lib/dumper/job.rb in dumper-0.0.7 vs lib/dumper/job.rb in dumper-0.1.0

- old
+ new

@@ -3,10 +3,12 @@ module Dumper class Job include POSIX::Spawn include Dumper::Utility::LoggingMethods + MAX_FILESIZE = 4.gigabytes + def initialize(agent, job) @agent = agent @stack = agent.stack @job = job end @@ -16,66 +18,73 @@ perform(server) end ensure log_last_error if $! log 'exiting...' - exit + exit!(true) # Do not use exit or abort to skip at_exit execution, or pid could get deleted on thin end def perform(server) - # Prepare - json = @agent.send_request(api: 'backup/prepare', params: { server_id: server[:id], manual: server[:manual].to_s }) - return unless json[:status] == 'ok' - + # Initialize database case server[:type] when 'mysql' @database = Dumper::Database::MySQL.new(@stack) + when 'mongodb' + @database = Dumper::Database::MongoDB.new(@stack, :tmpdir => Dir.mktmpdir) else - abort 'invalid server type!' # TBD + log "invalid server type: #{server[:type]}" + exit! end + + # Prepare + json = @agent.api_request('backup/prepare', :params => { :server_id => server[:id], :manual => server[:manual].to_s, :ext => @database.file_ext }) + return unless json[:status] == 'ok' + backup_id = json[:backup][:id] filename = json[:backup][:filename] # Dump start_at = Time.now tempfile = ruby19? ? Tempfile.new(filename, encoding: 'ascii-8bit') : Tempfile.new(filename) + @database.tempfile = tempfile log 'starting backup...' log "tempfile = #{tempfile.path}" log "command = #{@database.command}" begin pid, stdin, stdout, stderr = popen4(@database.command) stdin.close - # Reuse buffer: http://www.ruby-forum.com/topic/134164 - buffer_size = 1.megabytes - buffer = "\x00" * buffer_size # fixed-size malloc optimization - while stdout.read(buffer_size, buffer) - tempfile.write buffer - if tempfile.size > Backup::MAX_FILESIZE - raise 'Max filesize exceeded.' - end - end + # # Reuse buffer: http://www.ruby-forum.com/topic/134164 + # buffer_size = 1.megabytes + # buffer = "\x00" * buffer_size # fixed-size malloc optimization + # while stdout.read(buffer_size, buffer) + # tempfile.write buffer + # if tempfile.size > MAX_FILESIZE + # raise 'Max filesize exceeded.' + # end + # end + # tempfile.flush rescue Process.kill(:INT, pid) rescue SystemCallError - @agent.send_request(api: 'backup/fail', params: { backup_id: backup_id, code: 'dump_error', message: $!.to_s }) - abort + @database.finalize if @database.respond_to?(:finalize) + @agent.api_request('backup/fail', :params => { :backup_id => backup_id, :code => 'dump_error', :message => $!.to_s }) + exit! ensure [stdin, stdout, stderr].each{|io| io.close unless io.closed? } Process.waitpid(pid) end - tempfile.flush - dump_duration = Time.now - start_at log "dump_duration = #{dump_duration}" upload_to_s3(json[:url], json[:fields], tempfile.path, filename) - json = @agent.send_request(api: 'backup/commit', params: { backup_id: backup_id, dump_duration: dump_duration.to_i }) + json = @agent.api_request('backup/commit', :params => { :backup_id => backup_id, :dump_duration => dump_duration.to_i }) rescue log_last_error ensure tempfile.close(true) + @database.finalize if @database.respond_to?(:finalize) end # Upload def upload_to_s3(url, fields, local_file, remote_file) require 'net/http/post/multipart'