lib/dbox/syncer.rb in dbox-0.5.2 vs lib/dbox/syncer.rb in dbox-0.5.3

- old
+ new

@@ -211,11 +211,12 @@ def execute remove_tmpfiles dir = database.root_dir changes = calculate_changes(dir) log.debug "Executing changes:\n" + changes.map {|c| c.inspect }.join("\n") - changelist = { :created => [], :deleted => [], :updated => [] } + parent_ids_of_failed_entries = [] + changelist = { :created => [], :deleted => [], :updated => [], :failed => [] } # spin up a parallel task queue ptasks = ParallelTasks.new(MAX_PARALLEL_DBOX_OPS - 1) { clone_api_into_current_thread() } ptasks.start @@ -235,10 +236,12 @@ create_file(c) database.add_entry(c[:path], false, c[:parent_id], c[:modified], c[:revision], c[:hash]) changelist[:created] << c[:path] rescue Dbox::ServerError => e log.error "Error while downloading #{c[:path]}: #{e.inspect}" + parent_ids_of_failed_entries << c[:parent_id] + changelist[:failed] << { :operation => :create, :path => c[:path], :error => e } end end end when :update if c[:is_dir] @@ -251,10 +254,12 @@ update_file(c) database.update_entry_by_path(c[:path], :modified => c[:modified], :revision => c[:revision], :hash => c[:hash]) changelist[:updated] << c[:path] rescue Dbox::ServerError => e log.error "Error while downloading #{c[:path]}: #{e.inspect}" + parent_ids_of_failed_entries << c[:parent_id] + changelist[:failed] << { :operation => :create, :path => c[:path], :error => e } end end end when :delete c[:is_dir] ? delete_dir(c) : delete_file(c) @@ -266,10 +271,16 @@ end # wait for operations to finish ptasks.finish + # clear hashes on any dirs with children that failed so that + # they are processed again on next pull + parent_ids_of_failed_entries.uniq.each do |id| + database.update_entry_by_id(id, :hash => nil) + end + # sort & return output changelist.keys.each {|k| changelist[k].sort! } changelist end @@ -417,11 +428,11 @@ def execute dir = database.root_dir changes = calculate_changes(dir) log.debug "Executing changes:\n" + changes.map {|c| c.inspect }.join("\n") - changelist = { :created => [], :deleted => [], :updated => [] } + changelist = { :created => [], :deleted => [], :updated => [], :failed => [] } # spin up a parallel task queue ptasks = ParallelTasks.new(MAX_PARALLEL_DBOX_OPS - 1) { clone_api_into_current_thread() } ptasks.start @@ -429,28 +440,27 @@ case op when :create c[:parent_id] ||= lookup_id_by_path(c[:parent_path]) if c[:is_dir] - database.add_entry(c[:path], true, c[:parent_id], nil, nil, nil) - # directory creation cannot go in a thread, since later # operations might depend on the directory being there create_dir(c) + database.add_entry(c[:path], true, c[:parent_id], nil, nil, nil) force_metadata_update_from_server(c) changelist[:created] << c[:path] else - database.add_entry(c[:path], false, c[:parent_id], nil, nil, nil) - # spin up a thread to upload the file ptasks.add do begin upload_file(c) + database.add_entry(c[:path], false, c[:parent_id], nil, nil, nil) force_metadata_update_from_server(c) changelist[:created] << c[:path] rescue Dbox::ServerError => e log.error "Error while uploading #{c[:path]}: #{e.inspect}" + changelist[:failed] << { :operation => :create, :path => c[:path], :error => e } end end end when :update existing = database.find_by_path(c[:path]) @@ -467,10 +477,11 @@ upload_file(c) force_metadata_update_from_server(c) changelist[:updated] << c[:path] rescue Dbox::ServerError => e log.error "Error while uploading #{c[:path]}: #{e.inspect}" + changelist[:failed] << { :operation => :update, :path => c[:path], :error => e } end end end when :delete # spin up a thread to delete the file/dir @@ -485,11 +496,12 @@ rescue Dbox::RemoteMissing # safe to delete even if remote is already gone end database.delete_entry_by_path(c[:path]) changelist[:deleted] << c[:path] - rescue Dbox::ServerError + rescue Dbox::ServerError => e log.error "Error while deleting #{c[:path]}: #{e.inspect}" + changelist[:failed] << { :operation => :delete, :path => c[:path], :error => e } end end else raise(RuntimeError, "Unknown operation type: #{op}") end