lib/s3sync/sync.rb in s3sync-2.0.0 vs lib/s3sync/sync.rb in s3sync-2.0.1

- old
+ new

@@ -109,10 +109,20 @@ @source = source end def list_files nodes = {} + + # Create the directory if it does not exist + if not File.exists?(@source) + FileUtils.mkdir_p(@source) + return nodes + end + + # The path declared in `@source` exists, yay! Now we need read + # the whole directory and add all the readable files to the + # `nodes` hash. Find.find(@source) do |file| begin st = File.stat file # Might fail raise if not st.readable? # We're not interested in things we can't read rescue @@ -329,10 +339,11 @@ @args.s3.buckets[remote.bucket].objects.delete_if { |obj| list.map(&:path).include? obj.key } end end def download_files destination, source, list + puts list list.each {|e| path = File.join destination.path, e.path if @args.verbose puts " + #{source}#{e.path} => #{path}" @@ -342,13 +353,26 @@ obj = @args.s3.buckets[source.bucket].objects[e.path] # Making sure this new file will have a safe shelter FileUtils.mkdir_p File.dirname(path) - # Downloading and saving the files - File.open(path, 'wb') do |file| - obj.read do |chunk| - file.write chunk + # in some cases the s3 object will have a trailing '/' indicating + # a folder (this behavior noticed when the s3 folder is + # created by Transmit) + if path[-1] == '/' + FileUtils.mkdir_p path + else + # Downloading and saving the files + File.open(path, 'wb') do |file| + begin + obj.read do |chunk| + file.write chunk + end + rescue AWS::Core::Http::NetHttpHandler::TruncatedBodyError => e + $stderr.puts "WARNING: (retryable) TruncatedBodyError occured, retrying in a second #{file.basename}" + sleep 1 + retry + end end end end } end