lib/s3sync/sync.rb in s3sync-2.0.2 vs lib/s3sync/sync.rb in s3sync-2.0.4
- old
+ new
@@ -170,10 +170,11 @@
[same, to_add_to_2, to_remove_from_2]
end
def initialize args, source, destination
+ # @args.s3 should be of type Aws::S3::Resource
@args = args
@source = source
@destination = destination
end
@@ -300,15 +301,18 @@
def read_tree_remote location
dir = location.path
dir += '/' if not dir.empty? and not dir.end_with?('/')
nodes = {}
- @args.s3.buckets[location.bucket].objects.with_prefix(dir || "").to_a.collect do |obj|
+
+ @args.s3.bucket(location.bucket).objects(prefix: (dir || "")).to_a.collect do |obj|
# etag comes back with quotes (obj.etag.inspcet # => "\"abc...def\""
small_comparator = lambda { obj.etag[/[a-z0-9]+/] }
node = Node.new(location.path, obj.key, obj.content_length, small_comparator)
- nodes[node.path] = node
+ # The key is relative path from dir.
+ key = node.path[(dir || "").length,node.path.length - 1]
+ nodes[key] = node
end
return nodes
end
def read_trees source, destination
@@ -324,16 +328,16 @@
end
def upload_files remote, list
list.each do |e|
if @args.verbose
- puts " + #{e.full} => #{remote}#{e.path}"
+ puts " + #{e.full} => #{remote.path}#{e.path}"
end
unless @args.dry_run
remote_path = "#{remote.path}#{e.path}"
- @args.s3.buckets[remote.bucket].objects[remote_path].write Pathname.new(e.full), :acl => @args.acl
+ @args.s3.bucket(remote.bucket).object(remote_path).upload_file(Pathname.new(e.full), acl: @args.acl)
end
end
end
def remove_files remote, list
@@ -342,25 +346,27 @@
puts " - #{remote}#{e.path}"
}
end
unless @args.dry_run
- @args.s3.buckets[remote.bucket].objects.delete_if { |obj| list.map(&:path).include? obj.key }
+ list.map(&:path).each do |object_key|
+ @args.s3.bucket(remote.bucket).object(object_key).delete
+ end
end
end
def download_files destination, source, list
puts list
list.each {|e|
path = File.join destination.path, e.path
if @args.verbose
- puts " + #{source}#{e.path} => #{path}"
+ puts " + #{source.bucket}:#{e.path} => #{path}"
end
unless @args.dry_run
- obj = @args.s3.buckets[source.bucket].objects[e.path]
+ obj = @args.s3.bucket(source.bucket).object(e.path)
# Making sure this new file will have a safe shelter
FileUtils.mkdir_p File.dirname(path)
# in some cases the s3 object will have a trailing '/' indicating
@@ -369,17 +375,13 @@
if path[-1] == '/'
FileUtils.mkdir_p path
else
# Downloading and saving the files
File.open(path, 'wb') do |file|
- begin
- obj.read do |chunk|
- file.write chunk
- end
- rescue AWS::Core::Http::NetHttpHandler::TruncatedBodyError => e
- $stderr.puts "WARNING: (retryable) TruncatedBodyError occured, retrying in a second #{file.basename}"
- sleep 1
- retry
+ # By default Aws::S3::Client will retry 3 times if there is a network error.
+ # To increase this number or disable it, set :retry_limit when instantiating the S3 client.
+ obj.get do |chunk|
+ file.write chunk
end
end
end
end
}