lib/io_streams/paths/s3.rb in iostreams-1.6.2 vs lib/io_streams/paths/s3.rb in iostreams-1.7.0
- old
+ new
@@ -1,11 +1,11 @@
require "uri"
module IOStreams
module Paths
class S3 < IOStreams::Path
- attr_reader :bucket_name, :client, :options
+ attr_reader :bucket_name, :options
# Largest file size supported by the S3 copy object api.
S3_COPY_OBJECT_SIZE_LIMIT = 5 * 1024 * 1024 * 1024
# Arguments:
@@ -139,21 +139,22 @@
uri = Utils::URI.new(url)
raise "Invalid URI. Required Format: 's3://<bucket_name>/<key>'" unless uri.scheme == "s3"
@bucket_name = uri.hostname
key = uri.path.sub(%r{\A/}, "")
- if client.is_a?(Hash)
- client[:access_key_id] = access_key_id if access_key_id
- client[:secret_access_key] = secret_access_key if secret_access_key
- @client = ::Aws::S3::Client.new(client)
+
+ if client && !client.is_a?(Hash)
+ @client = client
else
- @client = client || ::Aws::S3::Client.new(access_key_id: access_key_id, secret_access_key: secret_access_key)
+ @client_options = client.is_a?(Hash) ? client.dup : {}
+ @client_options[:access_key_id] = access_key_id if access_key_id
+ @client_options[:secret_access_key] = secret_access_key if secret_access_key
end
+
@options = args
+ @options.merge!(uri.query.transform_keys(&:to_sym)) if uri.query
- @options.merge(uri.query) if uri.query
-
super(key)
end
def to_s
::File.join("s3://", bucket_name, path)
@@ -188,27 +189,29 @@
delete
target
end
# Make S3 perform direct copies within S3 itself.
- def copy_to(target_path, convert: true)
- return super(target_path) if convert || (size.to_i >= S3_COPY_OBJECT_SIZE_LIMIT)
+ def copy_to(target_path, convert: true, **args)
+ return super(target_path, convert: convert, **args) if convert || (size.to_i >= S3_COPY_OBJECT_SIZE_LIMIT)
target = IOStreams.new(target_path)
- return super(target) unless target.is_a?(self.class)
+ return super(target, convert: convert, **args) unless target.is_a?(self.class)
source_name = ::File.join(bucket_name, path)
client.copy_object(options.merge(bucket: target.bucket_name, key: target.path, copy_source: source_name))
target
end
# Make S3 perform direct copies within S3 itself.
- def copy_from(source_path, convert: true)
- return super(source_path) if convert
+ def copy_from(source_path, convert: true, **args)
+ return super(source_path, convert: true, **args) if convert
source = IOStreams.new(source_path)
- return super(source) if !source.is_a?(self.class) || (source.size.to_i >= S3_COPY_OBJECT_SIZE_LIMIT)
+ if !source.is_a?(self.class) || (source.size.to_i >= S3_COPY_OBJECT_SIZE_LIMIT)
+ return super(source, convert: convert, **args)
+ end
source_name = ::File.join(source.bucket_name, source.path)
client.copy_object(options.merge(bucket: bucket_name, key: path, copy_source: source_name))
end
@@ -309,9 +312,14 @@
end
# On S3 only files that are completely saved are visible.
def partial_files_visible?
false
+ end
+
+ # Lazy load S3 client since it takes two seconds to create itself!
+ def client
+ @client ||= ::Aws::S3::Client.new(@client_options)
end
end
end
end