lib/google/cloud/storage/file.rb in google-cloud-storage-1.10.0 vs lib/google/cloud/storage/file.rb in google-cloud-storage-1.11.0
- old
+ new
@@ -409,24 +409,25 @@
updater.check_for_changed_metadata!
update_gapi! updater.updates unless updater.updates.empty?
end
##
- # Download the file's contents to a local file or an IO instance.
+ # Download the file's contents to a local file or an File-like object.
#
# By default, the download is verified by calculating the MD5 digest.
#
# If a [customer-supplied encryption
# key](https://cloud.google.com/storage/docs/encryption#customer-supplied)
# was used with {Bucket#create_file}, the `encryption_key` option must
# be provided.
#
- # @param [String, IO] path The path on the local file system to write
- # the data to. The path provided must be writable. Can also be an IO
- # object, or IO-ish object like StringIO. If an IO object, the object
- # will be written to, not the filesystem. If omitted, a new StringIO
- # instance will be written to and returned. Optional.
+ # @param [String, ::File] path The path on the local file system to
+ # write the data to. The path provided must be writable. Can also be
+ # an File object, or File-like object such as StringIO. If an file
+ # object, the object will be written to, not the filesystem. If
+ # omitted, a new StringIO instance will be written to and returned.
+ # Optional.
# @param [Symbol] verify The verification algorithm used to ensure the
# downloaded file contents are correct. Default is `:md5`.
#
# Acceptable values are:
#
@@ -436,23 +437,30 @@
# * `none` - Don't perform file content verification.
#
# @param [String] encryption_key Optional. The customer-supplied,
# AES-256 encryption key used to encrypt the file, if one was provided
# to {Bucket#create_file}.
+ #
+ # @param [Range, String] range Optional. The byte range of the file's
+ # contents to download or a string header value. Provide this to
+ # perform a partial download. When a range is provided, no
+ # verification is performed regardless of the `verify` parameter's
+ # value.
+ #
# @param [Boolean] skip_decompress Optional. If `true`, the data for a
# Storage object returning a `Content-Encoding: gzip` response header
# will *not* be automatically decompressed by this client library. The
# default is `nil`. Note that all requests by this client library send
# the `Accept-Encoding: gzip` header, so decompressive transcoding is
# not performed in the Storage service. (See [Transcoding of
# gzip-compressed files](https://cloud.google.com/storage/docs/transcoding))
#
- # @return [IO] Returns an IO object representing the file data. This
- # will ordinarily be a `::File` object referencing the local file
- # system. However, if the argument to `path` is `nil`, a StringIO
- # instance will be returned. If the argument to `path` is an IO
- # object, then that object will be returned.
+ # @return [::File, StringIO] Returns a file object representing the file
+ # data. This will ordinarily be a `::File` object referencing the
+ # local file system. However, if the argument to `path` is `nil`, a
+ # StringIO instance will be returned. If the argument to `path` is an
+ # File-like object, then that object will be returned.
#
# @example
# require "google/cloud/storage"
#
# storage = Google::Cloud::Storage.new
@@ -540,21 +548,36 @@
#
# # The downloaded data remains compressed with skip_decompress.
# file.download "path/to/downloaded/gzipped.txt",
# skip_decompress: true
#
- def download path = nil, verify: :md5, encryption_key: nil,
+ # @example Partially download.
+ #
+ # require "google/cloud/storage"
+ #
+ # storage = Google::Cloud::Storage.new
+ # bucket = storage.bucket "my-bucket"
+ # file = bucket.file "path/to/my-file.ext"
+ #
+ # downloaded = file.download range: 6..10
+ # downloaded.rewind
+ # downloaded.read #=> "world"
+ #
+ def download path = nil, verify: :md5, encryption_key: nil, range: nil,
skip_decompress: nil
ensure_service!
if path.nil?
path = StringIO.new
path.set_encoding "ASCII-8BIT"
end
- file, resp = service.download_file \
- bucket, name, path, key: encryption_key, user_project: user_project
+ file, resp =
+ service.download_file bucket, name, path,
+ key: encryption_key, range: range,
+ user_project: user_project
# FIX: downloading with encryption key will return nil
file ||= ::File.new(path)
+ verify = :none if range
verify_file! file, verify
if !skip_decompress &&
Array(resp.header["Content-Encoding"]).include?("gzip")
file = gzip_decompress file
end
@@ -644,35 +667,159 @@
# file.copy "new-destination-bucket",
# "path/to/destination/file.ext" do |f|
# f.metadata["copied_from"] = "#{file.bucket}/#{file.name}"
# end
#
- def copy dest_bucket_or_path, dest_path = nil, acl: nil,
- generation: nil, encryption_key: nil
+ def copy dest_bucket_or_path, dest_path = nil,
+ acl: nil, generation: nil, encryption_key: nil
+ rewrite dest_bucket_or_path, dest_path,
+ acl: acl, generation: generation,
+ encryption_key: encryption_key,
+ new_encryption_key: encryption_key do |updater|
+ yield updater if block_given?
+ end
+ end
+
+ ##
+ # [Rewrites](https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite)
+ # the file to a new location. Or the same location can be provided to
+ # rewrite the file in place.
+ #
+ # If a [customer-supplied encryption
+ # key](https://cloud.google.com/storage/docs/encryption#customer-supplied)
+ # was used with {Bucket#create_file}, the `encryption_key` option must
+ # be provided. Unlike {#copy}, separate encryption keys are used to read
+ # (encryption_key) and to write (new_encryption_key) file contents.
+ #
+ # @param [String] dest_bucket_or_path Either the bucket to rewrite the
+ # file to, or the path to rewrite the file to in the current bucket.
+ # @param [String] dest_path If a bucket was provided in the first
+ # parameter, this contains the path to rewrite the file to in the
+ # given bucket.
+ # @param [String] acl A predefined set of access controls to apply to
+ # new file.
+ #
+ # Acceptable values are:
+ #
+ # * `auth`, `auth_read`, `authenticated`, `authenticated_read`,
+ # `authenticatedRead` - File owner gets OWNER access, and
+ # allAuthenticatedUsers get READER access.
+ # * `owner_full`, `bucketOwnerFullControl` - File owner gets OWNER
+ # access, and project team owners get OWNER access.
+ # * `owner_read`, `bucketOwnerRead` - File owner gets OWNER access,
+ # and project team owners get READER access.
+ # * `private` - File owner gets OWNER access.
+ # * `project_private`, `projectPrivate` - File owner gets OWNER
+ # access, and project team members get access according to their
+ # roles.
+ # * `public`, `public_read`, `publicRead` - File owner gets OWNER
+ # access, and allUsers get READER access.
+ # @param [Integer] generation Select a specific revision of the file to
+ # rewrite. The default is the latest version.
+ # @param [String] encryption_key Optional. The customer-supplied,
+ # AES-256 encryption key used to decrypt the file, if the existing
+ # file is encrypted.
+ # @param [String] new_encryption_key Optional. The customer-supplied,
+ # AES-256 encryption key used to encrypt the file, if the rewritten
+ # file is intended to be encrypted.
+ # @yield [file] a block yielding a delegate object for updating
+ #
+ # @return [Google::Cloud::Storage::File]
+ #
+ # @example The file can be rewritten to a new path in the bucket:
+ # require "google/cloud/storage"
+ #
+ # storage = Google::Cloud::Storage.new
+ #
+ # bucket = storage.bucket "my-bucket"
+ #
+ # file = bucket.file "path/to/my-file.ext"
+ # file.rewrite "path/to/destination/file.ext"
+ #
+ # @example The file can also be rewritten to a different bucket:
+ # require "google/cloud/storage"
+ #
+ # storage = Google::Cloud::Storage.new
+ #
+ # bucket = storage.bucket "my-bucket"
+ #
+ # file = bucket.file "path/to/my-file.ext"
+ # file.rewrite "new-destination-bucket",
+ # "path/to/destination/file.ext"
+ #
+ # @example The file can also be rewritten by specifying a generation:
+ # require "google/cloud/storage"
+ #
+ # storage = Google::Cloud::Storage.new
+ #
+ # bucket = storage.bucket "my-bucket"
+ #
+ # file = bucket.file "path/to/my-file.ext"
+ # file.rewrite "copy/of/previous/generation/file.ext",
+ # generation: 123456
+ #
+ # @example The file can be modified during rewriting:
+ # require "google/cloud/storage"
+ #
+ # storage = Google::Cloud::Storage.new
+ #
+ # bucket = storage.bucket "my-bucket"
+ #
+ # file = bucket.file "path/to/my-file.ext"
+ # file.rewrite "new-destination-bucket",
+ # "path/to/destination/file.ext" do |f|
+ # f.metadata["rewritten_from"] = "#{file.bucket}/#{file.name}"
+ # end
+ #
+ # @example The file can be rewritten with a new encryption key:
+ # require "google/cloud/storage"
+ #
+ # storage = Google::Cloud::Storage.new
+ #
+ # bucket = storage.bucket "my-bucket"
+ #
+ # # Old key was stored securely for later use.
+ # old_key = "y\x03\"\x0E\xB6\xD3\x9B\x0E\xAB*\x19\xFAv\xDEY\xBEI..."
+ #
+ # # Key generation shown for example purposes only. Write your own.
+ # cipher = OpenSSL::Cipher.new "aes-256-cfb"
+ # cipher.encrypt
+ # new_key = cipher.random_key
+ #
+ # file = bucket.file "path/to/my-file.ext"
+ # file.rewrite "new-destination-bucket",
+ # "path/to/destination/file.ext",
+ # encryption_key: old_key,
+ # new_encryption_key: new_key do |f|
+ # f.metadata["rewritten_from"] = "#{file.bucket}/#{file.name}"
+ # end
+ #
+ def rewrite dest_bucket_or_path, dest_path = nil,
+ acl: nil, generation: nil,
+ encryption_key: nil, new_encryption_key: nil
ensure_service!
- options = { acl: acl, generation: generation, key: encryption_key,
- user_project: user_project }
- dest_bucket, dest_path, options = fix_copy_args dest_bucket_or_path,
- dest_path, options
+ dest_bucket, dest_path = fix_rewrite_args dest_bucket_or_path,
+ dest_path
- copy_gapi = nil
+ update_gapi = nil
if block_given?
updater = Updater.new gapi
yield updater
updater.check_for_changed_metadata!
- copy_gapi = gapi_from_attrs(updater.updates) if updater.updates.any?
+ if updater.updates.any?
+ update_gapi = gapi_from_attrs updater.updates
+ end
end
- resp = service.copy_file bucket, name, dest_bucket, dest_path,
- copy_gapi, options
- until resp.done
- sleep 1
- resp = service.copy_file bucket, name, dest_bucket, dest_path,
- copy_gapi,
- options.merge(token: resp.rewrite_token)
- end
- File.from_gapi resp.resource, service, user_project: user_project
+ new_gapi = rewrite_gapi bucket, name, update_gapi,
+ new_bucket: dest_bucket, new_name: dest_path,
+ acl: acl, generation: generation,
+ encryption_key: encryption_key,
+ new_encryption_key: new_encryption_key,
+ user_project: user_project
+
+ File.from_gapi new_gapi, service, user_project: user_project
end
##
# [Rewrites](https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite)
# the file to the same {#bucket} and {#name} with a new
@@ -717,21 +864,12 @@
# new_key = cipher.random_key
#
# file.rotate encryption_key: old_key, new_encryption_key: new_key
#
def rotate encryption_key: nil, new_encryption_key: nil
- ensure_service!
- options = { source_key: encryption_key,
- destination_key: new_encryption_key,
- user_project: user_project }
- gapi = service.rewrite_file bucket, name, bucket, name, nil, options
- until gapi.done
- sleep 1
- options[:token] = gapi.rewrite_token
- gapi = service.rewrite_file bucket, name, bucket, name, nil, options
- end
- File.from_gapi gapi.resource, service, user_project: user_project
+ rewrite bucket, name, encryption_key: encryption_key,
+ new_encryption_key: new_encryption_key
end
##
# Permanently deletes the file.
#
@@ -1094,11 +1232,12 @@
return if update_gapi.nil?
ensure_service!
@gapi = if attributes.include? :storage_class
- rewrite_gapi bucket, name, update_gapi
+ rewrite_gapi \
+ bucket, name, update_gapi, user_project: user_project
else
service.patch_file \
bucket, name, update_gapi, user_project: user_project
end
end
@@ -1110,33 +1249,39 @@
[attr, @gapi.send(attr)]
end]
Google::Apis::StorageV1::Object.new attr_params
end
- def rewrite_gapi bucket, name, update_gapi
+ def rewrite_gapi bucket, name, updated_gapi,
+ new_bucket: nil, new_name: nil, acl: nil,
+ generation: nil, encryption_key: nil,
+ new_encryption_key: nil, user_project: nil
+ new_bucket ||= bucket
+ new_name ||= name
+ options = { acl: File::Acl.predefined_rule_for(acl),
+ generation: generation, source_key: encryption_key,
+ destination_key: new_encryption_key,
+ user_project: user_project }.delete_if { |_k, v| v.nil? }
+
resp = service.rewrite_file \
- bucket, name, bucket, name, update_gapi, user_project: user_project
+ bucket, name, new_bucket, new_name, updated_gapi, options
until resp.done
sleep 1
+ retry_options = options.merge token: resp.rewrite_token
resp = service.rewrite_file \
- bucket, name, bucket, name, update_gapi,
- token: resp.rewrite_token, user_project: user_project
+ bucket, name, new_bucket, new_name, updated_gapi, retry_options
end
resp.resource
end
- def fix_copy_args dest_bucket, dest_path, options = {}
- if dest_path.respond_to?(:to_hash) && options.empty?
- options = dest_path
- dest_path = nil
- end
+ def fix_rewrite_args dest_bucket, dest_path
if dest_path.nil?
dest_path = dest_bucket
dest_bucket = bucket
end
dest_bucket = dest_bucket.name if dest_bucket.respond_to? :name
- options[:acl] = File::Acl.predefined_rule_for options[:acl]
- [dest_bucket, dest_path, options]
+ dest_path = dest_path.name if dest_path.respond_to? :name
+ [dest_bucket, dest_path]
end
def verify_file! file, verify = :md5
verify_md5 = verify == :md5 || verify == :all
verify_crc32c = verify == :crc32c || verify == :all