generated/google/apis/storagetransfer_v1/classes.rb in google-api-client-0.43.0 vs generated/google/apis/storagetransfer_v1/classes.rb in google-api-client-0.44.0
- old
+ new
@@ -20,24 +20,21 @@
module Google
module Apis
module StoragetransferV1
- # AWS access key (see
- # [AWS Security
- # Credentials](https://docs.aws.amazon.com/general/latest/gr/aws-security-
- # credentials.html)).
+ # AWS access key (see [AWS Security Credentials](https://docs.aws.amazon.com/
+ # general/latest/gr/aws-security-credentials.html)).
class AwsAccessKey
include Google::Apis::Core::Hashable
# Required. AWS access key ID.
# Corresponds to the JSON property `accessKeyId`
# @return [String]
attr_accessor :access_key_id
- # Required. AWS secret access key. This field is not returned in RPC
- # responses.
+ # Required. AWS secret access key. This field is not returned in RPC responses.
# Corresponds to the JSON property `secretAccessKey`
# @return [String]
attr_accessor :secret_access_key
def initialize(**args)
@@ -49,27 +46,23 @@
@access_key_id = args[:access_key_id] if args.key?(:access_key_id)
@secret_access_key = args[:secret_access_key] if args.key?(:secret_access_key)
end
end
- # An AwsS3Data resource can be a data source, but not a data sink.
- # In an AwsS3Data resource, an object's name is the S3 object's key name.
+ # An AwsS3Data resource can be a data source, but not a data sink. In an
+ # AwsS3Data resource, an object's name is the S3 object's key name.
class AwsS3Data
include Google::Apis::Core::Hashable
- # AWS access key (see
- # [AWS Security
- # Credentials](https://docs.aws.amazon.com/general/latest/gr/aws-security-
- # credentials.html)).
+ # AWS access key (see [AWS Security Credentials](https://docs.aws.amazon.com/
+ # general/latest/gr/aws-security-credentials.html)).
# Corresponds to the JSON property `awsAccessKey`
# @return [Google::Apis::StoragetransferV1::AwsAccessKey]
attr_accessor :aws_access_key
- # Required. S3 Bucket name (see
- # [Creating a
- # bucket](https://docs.aws.amazon.com/AmazonS3/latest/dev/create-bucket-get-
- # location-example.html)).
+ # Required. S3 Bucket name (see [Creating a bucket](https://docs.aws.amazon.com/
+ # AmazonS3/latest/dev/create-bucket-get-location-example.html)).
# Corresponds to the JSON property `bucketName`
# @return [String]
attr_accessor :bucket_name
def initialize(**args)
@@ -81,19 +74,17 @@
@aws_access_key = args[:aws_access_key] if args.key?(:aws_access_key)
@bucket_name = args[:bucket_name] if args.key?(:bucket_name)
end
end
- # An AzureBlobStorageData resource can be a data source, but not a data sink.
- # An AzureBlobStorageData resource represents one Azure container. The storage
- # account determines the [Azure
- # endpoint](https://docs.microsoft.com/en-us/azure/storage/common/storage-create-
- # storage-account#storage-account-endpoints).
- # In an AzureBlobStorageData resource, a blobs's name is the [Azure Blob
- # Storage blob's key
- # name](https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-
- # referencing-containers--blobs--and-metadata#blob-names).
+ # An AzureBlobStorageData resource can be a data source, but not a data sink. An
+ # AzureBlobStorageData resource represents one Azure container. The storage
+ # account determines the [Azure endpoint](https://docs.microsoft.com/en-us/azure/
+ # storage/common/storage-create-storage-account#storage-account-endpoints). In
+ # an AzureBlobStorageData resource, a blobs's name is the [Azure Blob Storage
+ # blob's key name](https://docs.microsoft.com/en-us/rest/api/storageservices/
+ # naming-and-referencing-containers--blobs--and-metadata#blob-names).
class AzureBlobStorageData
include Google::Apis::Core::Hashable
# Azure credentials
# Corresponds to the JSON property `azureCredentials`
@@ -124,15 +115,13 @@
# Azure credentials
class AzureCredentials
include Google::Apis::Core::Hashable
- # Required. Azure shared access signature. (see
- # [Grant limited access to Azure Storage resources using shared access
- # signatures
- # (SAS)](https://docs.microsoft.com/en-us/azure/storage/common/storage-sas-
- # overview)).
+ # Required. Azure shared access signature. (see [Grant limited access to Azure
+ # Storage resources using shared access signatures (SAS)](https://docs.microsoft.
+ # com/en-us/azure/storage/common/storage-sas-overview)).
# Corresponds to the JSON property `sasToken`
# @return [String]
attr_accessor :sas_token
def initialize(**args)
@@ -145,34 +134,33 @@
end
end
# Represents a whole or partial calendar date, e.g. a birthday. The time of day
# and time zone are either specified elsewhere or are not significant. The date
- # is relative to the Proleptic Gregorian Calendar. This can represent:
- # * A full date, with non-zero year, month and day values
- # * A month and day value, with a zero year, e.g. an anniversary
- # * A year on its own, with zero month and day values
- # * A year and month value, with a zero day, e.g. a credit card expiration date
- # Related types are google.type.TimeOfDay and `google.protobuf.Timestamp`.
+ # is relative to the Proleptic Gregorian Calendar. This can represent: * A full
+ # date, with non-zero year, month and day values * A month and day value, with a
+ # zero year, e.g. an anniversary * A year on its own, with zero month and day
+ # values * A year and month value, with a zero day, e.g. a credit card
+ # expiration date Related types are google.type.TimeOfDay and `google.protobuf.
+ # Timestamp`.
class Date
include Google::Apis::Core::Hashable
- # Day of month. Must be from 1 to 31 and valid for the year and month, or 0
- # if specifying a year by itself or a year and month where the day is not
+ # Day of month. Must be from 1 to 31 and valid for the year and month, or 0 if
+ # specifying a year by itself or a year and month where the day is not
# significant.
# Corresponds to the JSON property `day`
# @return [Fixnum]
attr_accessor :day
- # Month of year. Must be from 1 to 12, or 0 if specifying a year without a
- # month and day.
+ # Month of year. Must be from 1 to 12, or 0 if specifying a year without a month
+ # and day.
# Corresponds to the JSON property `month`
# @return [Fixnum]
attr_accessor :month
- # Year of date. Must be from 1 to 9999, or 0 if specifying a date without
- # a year.
+ # Year of date. Must be from 1 to 9999, or 0 if specifying a date without a year.
# Corresponds to the JSON property `year`
# @return [Fixnum]
attr_accessor :year
def initialize(**args)
@@ -185,17 +173,15 @@
@month = args[:month] if args.key?(:month)
@year = args[:year] if args.key?(:year)
end
end
- # A generic empty message that you can re-use to avoid defining duplicated
- # empty messages in your APIs. A typical example is to use it as the request
- # or the response type of an API method. For instance:
- # service Foo `
- # rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
- # `
- # The JSON representation for `Empty` is empty JSON object ````.
+ # A generic empty message that you can re-use to avoid defining duplicated empty
+ # messages in your APIs. A typical example is to use it as the request or the
+ # response type of an API method. For instance: service Foo ` rpc Bar(google.
+ # protobuf.Empty) returns (google.protobuf.Empty); ` The JSON representation for
+ # `Empty` is empty JSON object ````.
class Empty
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
@@ -213,12 +199,12 @@
# A list of messages that carry the error details.
# Corresponds to the JSON property `errorDetails`
# @return [Array<String>]
attr_accessor :error_details
- # Required. A URL that refers to the target (a data source, a data sink,
- # or an object) with which the error is associated.
+ # Required. A URL that refers to the target (a data source, a data sink, or an
+ # object) with which the error is associated.
# Corresponds to the JSON property `url`
# @return [String]
attr_accessor :url
def initialize(**args)
@@ -230,12 +216,11 @@
@error_details = args[:error_details] if args.key?(:error_details)
@url = args[:url] if args.key?(:url)
end
end
- # A summary of errors by error code, plus a count and sample error log
- # entries.
+ # A summary of errors by error code, plus a count and sample error log entries.
class ErrorSummary
include Google::Apis::Core::Hashable
# Required.
# Corresponds to the JSON property `errorCode`
@@ -245,13 +230,12 @@
# Required. Count of this type of error.
# Corresponds to the JSON property `errorCount`
# @return [Fixnum]
attr_accessor :error_count
- # Error samples.
- # At most 5 error log entries will be recorded for a given
- # error code for a single transfer operation.
+ # Error samples. At most 5 error log entries will be recorded for a given error
+ # code for a single transfer operation.
# Corresponds to the JSON property `errorLogEntries`
# @return [Array<Google::Apis::StoragetransferV1::ErrorLogEntry>]
attr_accessor :error_log_entries
def initialize(**args)
@@ -264,20 +248,19 @@
@error_count = args[:error_count] if args.key?(:error_count)
@error_log_entries = args[:error_log_entries] if args.key?(:error_log_entries)
end
end
- # In a GcsData resource, an object's name is the Cloud Storage object's
- # name and its "last modification time" refers to the object's `updated`
- # property of Cloud Storage objects, which changes when the content or the
- # metadata of the object is updated.
+ # In a GcsData resource, an object's name is the Cloud Storage object's name and
+ # its "last modification time" refers to the object's `updated` property of
+ # Cloud Storage objects, which changes when the content or the metadata of the
+ # object is updated.
class GcsData
include Google::Apis::Core::Hashable
- # Required. Cloud Storage bucket name (see
- # [Bucket Name
- # Requirements](https://cloud.google.com/storage/docs/naming#requirements)).
+ # Required. Cloud Storage bucket name (see [Bucket Name Requirements](https://
+ # cloud.google.com/storage/docs/naming#requirements)).
# Corresponds to the JSON property `bucketName`
# @return [String]
attr_accessor :bucket_name
def initialize(**args)
@@ -308,46 +291,38 @@
@account_email = args[:account_email] if args.key?(:account_email)
end
end
# An HttpData resource specifies a list of objects on the web to be transferred
- # over HTTP. The information of the objects to be transferred is contained in
- # a file referenced by a URL. The first line in the file must be
- # `"TsvHttpData-1.0"`, which specifies the format of the file. Subsequent
- # lines specify the information of the list of objects, one object per list
- # entry. Each entry has the following tab-delimited fields:
- # * **HTTP URL** — The location of the object.
- # * **Length** — The size of the object in bytes.
- # * **MD5** — The base64-encoded MD5 hash of the object.
- # For an example of a valid TSV file, see
- # [Transferring data from
- # URLs](https://cloud.google.com/storage-transfer/docs/create-url-list).
- # When transferring data based on a URL list, keep the following in mind:
- # * When an object located at `http(s)://hostname:port/<URL-path>` is
- # transferred to a data sink, the name of the object at the data sink is
- # `<hostname>/<URL-path>`.
- # * If the specified size of an object does not match the actual size of the
- # object fetched, the object will not be transferred.
- # * If the specified MD5 does not match the MD5 computed from the transferred
- # bytes, the object transfer will fail. For more information, see
- # [Generating MD5
- # hashes](https://cloud.google.com/storage-transfer/docs/create-url-list#md5)
- # * Ensure that each URL you specify is publicly accessible. For
- # example, in Cloud Storage you can
- # [share an object publicly]
- # (https://cloud.google.com/storage/docs/cloud-console#_sharingdata) and get
- # a link to it.
- # * Storage Transfer Service obeys `robots.txt` rules and requires the source
- # HTTP server to support `Range` requests and to return a `Content-Length`
- # header in each response.
- # * ObjectConditions have no effect when filtering objects to transfer.
+ # over HTTP. The information of the objects to be transferred is contained in a
+ # file referenced by a URL. The first line in the file must be `"TsvHttpData-1.0"
+ # `, which specifies the format of the file. Subsequent lines specify the
+ # information of the list of objects, one object per list entry. Each entry has
+ # the following tab-delimited fields: * **HTTP URL** — The location of the
+ # object. * **Length** — The size of the object in bytes. * **MD5** — The base64-
+ # encoded MD5 hash of the object. For an example of a valid TSV file, see [
+ # Transferring data from URLs](https://cloud.google.com/storage-transfer/docs/
+ # create-url-list). When transferring data based on a URL list, keep the
+ # following in mind: * When an object located at `http(s)://hostname:port/` is
+ # transferred to a data sink, the name of the object at the data sink is `/`. *
+ # If the specified size of an object does not match the actual size of the
+ # object fetched, the object will not be transferred. * If the specified MD5
+ # does not match the MD5 computed from the transferred bytes, the object
+ # transfer will fail. For more information, see [Generating MD5 hashes](https://
+ # cloud.google.com/storage-transfer/docs/create-url-list#md5) * Ensure that each
+ # URL you specify is publicly accessible. For example, in Cloud Storage you can [
+ # share an object publicly] (https://cloud.google.com/storage/docs/cloud-console#
+ # _sharingdata) and get a link to it. * Storage Transfer Service obeys `robots.
+ # txt` rules and requires the source HTTP server to support `Range` requests and
+ # to return a `Content-Length` header in each response. * ObjectConditions have
+ # no effect when filtering objects to transfer.
class HttpData
include Google::Apis::Core::Hashable
- # Required. The URL that points to the file that stores the object list
- # entries. This file must allow public access. Currently, only URLs with
- # HTTP and HTTPS schemes are supported.
+ # Required. The URL that points to the file that stores the object list entries.
+ # This file must allow public access. Currently, only URLs with HTTP and HTTPS
+ # schemes are supported.
# Corresponds to the JSON property `listUrl`
# @return [String]
attr_accessor :list_url
def initialize(**args)
@@ -410,40 +385,34 @@
end
end
# Specification to configure notifications published to Cloud Pub/Sub.
# Notifications will be published to the customer-provided topic using the
- # following `PubsubMessage.attributes`:
- # * `"eventType"`: one of the EventType values
- # * `"payloadFormat"`: one of the PayloadFormat values
- # * `"projectId"`: the project_id of the
- # `TransferOperation`
- # * `"transferJobName"`: the
- # transfer_job_name of the
- # `TransferOperation`
- # * `"transferOperationName"`: the name of the
- # `TransferOperation`
- # The `PubsubMessage.data` will contain a TransferOperation resource
- # formatted according to the specified `PayloadFormat`.
+ # following `PubsubMessage.attributes`: * `"eventType"`: one of the EventType
+ # values * `"payloadFormat"`: one of the PayloadFormat values * `"projectId"`:
+ # the project_id of the `TransferOperation` * `"transferJobName"`: the
+ # transfer_job_name of the `TransferOperation` * `"transferOperationName"`: the
+ # name of the `TransferOperation` The `PubsubMessage.data` will contain a
+ # TransferOperation resource formatted according to the specified `PayloadFormat`
+ # .
class NotificationConfig
include Google::Apis::Core::Hashable
- # Event types for which a notification is desired. If empty, send
- # notifications for all event types.
+ # Event types for which a notification is desired. If empty, send notifications
+ # for all event types.
# Corresponds to the JSON property `eventTypes`
# @return [Array<String>]
attr_accessor :event_types
# Required. The desired format of the notification message payloads.
# Corresponds to the JSON property `payloadFormat`
# @return [String]
attr_accessor :payload_format
# Required. The `Topic.name` of the Cloud Pub/Sub topic to which to publish
- # notifications. Must be of the format: `projects/`project`/topics/`topic``.
- # Not matching this format will result in an
- # INVALID_ARGUMENT error.
+ # notifications. Must be of the format: `projects/`project`/topics/`topic``. Not
+ # matching this format will result in an INVALID_ARGUMENT error.
# Corresponds to the JSON property `pubsubTopic`
# @return [String]
attr_accessor :pubsub_topic
def initialize(**args)
@@ -456,52 +425,44 @@
@payload_format = args[:payload_format] if args.key?(:payload_format)
@pubsub_topic = args[:pubsub_topic] if args.key?(:pubsub_topic)
end
end
- # Conditions that determine which objects will be transferred. Applies only
- # to Cloud Data Sources such as S3, Azure, and Cloud Storage.
- # The "last modification time" refers to the time of the
- # last change to the object's content or metadata — specifically, this is
- # the `updated` property of Cloud Storage objects, the `LastModified` field of
- # S3 objects, and the `Last-Modified` header of Azure blobs.
+ # Conditions that determine which objects will be transferred. Applies only to
+ # Cloud Data Sources such as S3, Azure, and Cloud Storage. The "last
+ # modification time" refers to the time of the last change to the object's
+ # content or metadata — specifically, this is the `updated` property of Cloud
+ # Storage objects, the `LastModified` field of S3 objects, and the `Last-
+ # Modified` header of Azure blobs.
class ObjectConditions
include Google::Apis::Core::Hashable
- # `exclude_prefixes` must follow the requirements described for
- # include_prefixes.
+ # `exclude_prefixes` must follow the requirements described for include_prefixes.
# The max size of `exclude_prefixes` is 1000.
# Corresponds to the JSON property `excludePrefixes`
# @return [Array<String>]
attr_accessor :exclude_prefixes
- # If `include_prefixes` is specified, objects that satisfy the object
- # conditions must have names that start with one of the `include_prefixes`
- # and that do not start with any of the exclude_prefixes. If
- # `include_prefixes` is not specified, all objects except those that have
- # names starting with one of the `exclude_prefixes` must satisfy the object
- # conditions.
- # Requirements:
- # * Each include-prefix and exclude-prefix can contain any sequence of
- # Unicode characters, to a max length of 1024 bytes when UTF8-encoded,
- # and must not contain Carriage Return or Line Feed characters. Wildcard
- # matching and regular expression matching are not supported.
- # * Each include-prefix and exclude-prefix must omit the leading slash.
- # For example, to include the `requests.gz` object in a transfer from
- # `s3://my-aws-bucket/logs/y=2015/requests.gz`, specify the include
- # prefix as `logs/y=2015/requests.gz`.
- # * None of the include-prefix or the exclude-prefix values can be empty,
- # if specified.
- # * Each include-prefix must include a distinct portion of the object
- # namespace. No include-prefix may be a prefix of another
- # include-prefix.
- # * Each exclude-prefix must exclude a distinct portion of the object
- # namespace. No exclude-prefix may be a prefix of another
- # exclude-prefix.
- # * If `include_prefixes` is specified, then each exclude-prefix must start
- # with the value of a path explicitly included by `include_prefixes`.
- # The max size of `include_prefixes` is 1000.
+ # If `include_prefixes` is specified, objects that satisfy the object conditions
+ # must have names that start with one of the `include_prefixes` and that do not
+ # start with any of the exclude_prefixes. If `include_prefixes` is not specified,
+ # all objects except those that have names starting with one of the `
+ # exclude_prefixes` must satisfy the object conditions. Requirements: * Each
+ # include-prefix and exclude-prefix can contain any sequence of Unicode
+ # characters, to a max length of 1024 bytes when UTF8-encoded, and must not
+ # contain Carriage Return or Line Feed characters. Wildcard matching and regular
+ # expression matching are not supported. * Each include-prefix and exclude-
+ # prefix must omit the leading slash. For example, to include the `requests.gz`
+ # object in a transfer from `s3://my-aws-bucket/logs/y=2015/requests.gz`,
+ # specify the include prefix as `logs/y=2015/requests.gz`. * None of the include-
+ # prefix or the exclude-prefix values can be empty, if specified. * Each include-
+ # prefix must include a distinct portion of the object namespace. No include-
+ # prefix may be a prefix of another include-prefix. * Each exclude-prefix must
+ # exclude a distinct portion of the object namespace. No exclude-prefix may be a
+ # prefix of another exclude-prefix. * If `include_prefixes` is specified, then
+ # each exclude-prefix must start with the value of a path explicitly included by
+ # `include_prefixes`. The max size of `include_prefixes` is 1000.
# Corresponds to the JSON property `includePrefixes`
# @return [Array<String>]
attr_accessor :include_prefixes
# If specified, only objects with a "last modification time" before this
@@ -509,39 +470,33 @@
# transferred.
# Corresponds to the JSON property `lastModifiedBefore`
# @return [String]
attr_accessor :last_modified_before
- # If specified, only objects with a "last modification time" on or after
- # this timestamp and objects that don't have a "last modification time" are
- # transferred.
- # The `last_modified_since` and `last_modified_before` fields can be used
- # together for chunked data processing. For example, consider a script that
- # processes each day's worth of data at a time. For that you'd set each
- # of the fields as follows:
- # * `last_modified_since` to the start of the day
- # * `last_modified_before` to the end of the day
+ # If specified, only objects with a "last modification time" on or after this
+ # timestamp and objects that don't have a "last modification time" are
+ # transferred. The `last_modified_since` and `last_modified_before` fields can
+ # be used together for chunked data processing. For example, consider a script
+ # that processes each day's worth of data at a time. For that you'd set each of
+ # the fields as follows: * `last_modified_since` to the start of the day * `
+ # last_modified_before` to the end of the day
# Corresponds to the JSON property `lastModifiedSince`
# @return [String]
attr_accessor :last_modified_since
- # If specified, only objects with a "last modification time" on or after
- # `NOW` - `max_time_elapsed_since_last_modification` and objects that don't
- # have a "last modification time" are transferred.
- # For each TransferOperation started by this TransferJob,
- # `NOW` refers to the start_time of the
- # `TransferOperation`.
+ # If specified, only objects with a "last modification time" on or after `NOW` -
+ # `max_time_elapsed_since_last_modification` and objects that don't have a "last
+ # modification time" are transferred. For each TransferOperation started by this
+ # TransferJob, `NOW` refers to the start_time of the `TransferOperation`.
# Corresponds to the JSON property `maxTimeElapsedSinceLastModification`
# @return [String]
attr_accessor :max_time_elapsed_since_last_modification
- # If specified, only objects with a "last modification time" before
- # `NOW` - `min_time_elapsed_since_last_modification` and objects that don't
- # have a "last modification time" are transferred.
- # For each TransferOperation started by this TransferJob, `NOW`
- # refers to the start_time of the
- # `TransferOperation`.
+ # If specified, only objects with a "last modification time" before `NOW` - `
+ # min_time_elapsed_since_last_modification` and objects that don't have a "last
+ # modification time" are transferred. For each TransferOperation started by this
+ # TransferJob, `NOW` refers to the start_time of the `TransferOperation`.
# Corresponds to the JSON property `minTimeElapsedSinceLastModification`
# @return [String]
attr_accessor :min_time_elapsed_since_last_modification
def initialize(**args)
@@ -562,24 +517,23 @@
# This resource represents a long-running operation that is the result of a
# network API call.
class Operation
include Google::Apis::Core::Hashable
- # If the value is `false`, it means the operation is still in progress.
- # If `true`, the operation is completed, and either `error` or `response` is
- # available.
+ # If the value is `false`, it means the operation is still in progress. If `true`
+ # , the operation is completed, and either `error` or `response` is available.
# Corresponds to the JSON property `done`
# @return [Boolean]
attr_accessor :done
alias_method :done?, :done
- # The `Status` type defines a logical error model that is suitable for
- # different programming environments, including REST APIs and RPC APIs. It is
- # used by [gRPC](https://github.com/grpc). Each `Status` message contains
- # three pieces of data: error code, error message, and error details.
- # You can find out more about this error model and how to work with it in the
- # [API Design Guide](https://cloud.google.com/apis/design/errors).
+ # The `Status` type defines a logical error model that is suitable for different
+ # programming environments, including REST APIs and RPC APIs. It is used by [
+ # gRPC](https://github.com/grpc). Each `Status` message contains three pieces of
+ # data: error code, error message, and error details. You can find out more
+ # about this error model and how to work with it in the [API Design Guide](https:
+ # //cloud.google.com/apis/design/errors).
# Corresponds to the JSON property `error`
# @return [Google::Apis::StoragetransferV1::Status]
attr_accessor :error
# Represents the transfer operation object. To request a TransferOperation
@@ -593,18 +547,17 @@
# have the format of `transferOperations/some/unique/name`.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
- # The normal response of the operation in case of success. If the original
- # method returns no data on success, such as `Delete`, the response is
- # `google.protobuf.Empty`. If the original method is standard
- # `Get`/`Create`/`Update`, the response should be the resource. For other
- # methods, the response should have the type `XxxResponse`, where `Xxx`
- # is the original method name. For example, if the original method name
- # is `TakeSnapshot()`, the inferred response type is
- # `TakeSnapshotResponse`.
+ # The normal response of the operation in case of success. If the original
+ # method returns no data on success, such as `Delete`, the response is `google.
+ # protobuf.Empty`. If the original method is standard `Get`/`Create`/`Update`,
+ # the response should be the resource. For other methods, the response should
+ # have the type `XxxResponse`, where `Xxx` is the original method name. For
+ # example, if the original method name is `TakeSnapshot()`, the inferred
+ # response type is `TakeSnapshotResponse`.
# Corresponds to the JSON property `response`
# @return [Hash<String,Object>]
attr_accessor :response
def initialize(**args)
@@ -651,34 +604,34 @@
class Schedule
include Google::Apis::Core::Hashable
# Represents a whole or partial calendar date, e.g. a birthday. The time of day
# and time zone are either specified elsewhere or are not significant. The date
- # is relative to the Proleptic Gregorian Calendar. This can represent:
- # * A full date, with non-zero year, month and day values
- # * A month and day value, with a zero year, e.g. an anniversary
- # * A year on its own, with zero month and day values
- # * A year and month value, with a zero day, e.g. a credit card expiration date
- # Related types are google.type.TimeOfDay and `google.protobuf.Timestamp`.
+ # is relative to the Proleptic Gregorian Calendar. This can represent: * A full
+ # date, with non-zero year, month and day values * A month and day value, with a
+ # zero year, e.g. an anniversary * A year on its own, with zero month and day
+ # values * A year and month value, with a zero day, e.g. a credit card
+ # expiration date Related types are google.type.TimeOfDay and `google.protobuf.
+ # Timestamp`.
# Corresponds to the JSON property `scheduleEndDate`
# @return [Google::Apis::StoragetransferV1::Date]
attr_accessor :schedule_end_date
# Represents a whole or partial calendar date, e.g. a birthday. The time of day
# and time zone are either specified elsewhere or are not significant. The date
- # is relative to the Proleptic Gregorian Calendar. This can represent:
- # * A full date, with non-zero year, month and day values
- # * A month and day value, with a zero year, e.g. an anniversary
- # * A year on its own, with zero month and day values
- # * A year and month value, with a zero day, e.g. a credit card expiration date
- # Related types are google.type.TimeOfDay and `google.protobuf.Timestamp`.
+ # is relative to the Proleptic Gregorian Calendar. This can represent: * A full
+ # date, with non-zero year, month and day values * A month and day value, with a
+ # zero year, e.g. an anniversary * A year on its own, with zero month and day
+ # values * A year and month value, with a zero day, e.g. a credit card
+ # expiration date Related types are google.type.TimeOfDay and `google.protobuf.
+ # Timestamp`.
# Corresponds to the JSON property `scheduleStartDate`
# @return [Google::Apis::StoragetransferV1::Date]
attr_accessor :schedule_start_date
- # Represents a time of day. The date and time zone are either not significant
- # or are specified elsewhere. An API may choose to allow leap seconds. Related
+ # Represents a time of day. The date and time zone are either not significant or
+ # are specified elsewhere. An API may choose to allow leap seconds. Related
# types are google.type.Date and `google.protobuf.Timestamp`.
# Corresponds to the JSON property `startTimeOfDay`
# @return [Google::Apis::StoragetransferV1::TimeOfDay]
attr_accessor :start_time_of_day
@@ -692,33 +645,33 @@
@schedule_start_date = args[:schedule_start_date] if args.key?(:schedule_start_date)
@start_time_of_day = args[:start_time_of_day] if args.key?(:start_time_of_day)
end
end
- # The `Status` type defines a logical error model that is suitable for
- # different programming environments, including REST APIs and RPC APIs. It is
- # used by [gRPC](https://github.com/grpc). Each `Status` message contains
- # three pieces of data: error code, error message, and error details.
- # You can find out more about this error model and how to work with it in the
- # [API Design Guide](https://cloud.google.com/apis/design/errors).
+ # The `Status` type defines a logical error model that is suitable for different
+ # programming environments, including REST APIs and RPC APIs. It is used by [
+ # gRPC](https://github.com/grpc). Each `Status` message contains three pieces of
+ # data: error code, error message, and error details. You can find out more
+ # about this error model and how to work with it in the [API Design Guide](https:
+ # //cloud.google.com/apis/design/errors).
class Status
include Google::Apis::Core::Hashable
# The status code, which should be an enum value of google.rpc.Code.
# Corresponds to the JSON property `code`
# @return [Fixnum]
attr_accessor :code
- # A list of messages that carry the error details. There is a common set of
+ # A list of messages that carry the error details. There is a common set of
# message types for APIs to use.
# Corresponds to the JSON property `details`
# @return [Array<Hash<String,Object>>]
attr_accessor :details
- # A developer-facing error message, which should be in English. Any
- # user-facing error message should be localized and sent in the
- # google.rpc.Status.details field, or localized by the client.
+ # A developer-facing error message, which should be in English. Any user-facing
+ # error message should be localized and sent in the google.rpc.Status.details
+ # field, or localized by the client.
# Corresponds to the JSON property `message`
# @return [String]
attr_accessor :message
def initialize(**args)
@@ -731,18 +684,18 @@
@details = args[:details] if args.key?(:details)
@message = args[:message] if args.key?(:message)
end
end
- # Represents a time of day. The date and time zone are either not significant
- # or are specified elsewhere. An API may choose to allow leap seconds. Related
+ # Represents a time of day. The date and time zone are either not significant or
+ # are specified elsewhere. An API may choose to allow leap seconds. Related
# types are google.type.Date and `google.protobuf.Timestamp`.
class TimeOfDay
include Google::Apis::Core::Hashable
- # Hours of day in 24 hour format. Should be from 0 to 23. An API may choose
- # to allow the value "24:00:00" for scenarios like business closing time.
+ # Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to
+ # allow the value "24:00:00" for scenarios like business closing time.
# Corresponds to the JSON property `hours`
# @return [Fixnum]
attr_accessor :hours
# Minutes of hour of day. Must be from 0 to 59.
@@ -796,30 +749,29 @@
# Bytes that failed to be deleted from the data sink.
# Corresponds to the JSON property `bytesFailedToDeleteFromSink`
# @return [Fixnum]
attr_accessor :bytes_failed_to_delete_from_sink
- # Bytes found in the data source that are scheduled to be transferred,
- # excluding any that are filtered based on object conditions or skipped due
- # to sync.
+ # Bytes found in the data source that are scheduled to be transferred, excluding
+ # any that are filtered based on object conditions or skipped due to sync.
# Corresponds to the JSON property `bytesFoundFromSource`
# @return [Fixnum]
attr_accessor :bytes_found_from_source
# Bytes found only in the data sink that are scheduled to be deleted.
# Corresponds to the JSON property `bytesFoundOnlyFromSink`
# @return [Fixnum]
attr_accessor :bytes_found_only_from_sink
- # Bytes in the data source that failed to be transferred or that failed to
- # be deleted after being transferred.
+ # Bytes in the data source that failed to be transferred or that failed to be
+ # deleted after being transferred.
# Corresponds to the JSON property `bytesFromSourceFailed`
# @return [Fixnum]
attr_accessor :bytes_from_source_failed
- # Bytes in the data source that are not transferred because they already
- # exist in the data sink.
+ # Bytes in the data source that are not transferred because they already exist
+ # in the data sink.
# Corresponds to the JSON property `bytesFromSourceSkippedBySync`
# @return [Fixnum]
attr_accessor :bytes_from_source_skipped_by_sync
# Objects that are copied to the data sink.
@@ -841,29 +793,29 @@
# Corresponds to the JSON property `objectsFailedToDeleteFromSink`
# @return [Fixnum]
attr_accessor :objects_failed_to_delete_from_sink
# Objects found in the data source that are scheduled to be transferred,
- # excluding any that are filtered based on object conditions or skipped due
- # to sync.
+ # excluding any that are filtered based on object conditions or skipped due to
+ # sync.
# Corresponds to the JSON property `objectsFoundFromSource`
# @return [Fixnum]
attr_accessor :objects_found_from_source
# Objects found only in the data sink that are scheduled to be deleted.
# Corresponds to the JSON property `objectsFoundOnlyFromSink`
# @return [Fixnum]
attr_accessor :objects_found_only_from_sink
- # Objects in the data source that failed to be transferred or that failed
- # to be deleted after being transferred.
+ # Objects in the data source that failed to be transferred or that failed to be
+ # deleted after being transferred.
# Corresponds to the JSON property `objectsFromSourceFailed`
# @return [Fixnum]
attr_accessor :objects_from_source_failed
- # Objects in the data source that are not transferred because they already
- # exist in the data sink.
+ # Objects in the data source that are not transferred because they already exist
+ # in the data sink.
# Corresponds to the JSON property `objectsFromSourceSkippedBySync`
# @return [Fixnum]
attr_accessor :objects_from_source_skipped_by_sync
def initialize(**args)
@@ -904,50 +856,42 @@
# Output only. The time that the transfer job was deleted.
# Corresponds to the JSON property `deletionTime`
# @return [String]
attr_accessor :deletion_time
- # A description provided by the user for the job. Its max length is 1024
- # bytes when Unicode-encoded.
+ # A description provided by the user for the job. Its max length is 1024 bytes
+ # when Unicode-encoded.
# Corresponds to the JSON property `description`
# @return [String]
attr_accessor :description
# Output only. The time that the transfer job was last modified.
# Corresponds to the JSON property `lastModificationTime`
# @return [String]
attr_accessor :last_modification_time
- # A unique name (within the transfer project) assigned when the job is
- # created. If this field is empty in a CreateTransferJobRequest, Storage
- # Transfer Service will assign a unique name. Otherwise, the specified name
- # is used as the unique name for this job.
- # If the specified name is in use by a job, the creation request fails with
- # an ALREADY_EXISTS error.
- # This name must start with `"transferJobs/"` prefix and end with a letter or
- # a number, and should be no more than 128 characters.
- # Example: `"transferJobs/[A-Za-z0-9-._~]*[A-Za-z0-9]$"`
- # Invalid job names will fail with an
- # INVALID_ARGUMENT error.
+ # A unique name (within the transfer project) assigned when the job is created.
+ # If this field is empty in a CreateTransferJobRequest, Storage Transfer Service
+ # will assign a unique name. Otherwise, the specified name is used as the unique
+ # name for this job. If the specified name is in use by a job, the creation
+ # request fails with an ALREADY_EXISTS error. This name must start with `"
+ # transferJobs/"` prefix and end with a letter or a number, and should be no
+ # more than 128 characters. Example: `"transferJobs/[A-Za-z0-9-._~]*[A-Za-z0-9]$"
+ # ` Invalid job names will fail with an INVALID_ARGUMENT error.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# Specification to configure notifications published to Cloud Pub/Sub.
# Notifications will be published to the customer-provided topic using the
- # following `PubsubMessage.attributes`:
- # * `"eventType"`: one of the EventType values
- # * `"payloadFormat"`: one of the PayloadFormat values
- # * `"projectId"`: the project_id of the
- # `TransferOperation`
- # * `"transferJobName"`: the
- # transfer_job_name of the
- # `TransferOperation`
- # * `"transferOperationName"`: the name of the
- # `TransferOperation`
- # The `PubsubMessage.data` will contain a TransferOperation resource
- # formatted according to the specified `PayloadFormat`.
+ # following `PubsubMessage.attributes`: * `"eventType"`: one of the EventType
+ # values * `"payloadFormat"`: one of the PayloadFormat values * `"projectId"`:
+ # the project_id of the `TransferOperation` * `"transferJobName"`: the
+ # transfer_job_name of the `TransferOperation` * `"transferOperationName"`: the
+ # name of the `TransferOperation` The `PubsubMessage.data` will contain a
+ # TransferOperation resource formatted according to the specified `PayloadFormat`
+ # .
# Corresponds to the JSON property `notificationConfig`
# @return [Google::Apis::StoragetransferV1::NotificationConfig]
attr_accessor :notification_config
# The ID of the Google Cloud Platform Project that owns the job.
@@ -958,17 +902,15 @@
# Transfers can be scheduled to recur or to run just once.
# Corresponds to the JSON property `schedule`
# @return [Google::Apis::StoragetransferV1::Schedule]
attr_accessor :schedule
- # Status of the job. This value MUST be specified for
- # `CreateTransferJobRequests`.
- # **Note:** The effect of the new job status takes place during a subsequent
- # job run. For example, if you change the job status from
- # ENABLED to DISABLED, and an operation
- # spawned by the transfer is running, the status change would not affect the
- # current operation.
+ # Status of the job. This value MUST be specified for `CreateTransferJobRequests`
+ # . **Note:** The effect of the new job status takes place during a subsequent
+ # job run. For example, if you change the job status from ENABLED to DISABLED,
+ # and an operation spawned by the transfer is running, the status change would
+ # not affect the current operation.
# Corresponds to the JSON property `status`
# @return [String]
attr_accessor :status
# Configuration for running a transfer.
@@ -1019,22 +961,17 @@
# @return [String]
attr_accessor :name
# Specification to configure notifications published to Cloud Pub/Sub.
# Notifications will be published to the customer-provided topic using the
- # following `PubsubMessage.attributes`:
- # * `"eventType"`: one of the EventType values
- # * `"payloadFormat"`: one of the PayloadFormat values
- # * `"projectId"`: the project_id of the
- # `TransferOperation`
- # * `"transferJobName"`: the
- # transfer_job_name of the
- # `TransferOperation`
- # * `"transferOperationName"`: the name of the
- # `TransferOperation`
- # The `PubsubMessage.data` will contain a TransferOperation resource
- # formatted according to the specified `PayloadFormat`.
+ # following `PubsubMessage.attributes`: * `"eventType"`: one of the EventType
+ # values * `"payloadFormat"`: one of the PayloadFormat values * `"projectId"`:
+ # the project_id of the `TransferOperation` * `"transferJobName"`: the
+ # transfer_job_name of the `TransferOperation` * `"transferOperationName"`: the
+ # name of the `TransferOperation` The `PubsubMessage.data` will contain a
+ # TransferOperation resource formatted according to the specified `PayloadFormat`
+ # .
# Corresponds to the JSON property `notificationConfig`
# @return [Google::Apis::StoragetransferV1::NotificationConfig]
attr_accessor :notification_config
# The ID of the Google Cloud Platform Project that owns the operation.
@@ -1083,22 +1020,20 @@
# TransferOptions define the actions to be performed on objects in a transfer.
class TransferOptions
include Google::Apis::Core::Hashable
- # Whether objects should be deleted from the source after they are
- # transferred to the sink.
- # **Note:** This option and delete_objects_unique_in_sink are mutually
- # exclusive.
+ # Whether objects should be deleted from the source after they are transferred
+ # to the sink. **Note:** This option and delete_objects_unique_in_sink are
+ # mutually exclusive.
# Corresponds to the JSON property `deleteObjectsFromSourceAfterTransfer`
# @return [Boolean]
attr_accessor :delete_objects_from_source_after_transfer
alias_method :delete_objects_from_source_after_transfer?, :delete_objects_from_source_after_transfer
- # Whether objects that exist only in the sink should be deleted.
- # **Note:** This option and delete_objects_from_source_after_transfer are
- # mutually exclusive.
+ # Whether objects that exist only in the sink should be deleted. **Note:** This
+ # option and delete_objects_from_source_after_transfer are mutually exclusive.
# Corresponds to the JSON property `deleteObjectsUniqueInSink`
# @return [Boolean]
attr_accessor :delete_objects_unique_in_sink
alias_method :delete_objects_unique_in_sink?, :delete_objects_unique_in_sink
@@ -1122,86 +1057,76 @@
# Configuration for running a transfer.
class TransferSpec
include Google::Apis::Core::Hashable
- # An AwsS3Data resource can be a data source, but not a data sink.
- # In an AwsS3Data resource, an object's name is the S3 object's key name.
+ # An AwsS3Data resource can be a data source, but not a data sink. In an
+ # AwsS3Data resource, an object's name is the S3 object's key name.
# Corresponds to the JSON property `awsS3DataSource`
# @return [Google::Apis::StoragetransferV1::AwsS3Data]
attr_accessor :aws_s3_data_source
- # An AzureBlobStorageData resource can be a data source, but not a data sink.
- # An AzureBlobStorageData resource represents one Azure container. The storage
- # account determines the [Azure
- # endpoint](https://docs.microsoft.com/en-us/azure/storage/common/storage-create-
- # storage-account#storage-account-endpoints).
- # In an AzureBlobStorageData resource, a blobs's name is the [Azure Blob
- # Storage blob's key
- # name](https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-
- # referencing-containers--blobs--and-metadata#blob-names).
+ # An AzureBlobStorageData resource can be a data source, but not a data sink. An
+ # AzureBlobStorageData resource represents one Azure container. The storage
+ # account determines the [Azure endpoint](https://docs.microsoft.com/en-us/azure/
+ # storage/common/storage-create-storage-account#storage-account-endpoints). In
+ # an AzureBlobStorageData resource, a blobs's name is the [Azure Blob Storage
+ # blob's key name](https://docs.microsoft.com/en-us/rest/api/storageservices/
+ # naming-and-referencing-containers--blobs--and-metadata#blob-names).
# Corresponds to the JSON property `azureBlobStorageDataSource`
# @return [Google::Apis::StoragetransferV1::AzureBlobStorageData]
attr_accessor :azure_blob_storage_data_source
- # In a GcsData resource, an object's name is the Cloud Storage object's
- # name and its "last modification time" refers to the object's `updated`
- # property of Cloud Storage objects, which changes when the content or the
- # metadata of the object is updated.
+ # In a GcsData resource, an object's name is the Cloud Storage object's name and
+ # its "last modification time" refers to the object's `updated` property of
+ # Cloud Storage objects, which changes when the content or the metadata of the
+ # object is updated.
# Corresponds to the JSON property `gcsDataSink`
# @return [Google::Apis::StoragetransferV1::GcsData]
attr_accessor :gcs_data_sink
- # In a GcsData resource, an object's name is the Cloud Storage object's
- # name and its "last modification time" refers to the object's `updated`
- # property of Cloud Storage objects, which changes when the content or the
- # metadata of the object is updated.
+ # In a GcsData resource, an object's name is the Cloud Storage object's name and
+ # its "last modification time" refers to the object's `updated` property of
+ # Cloud Storage objects, which changes when the content or the metadata of the
+ # object is updated.
# Corresponds to the JSON property `gcsDataSource`
# @return [Google::Apis::StoragetransferV1::GcsData]
attr_accessor :gcs_data_source
# An HttpData resource specifies a list of objects on the web to be transferred
- # over HTTP. The information of the objects to be transferred is contained in
- # a file referenced by a URL. The first line in the file must be
- # `"TsvHttpData-1.0"`, which specifies the format of the file. Subsequent
- # lines specify the information of the list of objects, one object per list
- # entry. Each entry has the following tab-delimited fields:
- # * **HTTP URL** — The location of the object.
- # * **Length** — The size of the object in bytes.
- # * **MD5** — The base64-encoded MD5 hash of the object.
- # For an example of a valid TSV file, see
- # [Transferring data from
- # URLs](https://cloud.google.com/storage-transfer/docs/create-url-list).
- # When transferring data based on a URL list, keep the following in mind:
- # * When an object located at `http(s)://hostname:port/<URL-path>` is
- # transferred to a data sink, the name of the object at the data sink is
- # `<hostname>/<URL-path>`.
- # * If the specified size of an object does not match the actual size of the
- # object fetched, the object will not be transferred.
- # * If the specified MD5 does not match the MD5 computed from the transferred
- # bytes, the object transfer will fail. For more information, see
- # [Generating MD5
- # hashes](https://cloud.google.com/storage-transfer/docs/create-url-list#md5)
- # * Ensure that each URL you specify is publicly accessible. For
- # example, in Cloud Storage you can
- # [share an object publicly]
- # (https://cloud.google.com/storage/docs/cloud-console#_sharingdata) and get
- # a link to it.
- # * Storage Transfer Service obeys `robots.txt` rules and requires the source
- # HTTP server to support `Range` requests and to return a `Content-Length`
- # header in each response.
- # * ObjectConditions have no effect when filtering objects to transfer.
+ # over HTTP. The information of the objects to be transferred is contained in a
+ # file referenced by a URL. The first line in the file must be `"TsvHttpData-1.0"
+ # `, which specifies the format of the file. Subsequent lines specify the
+ # information of the list of objects, one object per list entry. Each entry has
+ # the following tab-delimited fields: * **HTTP URL** — The location of the
+ # object. * **Length** — The size of the object in bytes. * **MD5** — The base64-
+ # encoded MD5 hash of the object. For an example of a valid TSV file, see [
+ # Transferring data from URLs](https://cloud.google.com/storage-transfer/docs/
+ # create-url-list). When transferring data based on a URL list, keep the
+ # following in mind: * When an object located at `http(s)://hostname:port/` is
+ # transferred to a data sink, the name of the object at the data sink is `/`. *
+ # If the specified size of an object does not match the actual size of the
+ # object fetched, the object will not be transferred. * If the specified MD5
+ # does not match the MD5 computed from the transferred bytes, the object
+ # transfer will fail. For more information, see [Generating MD5 hashes](https://
+ # cloud.google.com/storage-transfer/docs/create-url-list#md5) * Ensure that each
+ # URL you specify is publicly accessible. For example, in Cloud Storage you can [
+ # share an object publicly] (https://cloud.google.com/storage/docs/cloud-console#
+ # _sharingdata) and get a link to it. * Storage Transfer Service obeys `robots.
+ # txt` rules and requires the source HTTP server to support `Range` requests and
+ # to return a `Content-Length` header in each response. * ObjectConditions have
+ # no effect when filtering objects to transfer.
# Corresponds to the JSON property `httpDataSource`
# @return [Google::Apis::StoragetransferV1::HttpData]
attr_accessor :http_data_source
- # Conditions that determine which objects will be transferred. Applies only
- # to Cloud Data Sources such as S3, Azure, and Cloud Storage.
- # The "last modification time" refers to the time of the
- # last change to the object's content or metadata — specifically, this is
- # the `updated` property of Cloud Storage objects, the `LastModified` field of
- # S3 objects, and the `Last-Modified` header of Azure blobs.
+ # Conditions that determine which objects will be transferred. Applies only to
+ # Cloud Data Sources such as S3, Azure, and Cloud Storage. The "last
+ # modification time" refers to the time of the last change to the object's
+ # content or metadata — specifically, this is the `updated` property of Cloud
+ # Storage objects, the `LastModified` field of S3 objects, and the `Last-
+ # Modified` header of Azure blobs.
# Corresponds to the JSON property `objectConditions`
# @return [Google::Apis::StoragetransferV1::ObjectConditions]
attr_accessor :object_conditions
# TransferOptions define the actions to be performed on objects in a transfer.
@@ -1239,16 +1164,13 @@
# periodically.
# Corresponds to the JSON property `transferJob`
# @return [Google::Apis::StoragetransferV1::TransferJob]
attr_accessor :transfer_job
- # The field mask of the fields in `transferJob` that are to be updated in
- # this request. Fields in `transferJob` that can be updated are:
- # description,
- # transfer_spec,
- # notification_config, and
- # status. To update the `transfer_spec` of the job, a
- # complete transfer specification must be provided. An incomplete
+ # The field mask of the fields in `transferJob` that are to be updated in this
+ # request. Fields in `transferJob` that can be updated are: description,
+ # transfer_spec, notification_config, and status. To update the `transfer_spec`
+ # of the job, a complete transfer specification must be provided. An incomplete
# specification missing any required fields will be rejected with the error
# INVALID_ARGUMENT.
# Corresponds to the JSON property `updateTransferJobFieldMask`
# @return [String]
attr_accessor :update_transfer_job_field_mask