# frozen_string_literal: true # Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Auto-generated by gapic-generator-ruby. DO NOT EDIT! require "google/cloud/errors" require "google/cloud/bigquery/storage/v1/storage_pb" module Google module Cloud module Bigquery module Storage module V1 module BigQueryWrite ## # Client for the BigQueryWrite service. # # BigQuery Write API. # # The Write API can be used to write data to BigQuery. # # For supplementary information about the Write API, see: # https://cloud.google.com/bigquery/docs/write-api # class Client # @private API_VERSION = "" # @private DEFAULT_ENDPOINT_TEMPLATE = "bigquerystorage.$UNIVERSE_DOMAIN$" include Paths # @private attr_reader :big_query_write_stub ## # Configure the BigQueryWrite Client class. # # See {::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client::Configuration} # for a description of the configuration fields. # # @example # # # Modify the configuration for all BigQueryWrite clients # ::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.configure do |config| # config.timeout = 10.0 # end # # @yield [config] Configure the Client client. # @yieldparam config [Client::Configuration] # # @return [Client::Configuration] # def self.configure @configure ||= begin namespace = ["Google", "Cloud", "Bigquery", "Storage", "V1"] parent_config = while namespace.any? parent_name = namespace.join "::" parent_const = const_get parent_name break parent_const.configure if parent_const.respond_to? :configure namespace.pop end default_config = Client::Configuration.new parent_config default_config.rpcs.create_write_stream.timeout = 1200.0 default_config.rpcs.create_write_stream.retry_policy = { initial_delay: 10.0, max_delay: 120.0, multiplier: 1.3, retry_codes: [4, 14, 8] } default_config.rpcs.append_rows.timeout = 86_400.0 default_config.rpcs.append_rows.retry_policy = { initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14] } default_config.rpcs.get_write_stream.timeout = 600.0 default_config.rpcs.get_write_stream.retry_policy = { initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [4, 14, 8] } default_config.rpcs.finalize_write_stream.timeout = 600.0 default_config.rpcs.finalize_write_stream.retry_policy = { initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [4, 14, 8] } default_config.rpcs.batch_commit_write_streams.timeout = 600.0 default_config.rpcs.batch_commit_write_streams.retry_policy = { initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [4, 14, 8] } default_config.rpcs.flush_rows.timeout = 600.0 default_config.rpcs.flush_rows.retry_policy = { initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [4, 14, 8] } default_config end yield @configure if block_given? @configure end ## # Configure the BigQueryWrite Client instance. # # The configuration is set to the derived mode, meaning that values can be changed, # but structural changes (adding new fields, etc.) are not allowed. Structural changes # should be made on {Client.configure}. # # See {::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client::Configuration} # for a description of the configuration fields. # # @yield [config] Configure the Client client. # @yieldparam config [Client::Configuration] # # @return [Client::Configuration] # def configure yield @config if block_given? @config end ## # The effective universe domain # # @return [String] # def universe_domain @big_query_write_stub.universe_domain end ## # Create a new BigQueryWrite client object. # # @example # # # Create a client using the default configuration # client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new # # # Create a client using a custom configuration # client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new do |config| # config.timeout = 10.0 # end # # @yield [config] Configure the BigQueryWrite client. # @yieldparam config [Client::Configuration] # def initialize # These require statements are intentionally placed here to initialize # the gRPC module only when it's required. # See https://github.com/googleapis/toolkit/issues/446 require "gapic/grpc" require "google/cloud/bigquery/storage/v1/storage_services_pb" # Create the configuration object @config = Configuration.new Client.configure # Yield the configuration if needed yield @config if block_given? # Create credentials credentials = @config.credentials # Use self-signed JWT if the endpoint is unchanged from default, # but only if the default endpoint does not have a region prefix. enable_self_signed_jwt = @config.endpoint.nil? || (@config.endpoint == Configuration::DEFAULT_ENDPOINT && !@config.endpoint.split(".").first.include?("-")) credentials ||= Credentials.default scope: @config.scope, enable_self_signed_jwt: enable_self_signed_jwt if credentials.is_a?(::String) || credentials.is_a?(::Hash) credentials = Credentials.new credentials, scope: @config.scope end @quota_project_id = @config.quota_project @quota_project_id ||= credentials.quota_project_id if credentials.respond_to? :quota_project_id @big_query_write_stub = ::Gapic::ServiceStub.new( ::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Stub, credentials: credentials, endpoint: @config.endpoint, endpoint_template: DEFAULT_ENDPOINT_TEMPLATE, universe_domain: @config.universe_domain, channel_args: @config.channel_args, interceptors: @config.interceptors, channel_pool_config: @config.channel_pool, logger: @config.logger ) @big_query_write_stub.stub_logger&.info do |entry| entry.set_system_name entry.set_service entry.message = "Created client for #{entry.service}" entry.set_credentials_fields credentials entry.set "customEndpoint", @config.endpoint if @config.endpoint entry.set "defaultTimeout", @config.timeout if @config.timeout entry.set "quotaProject", @quota_project_id if @quota_project_id end end ## # The logger used for request/response debug logging. # # @return [Logger] # def logger @big_query_write_stub.logger end # Service calls ## # Creates a write stream to the given table. # Additionally, every table has a special stream named '_default' # to which data can be written. This stream doesn't need to be created using # CreateWriteStream. It is a stream that can be used simultaneously by any # number of clients. Data written to this stream is considered committed as # soon as an acknowledgement is received. # # @overload create_write_stream(request, options = nil) # Pass arguments to `create_write_stream` via a request object, either of type # {::Google::Cloud::Bigquery::Storage::V1::CreateWriteStreamRequest} or an equivalent Hash. # # @param request [::Google::Cloud::Bigquery::Storage::V1::CreateWriteStreamRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. # # @overload create_write_stream(parent: nil, write_stream: nil) # Pass arguments to `create_write_stream` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param parent [::String] # Required. Reference to the table to which the stream belongs, in the format # of `projects/{project}/datasets/{dataset}/tables/{table}`. # @param write_stream [::Google::Cloud::Bigquery::Storage::V1::WriteStream, ::Hash] # Required. Stream to be created. # # @yield [response, operation] Access the result along with the RPC operation # @yieldparam response [::Google::Cloud::Bigquery::Storage::V1::WriteStream] # @yieldparam operation [::GRPC::ActiveCall::Operation] # # @return [::Google::Cloud::Bigquery::Storage::V1::WriteStream] # # @raise [::Google::Cloud::Error] if the RPC is aborted. # # @example Basic example # require "google/cloud/bigquery/storage/v1" # # # Create a client object. The client can be reused for multiple calls. # client = Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new # # # Create a request. To set request fields, pass in keyword arguments. # request = Google::Cloud::Bigquery::Storage::V1::CreateWriteStreamRequest.new # # # Call the create_write_stream method. # result = client.create_write_stream request # # # The returned object is of type Google::Cloud::Bigquery::Storage::V1::WriteStream. # p result # def create_write_stream request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::Storage::V1::CreateWriteStreamRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults metadata = @config.rpcs.create_write_stream.metadata.to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id header_params = {} if request.parent header_params["parent"] = request.parent end request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") metadata[:"x-goog-request-params"] ||= request_params_header options.apply_defaults timeout: @config.rpcs.create_write_stream.timeout, metadata: metadata, retry_policy: @config.rpcs.create_write_stream.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @big_query_write_stub.call_rpc :create_write_stream, request, options: options do |response, operation| yield response, operation if block_given? end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end ## # Appends data to the given stream. # # If `offset` is specified, the `offset` is checked against the end of # stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an # attempt is made to append to an offset beyond the current end of the stream # or `ALREADY_EXISTS` if user provides an `offset` that has already been # written to. User can retry with adjusted offset within the same RPC # connection. If `offset` is not specified, append happens at the end of the # stream. # # The response contains an optional offset at which the append # happened. No offset information will be returned for appends to a # default stream. # # Responses are received in the same order in which requests are sent. # There will be one response for each successful inserted request. Responses # may optionally embed error information if the originating AppendRequest was # not successfully processed. # # The specifics of when successfully appended data is made visible to the # table are governed by the type of stream: # # * For COMMITTED streams (which includes the default stream), data is # visible immediately upon successful append. # # * For BUFFERED streams, data is made visible via a subsequent `FlushRows` # rpc which advances a cursor to a newer offset in the stream. # # * For PENDING streams, data is not made visible until the stream itself is # finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly # committed via the `BatchCommitWriteStreams` rpc. # # @param request [::Gapic::StreamInput, ::Enumerable<::Google::Cloud::Bigquery::Storage::V1::AppendRowsRequest, ::Hash>] # An enumerable of {::Google::Cloud::Bigquery::Storage::V1::AppendRowsRequest} instances. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. # # @yield [response, operation] Access the result along with the RPC operation # @yieldparam response [::Enumerable<::Google::Cloud::Bigquery::Storage::V1::AppendRowsResponse>] # @yieldparam operation [::GRPC::ActiveCall::Operation] # # @return [::Enumerable<::Google::Cloud::Bigquery::Storage::V1::AppendRowsResponse>] # # @raise [::Google::Cloud::Error] if the RPC is aborted. # # @example Basic example # require "google/cloud/bigquery/storage/v1" # # # Create a client object. The client can be reused for multiple calls. # client = Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new # # # Create an input stream. # input = Gapic::StreamInput.new # # # Call the append_rows method to start streaming. # output = client.append_rows input # # # Send requests on the stream. For each request object, set fields by # # passing keyword arguments. Be sure to close the stream when done. # input << Google::Cloud::Bigquery::Storage::V1::AppendRowsRequest.new # input << Google::Cloud::Bigquery::Storage::V1::AppendRowsRequest.new # input.close # # # The returned object is a streamed enumerable yielding elements of type # # ::Google::Cloud::Bigquery::Storage::V1::AppendRowsResponse # output.each do |current_response| # p current_response # end # def append_rows request, options = nil unless request.is_a? ::Enumerable raise ::ArgumentError, "request must be an Enumerable" unless request.respond_to? :to_enum request = request.to_enum end request = request.lazy.map do |req| ::Gapic::Protobuf.coerce req, to: ::Google::Cloud::Bigquery::Storage::V1::AppendRowsRequest end # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults metadata = @config.rpcs.append_rows.metadata.to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id options.apply_defaults timeout: @config.rpcs.append_rows.timeout, metadata: metadata, retry_policy: @config.rpcs.append_rows.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @big_query_write_stub.call_rpc :append_rows, request, options: options do |response, operation| yield response, operation if block_given? end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end ## # Gets information about a write stream. # # @overload get_write_stream(request, options = nil) # Pass arguments to `get_write_stream` via a request object, either of type # {::Google::Cloud::Bigquery::Storage::V1::GetWriteStreamRequest} or an equivalent Hash. # # @param request [::Google::Cloud::Bigquery::Storage::V1::GetWriteStreamRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. # # @overload get_write_stream(name: nil, view: nil) # Pass arguments to `get_write_stream` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param name [::String] # Required. Name of the stream to get, in the form of # `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. # @param view [::Google::Cloud::Bigquery::Storage::V1::WriteStreamView] # Indicates whether to get full or partial view of the WriteStream. If # not set, view returned will be basic. # # @yield [response, operation] Access the result along with the RPC operation # @yieldparam response [::Google::Cloud::Bigquery::Storage::V1::WriteStream] # @yieldparam operation [::GRPC::ActiveCall::Operation] # # @return [::Google::Cloud::Bigquery::Storage::V1::WriteStream] # # @raise [::Google::Cloud::Error] if the RPC is aborted. # # @example Basic example # require "google/cloud/bigquery/storage/v1" # # # Create a client object. The client can be reused for multiple calls. # client = Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new # # # Create a request. To set request fields, pass in keyword arguments. # request = Google::Cloud::Bigquery::Storage::V1::GetWriteStreamRequest.new # # # Call the get_write_stream method. # result = client.get_write_stream request # # # The returned object is of type Google::Cloud::Bigquery::Storage::V1::WriteStream. # p result # def get_write_stream request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::Storage::V1::GetWriteStreamRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults metadata = @config.rpcs.get_write_stream.metadata.to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id header_params = {} if request.name header_params["name"] = request.name end request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") metadata[:"x-goog-request-params"] ||= request_params_header options.apply_defaults timeout: @config.rpcs.get_write_stream.timeout, metadata: metadata, retry_policy: @config.rpcs.get_write_stream.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @big_query_write_stub.call_rpc :get_write_stream, request, options: options do |response, operation| yield response, operation if block_given? end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end ## # Finalize a write stream so that no new data can be appended to the # stream. Finalize is not supported on the '_default' stream. # # @overload finalize_write_stream(request, options = nil) # Pass arguments to `finalize_write_stream` via a request object, either of type # {::Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamRequest} or an equivalent Hash. # # @param request [::Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. # # @overload finalize_write_stream(name: nil) # Pass arguments to `finalize_write_stream` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param name [::String] # Required. Name of the stream to finalize, in the form of # `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. # # @yield [response, operation] Access the result along with the RPC operation # @yieldparam response [::Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamResponse] # @yieldparam operation [::GRPC::ActiveCall::Operation] # # @return [::Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamResponse] # # @raise [::Google::Cloud::Error] if the RPC is aborted. # # @example Basic example # require "google/cloud/bigquery/storage/v1" # # # Create a client object. The client can be reused for multiple calls. # client = Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new # # # Create a request. To set request fields, pass in keyword arguments. # request = Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamRequest.new # # # Call the finalize_write_stream method. # result = client.finalize_write_stream request # # # The returned object is of type Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamResponse. # p result # def finalize_write_stream request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults metadata = @config.rpcs.finalize_write_stream.metadata.to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id header_params = {} if request.name header_params["name"] = request.name end request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") metadata[:"x-goog-request-params"] ||= request_params_header options.apply_defaults timeout: @config.rpcs.finalize_write_stream.timeout, metadata: metadata, retry_policy: @config.rpcs.finalize_write_stream.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @big_query_write_stub.call_rpc :finalize_write_stream, request, options: options do |response, operation| yield response, operation if block_given? end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end ## # Atomically commits a group of `PENDING` streams that belong to the same # `parent` table. # # Streams must be finalized before commit and cannot be committed multiple # times. Once a stream is committed, data in the stream becomes available # for read operations. # # @overload batch_commit_write_streams(request, options = nil) # Pass arguments to `batch_commit_write_streams` via a request object, either of type # {::Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsRequest} or an equivalent Hash. # # @param request [::Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. # # @overload batch_commit_write_streams(parent: nil, write_streams: nil) # Pass arguments to `batch_commit_write_streams` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param parent [::String] # Required. Parent table that all the streams should belong to, in the form # of `projects/{project}/datasets/{dataset}/tables/{table}`. # @param write_streams [::Array<::String>] # Required. The group of streams that will be committed atomically. # # @yield [response, operation] Access the result along with the RPC operation # @yieldparam response [::Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsResponse] # @yieldparam operation [::GRPC::ActiveCall::Operation] # # @return [::Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsResponse] # # @raise [::Google::Cloud::Error] if the RPC is aborted. # # @example Basic example # require "google/cloud/bigquery/storage/v1" # # # Create a client object. The client can be reused for multiple calls. # client = Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new # # # Create a request. To set request fields, pass in keyword arguments. # request = Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsRequest.new # # # Call the batch_commit_write_streams method. # result = client.batch_commit_write_streams request # # # The returned object is of type Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsResponse. # p result # def batch_commit_write_streams request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults metadata = @config.rpcs.batch_commit_write_streams.metadata.to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id header_params = {} if request.parent header_params["parent"] = request.parent end request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") metadata[:"x-goog-request-params"] ||= request_params_header options.apply_defaults timeout: @config.rpcs.batch_commit_write_streams.timeout, metadata: metadata, retry_policy: @config.rpcs.batch_commit_write_streams.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @big_query_write_stub.call_rpc :batch_commit_write_streams, request, options: options do |response, operation| yield response, operation if block_given? end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end ## # Flushes rows to a BUFFERED stream. # # If users are appending rows to BUFFERED stream, flush operation is # required in order for the rows to become available for reading. A # Flush operation flushes up to any previously flushed offset in a BUFFERED # stream, to the offset specified in the request. # # Flush is not supported on the _default stream, since it is not BUFFERED. # # @overload flush_rows(request, options = nil) # Pass arguments to `flush_rows` via a request object, either of type # {::Google::Cloud::Bigquery::Storage::V1::FlushRowsRequest} or an equivalent Hash. # # @param request [::Google::Cloud::Bigquery::Storage::V1::FlushRowsRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. # # @overload flush_rows(write_stream: nil, offset: nil) # Pass arguments to `flush_rows` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param write_stream [::String] # Required. The stream that is the target of the flush operation. # @param offset [::Google::Protobuf::Int64Value, ::Hash] # Ending offset of the flush operation. Rows before this offset(including # this offset) will be flushed. # # @yield [response, operation] Access the result along with the RPC operation # @yieldparam response [::Google::Cloud::Bigquery::Storage::V1::FlushRowsResponse] # @yieldparam operation [::GRPC::ActiveCall::Operation] # # @return [::Google::Cloud::Bigquery::Storage::V1::FlushRowsResponse] # # @raise [::Google::Cloud::Error] if the RPC is aborted. # # @example Basic example # require "google/cloud/bigquery/storage/v1" # # # Create a client object. The client can be reused for multiple calls. # client = Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new # # # Create a request. To set request fields, pass in keyword arguments. # request = Google::Cloud::Bigquery::Storage::V1::FlushRowsRequest.new # # # Call the flush_rows method. # result = client.flush_rows request # # # The returned object is of type Google::Cloud::Bigquery::Storage::V1::FlushRowsResponse. # p result # def flush_rows request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::Storage::V1::FlushRowsRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults metadata = @config.rpcs.flush_rows.metadata.to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id header_params = {} if request.write_stream header_params["write_stream"] = request.write_stream end request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") metadata[:"x-goog-request-params"] ||= request_params_header options.apply_defaults timeout: @config.rpcs.flush_rows.timeout, metadata: metadata, retry_policy: @config.rpcs.flush_rows.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @big_query_write_stub.call_rpc :flush_rows, request, options: options do |response, operation| yield response, operation if block_given? end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end ## # Configuration class for the BigQueryWrite API. # # This class represents the configuration for BigQueryWrite, # providing control over timeouts, retry behavior, logging, transport # parameters, and other low-level controls. Certain parameters can also be # applied individually to specific RPCs. See # {::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client::Configuration::Rpcs} # for a list of RPCs that can be configured independently. # # Configuration can be applied globally to all clients, or to a single client # on construction. # # @example # # # Modify the global config, setting the timeout for # # create_write_stream to 20 seconds, # # and all remaining timeouts to 10 seconds. # ::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.configure do |config| # config.timeout = 10.0 # config.rpcs.create_write_stream.timeout = 20.0 # end # # # Apply the above configuration only to a new client. # client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new do |config| # config.timeout = 10.0 # config.rpcs.create_write_stream.timeout = 20.0 # end # # @!attribute [rw] endpoint # A custom service endpoint, as a hostname or hostname:port. The default is # nil, indicating to use the default endpoint in the current universe domain. # @return [::String,nil] # @!attribute [rw] credentials # Credentials to send with calls. You may provide any of the following types: # * (`String`) The path to a service account key file in JSON format # * (`Hash`) A service account key as a Hash # * (`Google::Auth::Credentials`) A googleauth credentials object # (see the [googleauth docs](https://rubydoc.info/gems/googleauth/Google/Auth/Credentials)) # * (`Signet::OAuth2::Client`) A signet oauth2 client object # (see the [signet docs](https://rubydoc.info/gems/signet/Signet/OAuth2/Client)) # * (`GRPC::Core::Channel`) a gRPC channel with included credentials # * (`GRPC::Core::ChannelCredentials`) a gRPC credentails object # * (`nil`) indicating no credentials # @return [::Object] # @!attribute [rw] scope # The OAuth scopes # @return [::Array<::String>] # @!attribute [rw] lib_name # The library name as recorded in instrumentation and logging # @return [::String] # @!attribute [rw] lib_version # The library version as recorded in instrumentation and logging # @return [::String] # @!attribute [rw] channel_args # Extra parameters passed to the gRPC channel. Note: this is ignored if a # `GRPC::Core::Channel` object is provided as the credential. # @return [::Hash] # @!attribute [rw] interceptors # An array of interceptors that are run before calls are executed. # @return [::Array<::GRPC::ClientInterceptor>] # @!attribute [rw] timeout # The call timeout in seconds. # @return [::Numeric] # @!attribute [rw] metadata # Additional gRPC headers to be sent with the call. # @return [::Hash{::Symbol=>::String}] # @!attribute [rw] retry_policy # The retry policy. The value is a hash with the following keys: # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds. # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds. # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier. # * `:retry_codes` (*type:* `Array`) - The error codes that should # trigger a retry. # @return [::Hash] # @!attribute [rw] quota_project # A separate project against which to charge quota. # @return [::String] # @!attribute [rw] universe_domain # The universe domain within which to make requests. This determines the # default endpoint URL. The default value of nil uses the environment # universe (usually the default "googleapis.com" universe). # @return [::String,nil] # @!attribute [rw] logger # A custom logger to use for request/response debug logging, or the value # `:default` (the default) to construct a default logger, or `nil` to # explicitly disable logging. # @return [::Logger,:default,nil] # class Configuration extend ::Gapic::Config # @private # The endpoint specific to the default "googleapis.com" universe. Deprecated. DEFAULT_ENDPOINT = "bigquerystorage.googleapis.com" config_attr :endpoint, nil, ::String, nil config_attr :credentials, nil do |value| allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil] allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC allowed.any? { |klass| klass === value } end config_attr :scope, nil, ::String, ::Array, nil config_attr :lib_name, nil, ::String, nil config_attr :lib_version, nil, ::String, nil config_attr(:channel_args, { "grpc.service_config_disable_resolution" => 1 }, ::Hash, nil) config_attr :interceptors, nil, ::Array, nil config_attr :timeout, nil, ::Numeric, nil config_attr :metadata, nil, ::Hash, nil config_attr :retry_policy, nil, ::Hash, ::Proc, nil config_attr :quota_project, nil, ::String, nil config_attr :universe_domain, nil, ::String, nil config_attr :logger, :default, ::Logger, nil, :default # @private def initialize parent_config = nil @parent_config = parent_config unless parent_config.nil? yield self if block_given? end ## # Configurations for individual RPCs # @return [Rpcs] # def rpcs @rpcs ||= begin parent_rpcs = nil parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config.respond_to?(:rpcs) Rpcs.new parent_rpcs end end ## # Configuration for the channel pool # @return [::Gapic::ServiceStub::ChannelPool::Configuration] # def channel_pool @channel_pool ||= ::Gapic::ServiceStub::ChannelPool::Configuration.new end ## # Configuration RPC class for the BigQueryWrite API. # # Includes fields providing the configuration for each RPC in this service. # Each configuration object is of type `Gapic::Config::Method` and includes # the following configuration fields: # # * `timeout` (*type:* `Numeric`) - The call timeout in seconds # * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers # * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields # include the following keys: # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds. # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds. # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier. # * `:retry_codes` (*type:* `Array`) - The error codes that should # trigger a retry. # class Rpcs ## # RPC-specific configuration for `create_write_stream` # @return [::Gapic::Config::Method] # attr_reader :create_write_stream ## # RPC-specific configuration for `append_rows` # @return [::Gapic::Config::Method] # attr_reader :append_rows ## # RPC-specific configuration for `get_write_stream` # @return [::Gapic::Config::Method] # attr_reader :get_write_stream ## # RPC-specific configuration for `finalize_write_stream` # @return [::Gapic::Config::Method] # attr_reader :finalize_write_stream ## # RPC-specific configuration for `batch_commit_write_streams` # @return [::Gapic::Config::Method] # attr_reader :batch_commit_write_streams ## # RPC-specific configuration for `flush_rows` # @return [::Gapic::Config::Method] # attr_reader :flush_rows # @private def initialize parent_rpcs = nil create_write_stream_config = parent_rpcs.create_write_stream if parent_rpcs.respond_to? :create_write_stream @create_write_stream = ::Gapic::Config::Method.new create_write_stream_config append_rows_config = parent_rpcs.append_rows if parent_rpcs.respond_to? :append_rows @append_rows = ::Gapic::Config::Method.new append_rows_config get_write_stream_config = parent_rpcs.get_write_stream if parent_rpcs.respond_to? :get_write_stream @get_write_stream = ::Gapic::Config::Method.new get_write_stream_config finalize_write_stream_config = parent_rpcs.finalize_write_stream if parent_rpcs.respond_to? :finalize_write_stream @finalize_write_stream = ::Gapic::Config::Method.new finalize_write_stream_config batch_commit_write_streams_config = parent_rpcs.batch_commit_write_streams if parent_rpcs.respond_to? :batch_commit_write_streams @batch_commit_write_streams = ::Gapic::Config::Method.new batch_commit_write_streams_config flush_rows_config = parent_rpcs.flush_rows if parent_rpcs.respond_to? :flush_rows @flush_rows = ::Gapic::Config::Method.new flush_rows_config yield self if block_given? end end end end end end end end end end