# frozen_string_literal: true # Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Auto-generated by gapic-generator-ruby. DO NOT EDIT! require "google/cloud/errors" require "google/cloud/managedkafka/v1/managed_kafka_pb" require "google/cloud/managed_kafka/v1/managed_kafka/rest/service_stub" require "google/cloud/location/rest" module Google module Cloud module ManagedKafka module V1 module ManagedKafka module Rest ## # REST client for the ManagedKafka service. # # The service that a client application uses to manage Apache Kafka clusters, # topics and consumer groups. # class Client # @private API_VERSION = "" # @private DEFAULT_ENDPOINT_TEMPLATE = "managedkafka.$UNIVERSE_DOMAIN$" include Paths # @private attr_reader :managed_kafka_stub ## # Configure the ManagedKafka Client class. # # See {::Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client::Configuration} # for a description of the configuration fields. # # @example # # # Modify the configuration for all ManagedKafka clients # ::Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client.configure do |config| # config.timeout = 10.0 # end # # @yield [config] Configure the Client client. # @yieldparam config [Client::Configuration] # # @return [Client::Configuration] # def self.configure @configure ||= begin namespace = ["Google", "Cloud", "ManagedKafka", "V1"] parent_config = while namespace.any? parent_name = namespace.join "::" parent_const = const_get parent_name break parent_const.configure if parent_const.respond_to? :configure namespace.pop end default_config = Client::Configuration.new parent_config default_config.rpcs.list_clusters.timeout = 60.0 default_config.rpcs.list_clusters.retry_policy = { initial_delay: 1.0, max_delay: 10.0, multiplier: 1.3, retry_codes: [14] } default_config.rpcs.get_cluster.timeout = 60.0 default_config.rpcs.get_cluster.retry_policy = { initial_delay: 1.0, max_delay: 10.0, multiplier: 1.3, retry_codes: [14] } default_config.rpcs.create_cluster.timeout = 60.0 default_config.rpcs.update_cluster.timeout = 60.0 default_config.rpcs.delete_cluster.timeout = 60.0 default_config.rpcs.list_topics.timeout = 60.0 default_config.rpcs.list_topics.retry_policy = { initial_delay: 1.0, max_delay: 10.0, multiplier: 1.3, retry_codes: [14] } default_config.rpcs.get_topic.timeout = 60.0 default_config.rpcs.get_topic.retry_policy = { initial_delay: 1.0, max_delay: 10.0, multiplier: 1.3, retry_codes: [14] } default_config.rpcs.create_topic.timeout = 60.0 default_config.rpcs.update_topic.timeout = 60.0 default_config.rpcs.delete_topic.timeout = 60.0 default_config.rpcs.list_consumer_groups.timeout = 60.0 default_config.rpcs.list_consumer_groups.retry_policy = { initial_delay: 1.0, max_delay: 10.0, multiplier: 1.3, retry_codes: [14] } default_config.rpcs.get_consumer_group.timeout = 60.0 default_config.rpcs.get_consumer_group.retry_policy = { initial_delay: 1.0, max_delay: 10.0, multiplier: 1.3, retry_codes: [14] } default_config.rpcs.update_consumer_group.timeout = 60.0 default_config.rpcs.delete_consumer_group.timeout = 60.0 default_config end yield @configure if block_given? @configure end ## # Configure the ManagedKafka Client instance. # # The configuration is set to the derived mode, meaning that values can be changed, # but structural changes (adding new fields, etc.) are not allowed. Structural changes # should be made on {Client.configure}. # # See {::Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client::Configuration} # for a description of the configuration fields. # # @yield [config] Configure the Client client. # @yieldparam config [Client::Configuration] # # @return [Client::Configuration] # def configure yield @config if block_given? @config end ## # The effective universe domain # # @return [String] # def universe_domain @managed_kafka_stub.universe_domain end ## # Create a new ManagedKafka REST client object. # # @example # # # Create a client using the default configuration # client = ::Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client.new # # # Create a client using a custom configuration # client = ::Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client.new do |config| # config.timeout = 10.0 # end # # @yield [config] Configure the ManagedKafka client. # @yieldparam config [Client::Configuration] # def initialize # Create the configuration object @config = Configuration.new Client.configure # Yield the configuration if needed yield @config if block_given? # Create credentials credentials = @config.credentials # Use self-signed JWT if the endpoint is unchanged from default, # but only if the default endpoint does not have a region prefix. enable_self_signed_jwt = @config.endpoint.nil? || (@config.endpoint == Configuration::DEFAULT_ENDPOINT && !@config.endpoint.split(".").first.include?("-")) credentials ||= Credentials.default scope: @config.scope, enable_self_signed_jwt: enable_self_signed_jwt if credentials.is_a?(::String) || credentials.is_a?(::Hash) credentials = Credentials.new credentials, scope: @config.scope end @quota_project_id = @config.quota_project @quota_project_id ||= credentials.quota_project_id if credentials.respond_to? :quota_project_id @operations_client = ::Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Operations.new do |config| config.credentials = credentials config.quota_project = @quota_project_id config.endpoint = @config.endpoint config.universe_domain = @config.universe_domain end @managed_kafka_stub = ::Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::ServiceStub.new( endpoint: @config.endpoint, endpoint_template: DEFAULT_ENDPOINT_TEMPLATE, universe_domain: @config.universe_domain, credentials: credentials ) @location_client = Google::Cloud::Location::Locations::Rest::Client.new do |config| config.credentials = credentials config.quota_project = @quota_project_id config.endpoint = @managed_kafka_stub.endpoint config.universe_domain = @managed_kafka_stub.universe_domain config.bindings_override = @config.bindings_override end end ## # Get the associated client for long-running operations. # # @return [::Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Operations] # attr_reader :operations_client ## # Get the associated client for mix-in of the Locations. # # @return [Google::Cloud::Location::Locations::Rest::Client] # attr_reader :location_client # Service calls ## # Lists the clusters in a given project and location. # # @overload list_clusters(request, options = nil) # Pass arguments to `list_clusters` via a request object, either of type # {::Google::Cloud::ManagedKafka::V1::ListClustersRequest} or an equivalent Hash. # # @param request [::Google::Cloud::ManagedKafka::V1::ListClustersRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. # # @overload list_clusters(parent: nil, page_size: nil, page_token: nil, filter: nil, order_by: nil) # Pass arguments to `list_clusters` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param parent [::String] # Required. The parent location whose clusters are to be listed. Structured # like `projects/{project}/locations/{location}`. # @param page_size [::Integer] # Optional. The maximum number of clusters to return. The service may return # fewer than this value. If unspecified, server will pick an appropriate # default. # @param page_token [::String] # Optional. A page token, received from a previous `ListClusters` call. # Provide this to retrieve the subsequent page. # # When paginating, all other parameters provided to `ListClusters` must match # the call that provided the page token. # @param filter [::String] # Optional. Filter expression for the result. # @param order_by [::String] # Optional. Order by fields for the result. # @yield [result, operation] Access the result along with the TransportOperation object # @yieldparam result [::Google::Cloud::ManagedKafka::V1::ListClustersResponse] # @yieldparam operation [::Gapic::Rest::TransportOperation] # # @return [::Google::Cloud::ManagedKafka::V1::ListClustersResponse] # # @raise [::Google::Cloud::Error] if the REST call is aborted. # # @example Basic example # require "google/cloud/managed_kafka/v1" # # # Create a client object. The client can be reused for multiple calls. # client = Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client.new # # # Create a request. To set request fields, pass in keyword arguments. # request = Google::Cloud::ManagedKafka::V1::ListClustersRequest.new # # # Call the list_clusters method. # result = client.list_clusters request # # # The returned object is of type Gapic::PagedEnumerable. You can iterate # # over elements, and API calls will be issued to fetch pages as needed. # result.each do |item| # # Each element is of type ::Google::Cloud::ManagedKafka::V1::Cluster. # p item # end # def list_clusters request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::ManagedKafka::V1::ListClustersRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults call_metadata = @config.rpcs.list_clusters.metadata.to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::ManagedKafka::V1::VERSION, transports_version_send: [:rest] call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id options.apply_defaults timeout: @config.rpcs.list_clusters.timeout, metadata: call_metadata, retry_policy: @config.rpcs.list_clusters.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @managed_kafka_stub.list_clusters request, options do |result, operation| yield result, operation if block_given? return result end rescue ::Gapic::Rest::Error => e raise ::Google::Cloud::Error.from_error(e) end ## # Returns the properties of a single cluster. # # @overload get_cluster(request, options = nil) # Pass arguments to `get_cluster` via a request object, either of type # {::Google::Cloud::ManagedKafka::V1::GetClusterRequest} or an equivalent Hash. # # @param request [::Google::Cloud::ManagedKafka::V1::GetClusterRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. # # @overload get_cluster(name: nil) # Pass arguments to `get_cluster` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param name [::String] # Required. The name of the cluster whose configuration to return. # @yield [result, operation] Access the result along with the TransportOperation object # @yieldparam result [::Google::Cloud::ManagedKafka::V1::Cluster] # @yieldparam operation [::Gapic::Rest::TransportOperation] # # @return [::Google::Cloud::ManagedKafka::V1::Cluster] # # @raise [::Google::Cloud::Error] if the REST call is aborted. # # @example Basic example # require "google/cloud/managed_kafka/v1" # # # Create a client object. The client can be reused for multiple calls. # client = Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client.new # # # Create a request. To set request fields, pass in keyword arguments. # request = Google::Cloud::ManagedKafka::V1::GetClusterRequest.new # # # Call the get_cluster method. # result = client.get_cluster request # # # The returned object is of type Google::Cloud::ManagedKafka::V1::Cluster. # p result # def get_cluster request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::ManagedKafka::V1::GetClusterRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults call_metadata = @config.rpcs.get_cluster.metadata.to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::ManagedKafka::V1::VERSION, transports_version_send: [:rest] call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id options.apply_defaults timeout: @config.rpcs.get_cluster.timeout, metadata: call_metadata, retry_policy: @config.rpcs.get_cluster.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @managed_kafka_stub.get_cluster request, options do |result, operation| yield result, operation if block_given? return result end rescue ::Gapic::Rest::Error => e raise ::Google::Cloud::Error.from_error(e) end ## # Creates a new cluster in a given project and location. # # @overload create_cluster(request, options = nil) # Pass arguments to `create_cluster` via a request object, either of type # {::Google::Cloud::ManagedKafka::V1::CreateClusterRequest} or an equivalent Hash. # # @param request [::Google::Cloud::ManagedKafka::V1::CreateClusterRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. # # @overload create_cluster(parent: nil, cluster_id: nil, cluster: nil, request_id: nil) # Pass arguments to `create_cluster` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param parent [::String] # Required. The parent region in which to create the cluster. Structured like # `projects/{project}/locations/{location}`. # @param cluster_id [::String] # Required. The ID to use for the cluster, which will become the final # component of the cluster's name. The ID must be 1-63 characters long, and # match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` to comply with # RFC 1035. # # This value is structured like: `my-cluster-id`. # @param cluster [::Google::Cloud::ManagedKafka::V1::Cluster, ::Hash] # Required. Configuration of the cluster to create. Its `name` field is # ignored. # @param request_id [::String] # Optional. An optional request ID to identify requests. Specify a unique # request ID to avoid duplication of requests. If a request times out or # fails, retrying with the same ID allows the server to recognize the # previous attempt. For at least 60 minutes, the server ignores duplicate # requests bearing the same ID. # # For example, consider a situation where you make an initial request and the # request times out. If you make the request again with the same request ID # within 60 minutes of the last request, the server checks if an original # operation with the same request ID was received. If so, the server ignores # the second request. # # The request ID must be a valid UUID. A zero UUID is not supported # (00000000-0000-0000-0000-000000000000). # @yield [result, operation] Access the result along with the TransportOperation object # @yieldparam result [::Gapic::Operation] # @yieldparam operation [::Gapic::Rest::TransportOperation] # # @return [::Gapic::Operation] # # @raise [::Google::Cloud::Error] if the REST call is aborted. # # @example Basic example # require "google/cloud/managed_kafka/v1" # # # Create a client object. The client can be reused for multiple calls. # client = Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client.new # # # Create a request. To set request fields, pass in keyword arguments. # request = Google::Cloud::ManagedKafka::V1::CreateClusterRequest.new # # # Call the create_cluster method. # result = client.create_cluster request # # # The returned object is of type Gapic::Operation. You can use it to # # check the status of an operation, cancel it, or wait for results. # # Here is how to wait for a response. # result.wait_until_done! timeout: 60 # if result.response? # p result.response # else # puts "No response received." # end # def create_cluster request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::ManagedKafka::V1::CreateClusterRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults call_metadata = @config.rpcs.create_cluster.metadata.to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::ManagedKafka::V1::VERSION, transports_version_send: [:rest] call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id options.apply_defaults timeout: @config.rpcs.create_cluster.timeout, metadata: call_metadata, retry_policy: @config.rpcs.create_cluster.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @managed_kafka_stub.create_cluster request, options do |result, operation| result = ::Gapic::Operation.new result, @operations_client, options: options yield result, operation if block_given? return result end rescue ::Gapic::Rest::Error => e raise ::Google::Cloud::Error.from_error(e) end ## # Updates the properties of a single cluster. # # @overload update_cluster(request, options = nil) # Pass arguments to `update_cluster` via a request object, either of type # {::Google::Cloud::ManagedKafka::V1::UpdateClusterRequest} or an equivalent Hash. # # @param request [::Google::Cloud::ManagedKafka::V1::UpdateClusterRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. # # @overload update_cluster(update_mask: nil, cluster: nil, request_id: nil) # Pass arguments to `update_cluster` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param update_mask [::Google::Protobuf::FieldMask, ::Hash] # Required. Field mask is used to specify the fields to be overwritten in the # cluster resource by the update. The fields specified in the update_mask are # relative to the resource, not the full request. A field will be overwritten # if it is in the mask. The mask is required and a value of * will update all # fields. # @param cluster [::Google::Cloud::ManagedKafka::V1::Cluster, ::Hash] # Required. The cluster to update. Its `name` field must be populated. # @param request_id [::String] # Optional. An optional request ID to identify requests. Specify a unique # request ID to avoid duplication of requests. If a request times out or # fails, retrying with the same ID allows the server to recognize the # previous attempt. For at least 60 minutes, the server ignores duplicate # requests bearing the same ID. # # For example, consider a situation where you make an initial request and the # request times out. If you make the request again with the same request ID # within 60 minutes of the last request, the server checks if an original # operation with the same request ID was received. If so, the server ignores # the second request. # # The request ID must be a valid UUID. A zero UUID is not supported # (00000000-0000-0000-0000-000000000000). # @yield [result, operation] Access the result along with the TransportOperation object # @yieldparam result [::Gapic::Operation] # @yieldparam operation [::Gapic::Rest::TransportOperation] # # @return [::Gapic::Operation] # # @raise [::Google::Cloud::Error] if the REST call is aborted. # # @example Basic example # require "google/cloud/managed_kafka/v1" # # # Create a client object. The client can be reused for multiple calls. # client = Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client.new # # # Create a request. To set request fields, pass in keyword arguments. # request = Google::Cloud::ManagedKafka::V1::UpdateClusterRequest.new # # # Call the update_cluster method. # result = client.update_cluster request # # # The returned object is of type Gapic::Operation. You can use it to # # check the status of an operation, cancel it, or wait for results. # # Here is how to wait for a response. # result.wait_until_done! timeout: 60 # if result.response? # p result.response # else # puts "No response received." # end # def update_cluster request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::ManagedKafka::V1::UpdateClusterRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults call_metadata = @config.rpcs.update_cluster.metadata.to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::ManagedKafka::V1::VERSION, transports_version_send: [:rest] call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id options.apply_defaults timeout: @config.rpcs.update_cluster.timeout, metadata: call_metadata, retry_policy: @config.rpcs.update_cluster.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @managed_kafka_stub.update_cluster request, options do |result, operation| result = ::Gapic::Operation.new result, @operations_client, options: options yield result, operation if block_given? return result end rescue ::Gapic::Rest::Error => e raise ::Google::Cloud::Error.from_error(e) end ## # Deletes a single cluster. # # @overload delete_cluster(request, options = nil) # Pass arguments to `delete_cluster` via a request object, either of type # {::Google::Cloud::ManagedKafka::V1::DeleteClusterRequest} or an equivalent Hash. # # @param request [::Google::Cloud::ManagedKafka::V1::DeleteClusterRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. # # @overload delete_cluster(name: nil, request_id: nil) # Pass arguments to `delete_cluster` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param name [::String] # Required. The name of the cluster to delete. # @param request_id [::String] # Optional. An optional request ID to identify requests. Specify a unique # request ID to avoid duplication of requests. If a request times out or # fails, retrying with the same ID allows the server to recognize the # previous attempt. For at least 60 minutes, the server ignores duplicate # requests bearing the same ID. # # For example, consider a situation where you make an initial request and the # request times out. If you make the request again with the same request ID # within 60 minutes of the last request, the server checks if an original # operation with the same request ID was received. If so, the server ignores # the second request. # # The request ID must be a valid UUID. A zero UUID is not supported # (00000000-0000-0000-0000-000000000000). # @yield [result, operation] Access the result along with the TransportOperation object # @yieldparam result [::Gapic::Operation] # @yieldparam operation [::Gapic::Rest::TransportOperation] # # @return [::Gapic::Operation] # # @raise [::Google::Cloud::Error] if the REST call is aborted. # # @example Basic example # require "google/cloud/managed_kafka/v1" # # # Create a client object. The client can be reused for multiple calls. # client = Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client.new # # # Create a request. To set request fields, pass in keyword arguments. # request = Google::Cloud::ManagedKafka::V1::DeleteClusterRequest.new # # # Call the delete_cluster method. # result = client.delete_cluster request # # # The returned object is of type Gapic::Operation. You can use it to # # check the status of an operation, cancel it, or wait for results. # # Here is how to wait for a response. # result.wait_until_done! timeout: 60 # if result.response? # p result.response # else # puts "No response received." # end # def delete_cluster request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::ManagedKafka::V1::DeleteClusterRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults call_metadata = @config.rpcs.delete_cluster.metadata.to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::ManagedKafka::V1::VERSION, transports_version_send: [:rest] call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id options.apply_defaults timeout: @config.rpcs.delete_cluster.timeout, metadata: call_metadata, retry_policy: @config.rpcs.delete_cluster.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @managed_kafka_stub.delete_cluster request, options do |result, operation| result = ::Gapic::Operation.new result, @operations_client, options: options yield result, operation if block_given? return result end rescue ::Gapic::Rest::Error => e raise ::Google::Cloud::Error.from_error(e) end ## # Lists the topics in a given cluster. # # @overload list_topics(request, options = nil) # Pass arguments to `list_topics` via a request object, either of type # {::Google::Cloud::ManagedKafka::V1::ListTopicsRequest} or an equivalent Hash. # # @param request [::Google::Cloud::ManagedKafka::V1::ListTopicsRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. # # @overload list_topics(parent: nil, page_size: nil, page_token: nil) # Pass arguments to `list_topics` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param parent [::String] # Required. The parent cluster whose topics are to be listed. Structured like # `projects/{project}/locations/{location}/clusters/{cluster}`. # @param page_size [::Integer] # Optional. The maximum number of topics to return. The service may return # fewer than this value. If unset or zero, all topics for the parent is # returned. # @param page_token [::String] # Optional. A page token, received from a previous `ListTopics` call. # Provide this to retrieve the subsequent page. # # When paginating, all other parameters provided to `ListTopics` must match # the call that provided the page token. # @yield [result, operation] Access the result along with the TransportOperation object # @yieldparam result [::Gapic::Rest::PagedEnumerable<::Google::Cloud::ManagedKafka::V1::Topic>] # @yieldparam operation [::Gapic::Rest::TransportOperation] # # @return [::Gapic::Rest::PagedEnumerable<::Google::Cloud::ManagedKafka::V1::Topic>] # # @raise [::Google::Cloud::Error] if the REST call is aborted. # # @example Basic example # require "google/cloud/managed_kafka/v1" # # # Create a client object. The client can be reused for multiple calls. # client = Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client.new # # # Create a request. To set request fields, pass in keyword arguments. # request = Google::Cloud::ManagedKafka::V1::ListTopicsRequest.new # # # Call the list_topics method. # result = client.list_topics request # # # The returned object is of type Gapic::PagedEnumerable. You can iterate # # over elements, and API calls will be issued to fetch pages as needed. # result.each do |item| # # Each element is of type ::Google::Cloud::ManagedKafka::V1::Topic. # p item # end # def list_topics request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::ManagedKafka::V1::ListTopicsRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults call_metadata = @config.rpcs.list_topics.metadata.to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::ManagedKafka::V1::VERSION, transports_version_send: [:rest] call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id options.apply_defaults timeout: @config.rpcs.list_topics.timeout, metadata: call_metadata, retry_policy: @config.rpcs.list_topics.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @managed_kafka_stub.list_topics request, options do |result, operation| result = ::Gapic::Rest::PagedEnumerable.new @managed_kafka_stub, :list_topics, "topics", request, result, options yield result, operation if block_given? return result end rescue ::Gapic::Rest::Error => e raise ::Google::Cloud::Error.from_error(e) end ## # Returns the properties of a single topic. # # @overload get_topic(request, options = nil) # Pass arguments to `get_topic` via a request object, either of type # {::Google::Cloud::ManagedKafka::V1::GetTopicRequest} or an equivalent Hash. # # @param request [::Google::Cloud::ManagedKafka::V1::GetTopicRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. # # @overload get_topic(name: nil) # Pass arguments to `get_topic` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param name [::String] # Required. The name of the topic whose configuration to return. Structured # like: # projects/\\{project}/locations/\\{location}/clusters/\\{cluster}/topics/\\{topic}. # @yield [result, operation] Access the result along with the TransportOperation object # @yieldparam result [::Google::Cloud::ManagedKafka::V1::Topic] # @yieldparam operation [::Gapic::Rest::TransportOperation] # # @return [::Google::Cloud::ManagedKafka::V1::Topic] # # @raise [::Google::Cloud::Error] if the REST call is aborted. # # @example Basic example # require "google/cloud/managed_kafka/v1" # # # Create a client object. The client can be reused for multiple calls. # client = Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client.new # # # Create a request. To set request fields, pass in keyword arguments. # request = Google::Cloud::ManagedKafka::V1::GetTopicRequest.new # # # Call the get_topic method. # result = client.get_topic request # # # The returned object is of type Google::Cloud::ManagedKafka::V1::Topic. # p result # def get_topic request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::ManagedKafka::V1::GetTopicRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults call_metadata = @config.rpcs.get_topic.metadata.to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::ManagedKafka::V1::VERSION, transports_version_send: [:rest] call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id options.apply_defaults timeout: @config.rpcs.get_topic.timeout, metadata: call_metadata, retry_policy: @config.rpcs.get_topic.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @managed_kafka_stub.get_topic request, options do |result, operation| yield result, operation if block_given? return result end rescue ::Gapic::Rest::Error => e raise ::Google::Cloud::Error.from_error(e) end ## # Creates a new topic in a given project and location. # # @overload create_topic(request, options = nil) # Pass arguments to `create_topic` via a request object, either of type # {::Google::Cloud::ManagedKafka::V1::CreateTopicRequest} or an equivalent Hash. # # @param request [::Google::Cloud::ManagedKafka::V1::CreateTopicRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. # # @overload create_topic(parent: nil, topic_id: nil, topic: nil) # Pass arguments to `create_topic` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param parent [::String] # Required. The parent cluster in which to create the topic. # Structured like # `projects/{project}/locations/{location}/clusters/{cluster}`. # @param topic_id [::String] # Required. The ID to use for the topic, which will become the final # component of the topic's name. # # This value is structured like: `my-topic-name`. # @param topic [::Google::Cloud::ManagedKafka::V1::Topic, ::Hash] # Required. Configuration of the topic to create. Its `name` field is # ignored. # @yield [result, operation] Access the result along with the TransportOperation object # @yieldparam result [::Google::Cloud::ManagedKafka::V1::Topic] # @yieldparam operation [::Gapic::Rest::TransportOperation] # # @return [::Google::Cloud::ManagedKafka::V1::Topic] # # @raise [::Google::Cloud::Error] if the REST call is aborted. # # @example Basic example # require "google/cloud/managed_kafka/v1" # # # Create a client object. The client can be reused for multiple calls. # client = Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client.new # # # Create a request. To set request fields, pass in keyword arguments. # request = Google::Cloud::ManagedKafka::V1::CreateTopicRequest.new # # # Call the create_topic method. # result = client.create_topic request # # # The returned object is of type Google::Cloud::ManagedKafka::V1::Topic. # p result # def create_topic request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::ManagedKafka::V1::CreateTopicRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults call_metadata = @config.rpcs.create_topic.metadata.to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::ManagedKafka::V1::VERSION, transports_version_send: [:rest] call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id options.apply_defaults timeout: @config.rpcs.create_topic.timeout, metadata: call_metadata, retry_policy: @config.rpcs.create_topic.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @managed_kafka_stub.create_topic request, options do |result, operation| yield result, operation if block_given? return result end rescue ::Gapic::Rest::Error => e raise ::Google::Cloud::Error.from_error(e) end ## # Updates the properties of a single topic. # # @overload update_topic(request, options = nil) # Pass arguments to `update_topic` via a request object, either of type # {::Google::Cloud::ManagedKafka::V1::UpdateTopicRequest} or an equivalent Hash. # # @param request [::Google::Cloud::ManagedKafka::V1::UpdateTopicRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. # # @overload update_topic(update_mask: nil, topic: nil) # Pass arguments to `update_topic` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param update_mask [::Google::Protobuf::FieldMask, ::Hash] # Required. Field mask is used to specify the fields to be overwritten in the # Topic resource by the update. The fields specified in the update_mask are # relative to the resource, not the full request. A field will be overwritten # if it is in the mask. The mask is required and a value of * will update all # fields. # @param topic [::Google::Cloud::ManagedKafka::V1::Topic, ::Hash] # Required. The topic to update. Its `name` field must be populated. # @yield [result, operation] Access the result along with the TransportOperation object # @yieldparam result [::Google::Cloud::ManagedKafka::V1::Topic] # @yieldparam operation [::Gapic::Rest::TransportOperation] # # @return [::Google::Cloud::ManagedKafka::V1::Topic] # # @raise [::Google::Cloud::Error] if the REST call is aborted. # # @example Basic example # require "google/cloud/managed_kafka/v1" # # # Create a client object. The client can be reused for multiple calls. # client = Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client.new # # # Create a request. To set request fields, pass in keyword arguments. # request = Google::Cloud::ManagedKafka::V1::UpdateTopicRequest.new # # # Call the update_topic method. # result = client.update_topic request # # # The returned object is of type Google::Cloud::ManagedKafka::V1::Topic. # p result # def update_topic request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::ManagedKafka::V1::UpdateTopicRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults call_metadata = @config.rpcs.update_topic.metadata.to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::ManagedKafka::V1::VERSION, transports_version_send: [:rest] call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id options.apply_defaults timeout: @config.rpcs.update_topic.timeout, metadata: call_metadata, retry_policy: @config.rpcs.update_topic.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @managed_kafka_stub.update_topic request, options do |result, operation| yield result, operation if block_given? return result end rescue ::Gapic::Rest::Error => e raise ::Google::Cloud::Error.from_error(e) end ## # Deletes a single topic. # # @overload delete_topic(request, options = nil) # Pass arguments to `delete_topic` via a request object, either of type # {::Google::Cloud::ManagedKafka::V1::DeleteTopicRequest} or an equivalent Hash. # # @param request [::Google::Cloud::ManagedKafka::V1::DeleteTopicRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. # # @overload delete_topic(name: nil) # Pass arguments to `delete_topic` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param name [::String] # Required. The name of the topic to delete. # `projects/{project}/locations/{location}/clusters/{cluster}/topics/{topic}`. # @yield [result, operation] Access the result along with the TransportOperation object # @yieldparam result [::Google::Protobuf::Empty] # @yieldparam operation [::Gapic::Rest::TransportOperation] # # @return [::Google::Protobuf::Empty] # # @raise [::Google::Cloud::Error] if the REST call is aborted. # # @example Basic example # require "google/cloud/managed_kafka/v1" # # # Create a client object. The client can be reused for multiple calls. # client = Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client.new # # # Create a request. To set request fields, pass in keyword arguments. # request = Google::Cloud::ManagedKafka::V1::DeleteTopicRequest.new # # # Call the delete_topic method. # result = client.delete_topic request # # # The returned object is of type Google::Protobuf::Empty. # p result # def delete_topic request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::ManagedKafka::V1::DeleteTopicRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults call_metadata = @config.rpcs.delete_topic.metadata.to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::ManagedKafka::V1::VERSION, transports_version_send: [:rest] call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id options.apply_defaults timeout: @config.rpcs.delete_topic.timeout, metadata: call_metadata, retry_policy: @config.rpcs.delete_topic.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @managed_kafka_stub.delete_topic request, options do |result, operation| yield result, operation if block_given? return result end rescue ::Gapic::Rest::Error => e raise ::Google::Cloud::Error.from_error(e) end ## # Lists the consumer groups in a given cluster. # # @overload list_consumer_groups(request, options = nil) # Pass arguments to `list_consumer_groups` via a request object, either of type # {::Google::Cloud::ManagedKafka::V1::ListConsumerGroupsRequest} or an equivalent Hash. # # @param request [::Google::Cloud::ManagedKafka::V1::ListConsumerGroupsRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. # # @overload list_consumer_groups(parent: nil, page_size: nil, page_token: nil) # Pass arguments to `list_consumer_groups` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param parent [::String] # Required. The parent cluster whose consumer groups are to be listed. # Structured like # `projects/{project}/locations/{location}/clusters/{cluster}`. # @param page_size [::Integer] # Optional. The maximum number of consumer groups to return. The service may # return fewer than this value. If unset or zero, all consumer groups for the # parent is returned. # @param page_token [::String] # Optional. A page token, received from a previous `ListConsumerGroups` call. # Provide this to retrieve the subsequent page. # # When paginating, all other parameters provided to `ListConsumerGroups` must # match the call that provided the page token. # @yield [result, operation] Access the result along with the TransportOperation object # @yieldparam result [::Gapic::Rest::PagedEnumerable<::Google::Cloud::ManagedKafka::V1::ConsumerGroup>] # @yieldparam operation [::Gapic::Rest::TransportOperation] # # @return [::Gapic::Rest::PagedEnumerable<::Google::Cloud::ManagedKafka::V1::ConsumerGroup>] # # @raise [::Google::Cloud::Error] if the REST call is aborted. # # @example Basic example # require "google/cloud/managed_kafka/v1" # # # Create a client object. The client can be reused for multiple calls. # client = Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client.new # # # Create a request. To set request fields, pass in keyword arguments. # request = Google::Cloud::ManagedKafka::V1::ListConsumerGroupsRequest.new # # # Call the list_consumer_groups method. # result = client.list_consumer_groups request # # # The returned object is of type Gapic::PagedEnumerable. You can iterate # # over elements, and API calls will be issued to fetch pages as needed. # result.each do |item| # # Each element is of type ::Google::Cloud::ManagedKafka::V1::ConsumerGroup. # p item # end # def list_consumer_groups request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::ManagedKafka::V1::ListConsumerGroupsRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults call_metadata = @config.rpcs.list_consumer_groups.metadata.to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::ManagedKafka::V1::VERSION, transports_version_send: [:rest] call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id options.apply_defaults timeout: @config.rpcs.list_consumer_groups.timeout, metadata: call_metadata, retry_policy: @config.rpcs.list_consumer_groups.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @managed_kafka_stub.list_consumer_groups request, options do |result, operation| result = ::Gapic::Rest::PagedEnumerable.new @managed_kafka_stub, :list_consumer_groups, "consumer_groups", request, result, options yield result, operation if block_given? return result end rescue ::Gapic::Rest::Error => e raise ::Google::Cloud::Error.from_error(e) end ## # Returns the properties of a single consumer group. # # @overload get_consumer_group(request, options = nil) # Pass arguments to `get_consumer_group` via a request object, either of type # {::Google::Cloud::ManagedKafka::V1::GetConsumerGroupRequest} or an equivalent Hash. # # @param request [::Google::Cloud::ManagedKafka::V1::GetConsumerGroupRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. # # @overload get_consumer_group(name: nil) # Pass arguments to `get_consumer_group` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param name [::String] # Required. The name of the consumer group whose configuration to return. # `projects/{project}/locations/{location}/clusters/{cluster}/consumerGroups/{consumerGroup}`. # @yield [result, operation] Access the result along with the TransportOperation object # @yieldparam result [::Google::Cloud::ManagedKafka::V1::ConsumerGroup] # @yieldparam operation [::Gapic::Rest::TransportOperation] # # @return [::Google::Cloud::ManagedKafka::V1::ConsumerGroup] # # @raise [::Google::Cloud::Error] if the REST call is aborted. # # @example Basic example # require "google/cloud/managed_kafka/v1" # # # Create a client object. The client can be reused for multiple calls. # client = Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client.new # # # Create a request. To set request fields, pass in keyword arguments. # request = Google::Cloud::ManagedKafka::V1::GetConsumerGroupRequest.new # # # Call the get_consumer_group method. # result = client.get_consumer_group request # # # The returned object is of type Google::Cloud::ManagedKafka::V1::ConsumerGroup. # p result # def get_consumer_group request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::ManagedKafka::V1::GetConsumerGroupRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults call_metadata = @config.rpcs.get_consumer_group.metadata.to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::ManagedKafka::V1::VERSION, transports_version_send: [:rest] call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id options.apply_defaults timeout: @config.rpcs.get_consumer_group.timeout, metadata: call_metadata, retry_policy: @config.rpcs.get_consumer_group.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @managed_kafka_stub.get_consumer_group request, options do |result, operation| yield result, operation if block_given? return result end rescue ::Gapic::Rest::Error => e raise ::Google::Cloud::Error.from_error(e) end ## # Updates the properties of a single consumer group. # # @overload update_consumer_group(request, options = nil) # Pass arguments to `update_consumer_group` via a request object, either of type # {::Google::Cloud::ManagedKafka::V1::UpdateConsumerGroupRequest} or an equivalent Hash. # # @param request [::Google::Cloud::ManagedKafka::V1::UpdateConsumerGroupRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. # # @overload update_consumer_group(update_mask: nil, consumer_group: nil) # Pass arguments to `update_consumer_group` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param update_mask [::Google::Protobuf::FieldMask, ::Hash] # Required. Field mask is used to specify the fields to be overwritten in the # ConsumerGroup resource by the update. # The fields specified in the update_mask are relative to the resource, not # the full request. A field will be overwritten if it is in the mask. The # mask is required and a value of * will update all fields. # @param consumer_group [::Google::Cloud::ManagedKafka::V1::ConsumerGroup, ::Hash] # Required. The consumer group to update. Its `name` field must be populated. # @yield [result, operation] Access the result along with the TransportOperation object # @yieldparam result [::Google::Cloud::ManagedKafka::V1::ConsumerGroup] # @yieldparam operation [::Gapic::Rest::TransportOperation] # # @return [::Google::Cloud::ManagedKafka::V1::ConsumerGroup] # # @raise [::Google::Cloud::Error] if the REST call is aborted. # # @example Basic example # require "google/cloud/managed_kafka/v1" # # # Create a client object. The client can be reused for multiple calls. # client = Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client.new # # # Create a request. To set request fields, pass in keyword arguments. # request = Google::Cloud::ManagedKafka::V1::UpdateConsumerGroupRequest.new # # # Call the update_consumer_group method. # result = client.update_consumer_group request # # # The returned object is of type Google::Cloud::ManagedKafka::V1::ConsumerGroup. # p result # def update_consumer_group request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::ManagedKafka::V1::UpdateConsumerGroupRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults call_metadata = @config.rpcs.update_consumer_group.metadata.to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::ManagedKafka::V1::VERSION, transports_version_send: [:rest] call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id options.apply_defaults timeout: @config.rpcs.update_consumer_group.timeout, metadata: call_metadata, retry_policy: @config.rpcs.update_consumer_group.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @managed_kafka_stub.update_consumer_group request, options do |result, operation| yield result, operation if block_given? return result end rescue ::Gapic::Rest::Error => e raise ::Google::Cloud::Error.from_error(e) end ## # Deletes a single consumer group. # # @overload delete_consumer_group(request, options = nil) # Pass arguments to `delete_consumer_group` via a request object, either of type # {::Google::Cloud::ManagedKafka::V1::DeleteConsumerGroupRequest} or an equivalent Hash. # # @param request [::Google::Cloud::ManagedKafka::V1::DeleteConsumerGroupRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. # # @overload delete_consumer_group(name: nil) # Pass arguments to `delete_consumer_group` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param name [::String] # Required. The name of the consumer group to delete. # `projects/{project}/locations/{location}/clusters/{cluster}/consumerGroups/{consumerGroup}`. # @yield [result, operation] Access the result along with the TransportOperation object # @yieldparam result [::Google::Protobuf::Empty] # @yieldparam operation [::Gapic::Rest::TransportOperation] # # @return [::Google::Protobuf::Empty] # # @raise [::Google::Cloud::Error] if the REST call is aborted. # # @example Basic example # require "google/cloud/managed_kafka/v1" # # # Create a client object. The client can be reused for multiple calls. # client = Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client.new # # # Create a request. To set request fields, pass in keyword arguments. # request = Google::Cloud::ManagedKafka::V1::DeleteConsumerGroupRequest.new # # # Call the delete_consumer_group method. # result = client.delete_consumer_group request # # # The returned object is of type Google::Protobuf::Empty. # p result # def delete_consumer_group request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::ManagedKafka::V1::DeleteConsumerGroupRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults call_metadata = @config.rpcs.delete_consumer_group.metadata.to_h # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::ManagedKafka::V1::VERSION, transports_version_send: [:rest] call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id options.apply_defaults timeout: @config.rpcs.delete_consumer_group.timeout, metadata: call_metadata, retry_policy: @config.rpcs.delete_consumer_group.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @managed_kafka_stub.delete_consumer_group request, options do |result, operation| yield result, operation if block_given? return result end rescue ::Gapic::Rest::Error => e raise ::Google::Cloud::Error.from_error(e) end ## # Configuration class for the ManagedKafka REST API. # # This class represents the configuration for ManagedKafka REST, # providing control over timeouts, retry behavior, logging, transport # parameters, and other low-level controls. Certain parameters can also be # applied individually to specific RPCs. See # {::Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client::Configuration::Rpcs} # for a list of RPCs that can be configured independently. # # Configuration can be applied globally to all clients, or to a single client # on construction. # # @example # # # Modify the global config, setting the timeout for # # list_clusters to 20 seconds, # # and all remaining timeouts to 10 seconds. # ::Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client.configure do |config| # config.timeout = 10.0 # config.rpcs.list_clusters.timeout = 20.0 # end # # # Apply the above configuration only to a new client. # client = ::Google::Cloud::ManagedKafka::V1::ManagedKafka::Rest::Client.new do |config| # config.timeout = 10.0 # config.rpcs.list_clusters.timeout = 20.0 # end # # @!attribute [rw] endpoint # A custom service endpoint, as a hostname or hostname:port. The default is # nil, indicating to use the default endpoint in the current universe domain. # @return [::String,nil] # @!attribute [rw] credentials # Credentials to send with calls. You may provide any of the following types: # * (`String`) The path to a service account key file in JSON format # * (`Hash`) A service account key as a Hash # * (`Google::Auth::Credentials`) A googleauth credentials object # (see the [googleauth docs](https://rubydoc.info/gems/googleauth/Google/Auth/Credentials)) # * (`Signet::OAuth2::Client`) A signet oauth2 client object # (see the [signet docs](https://rubydoc.info/gems/signet/Signet/OAuth2/Client)) # * (`nil`) indicating no credentials # @return [::Object] # @!attribute [rw] scope # The OAuth scopes # @return [::Array<::String>] # @!attribute [rw] lib_name # The library name as recorded in instrumentation and logging # @return [::String] # @!attribute [rw] lib_version # The library version as recorded in instrumentation and logging # @return [::String] # @!attribute [rw] timeout # The call timeout in seconds. # @return [::Numeric] # @!attribute [rw] metadata # Additional headers to be sent with the call. # @return [::Hash{::Symbol=>::String}] # @!attribute [rw] retry_policy # The retry policy. The value is a hash with the following keys: # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds. # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds. # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier. # * `:retry_codes` (*type:* `Array`) - The error codes that should # trigger a retry. # @return [::Hash] # @!attribute [rw] quota_project # A separate project against which to charge quota. # @return [::String] # @!attribute [rw] universe_domain # The universe domain within which to make requests. This determines the # default endpoint URL. The default value of nil uses the environment # universe (usually the default "googleapis.com" universe). # @return [::String,nil] # class Configuration extend ::Gapic::Config # @private # The endpoint specific to the default "googleapis.com" universe. Deprecated. DEFAULT_ENDPOINT = "managedkafka.googleapis.com" config_attr :endpoint, nil, ::String, nil config_attr :credentials, nil do |value| allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil] allowed.any? { |klass| klass === value } end config_attr :scope, nil, ::String, ::Array, nil config_attr :lib_name, nil, ::String, nil config_attr :lib_version, nil, ::String, nil config_attr :timeout, nil, ::Numeric, nil config_attr :metadata, nil, ::Hash, nil config_attr :retry_policy, nil, ::Hash, ::Proc, nil config_attr :quota_project, nil, ::String, nil config_attr :universe_domain, nil, ::String, nil # @private # Overrides for http bindings for the RPCs of this service # are only used when this service is used as mixin, and only # by the host service. # @return [::Hash{::Symbol=>::Array<::Gapic::Rest::GrpcTranscoder::HttpBinding>}] config_attr :bindings_override, {}, ::Hash, nil # @private def initialize parent_config = nil @parent_config = parent_config unless parent_config.nil? yield self if block_given? end ## # Configurations for individual RPCs # @return [Rpcs] # def rpcs @rpcs ||= begin parent_rpcs = nil parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config.respond_to?(:rpcs) Rpcs.new parent_rpcs end end ## # Configuration RPC class for the ManagedKafka API. # # Includes fields providing the configuration for each RPC in this service. # Each configuration object is of type `Gapic::Config::Method` and includes # the following configuration fields: # # * `timeout` (*type:* `Numeric`) - The call timeout in seconds # * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional headers # * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields # include the following keys: # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds. # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds. # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier. # * `:retry_codes` (*type:* `Array`) - The error codes that should # trigger a retry. # class Rpcs ## # RPC-specific configuration for `list_clusters` # @return [::Gapic::Config::Method] # attr_reader :list_clusters ## # RPC-specific configuration for `get_cluster` # @return [::Gapic::Config::Method] # attr_reader :get_cluster ## # RPC-specific configuration for `create_cluster` # @return [::Gapic::Config::Method] # attr_reader :create_cluster ## # RPC-specific configuration for `update_cluster` # @return [::Gapic::Config::Method] # attr_reader :update_cluster ## # RPC-specific configuration for `delete_cluster` # @return [::Gapic::Config::Method] # attr_reader :delete_cluster ## # RPC-specific configuration for `list_topics` # @return [::Gapic::Config::Method] # attr_reader :list_topics ## # RPC-specific configuration for `get_topic` # @return [::Gapic::Config::Method] # attr_reader :get_topic ## # RPC-specific configuration for `create_topic` # @return [::Gapic::Config::Method] # attr_reader :create_topic ## # RPC-specific configuration for `update_topic` # @return [::Gapic::Config::Method] # attr_reader :update_topic ## # RPC-specific configuration for `delete_topic` # @return [::Gapic::Config::Method] # attr_reader :delete_topic ## # RPC-specific configuration for `list_consumer_groups` # @return [::Gapic::Config::Method] # attr_reader :list_consumer_groups ## # RPC-specific configuration for `get_consumer_group` # @return [::Gapic::Config::Method] # attr_reader :get_consumer_group ## # RPC-specific configuration for `update_consumer_group` # @return [::Gapic::Config::Method] # attr_reader :update_consumer_group ## # RPC-specific configuration for `delete_consumer_group` # @return [::Gapic::Config::Method] # attr_reader :delete_consumer_group # @private def initialize parent_rpcs = nil list_clusters_config = parent_rpcs.list_clusters if parent_rpcs.respond_to? :list_clusters @list_clusters = ::Gapic::Config::Method.new list_clusters_config get_cluster_config = parent_rpcs.get_cluster if parent_rpcs.respond_to? :get_cluster @get_cluster = ::Gapic::Config::Method.new get_cluster_config create_cluster_config = parent_rpcs.create_cluster if parent_rpcs.respond_to? :create_cluster @create_cluster = ::Gapic::Config::Method.new create_cluster_config update_cluster_config = parent_rpcs.update_cluster if parent_rpcs.respond_to? :update_cluster @update_cluster = ::Gapic::Config::Method.new update_cluster_config delete_cluster_config = parent_rpcs.delete_cluster if parent_rpcs.respond_to? :delete_cluster @delete_cluster = ::Gapic::Config::Method.new delete_cluster_config list_topics_config = parent_rpcs.list_topics if parent_rpcs.respond_to? :list_topics @list_topics = ::Gapic::Config::Method.new list_topics_config get_topic_config = parent_rpcs.get_topic if parent_rpcs.respond_to? :get_topic @get_topic = ::Gapic::Config::Method.new get_topic_config create_topic_config = parent_rpcs.create_topic if parent_rpcs.respond_to? :create_topic @create_topic = ::Gapic::Config::Method.new create_topic_config update_topic_config = parent_rpcs.update_topic if parent_rpcs.respond_to? :update_topic @update_topic = ::Gapic::Config::Method.new update_topic_config delete_topic_config = parent_rpcs.delete_topic if parent_rpcs.respond_to? :delete_topic @delete_topic = ::Gapic::Config::Method.new delete_topic_config list_consumer_groups_config = parent_rpcs.list_consumer_groups if parent_rpcs.respond_to? :list_consumer_groups @list_consumer_groups = ::Gapic::Config::Method.new list_consumer_groups_config get_consumer_group_config = parent_rpcs.get_consumer_group if parent_rpcs.respond_to? :get_consumer_group @get_consumer_group = ::Gapic::Config::Method.new get_consumer_group_config update_consumer_group_config = parent_rpcs.update_consumer_group if parent_rpcs.respond_to? :update_consumer_group @update_consumer_group = ::Gapic::Config::Method.new update_consumer_group_config delete_consumer_group_config = parent_rpcs.delete_consumer_group if parent_rpcs.respond_to? :delete_consumer_group @delete_consumer_group = ::Gapic::Config::Method.new delete_consumer_group_config yield self if block_given? end end end end end end end end end end