# frozen_string_literal: true

# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# Auto-generated by gapic-generator-ruby. DO NOT EDIT!


module Google
  module Cloud
    module DiscoveryEngine
      module V1
        # Cloud Storage location for input content.
        # @!attribute [rw] input_uris
        #   @return [::Array<::String>]
        #     Required. Cloud Storage URIs to input files. Each URI can be up to
        #     2000 characters long. URIs can match the full object path (for example,
        #     `gs://bucket/directory/object.json`) or a pattern matching one or more
        #     files, such as `gs://bucket/directory/*.json`.
        #
        #     A request can contain at most 100 files (or 100,000 files if `data_schema`
        #     is `content`). Each file can be up to 2 GB (or 100 MB if `data_schema` is
        #     `content`).
        # @!attribute [rw] data_schema
        #   @return [::String]
        #     The schema to use when parsing the data from the source.
        #
        #     Supported values for document imports:
        #
        #     * `document` (default): One JSON
        #     {::Google::Cloud::DiscoveryEngine::V1::Document Document} per line. Each
        #     document must
        #       have a valid {::Google::Cloud::DiscoveryEngine::V1::Document#id Document.id}.
        #     * `content`: Unstructured data (e.g. PDF, HTML). Each file matched by
        #       `input_uris` becomes a document, with the ID set to the first 128
        #       bits of SHA256(URI) encoded as a hex string.
        #     * `custom`: One custom data JSON per row in arbitrary format that conforms
        #       to the defined {::Google::Cloud::DiscoveryEngine::V1::Schema Schema} of the
        #       data store. This can only be used by the GENERIC Data Store vertical.
        #     * `csv`: A CSV file with header conforming to the defined
        #     {::Google::Cloud::DiscoveryEngine::V1::Schema Schema} of the
        #       data store. Each entry after the header is imported as a Document.
        #       This can only be used by the GENERIC Data Store vertical.
        #
        #     Supported values for user event imports:
        #
        #     * `user_event` (default): One JSON
        #     {::Google::Cloud::DiscoveryEngine::V1::UserEvent UserEvent} per line.
        class GcsSource
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods
        end

        # BigQuery source import data from.
        # @!attribute [rw] partition_date
        #   @return [::Google::Type::Date]
        #     BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format.
        # @!attribute [rw] project_id
        #   @return [::String]
        #     The project ID or the project number that contains the BigQuery source. Has
        #     a length limit of 128 characters. If not specified, inherits the project
        #     ID from the parent request.
        # @!attribute [rw] dataset_id
        #   @return [::String]
        #     Required. The BigQuery data set to copy the data from with a length limit
        #     of 1,024 characters.
        # @!attribute [rw] table_id
        #   @return [::String]
        #     Required. The BigQuery table to copy the data from with a length limit of
        #     1,024 characters.
        # @!attribute [rw] gcs_staging_dir
        #   @return [::String]
        #     Intermediate Cloud Storage directory used for the import with a length
        #     limit of 2,000 characters. Can be specified if one wants to have the
        #     BigQuery export to a specific Cloud Storage directory.
        # @!attribute [rw] data_schema
        #   @return [::String]
        #     The schema to use when parsing the data from the source.
        #
        #     Supported values for user event imports:
        #
        #     * `user_event` (default): One
        #     {::Google::Cloud::DiscoveryEngine::V1::UserEvent UserEvent} per row.
        #
        #     Supported values for document imports:
        #
        #     * `document` (default): One
        #     {::Google::Cloud::DiscoveryEngine::V1::Document Document} format per
        #       row. Each document must have a valid
        #       {::Google::Cloud::DiscoveryEngine::V1::Document#id Document.id} and one of
        #       {::Google::Cloud::DiscoveryEngine::V1::Document#json_data Document.json_data}
        #       or
        #       {::Google::Cloud::DiscoveryEngine::V1::Document#struct_data Document.struct_data}.
        #     * `custom`: One custom data per row in arbitrary format that conforms to
        #       the defined {::Google::Cloud::DiscoveryEngine::V1::Schema Schema} of the data
        #       store. This can only be used by the GENERIC Data Store vertical.
        class BigQuerySource
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods
        end

        # The Spanner source for importing data
        # @!attribute [rw] project_id
        #   @return [::String]
        #     The project ID that contains the Spanner source. Has a length limit of 128
        #     characters. If not specified, inherits the project ID from the parent
        #     request.
        # @!attribute [rw] instance_id
        #   @return [::String]
        #     Required. The instance ID of the source Spanner table.
        # @!attribute [rw] database_id
        #   @return [::String]
        #     Required. The database ID of the source Spanner table.
        # @!attribute [rw] table_id
        #   @return [::String]
        #     Required. The table name of the Spanner database that needs to be imported.
        # @!attribute [rw] enable_data_boost
        #   @return [::Boolean]
        #     Whether to apply data boost on Spanner export. Enabling this option will
        #     incur additional cost. More info can be found
        #     [here](https://cloud.google.com/spanner/docs/databoost/databoost-overview#billing_and_quotas).
        class SpannerSource
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods
        end

        # The Bigtable Options object that contains information to support
        # the import.
        # @!attribute [rw] key_field_name
        #   @return [::String]
        #     The field name used for saving row key value in the document. The name has
        #     to match the pattern `[a-zA-Z0-9][a-zA-Z0-9-_]*`.
        # @!attribute [rw] families
        #   @return [::Google::Protobuf::Map{::String => ::Google::Cloud::DiscoveryEngine::V1::BigtableOptions::BigtableColumnFamily}]
        #     The mapping from family names to an object that contains column families
        #     level information for the given column family. If a family is not present
        #     in this map it will be ignored.
        class BigtableOptions
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods

          # The column family of the Bigtable.
          # @!attribute [rw] field_name
          #   @return [::String]
          #     The field name to use for this column family in the document. The
          #     name has to match the pattern `[a-zA-Z0-9][a-zA-Z0-9-_]*`. If not set,
          #     it is parsed from the family name with best effort. However, due to
          #     different naming patterns, field name collisions could happen, where
          #     parsing behavior is undefined.
          # @!attribute [rw] encoding
          #   @return [::Google::Cloud::DiscoveryEngine::V1::BigtableOptions::Encoding]
          #     The encoding mode of the values when the type is not STRING.
          #     Acceptable encoding values are:
          #
          #     * `TEXT`: indicates values are alphanumeric text strings.
          #     * `BINARY`: indicates values are encoded using `HBase Bytes.toBytes`
          #     family of functions. This can be overridden for a specific column
          #     by listing that column in `columns` and specifying an encoding for it.
          # @!attribute [rw] type
          #   @return [::Google::Cloud::DiscoveryEngine::V1::BigtableOptions::Type]
          #     The type of values in this column family.
          #     The values are expected to be encoded using `HBase Bytes.toBytes`
          #     function when the encoding value is set to `BINARY`.
          # @!attribute [rw] columns
          #   @return [::Array<::Google::Cloud::DiscoveryEngine::V1::BigtableOptions::BigtableColumn>]
          #     The list of objects that contains column level information for each
          #     column. If a column is not present in this list it will be ignored.
          class BigtableColumnFamily
            include ::Google::Protobuf::MessageExts
            extend ::Google::Protobuf::MessageExts::ClassMethods
          end

          # The column of the Bigtable.
          # @!attribute [rw] qualifier
          #   @return [::String]
          #     Required. Qualifier of the column. If it cannot be decoded with utf-8,
          #     use a base-64 encoded string instead.
          # @!attribute [rw] field_name
          #   @return [::String]
          #     The field name to use for this column in the document. The name has to
          #     match the pattern `[a-zA-Z0-9][a-zA-Z0-9-_]*`.
          #     If not set, it is parsed from the qualifier bytes with best effort.
          #     However, due to different naming patterns, field name collisions could
          #     happen, where parsing behavior is undefined.
          # @!attribute [rw] encoding
          #   @return [::Google::Cloud::DiscoveryEngine::V1::BigtableOptions::Encoding]
          #     The encoding mode of the values when the type is not `STRING`.
          #     Acceptable encoding values are:
          #
          #     * `TEXT`: indicates values are alphanumeric text strings.
          #     * `BINARY`: indicates values are encoded using `HBase Bytes.toBytes`
          #     family of functions. This can be overridden for a specific column
          #     by listing that column in `columns` and specifying an encoding for it.
          # @!attribute [rw] type
          #   @return [::Google::Cloud::DiscoveryEngine::V1::BigtableOptions::Type]
          #     The type of values in this column family.
          #     The values are expected to be encoded using `HBase Bytes.toBytes`
          #     function when the encoding value is set to `BINARY`.
          class BigtableColumn
            include ::Google::Protobuf::MessageExts
            extend ::Google::Protobuf::MessageExts::ClassMethods
          end

          # @!attribute [rw] key
          #   @return [::String]
          # @!attribute [rw] value
          #   @return [::Google::Cloud::DiscoveryEngine::V1::BigtableOptions::BigtableColumnFamily]
          class FamiliesEntry
            include ::Google::Protobuf::MessageExts
            extend ::Google::Protobuf::MessageExts::ClassMethods
          end

          # The type of values in a Bigtable column or column family.
          # The values are expected to be encoded using
          # [HBase
          # Bytes.toBytes](https://hbase.apache.org/apidocs/org/apache/hadoop/hbase/util/Bytes.html)
          # function when the encoding value is set to `BINARY`.
          module Type
            # The type is unspecified.
            TYPE_UNSPECIFIED = 0

            # String type.
            STRING = 1

            # Numerical type.
            NUMBER = 2

            # Integer type.
            INTEGER = 3

            # Variable length integer type.
            VAR_INTEGER = 4

            # BigDecimal type.
            BIG_NUMERIC = 5

            # Boolean type.
            BOOLEAN = 6

            # JSON type.
            JSON = 7
          end

          # The encoding mode of a Bigtable column or column family.
          module Encoding
            # The encoding is unspecified.
            ENCODING_UNSPECIFIED = 0

            # Text encoding.
            TEXT = 1

            # Binary encoding.
            BINARY = 2
          end
        end

        # The Cloud Bigtable source for importing data.
        # @!attribute [rw] project_id
        #   @return [::String]
        #     The project ID that contains the Bigtable source. Has a length limit of 128
        #     characters. If not specified, inherits the project ID from the parent
        #     request.
        # @!attribute [rw] instance_id
        #   @return [::String]
        #     Required. The instance ID of the Cloud Bigtable that needs to be imported.
        # @!attribute [rw] table_id
        #   @return [::String]
        #     Required. The table ID of the Cloud Bigtable that needs to be imported.
        # @!attribute [rw] bigtable_options
        #   @return [::Google::Cloud::DiscoveryEngine::V1::BigtableOptions]
        #     Required. Bigtable options that contains information needed when parsing
        #     data into typed structures. For example, column type annotations.
        class BigtableSource
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods
        end

        # Cloud FhirStore source import data from.
        # @!attribute [rw] fhir_store
        #   @return [::String]
        #     Required. The full resource name of the FHIR store to import data from, in
        #     the format of
        #     `projects/{project}/locations/{location}/datasets/{dataset}/fhirStores/{fhir_store}`.
        # @!attribute [rw] gcs_staging_dir
        #   @return [::String]
        #     Intermediate Cloud Storage directory used for the import with a length
        #     limit of 2,000 characters. Can be specified if one wants to have the
        #     FhirStore export to a specific Cloud Storage directory.
        # @!attribute [rw] resource_types
        #   @return [::Array<::String>]
        #     The FHIR resource types to import. The resource types should be a subset of
        #     all [supported FHIR resource
        #     types](https://cloud.google.com/generative-ai-app-builder/docs/fhir-schema-reference#resource-level-specification).
        #     Default to all supported FHIR resource types if empty.
        class FhirStoreSource
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods
        end

        # Cloud SQL source import data from.
        # @!attribute [rw] project_id
        #   @return [::String]
        #     The project ID that contains the Cloud SQL source. Has a length limit of
        #     128 characters. If not specified, inherits the project ID from the parent
        #     request.
        # @!attribute [rw] instance_id
        #   @return [::String]
        #     Required. The Cloud SQL instance to copy the data from with a length limit
        #     of 256 characters.
        # @!attribute [rw] database_id
        #   @return [::String]
        #     Required. The Cloud SQL database to copy the data from with a length limit
        #     of 256 characters.
        # @!attribute [rw] table_id
        #   @return [::String]
        #     Required. The Cloud SQL table to copy the data from with a length limit of
        #     256 characters.
        # @!attribute [rw] gcs_staging_dir
        #   @return [::String]
        #     Intermediate Cloud Storage directory used for the import with a length
        #     limit of 2,000 characters. Can be specified if one wants to have the
        #     Cloud SQL export to a specific Cloud Storage directory.
        #
        #     Ensure that the Cloud SQL service account has the necessary Cloud
        #     Storage Admin permissions to access the specified Cloud Storage directory.
        # @!attribute [rw] offload
        #   @return [::Boolean]
        #     Option for serverless export. Enabling this option will incur additional
        #     cost. More info can be found
        #     [here](https://cloud.google.com/sql/pricing#serverless).
        class CloudSqlSource
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods
        end

        # AlloyDB source import data from.
        # @!attribute [rw] project_id
        #   @return [::String]
        #     The project ID that contains the AlloyDB source.
        #     Has a length limit of 128 characters. If not specified, inherits the
        #     project ID from the parent request.
        # @!attribute [rw] location_id
        #   @return [::String]
        #     Required. The AlloyDB location to copy the data from with a length limit of
        #     256 characters.
        # @!attribute [rw] cluster_id
        #   @return [::String]
        #     Required. The AlloyDB cluster to copy the data from with a length limit of
        #     256 characters.
        # @!attribute [rw] database_id
        #   @return [::String]
        #     Required. The AlloyDB database to copy the data from with a length limit of
        #     256 characters.
        # @!attribute [rw] table_id
        #   @return [::String]
        #     Required. The AlloyDB table to copy the data from with a length limit of
        #     256 characters.
        # @!attribute [rw] gcs_staging_dir
        #   @return [::String]
        #     Intermediate Cloud Storage directory used for the import with a length
        #     limit of 2,000 characters. Can be specified if one wants to have the
        #     AlloyDB export to a specific Cloud Storage directory.
        #
        #     Ensure that the AlloyDB service account has the necessary Cloud
        #     Storage Admin permissions to access the specified Cloud Storage directory.
        class AlloyDbSource
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods
        end

        # Firestore source import data from.
        # @!attribute [rw] project_id
        #   @return [::String]
        #     The project ID that the Cloud SQL source is in with a length limit of 128
        #     characters. If not specified, inherits the project ID from the parent
        #     request.
        # @!attribute [rw] database_id
        #   @return [::String]
        #     Required. The Firestore database to copy the data from with a length limit
        #     of 256 characters.
        # @!attribute [rw] collection_id
        #   @return [::String]
        #     Required. The Firestore collection (or entity) to copy the data from with a
        #     length limit of 1,500 characters.
        # @!attribute [rw] gcs_staging_dir
        #   @return [::String]
        #     Intermediate Cloud Storage directory used for the import with a length
        #     limit of 2,000 characters. Can be specified if one wants to have the
        #     Firestore export to a specific Cloud Storage directory.
        #
        #     Ensure that the Firestore service account has the necessary Cloud
        #     Storage Admin permissions to access the specified Cloud Storage directory.
        class FirestoreSource
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods
        end

        # Configuration of destination for Import related errors.
        # @!attribute [rw] gcs_prefix
        #   @return [::String]
        #     Cloud Storage prefix for import errors. This must be an empty,
        #     existing Cloud Storage directory. Import errors are written to
        #     sharded files in this directory, one per line, as a JSON-encoded
        #     `google.rpc.Status` message.
        class ImportErrorConfig
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods
        end

        # Request message for the ImportUserEvents request.
        # @!attribute [rw] inline_source
        #   @return [::Google::Cloud::DiscoveryEngine::V1::ImportUserEventsRequest::InlineSource]
        #     The Inline source for the input content for UserEvents.
        # @!attribute [rw] gcs_source
        #   @return [::Google::Cloud::DiscoveryEngine::V1::GcsSource]
        #     Cloud Storage location for the input content.
        # @!attribute [rw] bigquery_source
        #   @return [::Google::Cloud::DiscoveryEngine::V1::BigQuerySource]
        #     BigQuery input source.
        # @!attribute [rw] parent
        #   @return [::String]
        #     Required. Parent DataStore resource name, of the form
        #     `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`
        # @!attribute [rw] error_config
        #   @return [::Google::Cloud::DiscoveryEngine::V1::ImportErrorConfig]
        #     The desired location of errors incurred during the Import. Cannot be set
        #     for inline user event imports.
        class ImportUserEventsRequest
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods

          # The inline source for the input config for ImportUserEvents method.
          # @!attribute [rw] user_events
          #   @return [::Array<::Google::Cloud::DiscoveryEngine::V1::UserEvent>]
          #     Required. A list of user events to import. Recommended max of 10k items.
          class InlineSource
            include ::Google::Protobuf::MessageExts
            extend ::Google::Protobuf::MessageExts::ClassMethods
          end
        end

        # Response of the ImportUserEventsRequest. If the long running
        # operation was successful, then this message is returned by the
        # google.longrunning.Operations.response field if the operation was successful.
        # @!attribute [rw] error_samples
        #   @return [::Array<::Google::Rpc::Status>]
        #     A sample of errors encountered while processing the request.
        # @!attribute [rw] error_config
        #   @return [::Google::Cloud::DiscoveryEngine::V1::ImportErrorConfig]
        #     Echoes the destination for the complete errors if this field was set in
        #     the request.
        # @!attribute [rw] joined_events_count
        #   @return [::Integer]
        #     Count of user events imported with complete existing Documents.
        # @!attribute [rw] unjoined_events_count
        #   @return [::Integer]
        #     Count of user events imported, but with Document information not found
        #     in the existing Branch.
        class ImportUserEventsResponse
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods
        end

        # Metadata related to the progress of the Import operation. This is
        # returned by the google.longrunning.Operation.metadata field.
        # @!attribute [rw] create_time
        #   @return [::Google::Protobuf::Timestamp]
        #     Operation create time.
        # @!attribute [rw] update_time
        #   @return [::Google::Protobuf::Timestamp]
        #     Operation last update time. If the operation is done, this is also the
        #     finish time.
        # @!attribute [rw] success_count
        #   @return [::Integer]
        #     Count of entries that were processed successfully.
        # @!attribute [rw] failure_count
        #   @return [::Integer]
        #     Count of entries that encountered errors while processing.
        class ImportUserEventsMetadata
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods
        end

        # Metadata related to the progress of the ImportDocuments operation. This is
        # returned by the google.longrunning.Operation.metadata field.
        # @!attribute [rw] create_time
        #   @return [::Google::Protobuf::Timestamp]
        #     Operation create time.
        # @!attribute [rw] update_time
        #   @return [::Google::Protobuf::Timestamp]
        #     Operation last update time. If the operation is done, this is also the
        #     finish time.
        # @!attribute [rw] success_count
        #   @return [::Integer]
        #     Count of entries that were processed successfully.
        # @!attribute [rw] failure_count
        #   @return [::Integer]
        #     Count of entries that encountered errors while processing.
        # @!attribute [rw] total_count
        #   @return [::Integer]
        #     Total count of entries that were processed.
        class ImportDocumentsMetadata
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods
        end

        # Request message for Import methods.
        # @!attribute [rw] inline_source
        #   @return [::Google::Cloud::DiscoveryEngine::V1::ImportDocumentsRequest::InlineSource]
        #     The Inline source for the input content for documents.
        # @!attribute [rw] gcs_source
        #   @return [::Google::Cloud::DiscoveryEngine::V1::GcsSource]
        #     Cloud Storage location for the input content.
        # @!attribute [rw] bigquery_source
        #   @return [::Google::Cloud::DiscoveryEngine::V1::BigQuerySource]
        #     BigQuery input source.
        # @!attribute [rw] fhir_store_source
        #   @return [::Google::Cloud::DiscoveryEngine::V1::FhirStoreSource]
        #     FhirStore input source.
        # @!attribute [rw] spanner_source
        #   @return [::Google::Cloud::DiscoveryEngine::V1::SpannerSource]
        #     Spanner input source.
        # @!attribute [rw] cloud_sql_source
        #   @return [::Google::Cloud::DiscoveryEngine::V1::CloudSqlSource]
        #     Cloud SQL input source.
        # @!attribute [rw] firestore_source
        #   @return [::Google::Cloud::DiscoveryEngine::V1::FirestoreSource]
        #     Firestore input source.
        # @!attribute [rw] alloy_db_source
        #   @return [::Google::Cloud::DiscoveryEngine::V1::AlloyDbSource]
        #     AlloyDB input source.
        # @!attribute [rw] bigtable_source
        #   @return [::Google::Cloud::DiscoveryEngine::V1::BigtableSource]
        #     Cloud Bigtable input source.
        # @!attribute [rw] parent
        #   @return [::String]
        #     Required. The parent branch resource name, such as
        #     `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}`.
        #     Requires create/update permission.
        # @!attribute [rw] error_config
        #   @return [::Google::Cloud::DiscoveryEngine::V1::ImportErrorConfig]
        #     The desired location of errors incurred during the Import.
        # @!attribute [rw] reconciliation_mode
        #   @return [::Google::Cloud::DiscoveryEngine::V1::ImportDocumentsRequest::ReconciliationMode]
        #     The mode of reconciliation between existing documents and the documents to
        #     be imported. Defaults to
        #     {::Google::Cloud::DiscoveryEngine::V1::ImportDocumentsRequest::ReconciliationMode::INCREMENTAL ReconciliationMode.INCREMENTAL}.
        # @!attribute [rw] update_mask
        #   @return [::Google::Protobuf::FieldMask]
        #     Indicates which fields in the provided imported documents to update. If
        #     not set, the default is to update all fields.
        # @!attribute [rw] auto_generate_ids
        #   @return [::Boolean]
        #     Whether to automatically generate IDs for the documents if absent.
        #
        #     If set to `true`,
        #     {::Google::Cloud::DiscoveryEngine::V1::Document#id Document.id}s are
        #     automatically generated based on the hash of the payload, where IDs may not
        #     be consistent during multiple imports. In which case
        #     {::Google::Cloud::DiscoveryEngine::V1::ImportDocumentsRequest::ReconciliationMode::FULL ReconciliationMode.FULL}
        #     is highly recommended to avoid duplicate contents. If unset or set to
        #     `false`, {::Google::Cloud::DiscoveryEngine::V1::Document#id Document.id}s have
        #     to be specified using
        #     {::Google::Cloud::DiscoveryEngine::V1::ImportDocumentsRequest#id_field id_field},
        #     otherwise, documents without IDs fail to be imported.
        #
        #     Supported data sources:
        #
        #     * {::Google::Cloud::DiscoveryEngine::V1::GcsSource GcsSource}.
        #     {::Google::Cloud::DiscoveryEngine::V1::GcsSource#data_schema GcsSource.data_schema}
        #     must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
        #     * {::Google::Cloud::DiscoveryEngine::V1::BigQuerySource BigQuerySource}.
        #     {::Google::Cloud::DiscoveryEngine::V1::BigQuerySource#data_schema BigQuerySource.data_schema}
        #     must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
        #     * {::Google::Cloud::DiscoveryEngine::V1::SpannerSource SpannerSource}.
        #     * {::Google::Cloud::DiscoveryEngine::V1::CloudSqlSource CloudSqlSource}.
        #     * {::Google::Cloud::DiscoveryEngine::V1::FirestoreSource FirestoreSource}.
        #     * {::Google::Cloud::DiscoveryEngine::V1::BigtableSource BigtableSource}.
        # @!attribute [rw] id_field
        #   @return [::String]
        #     The field indicates the ID field or column to be used as unique IDs of
        #     the documents.
        #
        #     For {::Google::Cloud::DiscoveryEngine::V1::GcsSource GcsSource} it is the key of
        #     the JSON field. For instance, `my_id` for JSON `{"my_id": "some_uuid"}`.
        #     For others, it may be the column name of the table where the unique ids are
        #     stored.
        #
        #     The values of the JSON field or the table column are used as the
        #     {::Google::Cloud::DiscoveryEngine::V1::Document#id Document.id}s. The JSON field
        #     or the table column must be of string type, and the values must be set as
        #     valid strings conform to [RFC-1034](https://tools.ietf.org/html/rfc1034)
        #     with 1-63 characters. Otherwise, documents without valid IDs fail to be
        #     imported.
        #
        #     Only set this field when
        #     {::Google::Cloud::DiscoveryEngine::V1::ImportDocumentsRequest#auto_generate_ids auto_generate_ids}
        #     is unset or set as `false`. Otherwise, an INVALID_ARGUMENT error is thrown.
        #
        #     If it is unset, a default value `_id` is used when importing from the
        #     allowed data sources.
        #
        #     Supported data sources:
        #
        #     * {::Google::Cloud::DiscoveryEngine::V1::GcsSource GcsSource}.
        #     {::Google::Cloud::DiscoveryEngine::V1::GcsSource#data_schema GcsSource.data_schema}
        #     must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
        #     * {::Google::Cloud::DiscoveryEngine::V1::BigQuerySource BigQuerySource}.
        #     {::Google::Cloud::DiscoveryEngine::V1::BigQuerySource#data_schema BigQuerySource.data_schema}
        #     must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
        #     * {::Google::Cloud::DiscoveryEngine::V1::SpannerSource SpannerSource}.
        #     * {::Google::Cloud::DiscoveryEngine::V1::CloudSqlSource CloudSqlSource}.
        #     * {::Google::Cloud::DiscoveryEngine::V1::FirestoreSource FirestoreSource}.
        #     * {::Google::Cloud::DiscoveryEngine::V1::BigtableSource BigtableSource}.
        class ImportDocumentsRequest
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods

          # The inline source for the input config for ImportDocuments method.
          # @!attribute [rw] documents
          #   @return [::Array<::Google::Cloud::DiscoveryEngine::V1::Document>]
          #     Required. A list of documents to update/create. Each document must have a
          #     valid {::Google::Cloud::DiscoveryEngine::V1::Document#id Document.id}.
          #     Recommended max of 100 items.
          class InlineSource
            include ::Google::Protobuf::MessageExts
            extend ::Google::Protobuf::MessageExts::ClassMethods
          end

          # Indicates how imported documents are reconciled with the existing documents
          # created or imported before.
          module ReconciliationMode
            # Defaults to `INCREMENTAL`.
            RECONCILIATION_MODE_UNSPECIFIED = 0

            # Inserts new documents or updates existing documents.
            INCREMENTAL = 1

            # Calculates diff and replaces the entire document dataset. Existing
            # documents may be deleted if they are not present in the source location.
            FULL = 2
          end
        end

        # Response of the
        # {::Google::Cloud::DiscoveryEngine::V1::ImportDocumentsRequest ImportDocumentsRequest}.
        # If the long running operation is done, then this message is returned by the
        # google.longrunning.Operations.response field if the operation was successful.
        # @!attribute [rw] error_samples
        #   @return [::Array<::Google::Rpc::Status>]
        #     A sample of errors encountered while processing the request.
        # @!attribute [rw] error_config
        #   @return [::Google::Cloud::DiscoveryEngine::V1::ImportErrorConfig]
        #     Echoes the destination for the complete errors in the request if set.
        class ImportDocumentsResponse
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods
        end

        # Request message for
        # {::Google::Cloud::DiscoveryEngine::V1::CompletionService::Client#import_suggestion_deny_list_entries CompletionService.ImportSuggestionDenyListEntries}
        # method.
        # @!attribute [rw] inline_source
        #   @return [::Google::Cloud::DiscoveryEngine::V1::ImportSuggestionDenyListEntriesRequest::InlineSource]
        #     The Inline source for the input content for suggestion deny list entries.
        # @!attribute [rw] gcs_source
        #   @return [::Google::Cloud::DiscoveryEngine::V1::GcsSource]
        #     Cloud Storage location for the input content.
        #
        #     Only 1 file can be specified that contains all entries to import.
        #     Supported values `gcs_source.schema` for autocomplete suggestion deny
        #     list entry imports:
        #
        #     * `suggestion_deny_list` (default): One JSON [SuggestionDenyListEntry]
        #     per line.
        # @!attribute [rw] parent
        #   @return [::String]
        #     Required. The parent data store resource name for which to import denylist
        #     entries. Follows pattern projects/*/locations/*/collections/*/dataStores/*.
        class ImportSuggestionDenyListEntriesRequest
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods

          # The inline source for SuggestionDenyListEntry.
          # @!attribute [rw] entries
          #   @return [::Array<::Google::Cloud::DiscoveryEngine::V1::SuggestionDenyListEntry>]
          #     Required. A list of all denylist entries to import. Max of 1000 items.
          class InlineSource
            include ::Google::Protobuf::MessageExts
            extend ::Google::Protobuf::MessageExts::ClassMethods
          end
        end

        # Response message for
        # {::Google::Cloud::DiscoveryEngine::V1::CompletionService::Client#import_suggestion_deny_list_entries CompletionService.ImportSuggestionDenyListEntries}
        # method.
        # @!attribute [rw] error_samples
        #   @return [::Array<::Google::Rpc::Status>]
        #     A sample of errors encountered while processing the request.
        # @!attribute [rw] imported_entries_count
        #   @return [::Integer]
        #     Count of deny list entries successfully imported.
        # @!attribute [rw] failed_entries_count
        #   @return [::Integer]
        #     Count of deny list entries that failed to be imported.
        class ImportSuggestionDenyListEntriesResponse
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods
        end

        # Metadata related to the progress of the ImportSuggestionDenyListEntries
        # operation. This is returned by the google.longrunning.Operation.metadata
        # field.
        # @!attribute [rw] create_time
        #   @return [::Google::Protobuf::Timestamp]
        #     Operation create time.
        # @!attribute [rw] update_time
        #   @return [::Google::Protobuf::Timestamp]
        #     Operation last update time. If the operation is done, this is also the
        #     finish time.
        class ImportSuggestionDenyListEntriesMetadata
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods
        end

        # Request message for
        # {::Google::Cloud::DiscoveryEngine::V1::CompletionService::Client#import_completion_suggestions CompletionService.ImportCompletionSuggestions}
        # method.
        # @!attribute [rw] inline_source
        #   @return [::Google::Cloud::DiscoveryEngine::V1::ImportCompletionSuggestionsRequest::InlineSource]
        #     The Inline source for suggestion entries.
        # @!attribute [rw] gcs_source
        #   @return [::Google::Cloud::DiscoveryEngine::V1::GcsSource]
        #     Cloud Storage location for the input content.
        # @!attribute [rw] bigquery_source
        #   @return [::Google::Cloud::DiscoveryEngine::V1::BigQuerySource]
        #     BigQuery input source.
        # @!attribute [rw] parent
        #   @return [::String]
        #     Required. The parent data store resource name for which to import customer
        #     autocomplete suggestions.
        #
        #     Follows pattern `projects/*/locations/*/collections/*/dataStores/*`
        # @!attribute [rw] error_config
        #   @return [::Google::Cloud::DiscoveryEngine::V1::ImportErrorConfig]
        #     The desired location of errors incurred during the Import.
        class ImportCompletionSuggestionsRequest
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods

          # The inline source for CompletionSuggestions.
          # @!attribute [rw] suggestions
          #   @return [::Array<::Google::Cloud::DiscoveryEngine::V1::CompletionSuggestion>]
          #     Required. A list of all denylist entries to import. Max of 1000 items.
          class InlineSource
            include ::Google::Protobuf::MessageExts
            extend ::Google::Protobuf::MessageExts::ClassMethods
          end
        end

        # Response of the
        # {::Google::Cloud::DiscoveryEngine::V1::CompletionService::Client#import_completion_suggestions CompletionService.ImportCompletionSuggestions}
        # method. If the long running operation is done, this message is returned by
        # the google.longrunning.Operations.response field if the operation is
        # successful.
        # @!attribute [rw] error_samples
        #   @return [::Array<::Google::Rpc::Status>]
        #     A sample of errors encountered while processing the request.
        # @!attribute [rw] error_config
        #   @return [::Google::Cloud::DiscoveryEngine::V1::ImportErrorConfig]
        #     The desired location of errors incurred during the Import.
        class ImportCompletionSuggestionsResponse
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods
        end

        # Metadata related to the progress of the ImportCompletionSuggestions
        # operation. This will be returned by the google.longrunning.Operation.metadata
        # field.
        # @!attribute [rw] create_time
        #   @return [::Google::Protobuf::Timestamp]
        #     Operation create time.
        # @!attribute [rw] update_time
        #   @return [::Google::Protobuf::Timestamp]
        #     Operation last update time. If the operation is done, this is also the
        #     finish time.
        # @!attribute [rw] success_count
        #   @return [::Integer]
        #     Count of
        #     {::Google::Cloud::DiscoveryEngine::V1::CompletionSuggestion CompletionSuggestion}s
        #     successfully imported.
        # @!attribute [rw] failure_count
        #   @return [::Integer]
        #     Count of
        #     {::Google::Cloud::DiscoveryEngine::V1::CompletionSuggestion CompletionSuggestion}s
        #     that failed to be imported.
        class ImportCompletionSuggestionsMetadata
          include ::Google::Protobuf::MessageExts
          extend ::Google::Protobuf::MessageExts::ClassMethods
        end
      end
    end
  end
end