# frozen_string_literal: true
# Generated by the protocol buffer compiler.  DO NOT EDIT!
# source: google/cloud/datacatalog/v1/bigquery.proto

require 'google/protobuf'

require 'google/api/field_behavior_pb'


descriptor_data = "\n*google/cloud/datacatalog/v1/bigquery.proto\x12\x1bgoogle.cloud.datacatalog.v1\x1a\x1fgoogle/api/field_behavior.proto\"\xb4\x02\n\x16\x42igQueryConnectionSpec\x12[\n\x0f\x63onnection_type\x18\x01 \x01(\x0e\x32\x42.google.cloud.datacatalog.v1.BigQueryConnectionSpec.ConnectionType\x12P\n\tcloud_sql\x18\x02 \x01(\x0b\x32;.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpecH\x00\x12\x16\n\x0ehas_credential\x18\x03 \x01(\x08\"@\n\x0e\x43onnectionType\x12\x1f\n\x1b\x43ONNECTION_TYPE_UNSPECIFIED\x10\x00\x12\r\n\tCLOUD_SQL\x10\x01\x42\x11\n\x0f\x63onnection_spec\"\xe7\x01\n\x1e\x43loudSqlBigQueryConnectionSpec\x12\x13\n\x0binstance_id\x18\x01 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x02 \x01(\t\x12V\n\x04type\x18\x03 \x01(\x0e\x32H.google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType\"F\n\x0c\x44\x61tabaseType\x12\x1d\n\x19\x44\x41TABASE_TYPE_UNSPECIFIED\x10\x00\x12\x0c\n\x08POSTGRES\x10\x01\x12\t\n\x05MYSQL\x10\x02\"1\n\x13\x42igQueryRoutineSpec\x12\x1a\n\x12imported_libraries\x18\x01 \x03(\tB\xd2\x01\n\x1f\x63om.google.cloud.datacatalog.v1B\rBigQueryProtoP\x01ZAcloud.google.com/go/datacatalog/apiv1/datacatalogpb;datacatalogpb\xaa\x02\x1bGoogle.Cloud.DataCatalog.V1\xca\x02\x1bGoogle\\Cloud\\DataCatalog\\V1\xea\x02\x1eGoogle::Cloud::DataCatalog::V1b\x06proto3"

pool = Google::Protobuf::DescriptorPool.generated_pool

begin
  pool.add_serialized_file(descriptor_data)
rescue TypeError
  # Compatibility code: will be removed in the next major version.
  require 'google/protobuf/descriptor_pb'
  parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
  parsed.clear_dependency
  serialized = parsed.class.encode(parsed)
  file = pool.add_serialized_file(serialized)
  warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
  imports = [
  ]
  imports.each do |type_name, expected_filename|
    import_file = pool.lookup(type_name).file_descriptor
    if import_file.name != expected_filename
      warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
    end
  end
  warn "Each proto file must use a consistent fully-qualified name."
  warn "This will become an error in the next major version."
end

module Google
  module Cloud
    module DataCatalog
      module V1
        BigQueryConnectionSpec = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.datacatalog.v1.BigQueryConnectionSpec").msgclass
        BigQueryConnectionSpec::ConnectionType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.datacatalog.v1.BigQueryConnectionSpec.ConnectionType").enummodule
        CloudSqlBigQueryConnectionSpec = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec").msgclass
        CloudSqlBigQueryConnectionSpec::DatabaseType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.datacatalog.v1.CloudSqlBigQueryConnectionSpec.DatabaseType").enummodule
        BigQueryRoutineSpec = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.datacatalog.v1.BigQueryRoutineSpec").msgclass
      end
    end
  end
end