generated/google/apis/ml_v1/classes.rb in google-api-client-0.46.2 vs generated/google/apis/ml_v1/classes.rb in google-api-client-0.47.0
- old
+ new
@@ -512,10 +512,22 @@
# Options for automatically scaling a model.
class GoogleCloudMlV1AutoScaling
include Google::Apis::Core::Hashable
+ # The maximum number of nodes to scale this model under load. The actual value
+ # will depend on resource quota and availability.
+ # Corresponds to the JSON property `maxNodes`
+ # @return [Fixnum]
+ attr_accessor :max_nodes
+
+ # MetricSpec contains the specifications to use to calculate the desired nodes
+ # count.
+ # Corresponds to the JSON property `metrics`
+ # @return [Array<Google::Apis::MlV1::GoogleCloudMlV1MetricSpec>]
+ attr_accessor :metrics
+
# Optional. The minimum number of nodes to allocate for this model. These nodes
# are always up, starting from the time the model is deployed. Therefore, the
# cost of operating this model will be at least `rate` * `min_nodes` * number of
# hours since last billing cycle, where `rate` is the cost per node-hour as
# documented in the [pricing guide](/ml-engine/docs/pricing), even if no
@@ -546,10 +558,12 @@
update!(**args)
end
# Update properties of this object
def update!(**args)
+ @max_nodes = args[:max_nodes] if args.key?(:max_nodes)
+ @metrics = args[:metrics] if args.key?(:metrics)
@min_nodes = args[:min_nodes] if args.key?(:min_nodes)
end
end
# Configuration for Automated Early Stopping of Trials. If no
@@ -788,16 +802,18 @@
def update!(**args)
@tpu_service_account = args[:tpu_service_account] if args.key?(:tpu_service_account)
end
end
- # ContainerPort represents a network port in a single container.
+ # Represents a network port in a single container. This message is a subset of
+ # the [Kubernetes ContainerPort v1 core specification](https://kubernetes.io/
+ # docs/reference/generated/kubernetes-api/v1.18/#containerport-v1-core).
class GoogleCloudMlV1ContainerPort
include Google::Apis::Core::Hashable
- # Number of port to expose on the pod's IP address. This must be a valid port
- # number, 0 < x < 65536.
+ # Number of the port to expose on the container. This must be a valid port
+ # number: 0 < PORT_NUMBER < 65536.
# Corresponds to the JSON property `containerPort`
# @return [Fixnum]
attr_accessor :container_port
def initialize(**args)
@@ -808,57 +824,127 @@
def update!(**args)
@container_port = args[:container_port] if args.key?(:container_port)
end
end
- # Specify a custom container to deploy. Our ContainerSpec is a subset of the
- # Kubernetes Container specification. https://kubernetes.io/docs/reference/
- # generated/kubernetes-api/v1.10/#container-v1-core
+ # Specification of a custom container for serving predictions. This message is a
+ # subset of the [Kubernetes Container v1 core specification](https://kubernetes.
+ # io/docs/reference/generated/kubernetes-api/v1.18/#container-v1-core).
class GoogleCloudMlV1ContainerSpec
include Google::Apis::Core::Hashable
- # Immutable. Arguments to the entrypoint. The docker image's CMD is used if this
- # is not provided. Variable references $(VAR_NAME) are expanded using the
- # container's environment. If a variable cannot be resolved, the reference in
- # the input string will be unchanged. The $(VAR_NAME) syntax can be escaped with
- # a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded,
- # regardless of whether the variable exists or not. More info: https://
- # kubernetes.io/docs/tasks/inject-data-application/define-command-argument-
- # container/#running-a-command-in-a-shell
+ # Immutable. Specifies arguments for the command that runs when the container
+ # starts. This overrides the container's [`CMD`](https://docs.docker.com/engine/
+ # reference/builder/#cmd). Specify this field as an array of executable and
+ # arguments, similar to a Docker `CMD`'s "default parameters" form. If you don't
+ # specify this field but do specify the command field, then the command from the
+ # `command` field runs without any additional arguments. See the [Kubernetes
+ # documentation about how the `command` and `args` fields interact with a
+ # container's `ENTRYPOINT` and `CMD`](https://kubernetes.io/docs/tasks/inject-
+ # data-application/define-command-argument-container/#notes). If you don't
+ # specify this field and don't specify the `commmand` field, then the container'
+ # s [`ENTRYPOINT`](https://docs.docker.com/engine/reference/builder/#cmd) and `
+ # CMD` determine what runs based on their default behavior. See the [Docker
+ # documentation about how `CMD` and `ENTRYPOINT` interact](https://docs.docker.
+ # com/engine/reference/builder/#understand-how-cmd-and-entrypoint-interact). In
+ # this field, you can reference [environment variables set by AI Platform
+ # Prediction](/ai-platform/prediction/docs/custom-container-requirements#aip-
+ # variables) and environment variables set in the env field. You cannot
+ # reference environment variables set in the Docker image. In order for
+ # environment variables to be expanded, reference them by using the following
+ # syntax: $( VARIABLE_NAME) Note that this differs from Bash variable expansion,
+ # which does not use parentheses. If a variable cannot be resolved, the
+ # reference in the input string is used unchanged. To avoid variable expansion,
+ # you can escape this syntax with `$$`; for example: $$(VARIABLE_NAME) This
+ # field corresponds to the `args` field of the [Kubernetes Containers v1 core
+ # API](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.18/#
+ # container-v1-core).
# Corresponds to the JSON property `args`
# @return [Array<String>]
attr_accessor :args
- # Immutable. Entrypoint array. Not executed within a shell. The docker image's
- # ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME)
- # are expanded using the container's environment. If a variable cannot be
- # resolved, the reference in the input string will be unchanged. The $(VAR_NAME)
- # syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references
- # will never be expanded, regardless of whether the variable exists or not. More
- # info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-
- # argument-container/#running-a-command-in-a-shell
+ # Immutable. Specifies the command that runs when the container starts. This
+ # overrides the container's [`ENTRYPOINT`](https://docs.docker.com/engine/
+ # reference/builder/#entrypoint). Specify this field as an array of executable
+ # and arguments, similar to a Docker `ENTRYPOINT`'s "exec" form, not its "shell"
+ # form. If you do not specify this field, then the container's `ENTRYPOINT` runs,
+ # in conjunction with the args field or the container's [`CMD`](https://docs.
+ # docker.com/engine/reference/builder/#cmd), if either exists. If this field is
+ # not specified and the container does not have an `ENTRYPOINT`, then refer to
+ # the [Docker documentation about how `CMD` and `ENTRYPOINT` interact](https://
+ # docs.docker.com/engine/reference/builder/#understand-how-cmd-and-entrypoint-
+ # interact). If you specify this field, then you can also specify the `args`
+ # field to provide additional arguments for this command. However, if you
+ # specify this field, then the container's `CMD` is ignored. See the [Kubernetes
+ # documentation about how the `command` and `args` fields interact with a
+ # container's `ENTRYPOINT` and `CMD`](https://kubernetes.io/docs/tasks/inject-
+ # data-application/define-command-argument-container/#notes). In this field, you
+ # can reference [environment variables set by AI Platform Prediction](/ai-
+ # platform/prediction/docs/custom-container-requirements#aip-variables) and
+ # environment variables set in the env field. You cannot reference environment
+ # variables set in the Docker image. In order for environment variables to be
+ # expanded, reference them by using the following syntax: $( VARIABLE_NAME) Note
+ # that this differs from Bash variable expansion, which does not use parentheses.
+ # If a variable cannot be resolved, the reference in the input string is used
+ # unchanged. To avoid variable expansion, you can escape this syntax with `$$`;
+ # for example: $$(VARIABLE_NAME) This field corresponds to the `command` field
+ # of the [Kubernetes Containers v1 core API](https://kubernetes.io/docs/
+ # reference/generated/kubernetes-api/v1.18/#container-v1-core).
# Corresponds to the JSON property `command`
# @return [Array<String>]
attr_accessor :command
- # Immutable. List of environment variables to set in the container.
+ # Immutable. List of environment variables to set in the container. After the
+ # container starts running, code running in the container can read these
+ # environment variables. Additionally, the command and args fields can reference
+ # these variables. Later entries in this list can also reference earlier entries.
+ # For example, the following example sets the variable `VAR_2` to have the
+ # value `foo bar`: ```json [ ` "name": "VAR_1", "value": "foo" `, ` "name": "
+ # VAR_2", "value": "$(VAR_1) bar" ` ] ``` If you switch the order of the
+ # variables in the example, then the expansion does not occur. This field
+ # corresponds to the `env` field of the [Kubernetes Containers v1 core API](
+ # https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.18/#container-
+ # v1-core).
# Corresponds to the JSON property `env`
# @return [Array<Google::Apis::MlV1::GoogleCloudMlV1EnvVar>]
attr_accessor :env
- # Docker image name. More info: https://kubernetes.io/docs/concepts/containers/
- # images
+ # URI of the Docker image to be used as the custom container for serving
+ # predictions. This URI must identify [an image in Artifact Registry](/artifact-
+ # registry/docs/overview) and begin with the hostname ``REGION`-docker.pkg.dev`,
+ # where ``REGION`` is replaced by the region that matches AI Platform Prediction
+ # [regional endpoint](/ai-platform/prediction/docs/regional-endpoints) that you
+ # are using. For example, if you are using the `us-central1-ml.googleapis.com`
+ # endpoint, then this URI must begin with `us-central1-docker.pkg.dev`. To use a
+ # custom container, the [AI Platform Google-managed service account](/ai-
+ # platform/prediction/docs/custom-service-account#default) must have permission
+ # to pull (read) the Docker image at this URI. The AI Platform Google-managed
+ # service account has the following format: `service-`PROJECT_NUMBER`@cloud-ml.
+ # google.com.iam.gserviceaccount.com` `PROJECT_NUMBER` is replaced by your
+ # Google Cloud project number. By default, this service account has necessary
+ # permissions to pull an Artifact Registry image in the same Google Cloud
+ # project where you are using AI Platform Prediction. In this case, no
+ # configuration is necessary. If you want to use an image from a different
+ # Google Cloud project, learn how to [grant the Artifact Registry Reader (roles/
+ # artifactregistry.reader) role for a repository](/artifact-registry/docs/access-
+ # control#grant-repo) to your projet's AI Platform Google-managed service
+ # account. To learn about the requirements for the Docker image itself, read [
+ # Custom container requirements](/ai-platform/prediction/docs/custom-container-
+ # requirements).
# Corresponds to the JSON property `image`
# @return [String]
attr_accessor :image
- # Immutable. List of ports to expose from the container. Exposing a port here
- # gives the system additional information about the network connections a
- # container uses, but is primarily informational. Not specifying a port here
- # DOES NOT prevent that port from being exposed. Any port which is listening on
- # the default "0.0.0.0" address inside a container will be accessible from the
- # network.
+ # Immutable. List of ports to expose from the container. AI Platform Prediction
+ # sends any prediction requests that it receives to the first port on this list.
+ # AI Platform Prediction also sends [liveness and health checks](/ai-platform/
+ # prediction/docs/custom-container-requirements#health) to this port. If you do
+ # not specify this field, it defaults to following value: ```json [ ` "
+ # containerPort": 8080 ` ] ``` AI Platform Prediction does not use ports other
+ # than the first one listed. This field corresponds to the `ports` field of the [
+ # Kubernetes Containers v1 core API](https://kubernetes.io/docs/reference/
+ # generated/kubernetes-api/v1.18/#container-v1-core).
# Corresponds to the JSON property `ports`
# @return [Array<Google::Apis::MlV1::GoogleCloudMlV1ContainerPort>]
attr_accessor :ports
def initialize(**args)
@@ -896,25 +982,34 @@
def update!(**args)
@kms_key_name = args[:kms_key_name] if args.key?(:kms_key_name)
end
end
- # EnvVar represents an environment variable present in a Container.
+ # Represents an environment variable to be made available in a container. This
+ # message is a subset of the [Kubernetes EnvVar v1 core specification](https://
+ # kubernetes.io/docs/reference/generated/kubernetes-api/v1.18/#envvar-v1-core).
class GoogleCloudMlV1EnvVar
include Google::Apis::Core::Hashable
- # Name of the environment variable. Must be a C_IDENTIFIER.
+ # Name of the environment variable. Must be a [valid C identifier](https://
+ # github.com/kubernetes/kubernetes/blob/v1.18.8/staging/src/k8s.io/apimachinery/
+ # pkg/util/validation/validation.go#L258) and must not begin with the prefix `
+ # AIP_`.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
- # Variable references $(VAR_NAME) are expanded using the previous defined
- # environment variables in the container and any service environment variables.
- # If a variable cannot be resolved, the reference in the input string will be
- # unchanged. The $(VAR_NAME) syntax can be escaped with a double $$, ie: $$(
- # VAR_NAME). Escaped references will never be expanded, regardless of whether
- # the variable exists or not. Defaults to "".
+ # Value of the environment variable. Defaults to an empty string. In this field,
+ # you can reference [environment variables set by AI Platform Prediction](/ai-
+ # platform/prediction/docs/custom-container-requirements#aip-variables) and
+ # environment variables set earlier in the same env field as where this message
+ # occurs. You cannot reference environment variables set in the Docker image. In
+ # order for environment variables to be expanded, reference them by using the
+ # following syntax: $(VARIABLE_NAME) Note that this differs from Bash variable
+ # expansion, which does not use parentheses. If a variable cannot be resolved,
+ # the reference in the input string is used unchanged. To avoid variable
+ # expansion, you can escape this syntax with `$$`; for example: $$(VARIABLE_NAME)
# Corresponds to the JSON property `value`
# @return [String]
attr_accessor :value
def initialize(**args)
@@ -1535,10 +1630,37 @@
@metrics = args[:metrics] if args.key?(:metrics)
@step_count = args[:step_count] if args.key?(:step_count)
end
end
+ # MetricSpec contains the specifications to use to calculate the desired nodes
+ # count when autoscaling is enabled.
+ class GoogleCloudMlV1MetricSpec
+ include Google::Apis::Core::Hashable
+
+ # metric name.
+ # Corresponds to the JSON property `name`
+ # @return [String]
+ attr_accessor :name
+
+ # Target specifies the target value for the given metric; once real metric
+ # deviates from the threshold by a certain percentage, the node count changes.
+ # Corresponds to the JSON property `target`
+ # @return [Fixnum]
+ attr_accessor :target
+
+ def initialize(**args)
+ update!(**args)
+ end
+
+ # Update properties of this object
+ def update!(**args)
+ @name = args[:name] if args.key?(:name)
+ @target = args[:target] if args.key?(:target)
+ end
+ end
+
# Represents a machine learning solution. A model can have multiple versions,
# each of which is a deployed, trained model ready to receive prediction
# requests. The model itself is just a container.
class GoogleCloudMlV1Model
include Google::Apis::Core::Hashable
@@ -1581,27 +1703,26 @@
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# Optional. If true, online prediction nodes send `stderr` and `stdout` streams
- # to Stackdriver Logging. These can be more verbose than the standard access
- # logs (see `onlinePredictionLogging`) and can incur higher cost. However, they
- # are helpful for debugging. Note that [Stackdriver logs may incur a cost](/
- # stackdriver/pricing), especially if your project receives prediction requests
- # at a high QPS. Estimate your costs before enabling this option. Default is
- # false.
+ # to Cloud Logging. These can be more verbose than the standard access logs (see
+ # `onlinePredictionLogging`) and can incur higher cost. However, they are
+ # helpful for debugging. Note that [logs may incur a cost](/stackdriver/pricing),
+ # especially if your project receives prediction requests at a high QPS.
+ # Estimate your costs before enabling this option. Default is false.
# Corresponds to the JSON property `onlinePredictionConsoleLogging`
# @return [Boolean]
attr_accessor :online_prediction_console_logging
alias_method :online_prediction_console_logging?, :online_prediction_console_logging
- # Optional. If true, online prediction access logs are sent to StackDriver
- # Logging. These logs are like standard server access logs, containing
- # information like timestamp and latency for each request. Note that [
- # Stackdriver logs may incur a cost](/stackdriver/pricing), especially if your
- # project receives prediction requests at a high queries per second rate (QPS).
- # Estimate your costs before enabling this option. Default is false.
+ # Optional. If true, online prediction access logs are sent to Cloud Logging.
+ # These logs are like standard server access logs, containing information like
+ # timestamp and latency for each request. Note that [logs may incur a cost](/
+ # stackdriver/pricing), especially if your project receives prediction requests
+ # at a high queries per second rate (QPS). Estimate your costs before enabling
+ # this option. Default is false.
# Corresponds to the JSON property `onlinePredictionLogging`
# @return [Boolean]
attr_accessor :online_prediction_logging
alias_method :online_prediction_logging?, :online_prediction_logging
@@ -2055,22 +2176,53 @@
@bigquery_table_name = args[:bigquery_table_name] if args.key?(:bigquery_table_name)
@sampling_percentage = args[:sampling_percentage] if args.key?(:sampling_percentage)
end
end
- # RouteMap is used to override HTTP paths sent to a Custom Container. If
- # specified, the HTTP server implemented in the ContainerSpec must support the
- # route. If unspecified, standard HTTP paths will be used.
+ # Specifies HTTP paths served by a custom container. AI Platform Prediction
+ # sends requests to these paths on the container; the custom container must run
+ # an HTTP server that responds to these requests with appropriate responses.
+ # Read [Custom container requirements](/ai-platform/prediction/docs/custom-
+ # container-requirements) for details on how to create your container image to
+ # meet these requirements.
class GoogleCloudMlV1RouteMap
include Google::Apis::Core::Hashable
- # HTTP path to send health check requests.
+ # HTTP path on the container to send health checkss to. AI Platform Prediction
+ # intermittently sends GET requests to this path on the container's IP address
+ # and port to check that the container is healthy. Read more about [health
+ # checks](/ai-platform/prediction/docs/custom-container-requirements#checks).
+ # For example, if you set this field to `/bar`, then AI Platform Prediction
+ # intermittently sends a GET request to the following URL on the container:
+ # localhost:PORT/bar PORT refers to the first value of Version.container.ports.
+ # If you don't specify this field, it defaults to the following value: /v1/
+ # models/MODEL/versions/VERSION The placeholders in this value are replaced as
+ # follows: * MODEL: The name of the parent Model. This does not include the "
+ # projects/PROJECT_ID/models/" prefix that the API returns in output; it is the
+ # bare model name, as provided to projects.models.create. * VERSION: The name of
+ # the model version. This does not include the "projects/PROJECT_ID/models/MODEL/
+ # versions/" prefix that the API returns in output; it is the bare version name,
+ # as provided to projects.models.versions.create.
# Corresponds to the JSON property `health`
# @return [String]
attr_accessor :health
- # HTTP path to send prediction requests.
+ # HTTP path on the container to send prediction requests to. AI Platform
+ # Prediction forwards requests sent using projects.predict to this path on the
+ # container's IP address and port. AI Platform Prediction then returns the
+ # container's response in the API response. For example, if you set this field
+ # to `/foo`, then when AI Platform Prediction receives a prediction request, it
+ # forwards the request body in a POST request to the following URL on the
+ # container: localhost:PORT/foo PORT refers to the first value of Version.
+ # container.ports. If you don't specify this field, it defaults to the following
+ # value: /v1/models/MODEL/versions/VERSION:predict The placeholders in this
+ # value are replaced as follows: * MODEL: The name of the parent Model. This
+ # does not include the "projects/PROJECT_ID/models/" prefix that the API returns
+ # in output; it is the bare model name, as provided to projects.models.create. *
+ # VERSION: The name of the model version. This does not include the "projects/
+ # PROJECT_ID/models/MODEL/versions/" prefix that the API returns in output; it
+ # is the bare version name, as provided to projects.models.versions.create.
# Corresponds to the JSON property `predict`
# @return [String]
attr_accessor :predict
def initialize(**args)
@@ -2783,29 +2935,34 @@
# Options for automatically scaling a model.
# Corresponds to the JSON property `autoScaling`
# @return [Google::Apis::MlV1::GoogleCloudMlV1AutoScaling]
attr_accessor :auto_scaling
- # Specify a custom container to deploy. Our ContainerSpec is a subset of the
- # Kubernetes Container specification. https://kubernetes.io/docs/reference/
- # generated/kubernetes-api/v1.10/#container-v1-core
+ # Specification of a custom container for serving predictions. This message is a
+ # subset of the [Kubernetes Container v1 core specification](https://kubernetes.
+ # io/docs/reference/generated/kubernetes-api/v1.18/#container-v1-core).
# Corresponds to the JSON property `container`
# @return [Google::Apis::MlV1::GoogleCloudMlV1ContainerSpec]
attr_accessor :container
# Output only. The time the version was created.
# Corresponds to the JSON property `createTime`
# @return [String]
attr_accessor :create_time
- # Required. The Cloud Storage location of the trained model used to create the
- # version. See the [guide to model deployment](/ml-engine/docs/tensorflow/
- # deploying-models) for more information. When passing Version to projects.
- # models.versions.create the model service uses the specified location as the
- # source of the model. Once deployed, the model version is hosted by the
- # prediction service, so this location is useful only as a historical record.
- # The total number of model files can't exceed 1000.
+ # The Cloud Storage URI of a directory containing trained model artifacts to be
+ # used to create the model version. See the [guide to deploying models](/ai-
+ # platform/prediction/docs/deploying-models) for more information. The total
+ # number of files under this directory must not exceed 1000. During projects.
+ # models.versions.create, AI Platform Prediction copies all files from the
+ # specified directory to a location managed by the service. From then on, AI
+ # Platform Prediction uses these copies of the model artifacts to serve
+ # predictions, not the original files in Cloud Storage, so this location is
+ # useful only as a historical record. If you specify container, then this field
+ # is optional. Otherwise, it is required. Learn [how to use this field with a
+ # custom container](/ai-platform/prediction/docs/custom-container-requirements#
+ # artifacts).
# Corresponds to the JSON property `deploymentUri`
# @return [String]
attr_accessor :deployment_uri
# Optional. The description specified for the version when it was created.
@@ -2842,14 +2999,12 @@
# version of the model. Valid values are `TENSORFLOW`, `SCIKIT_LEARN`, `XGBOOST`.
# If you do not specify a framework, AI Platform will analyze files in the
# deployment_uri to determine a framework. If you choose `SCIKIT_LEARN` or `
# XGBOOST`, you must also set the runtime version of the model to 1.4 or greater.
# Do **not** specify a framework if you're deploying a [custom prediction
- # routine](/ml-engine/docs/tensorflow/custom-prediction-routines). If you
- # specify a [Compute Engine (N1) machine type](/ml-engine/docs/machine-types-
- # online-prediction) in the `machineType` field, you must specify `TENSORFLOW`
- # for the framework.
+ # routine](/ai-platform/prediction/docs/custom-prediction-routines) or if you're
+ # using a [custom container](/ai-platform/prediction/docs/use-custom-container).
# Corresponds to the JSON property `framework`
# @return [String]
attr_accessor :framework
# Output only. If true, this version will be used to handle prediction requests
@@ -2878,13 +3033,13 @@
# defaults to `mls1-c1-m2`. Online prediction supports the following machine
# types: * `mls1-c1-m2` * `mls1-c4-m2` * `n1-standard-2` * `n1-standard-4` * `n1-
# standard-8` * `n1-standard-16` * `n1-standard-32` * `n1-highmem-2` * `n1-
# highmem-4` * `n1-highmem-8` * `n1-highmem-16` * `n1-highmem-32` * `n1-highcpu-
# 2` * `n1-highcpu-4` * `n1-highcpu-8` * `n1-highcpu-16` * `n1-highcpu-32` `mls1-
- # c1-m2` is generally available. All other machine types are available in beta.
- # Learn more about the [differences between machine types](/ml-engine/docs/
- # machine-types-online-prediction).
+ # c4-m2` is in beta. All other machine types are generally available. Learn more
+ # about the [differences between machine types](/ml-engine/docs/machine-types-
+ # online-prediction).
# Corresponds to the JSON property `machineType`
# @return [String]
attr_accessor :machine_type
# Options for manually scaling a model.
@@ -2962,13 +3117,16 @@
# automatically enables logging of request-response pairs.
# Corresponds to the JSON property `requestLoggingConfig`
# @return [Google::Apis::MlV1::GoogleCloudMlV1RequestLoggingConfig]
attr_accessor :request_logging_config
- # RouteMap is used to override HTTP paths sent to a Custom Container. If
- # specified, the HTTP server implemented in the ContainerSpec must support the
- # route. If unspecified, standard HTTP paths will be used.
+ # Specifies HTTP paths served by a custom container. AI Platform Prediction
+ # sends requests to these paths on the container; the custom container must run
+ # an HTTP server that responds to these requests with appropriate responses.
+ # Read [Custom container requirements](/ai-platform/prediction/docs/custom-
+ # container-requirements) for details on how to create your container image to
+ # meet these requirements.
# Corresponds to the JSON property `routes`
# @return [Google::Apis::MlV1::GoogleCloudMlV1RouteMap]
attr_accessor :routes
# Required. The AI Platform runtime version to use for this deployment. For more
@@ -2976,10 +3134,13 @@
# list) and [how to manage runtime versions](/ml-engine/docs/versioning).
# Corresponds to the JSON property `runtimeVersion`
# @return [String]
attr_accessor :runtime_version
- # Optional. Specifies the service account for resource access control.
+ # Optional. Specifies the service account for resource access control. If you
+ # specify this field, then you must also specify either the `containerSpec` or
+ # the `predictionClass` field. Learn more about [using a custom service account](
+ # /ai-platform/prediction/docs/custom-service-account).
# Corresponds to the JSON property `serviceAccount`
# @return [String]
attr_accessor :service_account
# Output only. The state of a version.