proto_docs/google/cloud/dataproc/v1/jobs.rb in google-cloud-dataproc-v1-0.2.3 vs proto_docs/google/cloud/dataproc/v1/jobs.rb in google-cloud-dataproc-v1-0.3.0

- old
+ new

@@ -151,16 +151,16 @@ # @return [::Array<::String>] # Optional. HCFS URIs of jar files to add to the CLASSPATHs of the # Spark driver and tasks. # @!attribute [rw] file_uris # @return [::Array<::String>] - # Optional. HCFS URIs of files to be copied to the working directory of - # Spark drivers and distributed tasks. Useful for naively parallel tasks. + # Optional. HCFS URIs of files to be placed in the working directory of + # each executor. Useful for naively parallel tasks. # @!attribute [rw] archive_uris # @return [::Array<::String>] - # Optional. HCFS URIs of archives to be extracted in the working directory - # of Spark drivers and tasks. Supported file types: + # Optional. HCFS URIs of archives to be extracted into the working directory + # of each executor. Supported file types: # .jar, .tar, .tar.gz, .tgz, and .zip. # @!attribute [rw] properties # @return [::Google::Protobuf::Map{::String => ::String}] # Optional. A mapping of property names to values, used to configure Spark. # Properties that conflict with values set by the Dataproc API may be @@ -204,15 +204,16 @@ # @return [::Array<::String>] # Optional. HCFS URIs of jar files to add to the CLASSPATHs of the # Python driver and tasks. # @!attribute [rw] file_uris # @return [::Array<::String>] - # Optional. HCFS URIs of files to be copied to the working directory of - # Python drivers and distributed tasks. Useful for naively parallel tasks. + # Optional. HCFS URIs of files to be placed in the working directory of + # each executor. Useful for naively parallel tasks. # @!attribute [rw] archive_uris # @return [::Array<::String>] - # Optional. HCFS URIs of archives to be extracted in the working directory of + # Optional. HCFS URIs of archives to be extracted into the working directory + # of each executor. Supported file types: # .jar, .tar, .tar.gz, .tgz, and .zip. # @!attribute [rw] properties # @return [::Google::Protobuf::Map{::String => ::String}] # Optional. A mapping of property names to values, used to configure PySpark. # Properties that conflict with values set by the Dataproc API may be @@ -419,16 +420,16 @@ # Optional. The arguments to pass to the driver. Do not include arguments, # such as `--conf`, that can be set as job properties, since a collision may # occur that causes an incorrect job submission. # @!attribute [rw] file_uris # @return [::Array<::String>] - # Optional. HCFS URIs of files to be copied to the working directory of - # R drivers and distributed tasks. Useful for naively parallel tasks. + # Optional. HCFS URIs of files to be placed in the working directory of + # each executor. Useful for naively parallel tasks. # @!attribute [rw] archive_uris # @return [::Array<::String>] - # Optional. HCFS URIs of archives to be extracted in the working directory of - # Spark drivers and tasks. Supported file types: + # Optional. HCFS URIs of archives to be extracted into the working directory + # of each executor. Supported file types: # .jar, .tar, .tar.gz, .tgz, and .zip. # @!attribute [rw] properties # @return [::Google::Protobuf::Map{::String => ::String}] # Optional. A mapping of property names to values, used to configure SparkR. # Properties that conflict with values set by the Dataproc API may be @@ -593,11 +594,11 @@ end # Encapsulates the full scoping used to reference a job. # @!attribute [rw] project_id # @return [::String] - # Required. The ID of the Google Cloud Platform project that the job - # belongs to. + # Optional. The ID of the Google Cloud Platform project that the job belongs to. If + # specified, must match the request project ID. # @!attribute [rw] job_id # @return [::String] # Optional. The job ID, which must be unique within the project. # # The ID must contain only letters (a-z, A-Z), numbers (0-9),