lib/aws-sdk-glue/client.rb in aws-sdk-glue-1.116.0 vs lib/aws-sdk-glue/client.rb in aws-sdk-glue-1.117.0
- old
+ new
@@ -1442,10 +1442,11 @@
# resp.jobs[0].code_gen_configuration_nodes["NodeId"].postgre_sql_catalog_target.name #=> String
# resp.jobs[0].code_gen_configuration_nodes["NodeId"].postgre_sql_catalog_target.inputs #=> Array
# resp.jobs[0].code_gen_configuration_nodes["NodeId"].postgre_sql_catalog_target.inputs[0] #=> String
# resp.jobs[0].code_gen_configuration_nodes["NodeId"].postgre_sql_catalog_target.database #=> String
# resp.jobs[0].code_gen_configuration_nodes["NodeId"].postgre_sql_catalog_target.table #=> String
+ # resp.jobs[0].execution_class #=> String, one of "FLEX", "STANDARD"
# resp.jobs_not_found #=> Array
# resp.jobs_not_found[0] #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/BatchGetJobs AWS API Documentation
#
@@ -1593,13 +1594,13 @@
# resp.triggers[0].actions[0].crawler_name #=> String
# resp.triggers[0].predicate.logical #=> String, one of "AND", "ANY"
# resp.triggers[0].predicate.conditions #=> Array
# resp.triggers[0].predicate.conditions[0].logical_operator #=> String, one of "EQUALS"
# resp.triggers[0].predicate.conditions[0].job_name #=> String
- # resp.triggers[0].predicate.conditions[0].state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT"
+ # resp.triggers[0].predicate.conditions[0].state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT", "ERROR", "WAITING"
# resp.triggers[0].predicate.conditions[0].crawler_name #=> String
- # resp.triggers[0].predicate.conditions[0].crawl_state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED"
+ # resp.triggers[0].predicate.conditions[0].crawl_state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED", "ERROR"
# resp.triggers[0].event_batching_condition.batch_size #=> Integer
# resp.triggers[0].event_batching_condition.batch_window #=> Integer
# resp.triggers_not_found #=> Array
# resp.triggers_not_found[0] #=> String
#
@@ -1660,10 +1661,12 @@
# resp.workflows[0].last_run.statistics.timeout_actions #=> Integer
# resp.workflows[0].last_run.statistics.failed_actions #=> Integer
# resp.workflows[0].last_run.statistics.stopped_actions #=> Integer
# resp.workflows[0].last_run.statistics.succeeded_actions #=> Integer
# resp.workflows[0].last_run.statistics.running_actions #=> Integer
+ # resp.workflows[0].last_run.statistics.errored_actions #=> Integer
+ # resp.workflows[0].last_run.statistics.waiting_actions #=> Integer
# resp.workflows[0].last_run.graph.nodes #=> Array
# resp.workflows[0].last_run.graph.nodes[0].type #=> String, one of "CRAWLER", "JOB", "TRIGGER"
# resp.workflows[0].last_run.graph.nodes[0].name #=> String
# resp.workflows[0].last_run.graph.nodes[0].unique_id #=> String
# resp.workflows[0].last_run.graph.nodes[0].trigger_details.trigger.name #=> String
@@ -1683,13 +1686,13 @@
# resp.workflows[0].last_run.graph.nodes[0].trigger_details.trigger.actions[0].crawler_name #=> String
# resp.workflows[0].last_run.graph.nodes[0].trigger_details.trigger.predicate.logical #=> String, one of "AND", "ANY"
# resp.workflows[0].last_run.graph.nodes[0].trigger_details.trigger.predicate.conditions #=> Array
# resp.workflows[0].last_run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].logical_operator #=> String, one of "EQUALS"
# resp.workflows[0].last_run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].job_name #=> String
- # resp.workflows[0].last_run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT"
+ # resp.workflows[0].last_run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT", "ERROR", "WAITING"
# resp.workflows[0].last_run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].crawler_name #=> String
- # resp.workflows[0].last_run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].crawl_state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED"
+ # resp.workflows[0].last_run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].crawl_state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED", "ERROR"
# resp.workflows[0].last_run.graph.nodes[0].trigger_details.trigger.event_batching_condition.batch_size #=> Integer
# resp.workflows[0].last_run.graph.nodes[0].trigger_details.trigger.event_batching_condition.batch_window #=> Integer
# resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs #=> Array
# resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs[0].id #=> String
# resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs[0].attempt #=> Integer
@@ -1697,11 +1700,11 @@
# resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs[0].trigger_name #=> String
# resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs[0].job_name #=> String
# resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs[0].started_on #=> Time
# resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs[0].last_modified_on #=> Time
# resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs[0].completed_on #=> Time
- # resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs[0].job_run_state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT"
+ # resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs[0].job_run_state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT", "ERROR", "WAITING"
# resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs[0].arguments #=> Hash
# resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs[0].arguments["GenericString"] #=> String
# resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs[0].error_message #=> String
# resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs[0].predecessor_runs #=> Array
# resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs[0].predecessor_runs[0].job_name #=> String
@@ -1715,12 +1718,13 @@
# resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs[0].security_configuration #=> String
# resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs[0].log_group_name #=> String
# resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs[0].notification_property.notify_delay_after #=> Integer
# resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs[0].glue_version #=> String
# resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs[0].dpu_seconds #=> Float
+ # resp.workflows[0].last_run.graph.nodes[0].job_details.job_runs[0].execution_class #=> String, one of "FLEX", "STANDARD"
# resp.workflows[0].last_run.graph.nodes[0].crawler_details.crawls #=> Array
- # resp.workflows[0].last_run.graph.nodes[0].crawler_details.crawls[0].state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED"
+ # resp.workflows[0].last_run.graph.nodes[0].crawler_details.crawls[0].state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED", "ERROR"
# resp.workflows[0].last_run.graph.nodes[0].crawler_details.crawls[0].started_on #=> Time
# resp.workflows[0].last_run.graph.nodes[0].crawler_details.crawls[0].completed_on #=> Time
# resp.workflows[0].last_run.graph.nodes[0].crawler_details.crawls[0].error_message #=> String
# resp.workflows[0].last_run.graph.nodes[0].crawler_details.crawls[0].log_group #=> String
# resp.workflows[0].last_run.graph.nodes[0].crawler_details.crawls[0].log_stream #=> String
@@ -1750,13 +1754,13 @@
# resp.workflows[0].graph.nodes[0].trigger_details.trigger.actions[0].crawler_name #=> String
# resp.workflows[0].graph.nodes[0].trigger_details.trigger.predicate.logical #=> String, one of "AND", "ANY"
# resp.workflows[0].graph.nodes[0].trigger_details.trigger.predicate.conditions #=> Array
# resp.workflows[0].graph.nodes[0].trigger_details.trigger.predicate.conditions[0].logical_operator #=> String, one of "EQUALS"
# resp.workflows[0].graph.nodes[0].trigger_details.trigger.predicate.conditions[0].job_name #=> String
- # resp.workflows[0].graph.nodes[0].trigger_details.trigger.predicate.conditions[0].state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT"
+ # resp.workflows[0].graph.nodes[0].trigger_details.trigger.predicate.conditions[0].state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT", "ERROR", "WAITING"
# resp.workflows[0].graph.nodes[0].trigger_details.trigger.predicate.conditions[0].crawler_name #=> String
- # resp.workflows[0].graph.nodes[0].trigger_details.trigger.predicate.conditions[0].crawl_state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED"
+ # resp.workflows[0].graph.nodes[0].trigger_details.trigger.predicate.conditions[0].crawl_state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED", "ERROR"
# resp.workflows[0].graph.nodes[0].trigger_details.trigger.event_batching_condition.batch_size #=> Integer
# resp.workflows[0].graph.nodes[0].trigger_details.trigger.event_batching_condition.batch_window #=> Integer
# resp.workflows[0].graph.nodes[0].job_details.job_runs #=> Array
# resp.workflows[0].graph.nodes[0].job_details.job_runs[0].id #=> String
# resp.workflows[0].graph.nodes[0].job_details.job_runs[0].attempt #=> Integer
@@ -1764,11 +1768,11 @@
# resp.workflows[0].graph.nodes[0].job_details.job_runs[0].trigger_name #=> String
# resp.workflows[0].graph.nodes[0].job_details.job_runs[0].job_name #=> String
# resp.workflows[0].graph.nodes[0].job_details.job_runs[0].started_on #=> Time
# resp.workflows[0].graph.nodes[0].job_details.job_runs[0].last_modified_on #=> Time
# resp.workflows[0].graph.nodes[0].job_details.job_runs[0].completed_on #=> Time
- # resp.workflows[0].graph.nodes[0].job_details.job_runs[0].job_run_state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT"
+ # resp.workflows[0].graph.nodes[0].job_details.job_runs[0].job_run_state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT", "ERROR", "WAITING"
# resp.workflows[0].graph.nodes[0].job_details.job_runs[0].arguments #=> Hash
# resp.workflows[0].graph.nodes[0].job_details.job_runs[0].arguments["GenericString"] #=> String
# resp.workflows[0].graph.nodes[0].job_details.job_runs[0].error_message #=> String
# resp.workflows[0].graph.nodes[0].job_details.job_runs[0].predecessor_runs #=> Array
# resp.workflows[0].graph.nodes[0].job_details.job_runs[0].predecessor_runs[0].job_name #=> String
@@ -1782,12 +1786,13 @@
# resp.workflows[0].graph.nodes[0].job_details.job_runs[0].security_configuration #=> String
# resp.workflows[0].graph.nodes[0].job_details.job_runs[0].log_group_name #=> String
# resp.workflows[0].graph.nodes[0].job_details.job_runs[0].notification_property.notify_delay_after #=> Integer
# resp.workflows[0].graph.nodes[0].job_details.job_runs[0].glue_version #=> String
# resp.workflows[0].graph.nodes[0].job_details.job_runs[0].dpu_seconds #=> Float
+ # resp.workflows[0].graph.nodes[0].job_details.job_runs[0].execution_class #=> String, one of "FLEX", "STANDARD"
# resp.workflows[0].graph.nodes[0].crawler_details.crawls #=> Array
- # resp.workflows[0].graph.nodes[0].crawler_details.crawls[0].state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED"
+ # resp.workflows[0].graph.nodes[0].crawler_details.crawls[0].state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED", "ERROR"
# resp.workflows[0].graph.nodes[0].crawler_details.crawls[0].started_on #=> Time
# resp.workflows[0].graph.nodes[0].crawler_details.crawls[0].completed_on #=> Time
# resp.workflows[0].graph.nodes[0].crawler_details.crawls[0].error_message #=> String
# resp.workflows[0].graph.nodes[0].crawler_details.crawls[0].log_group #=> String
# resp.workflows[0].graph.nodes[0].crawler_details.crawls[0].log_stream #=> String
@@ -2839,10 +2844,22 @@
#
# @option params [Hash<String,Types::CodeGenConfigurationNode>] :code_gen_configuration_nodes
# The representation of a directed acyclic graph on which both the Glue
# Studio visual component and Glue Studio code generation is based.
#
+ # @option params [String] :execution_class
+ # Indicates whether the job is run with a standard or flexible execution
+ # class. The standard execution-class is ideal for time-sensitive
+ # workloads that require fast job startup and dedicated resources.
+ #
+ # The flexible execution class is appropriate for time-insensitive jobs
+ # whose start and completion times may vary.
+ #
+ # Only jobs with Glue version 3.0 and above and command type `glueetl`
+ # will be allowed to set `ExecutionClass` to `FLEX`. The flexible
+ # execution class is available for Spark jobs.
+ #
# @return [Types::CreateJobResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateJobResponse#name #name} => String
#
# @example Request syntax with placeholder values
@@ -3549,10 +3566,11 @@
# database: "EnclosedInStringProperty", # required
# table: "EnclosedInStringProperty", # required
# },
# },
# },
+ # execution_class: "FLEX", # accepts FLEX, STANDARD
# })
#
# @example Response structure
#
# resp.name #=> String
@@ -4546,13 +4564,13 @@
# logical: "AND", # accepts AND, ANY
# conditions: [
# {
# logical_operator: "EQUALS", # accepts EQUALS
# job_name: "NameString",
- # state: "STARTING", # accepts STARTING, RUNNING, STOPPING, STOPPED, SUCCEEDED, FAILED, TIMEOUT
+ # state: "STARTING", # accepts STARTING, RUNNING, STOPPING, STOPPED, SUCCEEDED, FAILED, TIMEOUT, ERROR, WAITING
# crawler_name: "NameString",
- # crawl_state: "RUNNING", # accepts RUNNING, CANCELLING, CANCELLED, SUCCEEDED, FAILED
+ # crawl_state: "RUNNING", # accepts RUNNING, CANCELLING, CANCELLED, SUCCEEDED, FAILED, ERROR
# },
# ],
# },
# actions: [ # required
# {
@@ -7186,10 +7204,11 @@
# resp.job.code_gen_configuration_nodes["NodeId"].postgre_sql_catalog_target.name #=> String
# resp.job.code_gen_configuration_nodes["NodeId"].postgre_sql_catalog_target.inputs #=> Array
# resp.job.code_gen_configuration_nodes["NodeId"].postgre_sql_catalog_target.inputs[0] #=> String
# resp.job.code_gen_configuration_nodes["NodeId"].postgre_sql_catalog_target.database #=> String
# resp.job.code_gen_configuration_nodes["NodeId"].postgre_sql_catalog_target.table #=> String
+ # resp.job.execution_class #=> String, one of "FLEX", "STANDARD"
#
# @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/GetJob AWS API Documentation
#
# @overload get_job(params = {})
# @param [Hash] params ({})
@@ -7267,11 +7286,11 @@
# resp.job_run.trigger_name #=> String
# resp.job_run.job_name #=> String
# resp.job_run.started_on #=> Time
# resp.job_run.last_modified_on #=> Time
# resp.job_run.completed_on #=> Time
- # resp.job_run.job_run_state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT"
+ # resp.job_run.job_run_state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT", "ERROR", "WAITING"
# resp.job_run.arguments #=> Hash
# resp.job_run.arguments["GenericString"] #=> String
# resp.job_run.error_message #=> String
# resp.job_run.predecessor_runs #=> Array
# resp.job_run.predecessor_runs[0].job_name #=> String
@@ -7285,10 +7304,11 @@
# resp.job_run.security_configuration #=> String
# resp.job_run.log_group_name #=> String
# resp.job_run.notification_property.notify_delay_after #=> Integer
# resp.job_run.glue_version #=> String
# resp.job_run.dpu_seconds #=> Float
+ # resp.job_run.execution_class #=> String, one of "FLEX", "STANDARD"
#
# @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/GetJobRun AWS API Documentation
#
# @overload get_job_run(params = {})
# @param [Hash] params ({})
@@ -7332,11 +7352,11 @@
# resp.job_runs[0].trigger_name #=> String
# resp.job_runs[0].job_name #=> String
# resp.job_runs[0].started_on #=> Time
# resp.job_runs[0].last_modified_on #=> Time
# resp.job_runs[0].completed_on #=> Time
- # resp.job_runs[0].job_run_state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT"
+ # resp.job_runs[0].job_run_state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT", "ERROR", "WAITING"
# resp.job_runs[0].arguments #=> Hash
# resp.job_runs[0].arguments["GenericString"] #=> String
# resp.job_runs[0].error_message #=> String
# resp.job_runs[0].predecessor_runs #=> Array
# resp.job_runs[0].predecessor_runs[0].job_name #=> String
@@ -7350,10 +7370,11 @@
# resp.job_runs[0].security_configuration #=> String
# resp.job_runs[0].log_group_name #=> String
# resp.job_runs[0].notification_property.notify_delay_after #=> Integer
# resp.job_runs[0].glue_version #=> String
# resp.job_runs[0].dpu_seconds #=> Float
+ # resp.job_runs[0].execution_class #=> String, one of "FLEX", "STANDARD"
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/GetJobRuns AWS API Documentation
#
# @overload get_job_runs(params = {})
@@ -7891,10 +7912,11 @@
# resp.jobs[0].code_gen_configuration_nodes["NodeId"].postgre_sql_catalog_target.name #=> String
# resp.jobs[0].code_gen_configuration_nodes["NodeId"].postgre_sql_catalog_target.inputs #=> Array
# resp.jobs[0].code_gen_configuration_nodes["NodeId"].postgre_sql_catalog_target.inputs[0] #=> String
# resp.jobs[0].code_gen_configuration_nodes["NodeId"].postgre_sql_catalog_target.database #=> String
# resp.jobs[0].code_gen_configuration_nodes["NodeId"].postgre_sql_catalog_target.table #=> String
+ # resp.jobs[0].execution_class #=> String, one of "FLEX", "STANDARD"
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/GetJobs AWS API Documentation
#
# @overload get_jobs(params = {})
@@ -9855,13 +9877,13 @@
# resp.trigger.actions[0].crawler_name #=> String
# resp.trigger.predicate.logical #=> String, one of "AND", "ANY"
# resp.trigger.predicate.conditions #=> Array
# resp.trigger.predicate.conditions[0].logical_operator #=> String, one of "EQUALS"
# resp.trigger.predicate.conditions[0].job_name #=> String
- # resp.trigger.predicate.conditions[0].state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT"
+ # resp.trigger.predicate.conditions[0].state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT", "ERROR", "WAITING"
# resp.trigger.predicate.conditions[0].crawler_name #=> String
- # resp.trigger.predicate.conditions[0].crawl_state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED"
+ # resp.trigger.predicate.conditions[0].crawl_state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED", "ERROR"
# resp.trigger.event_batching_condition.batch_size #=> Integer
# resp.trigger.event_batching_condition.batch_window #=> Integer
#
# @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/GetTrigger AWS API Documentation
#
@@ -9920,13 +9942,13 @@
# resp.triggers[0].actions[0].crawler_name #=> String
# resp.triggers[0].predicate.logical #=> String, one of "AND", "ANY"
# resp.triggers[0].predicate.conditions #=> Array
# resp.triggers[0].predicate.conditions[0].logical_operator #=> String, one of "EQUALS"
# resp.triggers[0].predicate.conditions[0].job_name #=> String
- # resp.triggers[0].predicate.conditions[0].state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT"
+ # resp.triggers[0].predicate.conditions[0].state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT", "ERROR", "WAITING"
# resp.triggers[0].predicate.conditions[0].crawler_name #=> String
- # resp.triggers[0].predicate.conditions[0].crawl_state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED"
+ # resp.triggers[0].predicate.conditions[0].crawl_state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED", "ERROR"
# resp.triggers[0].event_batching_condition.batch_size #=> Integer
# resp.triggers[0].event_batching_condition.batch_window #=> Integer
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/GetTriggers AWS API Documentation
@@ -10409,10 +10431,12 @@
# resp.workflow.last_run.statistics.timeout_actions #=> Integer
# resp.workflow.last_run.statistics.failed_actions #=> Integer
# resp.workflow.last_run.statistics.stopped_actions #=> Integer
# resp.workflow.last_run.statistics.succeeded_actions #=> Integer
# resp.workflow.last_run.statistics.running_actions #=> Integer
+ # resp.workflow.last_run.statistics.errored_actions #=> Integer
+ # resp.workflow.last_run.statistics.waiting_actions #=> Integer
# resp.workflow.last_run.graph.nodes #=> Array
# resp.workflow.last_run.graph.nodes[0].type #=> String, one of "CRAWLER", "JOB", "TRIGGER"
# resp.workflow.last_run.graph.nodes[0].name #=> String
# resp.workflow.last_run.graph.nodes[0].unique_id #=> String
# resp.workflow.last_run.graph.nodes[0].trigger_details.trigger.name #=> String
@@ -10432,13 +10456,13 @@
# resp.workflow.last_run.graph.nodes[0].trigger_details.trigger.actions[0].crawler_name #=> String
# resp.workflow.last_run.graph.nodes[0].trigger_details.trigger.predicate.logical #=> String, one of "AND", "ANY"
# resp.workflow.last_run.graph.nodes[0].trigger_details.trigger.predicate.conditions #=> Array
# resp.workflow.last_run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].logical_operator #=> String, one of "EQUALS"
# resp.workflow.last_run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].job_name #=> String
- # resp.workflow.last_run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT"
+ # resp.workflow.last_run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT", "ERROR", "WAITING"
# resp.workflow.last_run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].crawler_name #=> String
- # resp.workflow.last_run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].crawl_state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED"
+ # resp.workflow.last_run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].crawl_state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED", "ERROR"
# resp.workflow.last_run.graph.nodes[0].trigger_details.trigger.event_batching_condition.batch_size #=> Integer
# resp.workflow.last_run.graph.nodes[0].trigger_details.trigger.event_batching_condition.batch_window #=> Integer
# resp.workflow.last_run.graph.nodes[0].job_details.job_runs #=> Array
# resp.workflow.last_run.graph.nodes[0].job_details.job_runs[0].id #=> String
# resp.workflow.last_run.graph.nodes[0].job_details.job_runs[0].attempt #=> Integer
@@ -10446,11 +10470,11 @@
# resp.workflow.last_run.graph.nodes[0].job_details.job_runs[0].trigger_name #=> String
# resp.workflow.last_run.graph.nodes[0].job_details.job_runs[0].job_name #=> String
# resp.workflow.last_run.graph.nodes[0].job_details.job_runs[0].started_on #=> Time
# resp.workflow.last_run.graph.nodes[0].job_details.job_runs[0].last_modified_on #=> Time
# resp.workflow.last_run.graph.nodes[0].job_details.job_runs[0].completed_on #=> Time
- # resp.workflow.last_run.graph.nodes[0].job_details.job_runs[0].job_run_state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT"
+ # resp.workflow.last_run.graph.nodes[0].job_details.job_runs[0].job_run_state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT", "ERROR", "WAITING"
# resp.workflow.last_run.graph.nodes[0].job_details.job_runs[0].arguments #=> Hash
# resp.workflow.last_run.graph.nodes[0].job_details.job_runs[0].arguments["GenericString"] #=> String
# resp.workflow.last_run.graph.nodes[0].job_details.job_runs[0].error_message #=> String
# resp.workflow.last_run.graph.nodes[0].job_details.job_runs[0].predecessor_runs #=> Array
# resp.workflow.last_run.graph.nodes[0].job_details.job_runs[0].predecessor_runs[0].job_name #=> String
@@ -10464,12 +10488,13 @@
# resp.workflow.last_run.graph.nodes[0].job_details.job_runs[0].security_configuration #=> String
# resp.workflow.last_run.graph.nodes[0].job_details.job_runs[0].log_group_name #=> String
# resp.workflow.last_run.graph.nodes[0].job_details.job_runs[0].notification_property.notify_delay_after #=> Integer
# resp.workflow.last_run.graph.nodes[0].job_details.job_runs[0].glue_version #=> String
# resp.workflow.last_run.graph.nodes[0].job_details.job_runs[0].dpu_seconds #=> Float
+ # resp.workflow.last_run.graph.nodes[0].job_details.job_runs[0].execution_class #=> String, one of "FLEX", "STANDARD"
# resp.workflow.last_run.graph.nodes[0].crawler_details.crawls #=> Array
- # resp.workflow.last_run.graph.nodes[0].crawler_details.crawls[0].state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED"
+ # resp.workflow.last_run.graph.nodes[0].crawler_details.crawls[0].state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED", "ERROR"
# resp.workflow.last_run.graph.nodes[0].crawler_details.crawls[0].started_on #=> Time
# resp.workflow.last_run.graph.nodes[0].crawler_details.crawls[0].completed_on #=> Time
# resp.workflow.last_run.graph.nodes[0].crawler_details.crawls[0].error_message #=> String
# resp.workflow.last_run.graph.nodes[0].crawler_details.crawls[0].log_group #=> String
# resp.workflow.last_run.graph.nodes[0].crawler_details.crawls[0].log_stream #=> String
@@ -10499,13 +10524,13 @@
# resp.workflow.graph.nodes[0].trigger_details.trigger.actions[0].crawler_name #=> String
# resp.workflow.graph.nodes[0].trigger_details.trigger.predicate.logical #=> String, one of "AND", "ANY"
# resp.workflow.graph.nodes[0].trigger_details.trigger.predicate.conditions #=> Array
# resp.workflow.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].logical_operator #=> String, one of "EQUALS"
# resp.workflow.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].job_name #=> String
- # resp.workflow.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT"
+ # resp.workflow.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT", "ERROR", "WAITING"
# resp.workflow.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].crawler_name #=> String
- # resp.workflow.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].crawl_state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED"
+ # resp.workflow.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].crawl_state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED", "ERROR"
# resp.workflow.graph.nodes[0].trigger_details.trigger.event_batching_condition.batch_size #=> Integer
# resp.workflow.graph.nodes[0].trigger_details.trigger.event_batching_condition.batch_window #=> Integer
# resp.workflow.graph.nodes[0].job_details.job_runs #=> Array
# resp.workflow.graph.nodes[0].job_details.job_runs[0].id #=> String
# resp.workflow.graph.nodes[0].job_details.job_runs[0].attempt #=> Integer
@@ -10513,11 +10538,11 @@
# resp.workflow.graph.nodes[0].job_details.job_runs[0].trigger_name #=> String
# resp.workflow.graph.nodes[0].job_details.job_runs[0].job_name #=> String
# resp.workflow.graph.nodes[0].job_details.job_runs[0].started_on #=> Time
# resp.workflow.graph.nodes[0].job_details.job_runs[0].last_modified_on #=> Time
# resp.workflow.graph.nodes[0].job_details.job_runs[0].completed_on #=> Time
- # resp.workflow.graph.nodes[0].job_details.job_runs[0].job_run_state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT"
+ # resp.workflow.graph.nodes[0].job_details.job_runs[0].job_run_state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT", "ERROR", "WAITING"
# resp.workflow.graph.nodes[0].job_details.job_runs[0].arguments #=> Hash
# resp.workflow.graph.nodes[0].job_details.job_runs[0].arguments["GenericString"] #=> String
# resp.workflow.graph.nodes[0].job_details.job_runs[0].error_message #=> String
# resp.workflow.graph.nodes[0].job_details.job_runs[0].predecessor_runs #=> Array
# resp.workflow.graph.nodes[0].job_details.job_runs[0].predecessor_runs[0].job_name #=> String
@@ -10531,12 +10556,13 @@
# resp.workflow.graph.nodes[0].job_details.job_runs[0].security_configuration #=> String
# resp.workflow.graph.nodes[0].job_details.job_runs[0].log_group_name #=> String
# resp.workflow.graph.nodes[0].job_details.job_runs[0].notification_property.notify_delay_after #=> Integer
# resp.workflow.graph.nodes[0].job_details.job_runs[0].glue_version #=> String
# resp.workflow.graph.nodes[0].job_details.job_runs[0].dpu_seconds #=> Float
+ # resp.workflow.graph.nodes[0].job_details.job_runs[0].execution_class #=> String, one of "FLEX", "STANDARD"
# resp.workflow.graph.nodes[0].crawler_details.crawls #=> Array
- # resp.workflow.graph.nodes[0].crawler_details.crawls[0].state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED"
+ # resp.workflow.graph.nodes[0].crawler_details.crawls[0].state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED", "ERROR"
# resp.workflow.graph.nodes[0].crawler_details.crawls[0].started_on #=> Time
# resp.workflow.graph.nodes[0].crawler_details.crawls[0].completed_on #=> Time
# resp.workflow.graph.nodes[0].crawler_details.crawls[0].error_message #=> String
# resp.workflow.graph.nodes[0].crawler_details.crawls[0].log_group #=> String
# resp.workflow.graph.nodes[0].crawler_details.crawls[0].log_stream #=> String
@@ -10594,10 +10620,12 @@
# resp.run.statistics.timeout_actions #=> Integer
# resp.run.statistics.failed_actions #=> Integer
# resp.run.statistics.stopped_actions #=> Integer
# resp.run.statistics.succeeded_actions #=> Integer
# resp.run.statistics.running_actions #=> Integer
+ # resp.run.statistics.errored_actions #=> Integer
+ # resp.run.statistics.waiting_actions #=> Integer
# resp.run.graph.nodes #=> Array
# resp.run.graph.nodes[0].type #=> String, one of "CRAWLER", "JOB", "TRIGGER"
# resp.run.graph.nodes[0].name #=> String
# resp.run.graph.nodes[0].unique_id #=> String
# resp.run.graph.nodes[0].trigger_details.trigger.name #=> String
@@ -10617,13 +10645,13 @@
# resp.run.graph.nodes[0].trigger_details.trigger.actions[0].crawler_name #=> String
# resp.run.graph.nodes[0].trigger_details.trigger.predicate.logical #=> String, one of "AND", "ANY"
# resp.run.graph.nodes[0].trigger_details.trigger.predicate.conditions #=> Array
# resp.run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].logical_operator #=> String, one of "EQUALS"
# resp.run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].job_name #=> String
- # resp.run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT"
+ # resp.run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT", "ERROR", "WAITING"
# resp.run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].crawler_name #=> String
- # resp.run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].crawl_state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED"
+ # resp.run.graph.nodes[0].trigger_details.trigger.predicate.conditions[0].crawl_state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED", "ERROR"
# resp.run.graph.nodes[0].trigger_details.trigger.event_batching_condition.batch_size #=> Integer
# resp.run.graph.nodes[0].trigger_details.trigger.event_batching_condition.batch_window #=> Integer
# resp.run.graph.nodes[0].job_details.job_runs #=> Array
# resp.run.graph.nodes[0].job_details.job_runs[0].id #=> String
# resp.run.graph.nodes[0].job_details.job_runs[0].attempt #=> Integer
@@ -10631,11 +10659,11 @@
# resp.run.graph.nodes[0].job_details.job_runs[0].trigger_name #=> String
# resp.run.graph.nodes[0].job_details.job_runs[0].job_name #=> String
# resp.run.graph.nodes[0].job_details.job_runs[0].started_on #=> Time
# resp.run.graph.nodes[0].job_details.job_runs[0].last_modified_on #=> Time
# resp.run.graph.nodes[0].job_details.job_runs[0].completed_on #=> Time
- # resp.run.graph.nodes[0].job_details.job_runs[0].job_run_state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT"
+ # resp.run.graph.nodes[0].job_details.job_runs[0].job_run_state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT", "ERROR", "WAITING"
# resp.run.graph.nodes[0].job_details.job_runs[0].arguments #=> Hash
# resp.run.graph.nodes[0].job_details.job_runs[0].arguments["GenericString"] #=> String
# resp.run.graph.nodes[0].job_details.job_runs[0].error_message #=> String
# resp.run.graph.nodes[0].job_details.job_runs[0].predecessor_runs #=> Array
# resp.run.graph.nodes[0].job_details.job_runs[0].predecessor_runs[0].job_name #=> String
@@ -10649,12 +10677,13 @@
# resp.run.graph.nodes[0].job_details.job_runs[0].security_configuration #=> String
# resp.run.graph.nodes[0].job_details.job_runs[0].log_group_name #=> String
# resp.run.graph.nodes[0].job_details.job_runs[0].notification_property.notify_delay_after #=> Integer
# resp.run.graph.nodes[0].job_details.job_runs[0].glue_version #=> String
# resp.run.graph.nodes[0].job_details.job_runs[0].dpu_seconds #=> Float
+ # resp.run.graph.nodes[0].job_details.job_runs[0].execution_class #=> String, one of "FLEX", "STANDARD"
# resp.run.graph.nodes[0].crawler_details.crawls #=> Array
- # resp.run.graph.nodes[0].crawler_details.crawls[0].state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED"
+ # resp.run.graph.nodes[0].crawler_details.crawls[0].state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED", "ERROR"
# resp.run.graph.nodes[0].crawler_details.crawls[0].started_on #=> Time
# resp.run.graph.nodes[0].crawler_details.crawls[0].completed_on #=> Time
# resp.run.graph.nodes[0].crawler_details.crawls[0].error_message #=> String
# resp.run.graph.nodes[0].crawler_details.crawls[0].log_group #=> String
# resp.run.graph.nodes[0].crawler_details.crawls[0].log_stream #=> String
@@ -10752,10 +10781,12 @@
# resp.runs[0].statistics.timeout_actions #=> Integer
# resp.runs[0].statistics.failed_actions #=> Integer
# resp.runs[0].statistics.stopped_actions #=> Integer
# resp.runs[0].statistics.succeeded_actions #=> Integer
# resp.runs[0].statistics.running_actions #=> Integer
+ # resp.runs[0].statistics.errored_actions #=> Integer
+ # resp.runs[0].statistics.waiting_actions #=> Integer
# resp.runs[0].graph.nodes #=> Array
# resp.runs[0].graph.nodes[0].type #=> String, one of "CRAWLER", "JOB", "TRIGGER"
# resp.runs[0].graph.nodes[0].name #=> String
# resp.runs[0].graph.nodes[0].unique_id #=> String
# resp.runs[0].graph.nodes[0].trigger_details.trigger.name #=> String
@@ -10775,13 +10806,13 @@
# resp.runs[0].graph.nodes[0].trigger_details.trigger.actions[0].crawler_name #=> String
# resp.runs[0].graph.nodes[0].trigger_details.trigger.predicate.logical #=> String, one of "AND", "ANY"
# resp.runs[0].graph.nodes[0].trigger_details.trigger.predicate.conditions #=> Array
# resp.runs[0].graph.nodes[0].trigger_details.trigger.predicate.conditions[0].logical_operator #=> String, one of "EQUALS"
# resp.runs[0].graph.nodes[0].trigger_details.trigger.predicate.conditions[0].job_name #=> String
- # resp.runs[0].graph.nodes[0].trigger_details.trigger.predicate.conditions[0].state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT"
+ # resp.runs[0].graph.nodes[0].trigger_details.trigger.predicate.conditions[0].state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT", "ERROR", "WAITING"
# resp.runs[0].graph.nodes[0].trigger_details.trigger.predicate.conditions[0].crawler_name #=> String
- # resp.runs[0].graph.nodes[0].trigger_details.trigger.predicate.conditions[0].crawl_state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED"
+ # resp.runs[0].graph.nodes[0].trigger_details.trigger.predicate.conditions[0].crawl_state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED", "ERROR"
# resp.runs[0].graph.nodes[0].trigger_details.trigger.event_batching_condition.batch_size #=> Integer
# resp.runs[0].graph.nodes[0].trigger_details.trigger.event_batching_condition.batch_window #=> Integer
# resp.runs[0].graph.nodes[0].job_details.job_runs #=> Array
# resp.runs[0].graph.nodes[0].job_details.job_runs[0].id #=> String
# resp.runs[0].graph.nodes[0].job_details.job_runs[0].attempt #=> Integer
@@ -10789,11 +10820,11 @@
# resp.runs[0].graph.nodes[0].job_details.job_runs[0].trigger_name #=> String
# resp.runs[0].graph.nodes[0].job_details.job_runs[0].job_name #=> String
# resp.runs[0].graph.nodes[0].job_details.job_runs[0].started_on #=> Time
# resp.runs[0].graph.nodes[0].job_details.job_runs[0].last_modified_on #=> Time
# resp.runs[0].graph.nodes[0].job_details.job_runs[0].completed_on #=> Time
- # resp.runs[0].graph.nodes[0].job_details.job_runs[0].job_run_state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT"
+ # resp.runs[0].graph.nodes[0].job_details.job_runs[0].job_run_state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT", "ERROR", "WAITING"
# resp.runs[0].graph.nodes[0].job_details.job_runs[0].arguments #=> Hash
# resp.runs[0].graph.nodes[0].job_details.job_runs[0].arguments["GenericString"] #=> String
# resp.runs[0].graph.nodes[0].job_details.job_runs[0].error_message #=> String
# resp.runs[0].graph.nodes[0].job_details.job_runs[0].predecessor_runs #=> Array
# resp.runs[0].graph.nodes[0].job_details.job_runs[0].predecessor_runs[0].job_name #=> String
@@ -10807,12 +10838,13 @@
# resp.runs[0].graph.nodes[0].job_details.job_runs[0].security_configuration #=> String
# resp.runs[0].graph.nodes[0].job_details.job_runs[0].log_group_name #=> String
# resp.runs[0].graph.nodes[0].job_details.job_runs[0].notification_property.notify_delay_after #=> Integer
# resp.runs[0].graph.nodes[0].job_details.job_runs[0].glue_version #=> String
# resp.runs[0].graph.nodes[0].job_details.job_runs[0].dpu_seconds #=> Float
+ # resp.runs[0].graph.nodes[0].job_details.job_runs[0].execution_class #=> String, one of "FLEX", "STANDARD"
# resp.runs[0].graph.nodes[0].crawler_details.crawls #=> Array
- # resp.runs[0].graph.nodes[0].crawler_details.crawls[0].state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED"
+ # resp.runs[0].graph.nodes[0].crawler_details.crawls[0].state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED", "ERROR"
# resp.runs[0].graph.nodes[0].crawler_details.crawls[0].started_on #=> Time
# resp.runs[0].graph.nodes[0].crawler_details.crawls[0].completed_on #=> Time
# resp.runs[0].graph.nodes[0].crawler_details.crawls[0].error_message #=> String
# resp.runs[0].graph.nodes[0].crawler_details.crawls[0].log_group #=> String
# resp.runs[0].graph.nodes[0].crawler_details.crawls[0].log_stream #=> String
@@ -12599,10 +12631,22 @@
#
# @option params [Integer] :number_of_workers
# The number of workers of a defined `workerType` that are allocated
# when a job runs.
#
+ # @option params [String] :execution_class
+ # Indicates whether the job is run with a standard or flexible execution
+ # class. The standard execution-class is ideal for time-sensitive
+ # workloads that require fast job startup and dedicated resources.
+ #
+ # The flexible execution class is appropriate for time-insensitive jobs
+ # whose start and completion times may vary.
+ #
+ # Only jobs with Glue version 3.0 and above and command type `glueetl`
+ # will be allowed to set `ExecutionClass` to `FLEX`. The flexible
+ # execution class is available for Spark jobs.
+ #
# @return [Types::StartJobRunResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::StartJobRunResponse#job_run_id #job_run_id} => String
#
# @example Request syntax with placeholder values
@@ -12620,10 +12664,11 @@
# notification_property: {
# notify_delay_after: 1,
# },
# worker_type: "Standard", # accepts Standard, G.1X, G.2X, G.025X
# number_of_workers: 1,
+ # execution_class: "FLEX", # accepts FLEX, STANDARD
# })
#
# @example Response structure
#
# resp.job_run_id #=> String
@@ -14416,10 +14461,11 @@
# database: "EnclosedInStringProperty", # required
# table: "EnclosedInStringProperty", # required
# },
# },
# },
+ # execution_class: "FLEX", # accepts FLEX, STANDARD
# },
# })
#
# @example Response structure
#
@@ -14934,13 +14980,13 @@
# logical: "AND", # accepts AND, ANY
# conditions: [
# {
# logical_operator: "EQUALS", # accepts EQUALS
# job_name: "NameString",
- # state: "STARTING", # accepts STARTING, RUNNING, STOPPING, STOPPED, SUCCEEDED, FAILED, TIMEOUT
+ # state: "STARTING", # accepts STARTING, RUNNING, STOPPING, STOPPED, SUCCEEDED, FAILED, TIMEOUT, ERROR, WAITING
# crawler_name: "NameString",
- # crawl_state: "RUNNING", # accepts RUNNING, CANCELLING, CANCELLED, SUCCEEDED, FAILED
+ # crawl_state: "RUNNING", # accepts RUNNING, CANCELLING, CANCELLED, SUCCEEDED, FAILED, ERROR
# },
# ],
# },
# event_batching_condition: {
# batch_size: 1, # required
@@ -14968,13 +15014,13 @@
# resp.trigger.actions[0].crawler_name #=> String
# resp.trigger.predicate.logical #=> String, one of "AND", "ANY"
# resp.trigger.predicate.conditions #=> Array
# resp.trigger.predicate.conditions[0].logical_operator #=> String, one of "EQUALS"
# resp.trigger.predicate.conditions[0].job_name #=> String
- # resp.trigger.predicate.conditions[0].state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT"
+ # resp.trigger.predicate.conditions[0].state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT", "ERROR", "WAITING"
# resp.trigger.predicate.conditions[0].crawler_name #=> String
- # resp.trigger.predicate.conditions[0].crawl_state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED"
+ # resp.trigger.predicate.conditions[0].crawl_state #=> String, one of "RUNNING", "CANCELLING", "CANCELLED", "SUCCEEDED", "FAILED", "ERROR"
# resp.trigger.event_batching_condition.batch_size #=> Integer
# resp.trigger.event_batching_condition.batch_window #=> Integer
#
# @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/UpdateTrigger AWS API Documentation
#
@@ -15092,10 +15138,10 @@
operation: config.api.operation(operation_name),
client: self,
params: params,
config: config)
context[:gem_name] = 'aws-sdk-glue'
- context[:gem_version] = '1.116.0'
+ context[:gem_version] = '1.117.0'
Seahorse::Client::Request.new(handlers, context)
end
# @api private
# @deprecated