lib/google/cloud/bigquery/dataset.rb in google-cloud-bigquery-1.8.2 vs lib/google/cloud/bigquery/dataset.rb in google-cloud-bigquery-1.9.0
- old
+ new
@@ -872,10 +872,36 @@
# job.data.each do |row|
# puts row[:name]
# end
# end
#
+ # @example Execute a DDL statement:
+ # require "google/cloud/bigquery"
+ #
+ # bigquery = Google::Cloud::Bigquery.new
+ #
+ # job = bigquery.query_job "CREATE TABLE my_table (x INT64)"
+ #
+ # job.wait_until_done!
+ # if !job.failed?
+ # table_ref = job.ddl_target_table
+ # end
+ #
+ # @example Execute a DML statement:
+ # require "google/cloud/bigquery"
+ #
+ # bigquery = Google::Cloud::Bigquery.new
+ #
+ # job = bigquery.query_job "UPDATE my_table " \
+ # "SET x = x + 1 " \
+ # "WHERE x IS NOT NULL"
+ #
+ # job.wait_until_done!
+ # if !job.failed?
+ # puts job.num_dml_affected_rows
+ # end
+ #
# @example Query using external data source, set destination:
# require "google/cloud/bigquery"
#
# bigquery = Google::Cloud::Bigquery.new
# dataset = bigquery.dataset "my_dataset"
@@ -928,11 +954,12 @@
##
# Queries data and waits for the results. In this method, a {QueryJob}
# is created and its results are saved to a temporary table, then read
# from the table. Timeouts and transient errors are generally handled
- # as needed to complete the query.
+ # as needed to complete the query. When used for executing DDL/DML
+ # statements, this method does not return row data.
#
# Sets the current dataset as the default dataset in the query. Useful
# for using unqualified table names.
#
# When using standard SQL and passing arguments using `params`, Ruby
@@ -1064,10 +1091,30 @@
#
# data.each do |row|
# puts row[:name]
# end
#
+ # @example Execute a DDL statement:
+ # require "google/cloud/bigquery"
+ #
+ # bigquery = Google::Cloud::Bigquery.new
+ #
+ # data = bigquery.query "CREATE TABLE my_table (x INT64)"
+ #
+ # table_ref = data.ddl_target_table
+ #
+ # @example Execute a DML statement:
+ # require "google/cloud/bigquery"
+ #
+ # bigquery = Google::Cloud::Bigquery.new
+ #
+ # data = bigquery.query "UPDATE my_table " \
+ # "SET x = x + 1 " \
+ # "WHERE x IS NOT NULL"
+ #
+ # puts data.num_dml_affected_rows
+ #
# @example Query using external data source, set destination:
# require "google/cloud/bigquery"
#
# bigquery = Google::Cloud::Bigquery.new
# dataset = bigquery.dataset "my_dataset"
@@ -1796,10 +1843,10 @@
end
##
# @private New lazy Dataset object without making an HTTP request.
def self.new_reference project_id, dataset_id, service
- # TODO: raise if dataset_id is nil?
+ raise ArgumentError, "dataset_id is required" unless dataset_id
new.tap do |b|
reference_gapi = Google::Apis::BigqueryV2::DatasetReference.new(
project_id: project_id,
dataset_id: dataset_id
)