lib/google/cloud/bigquery/service.rb in google-cloud-bigquery-1.0.0 vs lib/google/cloud/bigquery/service.rb in google-cloud-bigquery-1.1.0
- old
+ new
@@ -199,26 +199,36 @@
JSON.parse json_txt, symbolize_names: true
end
end
def insert_tabledata dataset_id, table_id, rows, options = {}
- insert_rows = Array(rows).map do |row|
- Google::Apis::BigqueryV2::InsertAllTableDataRequest::Row.new(
- insert_id: SecureRandom.uuid,
- json: Convert.to_json_row(row)
- )
+ json_rows = Array(rows).map { |row| Convert.to_json_row row }
+
+ insert_tabledata_json_rows dataset_id, table_id, json_rows, options
+ end
+
+ def insert_tabledata_json_rows dataset_id, table_id, json_rows,
+ options = {}
+ insert_rows = Array(json_rows).map do |json_row|
+ {
+ insertId: SecureRandom.uuid,
+ json: json_row
+ }
end
- insert_req = Google::Apis::BigqueryV2::InsertAllTableDataRequest.new(
+
+ insert_req = {
rows: insert_rows,
- ignore_unknown_values: options[:ignore_unknown],
- skip_invalid_rows: options[:skip_invalid]
- )
+ ignoreUnknownValues: options[:ignore_unknown],
+ skipInvalidRows: options[:skip_invalid]
+ }.to_json
# The insertAll with insertId operation is considered idempotent
execute backoff: true do
service.insert_all_table_data(
- @project, dataset_id, table_id, insert_req)
+ @project, dataset_id, table_id, insert_req,
+ options: { skip_serialization: true }
+ )
end
end
##
# Lists all jobs in the specified project to which you have
@@ -278,11 +288,12 @@
def copy_table source, target, options = {}
# Jobs have generated id, so this operation is considered idempotent
execute backoff: true do
service.insert_job @project, copy_table_config(
- source, target, options)
+ source, target, options
+ )
end
end
def extract_table table, storage_files, options = {}
# Jobs have generated id, so this operation is considered idempotent
@@ -304,11 +315,12 @@
def load_table_file dataset_id, table_id, file, options = {}
# Jobs have generated id, so this operation is considered idempotent
execute backoff: true do
service.insert_job \
@project, load_table_file_config(
- dataset_id, table_id, file, options),
+ dataset_id, table_id, file, options
+ ),
upload_source: file, content_type: mime_type_for(file)
end
end
##
@@ -318,11 +330,11 @@
# dataset_id from the default table if they are missing.
def self.table_ref_from_s str, default_table_ref
str = str.to_s
m = /\A(((?<prj>\S*):)?(?<dts>\S*)\.)?(?<tbl>\S*)\z/.match str
unless m
- fail ArgumentError, "unable to identify table from #{str.inspect}"
+ raise ArgumentError, "unable to identify table from #{str.inspect}"
end
str_table_ref_hash = {
project_id: m["prj"],
dataset_id: m["dts"],
table_id: m["tbl"]
@@ -389,11 +401,12 @@
def load_table_file_opts dataset_id, table_id, file, options = {}
path = Pathname(file).to_path
{
destination_table: Google::Apis::BigqueryV2::TableReference.new(
- project_id: @project, dataset_id: dataset_id, table_id: table_id),
+ project_id: @project, dataset_id: dataset_id, table_id: table_id
+ ),
create_disposition: create_disposition(options[:create]),
write_disposition: write_disposition(options[:write]),
source_format: source_format(path, options[:format]),
projection_fields: projection_fields(options[:projection_fields]),
allow_jagged_rows: options[:jagged_rows],
@@ -421,11 +434,12 @@
end
def load_table_url_opts dataset_id, table_id, url, options = {}
{
destination_table: Google::Apis::BigqueryV2::TableReference.new(
- project_id: @project, dataset_id: dataset_id, table_id: table_id),
+ project_id: @project, dataset_id: dataset_id, table_id: table_id
+ ),
source_uris: Array(url),
create_disposition: create_disposition(options[:create]),
write_disposition: write_disposition(options[:write]),
source_format: source_format(url, options[:format]),
projection_fields: projection_fields(options[:projection_fields]),
@@ -498,11 +512,11 @@
Convert.to_query_param(param).tap do |named_param|
named_param.name = String name
end
end
else
- fail "Query parameters must be an Array or a Hash."
+ raise "Query parameters must be an Array or a Hash."
end
end
if options[:external]
external_table_pairs = options[:external].map do |name, obj|
@@ -543,11 +557,11 @@
Convert.to_query_param(param).tap do |named_param|
named_param.name = String name
end
end
else
- fail "Query parameters must be an Array or a Hash."
+ raise "Query parameters must be an Array or a Hash."
end
end
req
end
@@ -622,18 +636,19 @@
{ "batch" => "BATCH",
"interactive" => "INTERACTIVE" }[str.to_s.downcase]
end
def source_format path, format
- val = { "csv" => "CSV",
- "json" => "NEWLINE_DELIMITED_JSON",
- "newline_delimited_json" => "NEWLINE_DELIMITED_JSON",
- "avro" => "AVRO",
- "datastore" => "DATASTORE_BACKUP",
- "backup" => "DATASTORE_BACKUP",
- "datastore_backup" => "DATASTORE_BACKUP"
- }[format.to_s.downcase]
+ val = {
+ "csv" => "CSV",
+ "json" => "NEWLINE_DELIMITED_JSON",
+ "newline_delimited_json" => "NEWLINE_DELIMITED_JSON",
+ "avro" => "AVRO",
+ "datastore" => "DATASTORE_BACKUP",
+ "backup" => "DATASTORE_BACKUP",
+ "datastore_backup" => "DATASTORE_BACKUP"
+ }[format.to_s.downcase]
return val unless val.nil?
return nil if path.nil?
return "CSV" if path.end_with? ".csv"
return "NEWLINE_DELIMITED_JSON" if path.end_with? ".json"
return "AVRO" if path.end_with? ".avro"
@@ -647,11 +662,11 @@
def mime_type_for file
mime_type = MIME::Types.of(Pathname(file).to_path).first.to_s
return nil if mime_type.empty?
mime_type
- rescue
+ rescue StandardError
nil
end
def udfs array_or_str
Array(array_or_str).map do |uri_or_code|
@@ -680,17 +695,17 @@
attr_accessor :retries
attr_accessor :reasons
attr_accessor :backoff
end
self.retries = 5
- self.reasons = %w(rateLimitExceeded backendError)
+ self.reasons = %w[rateLimitExceeded backendError]
self.backoff = lambda do |retries|
# Max delay is 32 seconds
# See "Back-off Requirements" here:
# https://cloud.google.com/bigquery/sla
retries = 5 if retries > 5
- delay = 2 ** retries
+ delay = 2**retries
sleep delay
end
def initialize options = {}
@retries = (options[:retries] || Backoff.retries).to_i
@@ -727,10 +742,10 @@
return false if json_errors.empty?
json_errors.each do |json_error|
return false unless @reasons.include? json_error["reason"]
end
true
- rescue
+ rescue StandardError
false
end
end
end
end