lib/fluent/plugin/bigquery/writer.rb in fluent-plugin-bigquery-2.0.0.beta vs lib/fluent/plugin/bigquery/writer.rb in fluent-plugin-bigquery-2.0.0
- old
+ new
@@ -35,10 +35,11 @@
}
if @options[:time_partitioning_type]
definition[:time_partitioning] = {
type: @options[:time_partitioning_type].to_s.upcase,
+ field: @options[:time_partitioning_field] ? @options[:time_partitioning_field].to_s : nil,
expiration_ms: @options[:time_partitioning_expiration] ? @options[:time_partitioning_expiration] * 1000 : nil
}.select { |_, value| !value.nil? }
end
client.insert_table(project, dataset, definition, {})
log.debug "create table", project_id: project, dataset: dataset, table: table_id
@@ -225,12 +226,13 @@
@num_errors_per_chunk.delete(chunk_id_hex)
raise Fluent::BigQuery::UnRetryableError.new("failed to load into bigquery, and cannot retry")
end
end
+ # `stats` can be nil if we receive a warning like "Warning: Load job succeeded with data imported, however statistics may be lost due to internal error."
stats = response.statistics.load
duration = (response.statistics.end_time - response.statistics.creation_time) / 1000.0
- log.debug "load job finished", id: job_id, state: response.status.state, input_file_bytes: stats.input_file_bytes, input_files: stats.input_files, output_bytes: stats.output_bytes, output_rows: stats.output_rows, bad_records: stats.bad_records, duration: duration.round(2), project_id: project, dataset: dataset, table: table_id
+ log.debug "load job finished", id: job_id, state: response.status.state, input_file_bytes: stats&.input_file_bytes, input_files: stats&.input_files, output_bytes: stats&.output_bytes, output_rows: stats&.output_rows, bad_records: stats&.bad_records, duration: duration.round(2), project_id: project, dataset: dataset, table: table_id
@num_errors_per_chunk.delete(chunk_id_hex)
end
private