integration/fluent.conf in fluent-plugin-bigquery-3.0.1 vs integration/fluent.conf in fluent-plugin-bigquery-3.1.0

- old
+ new

@@ -2,10 +2,18 @@ @type forward port 24224 bind 0.0.0.0 </source> +<source> + @type dummy + dummy {"json_field": {"foo": "val1", "bar": "val2", "hoge": 1}, "geography_field": {"type": "LineString", "coordinates": [[-118.4085, 33.9416], [-73.7781, 40.6413]]}, "timestamp_field": "2022-12-15T22:40:21+09:00", "date": "2022-12-15", "record_field": {"inner_field": "hoge", "inner_json": {"key1": "val1", "key2": "val2"}}, "repeated_string_field": ["a", "b", "c"]} + auto_increment_key id + + tag insert_data +</source> + <match insert_data> @id bigquery-insert-integration @type bigquery_insert allow_retry_insert_errors true @@ -19,11 +27,11 @@ chunk_limit_size 1m chunk_limit_records 1500 total_limit_size 1g path ./log/bigquery-insert-integration - flush_interval 30 + flush_interval 15 flush_thread_count 4 flush_at_shutdown true retry_max_times 14 retry_max_interval 30m @@ -35,10 +43,11 @@ project "#{ENV["PROJECT_NAME"]}" dataset "#{ENV["DATASET_NAME"]}" table "#{ENV["TABLE_NAME"]}" auto_create_table false + # schema_path integration/schema.json fetch_schema true fetch_schema_table "#{ENV["TABLE_NAME"]}" insert_id_field id @@ -76,9 +85,10 @@ project "#{ENV["PROJECT_NAME"]}" dataset "#{ENV["DATASET_NAME"]}" table "#{ENV["TABLE_NAME"]}" auto_create_table false + # schema_path integration/schema.json fetch_schema true fetch_schema_table "#{ENV["TABLE_NAME"]}" <secondary> @type file