test/plugin/base_test.rb in fluent-plugin-google-cloud-0.12.10 vs test/plugin/base_test.rb in fluent-plugin-google-cloud-0.12.11
- old
+ new
@@ -69,12 +69,12 @@
def test_configure_service_account_private_key
# Using out-of-date config method.
exception_count = 0
begin
create_driver(PRIVATE_KEY_CONFIG)
- rescue Fluent::ConfigError => error
- assert error.message.include? 'Please remove configuration parameters'
+ rescue Fluent::ConfigError => e
+ assert e.message.include? 'Please remove configuration parameters'
exception_count += 1
end
assert_equal 1, exception_count
end
@@ -98,27 +98,26 @@
end
def test_configure_metadata_missing_parts_on_other_platforms
setup_no_metadata_service_stubs
Common::Utils::CredentialsInfo.stubs(:project_id).returns(nil)
- [[CONFIG_MISSING_METADATA_PROJECT_ID, ['project_id'], false],
- [CONFIG_MISSING_METADATA_ZONE, [], true],
- [CONFIG_MISSING_METADATA_VM_ID, [], true],
- [CONFIG_MISSING_METADATA_ALL, ['project_id'], false]
+ [
+ [CONFIG_MISSING_METADATA_PROJECT_ID, ['project_id'], false],
+ [CONFIG_MISSING_METADATA_ZONE, [], true],
+ [CONFIG_MISSING_METADATA_VM_ID, [], true],
+ [CONFIG_MISSING_METADATA_ALL, ['project_id'], false]
].each_with_index do |(config, missing_parts, is_valid_config), index|
- begin
- create_driver(config)
- assert_true is_valid_config, "Invalid config at index #{index} should "\
- 'have raised an error.'
- rescue Fluent::ConfigError => error
- assert_false is_valid_config, "Valid config at index #{index} should "\
- "not have raised an error #{error}."
- assert error.message.include?('Unable to obtain metadata parameters:'),
- "Index #{index} failed."
- missing_parts.each do |part|
- assert error.message.include?(part), "Index #{index} failed."
- end
+ create_driver(config)
+ assert_true is_valid_config, "Invalid config at index #{index} should "\
+ 'have raised an error.'
+ rescue Fluent::ConfigError => e
+ assert_false is_valid_config, "Valid config at index #{index} should "\
+ "not have raised an error #{e}."
+ assert e.message.include?('Unable to obtain metadata parameters:'),
+ "Index #{index} failed."
+ missing_parts.each do |part|
+ assert e.message.include?(part), "Index #{index} failed."
end
end
end
def test_configure_ignores_unknown_monitoring_type
@@ -126,42 +125,56 @@
# "prometheus" or "opencensus" (in which case, we simply don't record
# metrics), and that the counters are set to nil.
setup_gce_metadata_stubs
create_driver(CONFIG_UNKNOWN_MONITORING_TYPE)
assert_nil(Prometheus::Client.registry.get(
- :stackdriver_successful_requests_count))
+ :stackdriver_successful_requests_count
+ ))
assert_nil(Prometheus::Client.registry.get(
- :stackdriver_failed_requests_count))
+ :stackdriver_failed_requests_count
+ ))
assert_nil(Prometheus::Client.registry.get(
- :stackdriver_ingested_entries_count))
+ :stackdriver_ingested_entries_count
+ ))
assert_nil(Prometheus::Client.registry.get(
- :stackdriver_dropped_entries_count))
+ :stackdriver_dropped_entries_count
+ ))
assert_nil(Prometheus::Client.registry.get(
- :stackdriver_retried_entries_count))
+ :stackdriver_retried_entries_count
+ ))
assert_nil(OpenCensus::Stats::MeasureRegistry.get(
Monitoring::MetricTranslator.new(
- :stackdriver_successful_requests_count, {})))
+ :stackdriver_successful_requests_count, {}
+ )
+ ))
assert_nil(OpenCensus::Stats::MeasureRegistry.get(
Monitoring::MetricTranslator.new(
- :stackdriver_failed_requests_count, {})))
+ :stackdriver_failed_requests_count, {}
+ )
+ ))
assert_nil(OpenCensus::Stats::MeasureRegistry.get(
Monitoring::MetricTranslator.new(
- :stackdriver_ingested_entries_count, {})))
+ :stackdriver_ingested_entries_count, {}
+ )
+ ))
assert_nil(OpenCensus::Stats::MeasureRegistry.get(
Monitoring::MetricTranslator.new(
- :stackdriver_dropped_entries_count, {})))
+ :stackdriver_dropped_entries_count, {}
+ )
+ ))
assert_nil(OpenCensus::Stats::MeasureRegistry.get(
Monitoring::MetricTranslator.new(
- :stackdriver_retried_entries_count, {})))
+ :stackdriver_retried_entries_count, {}
+ )
+ ))
end
def test_configure_uses_metrics_resource
setup_gce_metadata_stubs
[CONFIG_METRICS_RESOURCE_JSON,
CONFIG_METRICS_RESOURCE_HASH,
- CONFIG_METRICS_RESOURCE_JSON_HASH
- ].each_with_index do |config, index|
+ CONFIG_METRICS_RESOURCE_JSON_HASH].each_with_index do |config, index|
d = create_driver(config)
assert_equal 'custom_resource', d.instance.monitoring_resource.type, \
"Index #{index}"
assert_equal '123', d.instance.monitoring_resource.labels['label1'], \
"Index #{index}"
@@ -169,11 +182,12 @@
"Index #{index}"
assert_true d.instance.instance_variable_get(:@enable_monitoring)
registry = d.instance.instance_variable_get(:@registry)
assert_not_nil registry
monitored_resource = registry.instance_variable_get(
- :@metrics_monitored_resource)
+ :@metrics_monitored_resource
+ )
assert_equal('custom_resource', monitored_resource.type, "Index #{index}")
assert_equal({ 'label1' => '123', 'label2' => 'abc' },
monitored_resource.labels, "Index #{index}")
end
end
@@ -188,18 +202,16 @@
CONFIG_METRICS_RESOURCE_JSON_BAD_KEYS_LABELS =>
/unrecognized keys: \[:"labels\.random"\]/,
CONFIG_METRICS_RESOURCE_JSON_BAD_KEYS_NO_LABELS =>
/unrecognized keys: \[:random\]/
}.each_with_index do |(config, pattern), index|
- begin
- create_driver(config)
- assert false,
- "Invalid config at index #{index} should have raised an error."
- rescue Fluent::ConfigError => error
- assert error.message.match?(pattern), \
- "Index #{index} failed: got #{error.message}."
- end
+ create_driver(config)
+ assert false,
+ "Invalid config at index #{index} should have raised an error."
+ rescue Fluent::ConfigError => e
+ assert e.message.match?(pattern), \
+ "Index #{index} failed: got #{e.message}."
end
end
def test_metadata_loading
setup_gce_metadata_stubs
@@ -275,20 +287,20 @@
def test_ec2_metadata_requires_project_id
setup_ec2_metadata_stubs
exception_count = 0
begin
create_driver
- rescue Fluent::ConfigError => error
- assert error.message.include? 'Unable to obtain metadata parameters:'
- assert error.message.include? 'project_id'
+ rescue Fluent::ConfigError => e
+ assert e.message.include? 'Unable to obtain metadata parameters:'
+ assert e.message.include? 'project_id'
exception_count += 1
end
assert_equal 1, exception_count
end
def test_project_id_from_credentials
- %w(gce ec2).each do |platform|
+ %w[gce ec2].each do |platform|
send("setup_#{platform}_metadata_stubs")
[IAM_CREDENTIALS, NEW_STYLE_CREDENTIALS, LEGACY_CREDENTIALS].each \
do |creds|
ENV[CREDENTIALS_PATH_ENV_VAR] = creds[:path]
d = create_driver
@@ -315,22 +327,23 @@
d = create_driver
d.emit('message' => log_entry(0))
d.run
end
verify_log_entries(1, COMPUTE_PARAMS.merge(
- project_id: IAM_CREDENTIALS[:project_id]))
+ project_id: IAM_CREDENTIALS[:project_id]
+ ))
end
def test_invalid_json_credentials
- %w(gce_metadata ec2_metadata no_metadata_service).each do |platform|
+ %w[gce_metadata ec2_metadata no_metadata_service].each do |platform|
send("setup_#{platform}_stubs")
exception_count = 0
ENV[CREDENTIALS_PATH_ENV_VAR] = INVALID_CREDENTIALS[:path]
begin
create_driver
- rescue RuntimeError => error
- assert error.message.include? 'Unable to read the credential file'
+ rescue RuntimeError => e
+ assert e.message.include? 'Unable to read the credential file'
exception_count += 1
end
assert_equal 1, exception_count
end
end
@@ -496,13 +509,13 @@
setup_logging_stubs do
d = create_driver
d.emit(
'int_key' => { 1 => message },
'int_array_key' => { [1, 2, 3, 4] => message },
- 'string_array_key' => { %w(a b c) => message },
+ 'string_array_key' => { %w[a b c] => message },
'hash_key' => { { 'some_key' => 'some_value' } => message },
- 'mixed_key' => { { 'some_key' => %w(a b c) } => message },
+ 'mixed_key' => { { 'some_key' => %w[a b c] } => message },
'symbol_key' => { some_symbol: message },
'nil_key' => { nil => message }
)
d.run
end
@@ -525,13 +538,13 @@
setup_gce_metadata_stubs
json_string = '{"msg": "test log entry 0", "tag2": "test", ' \
'"data": 5000, "some_null_field": null}'
setup_logging_stubs do
d = create_driver(APPLICATION_DEFAULT_CONFIG)
- d.emit('message' => 'notJSON ' + json_string)
+ d.emit('message' => "notJSON #{json_string}")
d.emit('message' => json_string)
- d.emit('message' => " \r\n \t" + json_string)
+ d.emit('message' => " \r\n \t#{json_string}")
d.run
end
verify_log_entries(3, COMPUTE_PARAMS, 'textPayload') do
# Only check for the existence of textPayload.
end
@@ -541,14 +554,14 @@
setup_gce_metadata_stubs
json_string = '{"msg": "test log entry 0", "tag2": "test", ' \
'"data": 5000, "some_null_field": null}'
setup_logging_stubs do
d = create_driver(APPLICATION_DEFAULT_CONFIG)
- %w(log msg).each do |field|
- d.emit(field => 'notJSON ' + json_string)
+ %w[log msg].each do |field|
+ d.emit(field => "notJSON #{json_string}")
d.emit(field => json_string)
- d.emit(field => " \r\n \t" + json_string)
+ d.emit(field => " \r\n \t#{json_string}")
end
d.run
end
verify_log_entries(6, COMPUTE_PARAMS, 'jsonPayload') do |entry|
fields = entry['jsonPayload']
@@ -562,11 +575,11 @@
setup_gce_metadata_stubs
json_string = '{"msg": "test log entry 0", "tag2": "test", ' \
'"data": 5000, "some_null_field": null}'
setup_logging_stubs do
d = create_driver(DETECT_JSON_CONFIG)
- d.emit('message' => 'notJSON ' + json_string)
+ d.emit('message' => "notJSON #{json_string}")
d.run
end
verify_log_entries(1, COMPUTE_PARAMS, 'textPayload') do
# Only check for the existence of textPayload.
end
@@ -576,12 +589,12 @@
setup_gce_metadata_stubs
json_string = '{"msg": "test log entry 0", "tag2": "test", ' \
'"data": 5000, "some_null_field": null}'
setup_logging_stubs do
d = create_driver(DETECT_JSON_CONFIG)
- %w(log msg).each do |field|
- d.emit(field => 'notJSON ' + json_string)
+ %w[log msg].each do |field|
+ d.emit(field => "notJSON #{json_string}")
end
d.run
end
verify_log_entries(2, COMPUTE_PARAMS, 'jsonPayload') do |entry|
fields = entry['jsonPayload']
@@ -666,13 +679,13 @@
setup_gce_metadata_stubs
json_string = '{"msg": "test log entry 0", "tag2": "test", ' \
'"data": 5000, "some_null_field": null}'
setup_logging_stubs do
d = create_driver(DETECT_JSON_CONFIG)
- %w(message log msg).each do |field|
+ %w[message log msg].each do |field|
d.emit(field => json_string)
- d.emit(field => " \r\n \t" + json_string)
+ d.emit(field => " \r\n \t#{json_string}")
end
d.run
end
verify_log_entries(6, COMPUTE_PARAMS, 'jsonPayload') do |entry|
fields = entry['jsonPayload']
@@ -689,13 +702,13 @@
setup_k8s_metadata_stubs
json_string = '{"msg": "test log entry 0", "tag2": "test", ' \
'"data": 5000, "some_null_field": null}'
setup_logging_stubs do
d = create_driver(APPLICATION_DEFAULT_CONFIG, CONTAINER_TAG)
- d.emit(container_log_entry_with_metadata('notJSON' + json_string))
+ d.emit(container_log_entry_with_metadata("notJSON#{json_string}"))
d.emit(container_log_entry_with_metadata(json_string))
- d.emit(container_log_entry_with_metadata(" \r\n \t" + json_string))
+ d.emit(container_log_entry_with_metadata(" \r\n \t#{json_string}"))
d.run
end
verify_log_entries(3, CONTAINER_FROM_METADATA_PARAMS, 'textPayload') do
# Only check for the existence of textPayload.
end
@@ -706,11 +719,11 @@
setup_k8s_metadata_stubs
json_string = '{"msg": "test log entry 0", "tag2": "test", ' \
'"data": 5000, "some_null_field": null}'
setup_logging_stubs do
d = create_driver(DETECT_JSON_CONFIG, CONTAINER_TAG)
- d.emit(container_log_entry_with_metadata('notJSON' + json_string))
+ d.emit(container_log_entry_with_metadata("notJSON#{json_string}"))
d.run
end
verify_log_entries(1, CONTAINER_FROM_METADATA_PARAMS, 'textPayload') do
# Only check for the existence of textPayload.
end
@@ -722,11 +735,11 @@
json_string = '{"msg": "test log entry 0", "tag2": "test", ' \
'"data": 5000, "some_null_field": null}'
setup_logging_stubs do
d = create_driver(DETECT_JSON_CONFIG, CONTAINER_TAG)
d.emit(container_log_entry_with_metadata(json_string))
- d.emit(container_log_entry_with_metadata(" \r\n \t" + json_string))
+ d.emit(container_log_entry_with_metadata(" \r\n \t#{json_string}"))
d.run
end
verify_log_entries(2, CONTAINER_FROM_METADATA_PARAMS, 'jsonPayload') \
do |entry|
fields = entry['jsonPayload']
@@ -747,18 +760,20 @@
'"data": 5000, "some_null_field": null}'
PRESERVED_KEYS_TIMESTAMP_FIELDS.each do |timestamp_fields|
setup_logging_stubs do
@logs_sent = []
d = create_driver(DETECT_JSON_CONFIG)
- %w(message log msg).each do |field|
+ %w[message log msg].each do |field|
d.emit(PRESERVED_KEYS_MAP.merge(
- field => json_string).merge(timestamp_fields))
+ field => json_string
+ ).merge(timestamp_fields))
end
d.run
end
expected_params = COMPUTE_PARAMS.merge(
- labels: COMPUTE_PARAMS[:labels].merge(LABELS_MESSAGE))
+ labels: COMPUTE_PARAMS[:labels].merge(LABELS_MESSAGE)
+ )
verify_log_entries(3, expected_params, 'jsonPayload') do |entry|
fields = entry['jsonPayload']
assert_equal 4, fields.size, entry
assert_equal 'test log entry 0', fields['msg'], entry
assert_equal 'test', fields['tag2'], entry
@@ -770,11 +785,11 @@
# Verify that we drop the log entries when 'require_valid_tags' is true and
# any non-string tags or tags with non-utf8 characters are detected.
def test_reject_invalid_tags_with_require_valid_tags_true
setup_gce_metadata_stubs
- INVALID_TAGS.keys.each do |tag|
+ INVALID_TAGS.each_key do |tag|
setup_logging_stubs do
@logs_sent = []
d = create_driver(REQUIRE_VALID_TAGS_CONFIG, tag)
d.emit('msg' => log_entry(0))
d.run
@@ -798,12 +813,15 @@
d.run
end
params = CONTAINER_FROM_METADATA_PARAMS.merge(
resource: CONTAINER_FROM_METADATA_PARAMS[:resource].merge(
labels: CONTAINER_FROM_METADATA_PARAMS[:resource][:labels].merge(
- 'container_name' => container_name)),
- log_name: tag)
+ 'container_name' => container_name
+ )
+ ),
+ log_name: tag
+ )
verify_log_entries(1, params, 'textPayload')
end
# Verify that container names with non-utf8 characters should be rejected when
# 'require_valid_tags' is true.
@@ -822,12 +840,14 @@
d.run
end
params = CONTAINER_FROM_METADATA_PARAMS.merge(
labels: CONTAINER_FROM_METADATA_PARAMS[:labels].merge(
"#{GKE_CONSTANTS[:service]}/container_name" =>
- CGI.unescape(encoded_name)),
- log_name: encoded_name)
+ CGI.unescape(encoded_name)
+ ),
+ log_name: encoded_name
+ )
verify_log_entries(0, params, 'textPayload')
end
end
# Verify that tags are properly encoded. When 'require_valid_tags' is true, we
@@ -859,12 +879,15 @@
d.run
end
params = CONTAINER_FROM_METADATA_PARAMS.merge(
resource: CONTAINER_FROM_METADATA_PARAMS[:resource].merge(
labels: CONTAINER_FROM_METADATA_PARAMS[:resource][:labels].merge(
- 'container_name' => tag)),
- log_name: encoded_tag)
+ 'container_name' => tag
+ )
+ ),
+ log_name: encoded_tag
+ )
verify_log_entries(1, params, 'textPayload')
end
end
# Verify that tags are properly encoded and sanitized. When
@@ -905,12 +928,15 @@
d.run
end
params = CONTAINER_FROM_METADATA_PARAMS.merge(
resource: CONTAINER_FROM_METADATA_PARAMS[:resource].merge(
labels: CONTAINER_FROM_METADATA_PARAMS[:resource][:labels].merge(
- 'container_name' => CGI.unescape(encoded_container_name))),
- log_name: encoded_container_name)
+ 'container_name' => CGI.unescape(encoded_container_name)
+ )
+ ),
+ log_name: encoded_container_name
+ )
verify_log_entries(1, params, 'textPayload')
end
end
def test_configure_split_logs_by_tag
@@ -958,17 +984,19 @@
assert_prometheus_metric_value(
:stackdriver_successful_requests_count,
request_count,
'agent.googleapis.com/agent',
OpenCensus::Stats::Aggregation::Sum, d,
- :aggregate)
+ :aggregate
+ )
assert_prometheus_metric_value(
:stackdriver_ingested_entries_count,
log_entry_count,
'agent.googleapis.com/agent',
OpenCensus::Stats::Aggregation::Sum, d,
- :aggregate)
+ :aggregate
+ )
end
end
end
def test_compute_timestamp
@@ -1042,17 +1070,17 @@
adjust_timestamp_if_invalid = lambda do |driver, timestamp, current_time|
driver.instance.send(:adjust_timestamp_if_invalid, timestamp,
current_time)
end
- december_29 = Time.new(2019, 12, 29, 10, 23, 35, '-08:00')
- december_31 = Time.new(2019, 12, 31, 10, 23, 35, '-08:00')
- january_1 = Time.new(2020, 1, 1, 10, 23, 35, '-08:00')
+ december29 = Time.new(2019, 12, 29, 10, 23, 35, '-08:00')
+ december31 = Time.new(2019, 12, 31, 10, 23, 35, '-08:00')
+ january1 = Time.new(2020, 1, 1, 10, 23, 35, '-08:00')
{
# December 29, 2019 (normal operation).
- december_29 => begin
+ december29 => begin
one_day_later = Time.new(2019, 12, 30, 10, 23, 35, '-08:00')
one_day_a_year_earlier = Time.new(2018, 12, 30, 10, 23, 35, '-08:00')
just_under_one_day_later = Time.new(2019, 12, 30, 10, 23, 34, '-08:00')
next_year = Time.new(2020, 1, 1, 0, 0, 0, '-08:00')
one_second_before_next_year =
@@ -1062,21 +1090,21 @@
one_second_into_next_year = Time.new(2020, 1, 1, 0, 0, 1, '-08:00')
one_day_into_next_year = Time.new(2020, 1, 2, 0, 0, 0, '-08:00')
{
Time.at(123_456.789) => Time.at(123_456.789),
Time.at(0) => Time.at(0),
- december_29 => december_29,
+ december29 => december29,
one_day_later => one_day_a_year_earlier,
just_under_one_day_later => just_under_one_day_later,
one_second_before_next_year => one_second_before_this_year,
next_year => Time.at(0),
one_second_into_next_year => Time.at(0),
one_day_into_next_year => Time.at(0)
}
end,
# January 1, 2020 (normal operation).
- january_1 => begin
+ january1 => begin
one_day_later = Time.new(2020, 1, 2, 10, 23, 35, '-08:00')
one_day_a_year_earlier = Time.new(2019, 1, 2, 10, 23, 35, '-08:00')
just_under_one_day_later = Time.new(2020, 1, 2, 10, 23, 34, '-08:00')
next_year = Time.new(2021, 1, 1, 0, 0, 0, '-08:00')
one_second_before_next_year =
@@ -1086,32 +1114,32 @@
one_second_into_next_year = Time.new(2021, 1, 1, 0, 0, 1, '-08:00')
one_day_into_next_year = Time.new(2021, 1, 2, 0, 0, 0, '-08:00')
{
Time.at(123_456.789) => Time.at(123_456.789),
Time.at(0) => Time.at(0),
- january_1 => january_1,
+ january1 => january1,
one_day_later => one_day_a_year_earlier,
just_under_one_day_later => just_under_one_day_later,
one_second_before_next_year => one_second_before_this_year,
next_year => Time.at(0),
one_second_into_next_year => Time.at(0),
one_day_into_next_year => Time.at(0)
}
end,
# December 31, 2019 (next day overlaps new year).
- december_31 => begin
+ december31 => begin
one_day_later = Time.new(2020, 1, 1, 10, 23, 35, '-08:00')
just_under_one_day_later = Time.new(2020, 1, 1, 10, 23, 34, '-08:00')
next_year = Time.new(2020, 1, 1, 0, 0, 0, '-08:00')
one_second_before_next_year =
Time.new(2019, 12, 31, 11, 59, 59, '-08:00')
one_second_into_next_year = Time.new(2020, 1, 1, 0, 0, 1, '-08:00')
one_day_into_next_year = Time.new(2020, 1, 2, 0, 0, 0, '-08:00')
{
Time.at(123_456.789) => Time.at(123_456.789),
Time.at(0) => Time.at(0),
- december_31 => december_31,
+ december31 => december31,
one_day_later => Time.at(0), # Falls into the next year.
just_under_one_day_later => just_under_one_day_later,
one_second_before_next_year => one_second_before_next_year,
next_year => next_year,
one_second_into_next_year => one_second_into_next_year,
@@ -1605,11 +1633,12 @@
verify_field_key('labels',
default_key: DEFAULT_LABELS_KEY,
custom_key: 'custom_labels_key',
custom_key_config: CONFIG_CUSTOM_LABELS_KEY_SPECIFIED,
sample_value: COMPUTE_PARAMS[:labels].merge(
- LABELS_MESSAGE),
+ LABELS_MESSAGE
+ ),
default_value: COMPUTE_PARAMS[:labels])
end
def test_log_entry_operation_field
verify_field_key('operation',
@@ -1657,59 +1686,67 @@
def test_cascading_json_detection_with_log_entry_insert_id_field
verify_cascading_json_detection_with_log_entry_fields(
'insertId', DEFAULT_INSERT_ID_KEY,
root_level_value: INSERT_ID,
- nested_level_value: INSERT_ID2)
+ nested_level_value: INSERT_ID2
+ )
end
def test_cascading_json_detection_with_log_entry_labels_field
verify_cascading_json_detection_with_log_entry_fields(
'labels', DEFAULT_LABELS_KEY,
root_level_value: LABELS_MESSAGE,
nested_level_value: LABELS_MESSAGE2,
expected_value_from_root: COMPUTE_PARAMS[:labels].merge(LABELS_MESSAGE),
expected_value_from_nested: COMPUTE_PARAMS[:labels].merge(
- LABELS_MESSAGE2))
+ LABELS_MESSAGE2
+ )
+ )
end
def test_cascading_json_detection_with_log_entry_operation_field
verify_cascading_json_detection_with_log_entry_fields(
'operation', DEFAULT_OPERATION_KEY,
root_level_value: OPERATION_MESSAGE,
nested_level_value: OPERATION_MESSAGE2,
- expected_value_from_nested: expected_operation_message2)
+ expected_value_from_nested: expected_operation_message2
+ )
end
def test_cascading_json_detection_with_log_entry_source_location_field
verify_cascading_json_detection_with_log_entry_fields(
'sourceLocation', DEFAULT_SOURCE_LOCATION_KEY,
root_level_value: SOURCE_LOCATION_MESSAGE,
- nested_level_value: SOURCE_LOCATION_MESSAGE2)
+ nested_level_value: SOURCE_LOCATION_MESSAGE2
+ )
end
def test_cascading_json_detection_with_log_entry_span_id_field
verify_cascading_json_detection_with_log_entry_fields(
'spanId', DEFAULT_SPAN_ID_KEY,
root_level_value: SPAN_ID,
- nested_level_value: SPAN_ID2)
+ nested_level_value: SPAN_ID2
+ )
end
def test_cascading_json_detection_with_log_entry_trace_field
verify_cascading_json_detection_with_log_entry_fields(
'trace', DEFAULT_TRACE_KEY,
root_level_value: TRACE,
- nested_level_value: TRACE2)
+ nested_level_value: TRACE2
+ )
end
def test_cascading_json_detection_with_log_entry_trace_sampled_field
verify_cascading_json_detection_with_log_entry_fields(
'traceSampled', DEFAULT_TRACE_SAMPLED_KEY,
root_level_value: TRACE_SAMPLED,
nested_level_value: TRACE_SAMPLED2,
default_value_from_root: false,
- default_value_from_nested: false)
+ default_value_from_nested: false
+ )
end
# Verify that labels present in multiple inputs respect the expected priority
# order:
# 1. Labels from the field "logging.googleapis.com/labels" in payload.
@@ -1737,13 +1774,15 @@
},
# All three types of labels that do not conflict.
{
config: CONFIG_LABLES_AND_LABLE_MAP,
emitted_log: PAYLOAD_FOR_LABEL_MAP.merge(
- DEFAULT_LABELS_KEY => LABELS_MESSAGE),
+ DEFAULT_LABELS_KEY => LABELS_MESSAGE
+ ),
expected_labels: LABELS_MESSAGE.merge(LABELS_FROM_LABELS_CONFIG).merge(
- LABELS_FROM_LABEL_MAP_CONFIG)
+ LABELS_FROM_LABEL_MAP_CONFIG
+ )
},
# labels from the config "labels" and "label_map" conflict.
{
config: CONFIG_LABLES_AND_LABLE_MAP_CONFLICTING,
emitted_log: PAYLOAD_FOR_LABEL_MAP_CONFLICTING,
@@ -1759,18 +1798,20 @@
# labels from the config "label_map" and labels from the field
# "logging.googleapis.com/labels" in payload conflict.
{
config: CONFIG_LABEL_MAP_CONFLICTING,
emitted_log: PAYLOAD_FOR_LABEL_MAP_CONFLICTING.merge(
- DEFAULT_LABELS_KEY => LABELS_FROM_PAYLOAD_CONFLICTING),
+ DEFAULT_LABELS_KEY => LABELS_FROM_PAYLOAD_CONFLICTING
+ ),
expected_labels: LABELS_FROM_PAYLOAD_CONFLICTING
},
# All three types of labels conflict.
{
config: CONFIG_LABLES_AND_LABLE_MAP_CONFLICTING,
emitted_log: PAYLOAD_FOR_LABEL_MAP_CONFLICTING.merge(
- DEFAULT_LABELS_KEY => LABELS_FROM_PAYLOAD_CONFLICTING),
+ DEFAULT_LABELS_KEY => LABELS_FROM_PAYLOAD_CONFLICTING
+ ),
expected_labels: LABELS_FROM_PAYLOAD_CONFLICTING
}
].each do |test_params|
new_stub_context do
setup_gce_metadata_stubs
@@ -1778,11 +1819,12 @@
d = create_driver(test_params[:config])
d.emit({ 'message' => log_entry(0) }.merge(test_params[:emitted_log]))
d.run
end
expected_params = COMPUTE_PARAMS.merge(
- labels: COMPUTE_PARAMS[:labels].merge(test_params[:expected_labels]))
+ labels: COMPUTE_PARAMS[:labels].merge(test_params[:expected_labels])
+ )
verify_log_entries(1, expected_params)
end
end
end
@@ -1850,19 +1892,21 @@
# When local_resource_id is not present or does not match k8s regexes.
{
config: APPLICATION_DEFAULT_CONFIG,
setup_k8s_stub: true,
log_entry: k8s_container_log_entry(
- log_entry(0)).reject { |k, _| k == LOCAL_RESOURCE_ID_KEY },
+ log_entry(0)
+ ).reject { |k, _| k == LOCAL_RESOURCE_ID_KEY },
expected_params: CONTAINER_FROM_TAG_PARAMS
},
{
config: APPLICATION_DEFAULT_CONFIG,
setup_k8s_stub: true,
log_entry: k8s_container_log_entry(
log_entry(0),
- local_resource_id: RANDOM_LOCAL_RESOURCE_ID),
+ local_resource_id: RANDOM_LOCAL_RESOURCE_ID
+ ),
expected_params: CONTAINER_FROM_TAG_PARAMS
}
].each do |test_params|
new_stub_context do
setup_gce_metadata_stubs
@@ -1977,11 +2021,12 @@
expected = Time.now.to_i - start_time
d.instance.update_uptime
assert_metric_value.call(
:uptime, expected, 'agent.googleapis.com/agent',
OpenCensus::Stats::Aggregation::Sum, d,
- version: Fluent::GoogleCloudOutput.version_string)
+ version: Fluent::GoogleCloudOutput.version_string
+ )
rescue Test::Unit::AssertionFailedError
retry if (retries += 1) < 3
end
assert_not_equal 3, retries
end
@@ -2011,16 +2056,16 @@
(1..request_count).each do |request_index|
d = create_driver(config)
(1..entry_count).each do |entry_index|
d.emit('message' => log_entry(entry_index.to_s))
end
- # rubocop:disable Lint/HandleExceptions
+ # rubocop:disable Lint/SuppressedException
begin
d.run
rescue mock_error_type
end
- # rubocop:enable Lint/HandleExceptions
+ # rubocop:enable Lint/SuppressedException
failed_requests_count, dropped_entries_count,
retried_entries_count = metric_values
successful_requests_count = \
if code != ok_status_code
@@ -2065,10 +2110,11 @@
grpc: use_grpc, code: code)
# Skip failure assertions when code indicates success, because the
# assertion will fail in the case when a single metric contains time
# series with success and failure events.
next if code == ok_status_code
+
assert_metric_value.call(:stackdriver_failed_requests_count,
failed_requests_count,
'agent.googleapis.com/agent',
OpenCensus::Stats::Aggregation::Sum, d,
grpc: use_grpc, code: code)
@@ -2098,11 +2144,12 @@
def container_tag_with_container_name(container_name)
"kubernetes.#{K8S_POD_NAME}_#{K8S_NAMESPACE_NAME}_#{container_name}"
end
def container_log_entry_with_metadata(
- log, container_name = K8S_CONTAINER_NAME)
+ log, container_name = K8S_CONTAINER_NAME
+ )
{
log: log,
stream: K8S_STREAM,
time: K8S_TIMESTAMP,
kubernetes: {
@@ -2157,14 +2204,14 @@
"#{K8S_NODE_LOCAL_RESOURCE_ID_PREFIX}" \
".#{K8S_NODE_NAME}"
}
end
- def dataflow_log_entry(i)
+ def dataflow_log_entry(index)
{
step: DATAFLOW_STEP_ID,
- message: log_entry(i)
+ message: log_entry(index)
}
end
def dataproc_log_entry(message, source_class = 'com.example.Example',
filename = 'test.log')
@@ -2173,56 +2220,58 @@
class: source_class,
message: log_entry(message)
}
end
- def ml_log_entry(i)
+ def ml_log_entry(index)
{
name: ML_LOG_AREA,
- message: log_entry(i)
+ message: log_entry(index)
}
end
def structured_log_entry
{
'name' => 'test name',
'code' => 'test code'
}
end
- def log_entry(i)
- "test log entry #{i}"
+ def log_entry(index)
+ "test log entry #{index}"
end
# If check_exact_labels is true, assert 'labels' and 'expected_labels' match
# exactly. If check_exact_labels is false, assert 'labels' is a subset of
# 'expected_labels'.
def check_labels(expected_labels, labels, check_exact_labels = true)
return if expected_labels.empty? && labels.empty?
+
expected_labels.each do |expected_key, expected_value|
assert labels.key?(expected_key), "Expected label #{expected_key} not" \
" found. Got labels: #{labels}."
actual_value = labels[expected_key]
assert actual_value.is_a?(String), 'Value for label' \
" #{expected_key} is not a string: #{actual_value}."
assert_equal expected_value, actual_value, "Value for #{expected_key}" \
" mismatch. Expected #{expected_value}. Got #{actual_value}"
end
- if check_exact_labels
- assert_equal expected_labels.length, labels.length, 'Expected ' \
- "#{expected_labels.length} labels: #{expected_labels}, got " \
- "#{labels.length} labels: #{labels}"
- end
+ return unless check_exact_labels
+
+ assert_equal expected_labels.length, labels.length, 'Expected ' \
+ "#{expected_labels.length} labels: #{expected_labels}, got " \
+ "#{labels.length} labels: #{labels}"
end
- def verify_default_log_entry_text(text, i, entry)
- assert_equal "test log entry #{i}", text,
- "Entry ##{i} had unexpected text: #{entry}"
+ def verify_default_log_entry_text(text, index, entry)
+ assert_equal "test log entry #{index}", text,
+ "Entry ##{index} had unexpected text: #{entry}"
end
# The caller can optionally provide a block which is called for each entry.
- def verify_json_log_entries(n, params, payload_type = 'textPayload',
+ def verify_json_log_entries(expected_count, params,
+ payload_type = 'textPayload',
check_exact_entry_labels = true)
entry_count = 0
@logs_sent.each do |request|
request['entries'].each do |entry|
unless payload_type.empty?
@@ -2256,15 +2305,15 @@
# Check the payload for textPayload, otherwise it's up to the caller.
verify_default_log_entry_text(entry['textPayload'], entry_count,
entry)
end
entry_count += 1
- assert entry_count <= n,
- "Number of entries #{entry_count} exceeds expected number #{n}."
+ assert entry_count <= expected_count,
+ "Number of entries #{entry_count} exceeds expected number #{expected_count}."
end
end
- assert_equal n, entry_count
+ assert_equal expected_count, entry_count
end
def verify_container_logs(log_entry_factory, expected_params)
setup_gce_metadata_stubs
setup_k8s_metadata_stubs
@@ -2289,17 +2338,21 @@
{
# The keys are the names of fields in the payload that we are extracting
# LogEntry info from. The values are lists of two elements: the name of
# the subfield in LogEntry object and the expected value of that field.
DEFAULT_HTTP_REQUEST_KEY => [
- 'httpRequest', HTTP_REQUEST_MESSAGE],
+ 'httpRequest', HTTP_REQUEST_MESSAGE
+ ],
DEFAULT_LABELS_KEY => [
- 'labels', COMPUTE_PARAMS[:labels].merge(LABELS_MESSAGE)],
+ 'labels', COMPUTE_PARAMS[:labels].merge(LABELS_MESSAGE)
+ ],
DEFAULT_OPERATION_KEY => [
- 'operation', OPERATION_MESSAGE],
+ 'operation', OPERATION_MESSAGE
+ ],
DEFAULT_SOURCE_LOCATION_KEY => [
- 'sourceLocation', SOURCE_LOCATION_MESSAGE]
+ 'sourceLocation', SOURCE_LOCATION_MESSAGE
+ ]
}
end
def verify_subfields_from_record(payload_key, check_exact_entry_labels = true)
destination_key, payload_value = log_entry_subfields_params[payload_key]
@@ -2395,21 +2448,26 @@
# Cascading JSON detection is only triggered when the record has one field
# left with name "log", "message" or "msg". This test verifies additional
# LogEntry fields like spanId and traceId do not disable that by accident.
def verify_cascading_json_detection_with_log_entry_fields(
- log_entry_field, default_key, expectation)
+ log_entry_field, default_key, expectation
+ )
root_level_value = expectation[:root_level_value]
nested_level_value = expectation[:nested_level_value]
expected_value_from_root = expectation.fetch(
- :expected_value_from_root, root_level_value)
+ :expected_value_from_root, root_level_value
+ )
expected_value_from_nested = expectation.fetch(
- :expected_value_from_nested, nested_level_value)
+ :expected_value_from_nested, nested_level_value
+ )
default_value_from_root = expectation.fetch(
- :default_value_from_root, nil)
+ :default_value_from_root, nil
+ )
default_value_from_nested = expectation.fetch(
- :default_value_from_nested, nil)
+ :default_value_from_nested, nil
+ )
setup_gce_metadata_stubs
# {
# "logging.googleapis.com/XXX' => 'sample value'
@@ -2441,11 +2499,12 @@
# "name": "test name",
# "code": "test code"
# }
# }
log_entry_with_both_level_fields = log_entry_with_nested_level_field.merge(
- default_key => root_level_value)
+ default_key => root_level_value
+ )
[
[
log_entry_with_root_level_field,
expected_value_from_root,
@@ -2592,10 +2651,11 @@
_undefined
end
# Verify the number and the content of the log entries match the expectation.
# The caller can optionally provide a block which is called for each entry.
- def verify_log_entries(_n, _params, _payload_type = 'textPayload',
+ def verify_log_entries(_expected_count, _params,
+ _payload_type = 'textPayload',
_check_exact_entry_labels = true, &_block)
_undefined
end
# Defined in specific gRPC or REST files.