test/plugin/base_test.rb in fluent-plugin-google-cloud-0.7.15 vs test/plugin/base_test.rb in fluent-plugin-google-cloud-0.7.16
- old
+ new
@@ -37,10 +37,18 @@
ENV.delete(CLIENT_SECRET_VAR)
ENV.delete(REFRESH_TOKEN_VAR)
# home var, which is used to find $HOME/.gcloud/...
ENV.delete('HOME')
+ # Unregister Prometheus metrics.
+ registry = Prometheus::Client.registry
+ registry.unregister(:stackdriver_successful_requests_count)
+ registry.unregister(:stackdriver_failed_requests_count)
+ registry.unregister(:stackdriver_ingested_entries_count)
+ registry.unregister(:stackdriver_dropped_entries_count)
+ registry.unregister(:stackdriver_retried_entries_count)
+
setup_auth_stubs
@logs_sent = []
end
# Shared tests.
@@ -140,10 +148,28 @@
assert_equal expected_url, d.instance.metadata_agent_url
ENV.delete(METADATA_AGENT_URL_ENV_VAR)
end
end
+ def test_configure_ignores_unknown_monitoring_type
+ # Verify that driver creation succeeds when monitoring type is not
+ # "prometheus" (in which case, we simply don't record metrics),
+ # and that the counters are set to nil.
+ setup_gce_metadata_stubs
+ create_driver(CONFIG_UNKNOWN_MONITORING_TYPE)
+ assert_nil(Prometheus::Client.registry.get(
+ :stackdriver_successful_requests_count))
+ assert_nil(Prometheus::Client.registry.get(
+ :stackdriver_failed_requests_count))
+ assert_nil(Prometheus::Client.registry.get(
+ :stackdriver_ingested_entries_count))
+ assert_nil(Prometheus::Client.registry.get(
+ :stackdriver_dropped_entries_count))
+ assert_nil(Prometheus::Client.registry.get(
+ :stackdriver_retried_entries_count))
+ end
+
def test_metadata_loading
setup_gce_metadata_stubs
d = create_driver
d.run
assert_equal PROJECT_ID, d.instance.project_id
@@ -328,16 +354,16 @@
d.emit('msg' => log_entry(0), 'tag2' => 'test', 'data' => 5000,
'some_null_field' => nil)
d.run
end
verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload') do |entry, i|
- fields = get_fields(entry['jsonPayload'])
+ fields = entry['jsonPayload']
assert_equal 4, fields.size, entry
- verify_default_log_entry_text(get_string(fields['msg']), i, entry)
- assert_equal 'test', get_string(fields['tag2']), entry
- assert_equal 5000, get_number(fields['data']), entry
- assert_equal null_value, fields['some_null_field'], entry
+ verify_default_log_entry_text(fields['msg'], i, entry)
+ assert_equal 'test', fields['tag2'], entry
+ assert_equal 5000, fields['data'], entry
+ assert_nil fields['some_null_field'], entry
end
end
def test_autoformat_enabled_with_stackdriver_trace_id_as_trace
[
@@ -446,26 +472,21 @@
'nil_key' => { nil => message }
)
d.run
end
verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload') do |entry|
- fields = get_fields(entry['jsonPayload'])
+ fields = entry['jsonPayload']
assert_equal 7, fields.size, entry
- assert_equal message, get_string(get_fields(get_struct(fields \
- ['int_key']))['1']), entry
- assert_equal message, get_string(get_fields(get_struct(fields \
- ['int_array_key']))['[1, 2, 3, 4]']), entry
- assert_equal message, get_string(get_fields(get_struct(fields \
- ['string_array_key']))['["a", "b", "c"]']), entry
- assert_equal message, get_string(get_fields(get_struct(fields \
- ['hash_key']))['{"some_key"=>"some_value"}']), entry
- assert_equal message, get_string(get_fields(get_struct(fields \
- ['mixed_key']))['{"some_key"=>["a", "b", "c"]}']), entry
- assert_equal message, get_string(get_fields(get_struct(fields \
- ['symbol_key']))['some_symbol']), entry
- assert_equal message, get_string(get_fields(get_struct(fields \
- ['nil_key']))['']), entry
+ assert_equal message, fields['int_key']['1'], entry
+ assert_equal message, fields['int_array_key']['[1, 2, 3, 4]'], entry
+ assert_equal message, fields['string_array_key']['["a", "b", "c"]'], entry
+ assert_equal message, fields['hash_key']['{"some_key"=>"some_value"}'],
+ entry
+ assert_equal message,
+ fields['mixed_key']['{"some_key"=>["a", "b", "c"]}'], entry
+ assert_equal message, fields['symbol_key']['some_symbol'], entry
+ assert_equal message, fields['nil_key'][''], entry
end
end
def test_structured_payload_json_log_default_not_parsed_text
setup_gce_metadata_stubs
@@ -495,11 +516,11 @@
d.emit(field => " \r\n \t" + json_string)
end
d.run
end
verify_log_entries(6, COMPUTE_PARAMS, 'jsonPayload') do |entry|
- fields = get_fields(entry['jsonPayload'])
+ fields = entry['jsonPayload']
assert !fields.key?('tag2'), 'Did not expect tag2'
assert !fields.key?('data'), 'Did not expect data'
assert !fields.key?('some_null_field'), 'Did not expect some_null_field'
end
end
@@ -528,11 +549,11 @@
d.emit(field => 'notJSON ' + json_string)
end
d.run
end
verify_log_entries(2, COMPUTE_PARAMS, 'jsonPayload') do |entry|
- fields = get_fields(entry['jsonPayload'])
+ fields = entry['jsonPayload']
assert !fields.key?('tag2'), 'Did not expect tag2'
assert !fields.key?('data'), 'Did not expect data'
assert !fields.key?('some_null_field'), 'Did not expect some_null_field'
end
end
@@ -592,19 +613,18 @@
'"data"=>5000, "some_null_field"=>nil}',
text_payload, entry
end
else
verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload') do |entry|
- json_payload = get_fields(entry['jsonPayload'])
+ json_payload = entry['jsonPayload']
assert_equal 1, json_payload.size, entry
- fields = get_fields(
- get_struct(json_payload[test_params[:field_name]]))
+ fields = json_payload[test_params[:field_name]]
assert_equal 4, fields.size, entry
- assert_equal 'test log entry 0', get_string(fields['msg']), entry
- assert_equal 'test', get_string(fields['tag2']), entry
- assert_equal 5000, get_number(fields['data']), entry
- assert_equal null_value, fields['some_null_field'], entry
+ assert_equal 'test log entry 0', fields['msg'], entry
+ assert_equal 'test', fields['tag2'], entry
+ assert_equal 5000, fields['data'], entry
+ assert_nil fields['some_null_field'], entry
end
end
end
end
end
@@ -620,16 +640,16 @@
d.emit(field => " \r\n \t" + json_string)
end
d.run
end
verify_log_entries(6, COMPUTE_PARAMS, 'jsonPayload') do |entry|
- fields = get_fields(entry['jsonPayload'])
+ fields = entry['jsonPayload']
assert_equal 4, fields.size, entry
- assert_equal 'test log entry 0', get_string(fields['msg']), entry
- assert_equal 'test', get_string(fields['tag2']), entry
- assert_equal 5000, get_number(fields['data']), entry
- assert_equal null_value, fields['some_null_field'], entry
+ assert_equal 'test log entry 0', fields['msg'], entry
+ assert_equal 'test', fields['tag2'], entry
+ assert_equal 5000, fields['data'], entry
+ assert_nil fields['some_null_field'], entry
end
end
def test_structured_payload_json_log_default_container_not_parsed
setup_gce_metadata_stubs
@@ -674,43 +694,47 @@
d.emit(container_log_entry_with_metadata(" \r\n \t" + json_string))
d.run
end
verify_log_entries(2, CONTAINER_FROM_METADATA_PARAMS, 'jsonPayload') \
do |entry|
- fields = get_fields(entry['jsonPayload'])
+ fields = entry['jsonPayload']
assert_equal 4, fields.size, entry
- assert_equal 'test log entry 0', get_string(fields['msg']), entry
- assert_equal 'test', get_string(fields['tag2']), entry
- assert_equal 5000, get_number(fields['data']), entry
- assert_equal null_value, fields['some_null_field'], entry
+ assert_equal 'test log entry 0', fields['msg'], entry
+ assert_equal 'test', fields['tag2'], entry
+ assert_equal 5000, fields['data'], entry
+ assert_nil fields['some_null_field'], entry
end
end
# Verify that when the log has only one effective field (named 'log',
# 'message', or 'msg') and the field is in JSON format, the field is parsed as
# JSON and sent as jsonPayload.
def test_detect_json_auto_triggered_with_one_field
setup_gce_metadata_stubs
json_string = '{"msg": "test log entry 0", "tag2": "test", ' \
'"data": 5000, "some_null_field": null}'
- setup_logging_stubs do
- d = create_driver(DETECT_JSON_CONFIG)
- %w(message log msg).each do |field|
- d.emit(PRESERVED_KEYS_MAP.merge(field => json_string))
+ PRESERVED_KEYS_TIMESTAMP_FIELDS.each do |timestamp_fields|
+ setup_logging_stubs do
+ @logs_sent = []
+ d = create_driver(DETECT_JSON_CONFIG)
+ %w(message log msg).each do |field|
+ d.emit(PRESERVED_KEYS_MAP.merge(
+ field => json_string).merge(timestamp_fields))
+ end
+ d.run
end
- d.run
+ expected_params = COMPUTE_PARAMS.merge(
+ labels: COMPUTE_PARAMS[:labels].merge(LABELS_MESSAGE))
+ verify_log_entries(3, expected_params, 'jsonPayload') do |entry|
+ fields = entry['jsonPayload']
+ assert_equal 4, fields.size, entry
+ assert_equal 'test log entry 0', fields['msg'], entry
+ assert_equal 'test', fields['tag2'], entry
+ assert_equal 5000, fields['data'], entry
+ assert_nil fields['some_null_field'], entry
+ end
end
- expected_params = COMPUTE_PARAMS.merge(
- labels: COMPUTE_PARAMS[:labels].merge(LABELS_MESSAGE))
- verify_log_entries(3, expected_params, 'jsonPayload') do |entry|
- fields = get_fields(entry['jsonPayload'])
- assert_equal 4, fields.size, entry
- assert_equal 'test log entry 0', get_string(fields['msg']), entry
- assert_equal 'test', get_string(fields['tag2']), entry
- assert_equal 5000, get_number(fields['data']), entry
- assert_equal null_value, fields['some_null_field'], entry
- end
end
# Verify that we drop the log entries when 'require_valid_tags' is true and
# any non-string tags or tags with non-utf8 characters are detected.
def test_reject_invalid_tags_with_require_valid_tags_true
@@ -959,23 +983,19 @@
'timestampNanos' => ts.tv_nsec.to_s)
emit_index += 1
d.run
verify_log_entries(emit_index, COMPUTE_PARAMS) do |entry, i|
verify_default_log_entry_text(entry['textPayload'], i, entry)
- assert_equal_with_default entry['timestamp']['seconds'],
- expected_ts.tv_sec, 0, entry
- assert_equal_with_default \
- entry['timestamp']['nanos'],
- expected_ts.tv_nsec, 0, entry do
- # Fluentd v0.14 onwards supports nanosecond timestamp values.
- # Added in 600 ns delta to avoid flaky tests introduced
- # due to rounding error in double-precision floating-point numbers
- # (to account for the missing 9 bits of precision ~ 512 ns).
- # See http://wikipedia.org/wiki/Double-precision_floating-point_format.
- assert_in_delta expected_ts.tv_nsec,
- entry['timestamp']['nanos'], 600, entry
- end
+ actual_timestamp = timestamp_parse(entry['timestamp'])
+ assert_equal actual_timestamp['seconds'], expected_ts.tv_sec, entry
+ # Fluentd v0.14 onwards supports nanosecond timestamp values.
+ # Added in 600 ns delta to avoid flaky tests introduced
+ # due to rounding error in double-precision floating-point numbers
+ # (to account for the missing 9 bits of precision ~ 512 ns).
+ # See http://wikipedia.org/wiki/Double-precision_floating-point_format.
+ assert_in_delta expected_ts.tv_nsec, actual_timestamp['nanos'],
+ 600, entry
end
end
end
end
end
@@ -987,13 +1007,13 @@
# if timestamp is not a hash it is passed through to the json payload.
d.emit('message' => log_entry(0), 'timestamp' => 'not-a-hash')
d.run
end
verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload') do |entry|
- fields = get_fields(entry['jsonPayload'])
+ fields = entry['jsonPayload']
assert_equal 2, fields.size, entry
- assert_equal 'not-a-hash', get_string(fields['timestamp']), entry
+ assert_equal 'not-a-hash', fields['timestamp'], entry
end
end
# Make parse_severity public so we can test it.
class Fluent::GoogleCloudOutput # rubocop:disable Style/ClassAndModuleChildren
@@ -1080,14 +1100,14 @@
params = Marshal.load(Marshal.dump(COMPUTE_PARAMS))
params[:labels]['sent_label_1'] = 'value1'
params[:labels]['foo.googleapis.com/bar'] = 'value2'
params[:labels]['label3'] = 'value3'
verify_log_entries(1, params, 'jsonPayload') do |entry, i|
- fields = get_fields(entry['jsonPayload'])
+ fields = entry['jsonPayload']
assert_equal 2, fields.size, entry
- verify_default_log_entry_text(get_string(fields['message']), i, entry)
- assert_equal 'value4', get_string(fields['not_a_label']), entry
+ verify_default_log_entry_text(fields['message'], i, entry)
+ assert_equal 'value4', fields['not_a_label'], entry
end
end
def test_multiple_logs
setup_gce_metadata_stubs
@@ -1169,12 +1189,13 @@
expected_params = CONTAINER_FROM_TAG_PARAMS.merge(
labels: { "#{GKE_CONSTANTS[:service]}/stream" => 'stderr' }
) { |_, oldval, newval| oldval.merge(newval) }
verify_log_entries(1, expected_params) do |entry, i|
verify_default_log_entry_text(entry['textPayload'], i, entry)
- assert_equal K8S_SECONDS_EPOCH, entry['timestamp']['seconds'], entry
- assert_equal K8S_NANOS, entry['timestamp']['nanos'], entry
+ actual_timestamp = timestamp_parse(entry['timestamp'])
+ assert_equal K8S_SECONDS_EPOCH, actual_timestamp['seconds'], entry
+ assert_equal K8S_NANOS, actual_timestamp['nanos'], entry
assert_equal 'ERROR', entry['severity'], entry
end
end
def test_json_container_log_metadata_from_plugin
@@ -1187,17 +1208,18 @@
'5000, "severity": "WARNING"}'))
d.run
end
verify_log_entries(1, CONTAINER_FROM_METADATA_PARAMS,
'jsonPayload') do |entry|
- fields = get_fields(entry['jsonPayload'])
+ fields = entry['jsonPayload']
assert_equal 3, fields.size, entry
- assert_equal 'test log entry 0', get_string(fields['msg']), entry
- assert_equal 'test', get_string(fields['tag2']), entry
- assert_equal 5000, get_number(fields['data']), entry
- assert_equal K8S_SECONDS_EPOCH, entry['timestamp']['seconds'], entry
- assert_equal K8S_NANOS, entry['timestamp']['nanos'], entry
+ assert_equal 'test log entry 0', fields['msg'], entry
+ assert_equal 'test', fields['tag2'], entry
+ assert_equal 5000, fields['data'], entry
+ actual_timestamp = timestamp_parse(entry['timestamp'])
+ assert_equal K8S_SECONDS_EPOCH, actual_timestamp['seconds'], entry
+ assert_equal K8S_NANOS, actual_timestamp['nanos'], entry
assert_equal 'WARNING', entry['severity'], entry
end
end
def test_json_container_log_metadata_from_tag
@@ -1210,17 +1232,18 @@
'"severity": "W"}'))
d.run
end
verify_log_entries(1, CONTAINER_FROM_TAG_PARAMS,
'jsonPayload') do |entry|
- fields = get_fields(entry['jsonPayload'])
+ fields = entry['jsonPayload']
assert_equal 3, fields.size, entry
- assert_equal 'test log entry 0', get_string(fields['msg']), entry
- assert_equal 'test', get_string(fields['tag2']), entry
- assert_equal 5000, get_number(fields['data']), entry
- assert_equal K8S_SECONDS_EPOCH, entry['timestamp']['seconds'], entry
- assert_equal K8S_NANOS, entry['timestamp']['nanos'], entry
+ assert_equal 'test log entry 0', fields['msg'], entry
+ assert_equal 'test', fields['tag2'], entry
+ assert_equal 5000, fields['data'], entry
+ actual_timestamp = timestamp_parse(entry['timestamp'])
+ assert_equal K8S_SECONDS_EPOCH, actual_timestamp['seconds'], entry
+ assert_equal K8S_NANOS, actual_timestamp['nanos'], entry
assert_equal 'WARNING', entry['severity'], entry
end
end
def test_dataproc_log
@@ -1354,28 +1377,28 @@
d.run
end
verify_log_entries(1, COMPUTE_PARAMS, 'httpRequest') do |entry|
assert_equal http_request_message_with_absent_referer,
entry['httpRequest'], entry
- assert_nil get_fields(entry['jsonPayload'])['httpRequest'], entry
+ assert_nil entry['jsonPayload']['httpRequest'], entry
end
end
end
def test_http_request_with_latency
setup_gce_metadata_stubs
latency_conversion.each do |input, expected|
setup_logging_stubs do
d = create_driver
@logs_sent = []
- d.emit('httpRequest' => http_request_message.merge('latency' => input))
+ d.emit('httpRequest' => HTTP_REQUEST_MESSAGE.merge('latency' => input))
d.run
end
verify_log_entries(1, COMPUTE_PARAMS, 'httpRequest') do |entry|
- assert_equal http_request_message.merge('latency' => expected),
+ assert_equal HTTP_REQUEST_MESSAGE.merge('latency' => expected),
entry['httpRequest'], entry
- assert_nil get_fields(entry['jsonPayload'])['httpRequest'], entry
+ assert_nil entry['jsonPayload']['httpRequest'], entry
end
end
end
# Skip setting latency when the field is null or has invalid format.
@@ -1386,16 +1409,16 @@
' 123 s econds ', '1min', 'abc&^!$*('
].each do |input|
setup_logging_stubs do
d = create_driver
@logs_sent = []
- d.emit('httpRequest' => http_request_message.merge('latency' => input))
+ d.emit('httpRequest' => HTTP_REQUEST_MESSAGE.merge('latency' => input))
d.run
end
verify_log_entries(1, COMPUTE_PARAMS, 'httpRequest') do |entry|
- assert_equal http_request_message, entry['httpRequest'], entry
- assert_nil get_fields(entry['jsonPayload'])['httpRequest'], entry
+ assert_equal HTTP_REQUEST_MESSAGE, entry['httpRequest'], entry
+ assert_nil entry['jsonPayload']['httpRequest'], entry
end
end
end
# Verify the default and customization of LogEntry field extraction key.
@@ -1430,11 +1453,11 @@
verify_field_key('sourceLocation',
default_key: DEFAULT_SOURCE_LOCATION_KEY,
custom_key: 'custom_source_location_key',
custom_key_config: \
CONFIG_CUSTOM_SOURCE_LOCATION_KEY_SPECIFIED,
- sample_value: source_location_message)
+ sample_value: SOURCE_LOCATION_MESSAGE)
end
def test_log_entry_span_id_field
verify_field_key('spanId',
default_key: DEFAULT_SPAN_ID_KEY,
@@ -1488,12 +1511,12 @@
end
def test_cascading_json_detection_with_log_entry_source_location_field
verify_cascading_json_detection_with_log_entry_fields(
'sourceLocation', DEFAULT_SOURCE_LOCATION_KEY,
- root_level_value: source_location_message,
- nested_level_value: source_location_message2)
+ root_level_value: SOURCE_LOCATION_MESSAGE,
+ nested_level_value: SOURCE_LOCATION_MESSAGE2)
end
def test_cascading_json_detection_with_log_entry_span_id_field
verify_cascading_json_detection_with_log_entry_fields(
'spanId', DEFAULT_SPAN_ID_KEY,
@@ -1650,15 +1673,15 @@
'5000, "severity": "WARNING"}'))
end
d.run
end
verify_log_entries(n, DOCKER_CONTAINER_PARAMS, 'jsonPayload') do |entry|
- fields = get_fields(entry['jsonPayload'])
+ fields = entry['jsonPayload']
assert_equal 3, fields.size, entry
- assert_equal "test log entry #{n}", get_string(fields['msg']), entry
- assert_equal 'test', get_string(fields['tag2']), entry
- assert_equal 5000, get_number(fields['data']), entry
+ assert_equal "test log entry #{n}", fields['msg'], entry
+ assert_equal 'test', fields['tag2'], entry
+ assert_equal 5000, fields['data'], entry
end
assert_requested_metadata_agent_stub("container.#{DOCKER_CONTAINER_ID}")
end
end
end
@@ -1769,14 +1792,14 @@
d.emit(test_params[:log_entry])
d.run
end
verify_log_entries(1, test_params[:expected_params],
'jsonPayload') do |entry|
- fields = get_fields(entry['jsonPayload'])
+ fields = entry['jsonPayload']
assert_equal 2, fields.size, entry
- assert_equal 'test log entry 0', get_string(fields['log']), entry
- assert_equal K8S_STREAM, get_string(fields['stream']), entry
+ assert_equal 'test log entry 0', fields['log'], entry
+ assert_equal K8S_STREAM, fields['stream'], entry
end
end
end
end
@@ -1866,14 +1889,14 @@
d.emit(test_params[:log_entry])
d.run
end
verify_log_entries(1, test_params[:expected_params],
'jsonPayload') do |entry|
- fields = get_fields(entry['jsonPayload'])
+ fields = entry['jsonPayload']
assert_equal 2, fields.size, entry
- assert_equal 'test log entry 0', get_string(fields['log']), entry
- assert_equal K8S_STREAM, get_string(fields['stream']), entry
+ assert_equal 'test log entry 0', fields['log'], entry
+ assert_equal K8S_STREAM, fields['stream'], entry
end
end
end
end
@@ -1926,14 +1949,14 @@
d.emit(test_params[:log_entry])
d.run
end
verify_log_entries(1, test_params[:expected_params],
'jsonPayload') do |entry|
- fields = get_fields(entry['jsonPayload'])
+ fields = entry['jsonPayload']
assert_equal 2, fields.size, entry
- assert_equal 'test log entry 0', get_string(fields['log']), entry
- assert_equal K8S_STREAM, get_string(fields['stream']), entry
+ assert_equal 'test log entry 0', fields['log'], entry
+ assert_equal K8S_STREAM, fields['stream'], entry
end
end
end
end
@@ -1949,14 +1972,14 @@
d.run
end
verify_log_entries(1, DOCKER_CONTAINER_PARAMS_NO_STREAM) do |entry, i|
verify_default_log_entry_text(entry['textPayload'], i, entry)
# Timestamp in 'time' field from log entry should be set properly.
+ actual_timestamp = timestamp_parse(entry['timestamp'])
assert_equal DOCKER_CONTAINER_SECONDS_EPOCH,
- entry['timestamp']['seconds'], entry
- assert_equal DOCKER_CONTAINER_NANOS,
- entry['timestamp']['nanos'], entry
+ actual_timestamp['seconds'], entry
+ assert_equal DOCKER_CONTAINER_NANOS, actual_timestamp['nanos'], entry
end
assert_requested_metadata_agent_stub(
"#{DOCKER_CONTAINER_LOCAL_RESOURCE_ID_PREFIX}.#{DOCKER_CONTAINER_NAME}")
end
end
@@ -2379,12 +2402,13 @@
n.times { |i| d.emit(log_entry_factory.call(log_entry(i))) }
d.run
end
verify_log_entries(n, expected_params) do |entry, i|
verify_default_log_entry_text(entry['textPayload'], i, entry)
- assert_equal K8S_SECONDS_EPOCH, entry['timestamp']['seconds'], entry
- assert_equal K8S_NANOS, entry['timestamp']['nanos'], entry
+ actual_timestamp = timestamp_parse(entry['timestamp'])
+ assert_equal K8S_SECONDS_EPOCH, actual_timestamp['seconds'], entry
+ assert_equal K8S_NANOS, actual_timestamp['nanos'], entry
assert_equal CONTAINER_SEVERITY, entry['severity'], entry
end
end
end
@@ -2392,17 +2416,17 @@
{
# The keys are the names of fields in the payload that we are extracting
# LogEntry info from. The values are lists of two elements: the name of
# the subfield in LogEntry object and the expected value of that field.
DEFAULT_HTTP_REQUEST_KEY => [
- 'httpRequest', http_request_message],
+ 'httpRequest', HTTP_REQUEST_MESSAGE],
DEFAULT_LABELS_KEY => [
'labels', COMPUTE_PARAMS[:labels].merge(LABELS_MESSAGE)],
DEFAULT_OPERATION_KEY => [
'operation', OPERATION_MESSAGE],
DEFAULT_SOURCE_LOCATION_KEY => [
- 'sourceLocation', source_location_message]
+ 'sourceLocation', SOURCE_LOCATION_MESSAGE]
}
end
def verify_subfields_from_record(payload_key, check_exact_entry_labels = true)
destination_key, payload_value = log_entry_subfields_params[payload_key]
@@ -2414,11 +2438,11 @@
d.run
end
verify_log_entries(1, COMPUTE_PARAMS, destination_key,
check_exact_entry_labels) do |entry|
assert_equal payload_value, entry[destination_key], entry
- fields = get_fields(entry['jsonPayload'])
+ fields = entry['jsonPayload']
assert_nil fields[payload_key], entry
end
end
def verify_subfields_partial_from_record(payload_key)
@@ -2430,13 +2454,13 @@
d.emit(payload_key => payload_value.merge('otherKey' => 'value'))
d.run
end
verify_log_entries(1, COMPUTE_PARAMS, destination_key) do |entry|
assert_equal payload_value, entry[destination_key], entry
- fields = get_fields(entry['jsonPayload'])
- request = get_fields(get_struct(fields[payload_key]))
- assert_equal 'value', get_string(request['otherKey']), entry
+ fields = entry['jsonPayload']
+ request = fields[payload_key]
+ assert_equal 'value', request['otherKey'], entry
end
end
def verify_subfields_removed_when_not_hash(payload_key)
destination_key = log_entry_subfields_params[payload_key][0]
@@ -2447,11 +2471,11 @@
d.emit(payload_key => 'a_string')
d.run
end
verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload') do |entry|
# The malformed field has been removed from the payload.
- assert_true get_fields(entry['jsonPayload']).empty?, entry
+ assert_true entry['jsonPayload'].empty?, entry
# No additional labels.
assert_equal COMPUTE_PARAMS[:labels].size,
entry[destination_key].size, entry
end
end
@@ -2465,12 +2489,12 @@
d.emit(payload_key => 'a_string')
d.run
end
verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload') do |entry|
# Verify that we leave the malformed field as it is.
- field = get_fields(entry['jsonPayload'])[payload_key]
- assert_equal 'a_string', get_string(field), entry
+ field = entry['jsonPayload'][payload_key]
+ assert_equal 'a_string', field, entry
assert_false entry.key?(destination_key), entry
end
end
def verify_subfields_when_nil(payload_key)
@@ -2482,11 +2506,11 @@
d.emit(payload_key => nil)
d.run
end
verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload') do |entry|
- fields = get_fields(entry['jsonPayload'])
+ fields = entry['jsonPayload']
assert_false fields.key?(payload_key), entry
if payload_key == DEFAULT_LABELS_KEY
# No additional labels.
assert_equal COMPUTE_PARAMS[:labels].size,
entry[destination_key].size, entry
@@ -2574,14 +2598,14 @@
verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload', false) do |entry|
assert_equal_with_default \
entry[log_entry_field], expected_value, default_value,
"Index #{index} failed. #{expected_value} is expected for " \
"#{log_entry_field} field."
- payload_fields = get_fields(entry['jsonPayload'])
+ payload_fields = entry['jsonPayload']
assert_equal structured_log_entry.size, payload_fields.size
payload_fields.each do |key, value|
- assert_equal structured_log_entry[key], get_string(value)
+ assert_equal structured_log_entry[key], value
end
end
end
end
@@ -2630,50 +2654,34 @@
d.emit(input[:emitted_log])
d.run
end
verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload', false) do |entry|
assert_equal input[:expected_field_value], entry[log_entry_field], input
- payload_fields = get_fields(entry['jsonPayload'])
+ payload_fields = entry['jsonPayload']
assert_equal input[:expected_payload].size, payload_fields.size, input
payload_fields.each do |key, value|
- assert_hash_equal_json(input[:expected_payload][key], value)
+ assert_equal input[:expected_payload][key], value
end
end
end
end
# Replace the 'referer' field with nil.
def http_request_message_with_nil_referer
- http_request_message.merge('referer' => nil)
+ HTTP_REQUEST_MESSAGE.merge('referer' => nil)
end
# Unset the 'referer' field.
def http_request_message_with_absent_referer
- http_request_message.reject do |k, _|
+ HTTP_REQUEST_MESSAGE.reject do |k, _|
k == 'referer'
end
end
# The conversions from user input to output.
def latency_conversion
- {
- '32 s' => { 'seconds' => 32 },
- '32s' => { 'seconds' => 32 },
- '0.32s' => { 'nanos' => 320_000_000 },
- ' 123 s ' => { 'seconds' => 123 },
- '1.3442 s' => { 'seconds' => 1, 'nanos' => 344_200_000 },
-
- # Test whitespace.
- # \t: tab. \r: carriage return. \n: line break.
- # \v: vertical whitespace. \f: form feed.
- "\t123.5\ts\t" => { 'seconds' => 123, 'nanos' => 500_000_000 },
- "\r123.5\rs\r" => { 'seconds' => 123, 'nanos' => 500_000_000 },
- "\n123.5\ns\n" => { 'seconds' => 123, 'nanos' => 500_000_000 },
- "\v123.5\vs\v" => { 'seconds' => 123, 'nanos' => 500_000_000 },
- "\f123.5\fs\f" => { 'seconds' => 123, 'nanos' => 500_000_000 },
- "\r123.5\ts\f" => { 'seconds' => 123, 'nanos' => 500_000_000 }
- }
+ _undefined
end
# This module expects the methods below to be overridden.
# Create a Fluentd output test driver with the Google Cloud Output plugin.
@@ -2713,56 +2721,18 @@
metric.get(labels)
end
assert_equal(expected_value, metric_value)
end
- # Get the fields of the payload.
- def get_fields(_payload)
- _undefined
- end
-
- # Get the value of a struct field.
- def get_struct(_field)
- _undefined
- end
-
- # Get the value of a string field.
- def get_string(_field)
- _undefined
- end
-
- # Get the value of a number field.
- def get_number(_field)
- _undefined
- end
-
- # The null value.
- def null_value(_field)
- _undefined
- end
-
# Defined in specific gRPC or REST files.
- def http_request_message
- _undefined
- end
-
- # Defined in specific gRPC or REST files.
- def source_location_message
- _undefined
- end
-
- # Defined in specific gRPC or REST files.
- def source_location_message2
- _undefined
- end
-
- # Defined in specific gRPC or REST files.
def expected_operation_message2
_undefined
end
+ # Parse timestamp and convert it to a hash with the "seconds" and "nanos" keys
+ # if necessary.
# Defined in specific gRPC or REST files.
- def assert_hash_equal_json(_expected, _actual)
+ def timestamp_parse(_timestamp)
_undefined
end
def _undefined
raise "Method #{__callee__} is unimplemented and needs to be overridden."