test/plugin/base_test.rb in fluent-plugin-google-cloud-0.6.6 vs test/plugin/base_test.rb in fluent-plugin-google-cloud-0.6.7.pre.1

- old
+ new

@@ -254,14 +254,14 @@ d = create_driver d.emit('msg' => log_entry(0), 'tag2' => 'test', 'data' => 5000, 'some_null_field' => nil) d.run end - verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload') do |entry| + verify_log_entries(1, COMPUTE_PARAMS, 'jsonPayload') do |entry, i| fields = get_fields(entry['jsonPayload']) assert_equal 4, fields.size, entry - assert_equal 'test log entry 0', get_string(fields['msg']), entry + verify_default_log_entry_text(get_string(fields['msg']), i, entry) assert_equal 'test', get_string(fields['tag2']), entry assert_equal 5000, get_number(fields['data']), entry assert_equal null_value, fields['some_null_field'], entry end end @@ -300,54 +300,154 @@ assert_equal message, get_string(get_fields(get_struct(fields \ ['nil_key']))['']), entry end end - def test_structured_payload_json_log + def test_structured_payload_json_log_default_not_parsed_text setup_gce_metadata_stubs + json_string = '{"msg": "test log entry 0", "tag2": "test", ' \ + '"data": 5000, "some_null_field": null}' setup_logging_stubs do - d = create_driver - json_string = '{"msg": "test log entry 0", "tag2": "test", "data": 5000}' + d = create_driver(APPLICATION_DEFAULT_CONFIG) d.emit('message' => 'notJSON ' + json_string) d.emit('message' => json_string) - d.emit('message' => "\t" + json_string) - d.emit('message' => ' ' + json_string) + d.emit('message' => " \r\n \t" + json_string) d.run end - verify_log_entries(4, COMPUTE_PARAMS, '') do |entry| - assert entry.key?('textPayload'), 'Entry did not have textPayload' + verify_log_entries(3, COMPUTE_PARAMS, 'textPayload') do + # Only check for the existence of textPayload. end end - def test_structured_payload_json_container_log + def test_structured_payload_json_log_default_not_parsed_json setup_gce_metadata_stubs + json_string = '{"msg": "test log entry 0", "tag2": "test", ' \ + '"data": 5000, "some_null_field": null}' + setup_logging_stubs do + d = create_driver(APPLICATION_DEFAULT_CONFIG) + %w(log msg).each do |field| + d.emit(field => 'notJSON ' + json_string) + d.emit(field => json_string) + d.emit(field => " \r\n \t" + json_string) + end + d.run + end + verify_log_entries(6, COMPUTE_PARAMS, 'jsonPayload') do |entry| + fields = get_fields(entry['jsonPayload']) + assert !fields.key?('tag2'), 'Did not expect tag2' + assert !fields.key?('data'), 'Did not expect data' + assert !fields.key?('some_null_field'), 'Did not expect some_null_field' + end + end + + def test_structured_payload_json_log_detect_json_not_parsed_text + setup_gce_metadata_stubs + json_string = '{"msg": "test log entry 0", "tag2": "test", ' \ + '"data": 5000, "some_null_field": null}' + setup_logging_stubs do + d = create_driver(DETECT_JSON_CONFIG) + d.emit('message' => 'notJSON ' + json_string) + d.run + end + verify_log_entries(1, COMPUTE_PARAMS, 'textPayload') do + # Only check for the existence of textPayload. + end + end + + def test_structured_payload_json_log_detect_json_not_parsed_json + setup_gce_metadata_stubs + json_string = '{"msg": "test log entry 0", "tag2": "test", ' \ + '"data": 5000, "some_null_field": null}' + setup_logging_stubs do + d = create_driver(DETECT_JSON_CONFIG) + %w(log msg).each do |field| + d.emit(field => 'notJSON ' + json_string) + end + d.run + end + verify_log_entries(2, COMPUTE_PARAMS, 'jsonPayload') do |entry| + fields = get_fields(entry['jsonPayload']) + assert !fields.key?('tag2'), 'Did not expect tag2' + assert !fields.key?('data'), 'Did not expect data' + assert !fields.key?('some_null_field'), 'Did not expect some_null_field' + end + end + + def test_structured_payload_json_log_detect_json_parsed + setup_gce_metadata_stubs + json_string = '{"msg": "test log entry 0", "tag2": "test", ' \ + '"data": 5000, "some_null_field": null}' + setup_logging_stubs do + d = create_driver(DETECT_JSON_CONFIG) + %w(message log msg).each do |field| + d.emit(field => json_string) + d.emit(field => " \r\n \t" + json_string) + end + d.run + end + verify_log_entries(6, COMPUTE_PARAMS, 'jsonPayload') do |entry| + fields = get_fields(entry['jsonPayload']) + assert_equal 4, fields.size, entry + assert_equal 'test log entry 0', get_string(fields['msg']), entry + assert_equal 'test', get_string(fields['tag2']), entry + assert_equal 5000, get_number(fields['data']), entry + assert_equal null_value, fields['some_null_field'], entry + end + end + + def test_structured_payload_json_log_default_container_not_parsed + setup_gce_metadata_stubs setup_container_metadata_stubs + json_string = '{"msg": "test log entry 0", "tag2": "test", ' \ + '"data": 5000, "some_null_field": null}' setup_logging_stubs do d = create_driver(APPLICATION_DEFAULT_CONFIG, CONTAINER_TAG) - json_string = '{"msg": "test log entry 0", "tag2": "test", ' \ - '"data": 5000, "some_null_field": null}' d.emit(container_log_entry_with_metadata('notJSON' + json_string)) d.emit(container_log_entry_with_metadata(json_string)) d.emit(container_log_entry_with_metadata(" \r\n \t" + json_string)) d.run end - log_index = 0 - verify_log_entries( - 3, CONTAINER_FROM_METADATA_PARAMS, '') do |entry| - log_index += 1 - if log_index == 1 - assert entry.key?('textPayload'), 'Entry did not have textPayload' - else - assert entry.key?('jsonPayload'), 'Entry did not have jsonPayload' + verify_log_entries(3, CONTAINER_FROM_METADATA_PARAMS, 'textPayload') do + # Only check for the existence of textPayload. + end + end + + def test_structured_payload_json_log_detect_json_container_not_parsed + setup_gce_metadata_stubs + setup_container_metadata_stubs + json_string = '{"msg": "test log entry 0", "tag2": "test", ' \ + '"data": 5000, "some_null_field": null}' + setup_logging_stubs do + d = create_driver(DETECT_JSON_CONFIG, CONTAINER_TAG) + d.emit(container_log_entry_with_metadata('notJSON' + json_string)) + d.run + end + verify_log_entries(1, CONTAINER_FROM_METADATA_PARAMS, 'textPayload') do + # Only check for the existence of textPayload. + end + end + + def test_structured_payload_json_log_detect_json_container_parsed + setup_gce_metadata_stubs + setup_container_metadata_stubs + json_string = '{"msg": "test log entry 0", "tag2": "test", ' \ + '"data": 5000, "some_null_field": null}' + setup_logging_stubs do + d = create_driver(DETECT_JSON_CONFIG, CONTAINER_TAG) + d.emit(container_log_entry_with_metadata(json_string)) + d.emit(container_log_entry_with_metadata(" \r\n \t" + json_string)) + d.run + end + verify_log_entries(2, CONTAINER_FROM_METADATA_PARAMS, 'jsonPayload') \ + do |entry| fields = get_fields(entry['jsonPayload']) assert_equal 4, fields.size, entry assert_equal 'test log entry 0', get_string(fields['msg']), entry assert_equal 'test', get_string(fields['tag2']), entry assert_equal 5000, get_number(fields['data']), entry assert_equal null_value, fields['some_null_field'], entry end - end end # Verify that we drop the log entries when 'require_valid_tags' is true and # any non-string tags or tags with non-utf8 characters are detected. def test_reject_invalid_tags_with_require_valid_tags_true @@ -401,11 +501,11 @@ d.emit(container_log_entry_with_metadata(log_entry(0), container_name)) d.run end params = CONTAINER_FROM_METADATA_PARAMS.merge( labels: CONTAINER_FROM_METADATA_PARAMS[:labels].merge( - "#{CONTAINER_CONSTANTS[:service]}/container_name" => + "#{GKE_CONSTANTS[:service]}/container_name" => URI.decode(encoded_name)), log_name: encoded_name) verify_log_entries(0, params, 'textPayload') end end @@ -523,11 +623,12 @@ emit_index += 1 d.run end end verify_index = 0 - verify_log_entries(emit_index, COMPUTE_PARAMS) do |entry| + verify_log_entries(emit_index, COMPUTE_PARAMS) do |entry, i| + verify_default_log_entry_text(entry['textPayload'], i, entry) assert_equal_with_default entry['timestamp']['seconds'], expected_ts[verify_index].tv_sec, 0, entry assert_equal_with_default entry['timestamp']['nanos'], expected_ts[verify_index].tv_nsec, 0, entry do # Fluentd v0.14 onwards supports nanosecond timestamp values. @@ -641,14 +742,14 @@ # make a deep copy of COMPUTE_PARAMS and add the parsed labels. params = Marshal.load(Marshal.dump(COMPUTE_PARAMS)) params[:labels]['sent_label_1'] = 'value1' params[:labels]['foo.googleapis.com/bar'] = 'value2' params[:labels]['label3'] = 'value3' - verify_log_entries(1, params, 'jsonPayload') do |entry| + verify_log_entries(1, params, 'jsonPayload') do |entry, i| fields = get_fields(entry['jsonPayload']) assert_equal 2, fields.size, entry - assert_equal 'test log entry 0', get_string(fields['message']), entry + verify_default_log_entry_text(get_string(fields['message']), i, entry) assert_equal 'value4', get_string(fields['not_a_label']), entry end end def test_multiple_logs @@ -727,24 +828,25 @@ d = create_driver(APPLICATION_DEFAULT_CONFIG, CONTAINER_TAG) d.emit(container_log_entry(log_entry(0), 'stderr')) d.run end expected_params = CONTAINER_FROM_TAG_PARAMS.merge( - labels: { "#{CONTAINER_CONSTANTS[:service]}/stream" => 'stderr' } + labels: { "#{GKE_CONSTANTS[:service]}/stream" => 'stderr' } ) { |_, oldval, newval| oldval.merge(newval) } - verify_log_entries(1, expected_params) do |entry| + verify_log_entries(1, expected_params) do |entry, i| + verify_default_log_entry_text(entry['textPayload'], i, entry) assert_equal CONTAINER_SECONDS_EPOCH, entry['timestamp']['seconds'], entry assert_equal CONTAINER_NANOS, entry['timestamp']['nanos'], entry assert_equal 'ERROR', entry['severity'], entry end end def test_json_container_log_metadata_from_plugin setup_gce_metadata_stubs setup_container_metadata_stubs setup_logging_stubs do - d = create_driver(APPLICATION_DEFAULT_CONFIG, CONTAINER_TAG) + d = create_driver(DETECT_JSON_CONFIG, CONTAINER_TAG) d.emit(container_log_entry_with_metadata('{"msg": "test log entry 0", ' \ '"tag2": "test", "data": ' \ '5000, "severity": "WARNING"}')) d.run end @@ -763,11 +865,11 @@ def test_json_container_log_metadata_from_tag setup_gce_metadata_stubs setup_container_metadata_stubs setup_logging_stubs do - d = create_driver(APPLICATION_DEFAULT_CONFIG, CONTAINER_TAG) + d = create_driver(DETECT_JSON_CONFIG, CONTAINER_TAG) d.emit(container_log_entry('{"msg": "test log entry 0", ' \ '"tag2": "test", "data": 5000, ' \ '"severity": "W"}')) d.run end @@ -795,11 +897,12 @@ d.instance_variable_get('@entries').clear @logs_sent = [] n.times { |i| d.emit(cloudfunctions_log_entry(i)) } d.run end - verify_log_entries(n, CLOUDFUNCTIONS_PARAMS) do |entry| + verify_log_entries(n, CLOUDFUNCTIONS_PARAMS) do |entry, i| + verify_default_log_entry_text(entry['textPayload'], i, entry) assert_equal 'DEBUG', entry['severity'], "Test with #{n} logs failed. \n#{entry}" end end end @@ -836,17 +939,16 @@ # do it manually here. d.instance_variable_get('@entries').clear n.times { |i| d.emit(cloudfunctions_log_entry(i)) } d.run end - i = 0 - verify_log_entries(n, CONTAINER_FROM_TAG_PARAMS, '') do |entry| - assert_equal '[D][2015-09-25T12:34:56.789Z][123-0] test log entry ' \ - "#{i}", entry['textPayload'], - "Test with #{n} logs failed. \n#{entry}" - i += 1 - end + verify_log_entries(n, CONTAINER_FROM_TAG_PARAMS, 'textPayload') \ + do |entry, i| + assert_equal '[D][2015-09-25T12:34:56.789Z][123-0] test log entry ' \ + "#{i}", entry['textPayload'], + "Test with #{n} logs failed. \n#{entry}" + end end end def test_dataproc_log setup_gce_metadata_stubs @@ -1002,10 +1104,201 @@ end end end end + # Metadata Agent related tests. + + # Test enable_metadata_agent not set or set to false. + def test_configure_enable_metadata_agent_default_and_false + setup_gce_metadata_stubs + [create_driver, create_driver(DISABLE_METADATA_AGENT_CONFIG)].each do |d| + assert_false d.instance.instance_variable_get(:@enable_metadata_agent) + end + end + + # Test enable_metadata_agent set to true. + def test_configure_enable_metadata_agent_true + new_stub_context do + setup_gce_metadata_stubs + setup_metadata_agent_stubs + d = create_driver(ENABLE_METADATA_AGENT_CONFIG) + assert_true d.instance.instance_variable_get(:@enable_metadata_agent) + assert_requested_metadata_agent_stub(IMPLICIT_LOCAL_RESOURCE_ID) + end + end + + # Test that an implicit monitored resource can be retrieved from Metadata + # Agent with an empty string as the local_resource_id. + def test_retrieve_implicit_monitored_resource + # GCE metadata stubs has VM_ID and ZONE, while the Metadata Agent stub has + # METADATA_VM_ID and METADATA_ZONE. + new_stub_context do + setup_gce_metadata_stubs + setup_metadata_agent_stubs + setup_logging_stubs do + d = create_driver(ENABLE_METADATA_AGENT_CONFIG) + d.emit('message' => log_entry(0)) + d.run + end + verify_log_entries(1, COMPUTE_PARAMS_WITH_METADATA_VM_ID_AND_ZONE) + assert_requested_metadata_agent_stub(IMPLICIT_LOCAL_RESOURCE_ID) + end + end + + # Docker Container. + + # Test textPayload logs from Docker container stdout / stderr. + def test_docker_container_stdout_stderr_logs_text_payload + [1, 2, 3, 5, 11, 50].each do |n| + new_stub_context do + setup_gce_metadata_stubs + setup_metadata_agent_stubs + setup_logging_stubs do + d = create_driver(DOCKER_CONTAINER_CONFIG) + n.times do |i| + d.emit(docker_container_stdout_stderr_log_entry(log_entry(i))) + end + d.run + end + verify_log_entries(n, DOCKER_CONTAINER_PARAMS) + assert_requested_metadata_agent_stub(IMPLICIT_LOCAL_RESOURCE_ID) + assert_requested_metadata_agent_stub("container.#{DOCKER_CONTAINER_ID}") + end + end + end + + # Test jsonPayload logs from Docker container stdout / stderr. + def test_docker_container_stdout_stderr_logs_json_payload + [1, 2, 3, 5, 11, 50].each do |n| + new_stub_context do + setup_gce_metadata_stubs + setup_metadata_agent_stubs + setup_logging_stubs do + d = create_driver(DOCKER_CONTAINER_CONFIG) + n.times do + d.emit(docker_container_stdout_stderr_log_entry( + '{"msg": "test log entry ' \ + "#{n}" \ + '", "tag2": "test", "data": ' \ + '5000, "severity": "WARNING"}')) + end + d.run + end + verify_log_entries(n, DOCKER_CONTAINER_PARAMS, 'jsonPayload') do |entry| + fields = get_fields(entry['jsonPayload']) + assert_equal 3, fields.size, entry + assert_equal "test log entry #{n}", get_string(fields['msg']), entry + assert_equal 'test', get_string(fields['tag2']), entry + assert_equal 5000, get_number(fields['data']), entry + end + assert_requested_metadata_agent_stub(IMPLICIT_LOCAL_RESOURCE_ID) + assert_requested_metadata_agent_stub("container.#{DOCKER_CONTAINER_ID}") + end + end + end + + # Test logs from applications running in Docker containers. These logs have + # the label "logging.googleapis.com/local_resource_id" set in the format of + # "containerName.<container_name>". + def test_docker_container_application_logs + new_stub_context do + setup_gce_metadata_stubs + setup_metadata_agent_stubs + setup_logging_stubs do + # Metadata Agent is not enabled. Will call Docker Remote API for + # container info. + d = create_driver(ENABLE_METADATA_AGENT_CONFIG) + d.emit(docker_container_application_log_entry(log_entry(0))) + d.run + end + verify_log_entries(1, DOCKER_CONTAINER_PARAMS_WITH_NO_STREAM) + assert_requested_metadata_agent_stub(IMPLICIT_LOCAL_RESOURCE_ID) + assert_requested_metadata_agent_stub( + "containerName.#{DOCKER_CONTAINER_NAME}") + end + end + + # Test that the 'time' field from the json record is extracted and set to + # entry.timestamp for Docker container logs. + def test_time_field_extraction_for_docker_container_logs + new_stub_context do + setup_gce_metadata_stubs + setup_metadata_agent_stubs + setup_logging_stubs do + d = create_driver(ENABLE_METADATA_AGENT_CONFIG) + d.emit(docker_container_application_log_entry(log_entry(0))) + d.run + end + verify_log_entries(1, DOCKER_CONTAINER_PARAMS_WITH_NO_STREAM) do |entry| + # Timestamp in 'time' field from log entry should be set properly. + assert_equal DOCKER_CONTAINER_SECONDS_EPOCH, + entry['timestamp']['seconds'], entry + assert_equal DOCKER_CONTAINER_NANOS, + entry['timestamp']['nanos'], entry + end + assert_requested_metadata_agent_stub(IMPLICIT_LOCAL_RESOURCE_ID) + assert_requested_metadata_agent_stub( + "containerName.#{DOCKER_CONTAINER_NAME}") + end + end + + # Test that the 'source' field is properly extracted from the record json and + # set as a common label 'stream'. Also entry.severity is set accordingly for + # Docker container logs. + def test_source_and_severity_for_docker_container_logs + { + docker_container_stdout_stderr_log_entry( + log_entry(0), DOCKER_CONTAINER_STREAM_STDOUT) => + DOCKER_CONTAINER_PARAMS, + docker_container_stdout_stderr_log_entry( + log_entry(0), DOCKER_CONTAINER_STREAM_STDERR) => + DOCKER_CONTAINER_PARAMS_WITH_STREAM_STDERR, + docker_container_application_log_entry(log_entry(0)) => + DOCKER_CONTAINER_PARAMS_WITH_NO_STREAM, + docker_container_application_log_entry(log_entry(0)) \ + .merge('severity' => 'warning') => + DOCKER_CONTAINER_PARAMS_WITH_NO_STREAM + }.each do |log_entry, expected_params| + new_stub_context do + setup_gce_metadata_stubs + setup_metadata_agent_stubs + setup_logging_stubs do + d = create_driver(DOCKER_CONTAINER_CONFIG) + d.emit(log_entry) + d.run + end + verify_log_entries(1, expected_params) + end + end + end + + # Test GKE container logs. These logs have the label + # "logging.googleapis.com/local_resource_id" set in the format of + # "gke_containerName.<namespace_id>.<pod_name>.<container_name>". + def test_gke_container_logs + [1, 2, 3, 5, 11, 50].each do |n| + new_stub_context do + setup_gce_metadata_stubs + setup_container_metadata_stubs + setup_metadata_agent_stubs + setup_logging_stubs do + d = create_driver(ENABLE_METADATA_AGENT_CONFIG) + n.times do |i| + d.emit(gke_container_log_entry(log_entry(i))) + end + d.run + end + verify_log_entries(n, CONTAINER_FROM_APPLICATION_PARAMS) + assert_requested_metadata_agent_stub(IMPLICIT_LOCAL_RESOURCE_ID) + assert_requested_metadata_agent_stub( + "gke_containerName.#{CONTAINER_NAMESPACE_ID}.#{CONTAINER_POD_NAME}." \ + "#{CONTAINER_CONTAINER_NAME}") + end + end + end + private def stub_metadata_request(metadata_path, response_body) stub_request(:get, 'http://169.254.169.254/computeMetadata/v1/' + metadata_path) @@ -1117,10 +1410,37 @@ def setup_prometheus Prometheus::Client.registry.instance_variable_set('@metrics', {}) end + # Metadata Agent. + + def metadata_request_url(local_resource_id) + "#{DEFAULT_METADATA_AGENT_URL}/monitoredResource/#{local_resource_id}" + end + + # Provide a stub context that initializes @logs_sent, executes the block and + # resets WebMock at the end. + def new_stub_context + @logs_sent = [] + yield + WebMock.reset! + end + + def setup_metadata_agent_stubs + MONITORED_RESOURCE_STUBS.each do |local_resource_id, resource| + stub_request(:get, metadata_request_url(local_resource_id)) + .to_return(status: 200, body: resource) + end + end + + def assert_requested_metadata_agent_stub(local_resource_id) + assert_requested :get, metadata_request_url(local_resource_id) + end + + # GKE Container. + def container_tag_with_container_name(container_name) "kubernetes.#{CONTAINER_POD_NAME}_#{CONTAINER_NAMESPACE_NAME}_" \ "#{container_name}" end @@ -1149,10 +1469,44 @@ stream: stream, time: CONTAINER_TIMESTAMP } end + def gke_container_log_entry(log) + { + log: log, + LOCAL_RESOURCE_ID_KEY => + "gke_containerName.#{CONTAINER_NAMESPACE_ID}" \ + ".#{CONTAINER_POD_NAME}.#{CONTAINER_CONTAINER_NAME}" + } + end + + # Docker Container. + + def docker_container_stdout_stderr_log_entry( + log, stream = DOCKER_CONTAINER_STREAM_STDOUT) + severity = if stream == DOCKER_CONTAINER_STREAM_STDOUT + 'INFO' + else + 'ERROR' + end + { + log: log, + source: stream, + severity: severity, + LOCAL_RESOURCE_ID_KEY => "container.#{DOCKER_CONTAINER_ID}" + } + end + + def docker_container_application_log_entry(log) + { + log: log, + time: DOCKER_CONTAINER_TIMESTAMP, + LOCAL_RESOURCE_ID_KEY => "containerName.#{DOCKER_CONTAINER_NAME}" + } + end + def cloudfunctions_log_entry(i) { stream: 'stdout', log: '[D][2015-09-25T12:34:56.789Z][123-0] ' + log_entry(i) } @@ -1187,11 +1541,11 @@ message: log_entry(i) } end def log_entry(i) - 'test log entry ' + i.to_s + "test log entry #{i}" end def check_labels(labels, expected_labels) labels.each do |key, value| assert value.is_a?(String), "Value #{value} for label #{key} " \ @@ -1203,22 +1557,23 @@ assert_equal expected_labels.length, labels.length, 'Expected ' \ "#{expected_labels.length} labels: #{expected_labels}, got " \ "#{labels.length} labels: #{labels}" end + def verify_default_log_entry_text(text, i, entry) + assert_equal "test log entry #{i}", text, + "Entry ##{i} had unexpected text: #{entry}" + end + # The caller can optionally provide a block which is called for each entry. def verify_json_log_entries(n, params, payload_type = 'textPayload') i = 0 @logs_sent.each do |request| request['entries'].each do |entry| unless payload_type.empty? - assert entry.key?(payload_type), 'Entry did not contain expected ' \ - "#{payload_type} key: " + entry.to_s - # Check the payload for textPayload, otherwise it's up to the caller. - if payload_type == 'textPayload' - assert_equal "test log entry #{i}", entry['textPayload'], request - end + assert entry.key?(payload_type), "Entry ##{i} did not contain " \ + "expected #{payload_type} key: #{entry}" end # per-entry resource or log_name overrides the corresponding field # from the request. Labels are merged, with the per-entry label # taking precedence in case of overlap. @@ -1231,11 +1586,16 @@ assert_equal \ "projects/#{params[:project_id]}/logs/#{params[:log_name]}", log_name assert_equal params[:resource][:type], resource['type'] check_labels resource['labels'], params[:resource][:labels] check_labels labels, params[:labels] - yield(entry) if block_given? + if block_given? + yield(entry, i) + elsif payload_type == 'textPayload' + # Check the payload for textPayload, otherwise it's up to the caller. + verify_default_log_entry_text(entry['textPayload'], i, entry) + end i += 1 assert i <= n, "Number of entries #{i} exceeds expected number #{n}" end end assert i == n, "Number of entries #{i} does not match expected number #{n}" @@ -1249,10 +1609,11 @@ setup_logging_stubs do d = create_driver(APPLICATION_DEFAULT_CONFIG, CONTAINER_TAG) n.times { |i| d.emit(log_entry_factory.call(log_entry(i))) } d.run end - verify_log_entries(n, expected_params) do |entry| + verify_log_entries(n, expected_params) do |entry, i| + verify_default_log_entry_text(entry['textPayload'], i, entry) assert_equal CONTAINER_SECONDS_EPOCH, entry['timestamp']['seconds'], entry assert_equal CONTAINER_NANOS, entry['timestamp']['nanos'], entry assert_equal CONTAINER_SEVERITY, entry['severity'], entry end