test/test_out_s3.rb in fluent-plugin-s3-0.6.6 vs test/test_out_s3.rb in fluent-plugin-s3-0.6.7
- old
+ new
@@ -2,10 +2,11 @@
require 'fluent/plugin/out_s3'
require 'test/unit/rr'
require 'zlib'
require 'fileutils'
+require 'timecop'
class S3OutputTest < Test::Unit::TestCase
def setup
require 'aws-sdk-resources'
Fluent::Test.setup
@@ -48,10 +49,11 @@
assert_equal 'log', d.instance.path
assert_equal 'gz', d.instance.instance_variable_get(:@compressor).ext
assert_equal 'application/x-gzip', d.instance.instance_variable_get(:@compressor).content_type
assert_equal false, d.instance.force_path_style
assert_equal nil, d.instance.compute_checksums
+ assert_equal nil, d.instance.signature_version
end
def test_s3_endpoint_with_valid_endpoint
d = create_driver(CONFIG + 's3_endpoint riak-cs.example.com')
assert_equal 'riak-cs.example.com', d.instance.s3_endpoint
@@ -272,62 +274,42 @@
end
def test_write_with_custom_s3_object_key_format
# Partial mock the S3Bucket, not to make an actual connection to Amazon S3
setup_mocks(true)
+ s3_local_file_path = "/tmp/s3-test.txt"
+ setup_s3_object_mocks(s3_local_file_path: s3_local_file_path)
- # Assert content of event logs which are being sent to S3
- s3obj = stub(Aws::S3::Object.new(:bucket_name => "test_bucket",
- :key => "test",
- :client => @s3_client))
- s3obj.exists? { false }
- s3_test_file_path = "/tmp/s3-test.txt"
- tempfile = File.new(s3_test_file_path, "w")
- mock(Tempfile).new("s3-") { tempfile }
- s3obj.put(:body => tempfile,
- :content_type => "application/x-gzip",
- :storage_class => "STANDARD")
- @s3_bucket.object("log/events/ts=20110102-13/events_0-testing.node.local.gz") { s3obj }
-
# We must use TimeSlicedOutputTestDriver instead of BufferedOutputTestDriver,
# to make assertions on chunks' keys
d = create_time_sliced_driver
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
d.emit({"a"=>1}, time)
d.emit({"a"=>2}, time)
# Finally, the instance of S3Output is initialized and then invoked
d.run
- Zlib::GzipReader.open(s3_test_file_path) do |gz|
+ Zlib::GzipReader.open(s3_local_file_path) do |gz|
data = gz.read
assert_equal %[2011-01-02T13:14:15Z\ttest\t{"a":1}\n] +
%[2011-01-02T13:14:15Z\ttest\t{"a":2}\n],
data
end
- FileUtils.rm_f(s3_test_file_path)
+ FileUtils.rm_f(s3_local_file_path)
end
def test_write_with_custom_s3_object_key_format_containing_uuid_flush_placeholder
# Partial mock the S3Bucket, not to make an actual connection to Amazon S3
setup_mocks(true)
uuid = "5755e23f-9b54-42d8-8818-2ea38c6f279e"
stub(UUIDTools::UUID).random_create{ uuid }
- # Assert content of event logs which are being sent to S3
- s3obj = stub(Aws::S3::Object.new(:bucket_name => "test_bucket",
- :key => "test",
- :client => @s3_client))
- s3obj.exists? { false }
- s3_test_file_path = "/tmp/s3-test.txt"
- tempfile = File.new(s3_test_file_path, "w")
- mock(Tempfile).new("s3-") { tempfile }
- s3obj.put(:body => tempfile,
- :content_type => "application/x-gzip",
- :storage_class => "STANDARD")
- @s3_bucket.object("log/events/ts=20110102-13/events_0-#{uuid}.gz") { s3obj }
+ s3_local_file_path = "/tmp/s3-test.txt"
+ s3path = "log/events/ts=20110102-13/events_0-#{uuid}.gz"
+ setup_s3_object_mocks(s3_local_file_path: s3_local_file_path, s3path: s3path)
# We must use TimeSlicedOutputTestDriver instead of BufferedOutputTestDriver,
# to make assertions on chunks' keys
config = CONFIG_TIME_SLICE.gsub(/%{hostname}/,"%{uuid_flush}")
d = create_time_sliced_driver(config)
@@ -336,17 +318,17 @@
d.emit({"a"=>1}, time)
d.emit({"a"=>2}, time)
# Finally, the instance of S3Output is initialized and then invoked
d.run
- Zlib::GzipReader.open(s3_test_file_path) do |gz|
+ Zlib::GzipReader.open(s3_local_file_path) do |gz|
data = gz.read
assert_equal %[2011-01-02T13:14:15Z\ttest\t{"a":1}\n] +
%[2011-01-02T13:14:15Z\ttest\t{"a":2}\n],
data
end
- FileUtils.rm_f(s3_test_file_path)
+ FileUtils.rm_f(s3_local_file_path)
Dir.glob('tmp/*').each {|file| FileUtils.rm_f(file) }
end
def test_write_with_custom_s3_object_key_format_containing_hex_random_placeholder_memory_buffer
hex = "012345"
@@ -373,38 +355,29 @@
# the current fluentd test helper because it does not provide a way to run with the same chunks
def write_with_custom_s3_object_key_format_containing_hex_random_placeholder(config, hex)
# Partial mock the S3Bucket, not to make an actual connection to Amazon S3
setup_mocks(true)
- # Assert content of event logs which are being sent to S3
- s3obj = stub(Aws::S3::Object.new(:bucket_name => "test_bucket",
- :key => "test",
- :client => @s3_client))
- s3obj.exists? { false }
- s3_test_file_path = "/tmp/s3-test.txt"
- tempfile = File.new(s3_test_file_path, "w")
- mock(Tempfile).new("s3-") { tempfile }
- s3obj.put(:body => tempfile,
- :content_type => "application/x-gzip",
- :storage_class => "STANDARD")
- @s3_bucket.object("log/events/ts=20110102-13/events_0-#{hex}.gz") { s3obj }
+ s3path = "log/events/ts=20110102-13/events_0-#{hex}.gz"
+ s3_local_file_path = "/tmp/s3-test.txt"
+ setup_s3_object_mocks(s3_local_file_path: s3_local_file_path, s3path: s3path)
d = create_time_sliced_driver(config)
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
d.emit({"a"=>1}, time)
d.emit({"a"=>2}, time)
# Finally, the instance of S3Output is initialized and then invoked
d.run
- Zlib::GzipReader.open(s3_test_file_path) do |gz|
+ Zlib::GzipReader.open(s3_local_file_path) do |gz|
data = gz.read
assert_equal %[2011-01-02T13:14:15Z\ttest\t{"a":1}\n] +
%[2011-01-02T13:14:15Z\ttest\t{"a":2}\n],
data
end
- FileUtils.rm_f(s3_test_file_path)
+ FileUtils.rm_f(s3_local_file_path)
end
def setup_mocks(exists_return = false)
@s3_client = stub(Aws::S3::Client.new(:stub_responses => true))
mock(Aws::S3::Client).new(anything).at_least(0) { @s3_client }
@@ -418,10 +391,29 @@
:client => @s3_client))
@s3_bucket.object(anything).at_least(0) { @s3_object }
@s3_resource.bucket(anything) { @s3_bucket }
end
+ def setup_s3_object_mocks(params = {})
+ s3path = params[:s3path] || "log/events/ts=20110102-13/events_0-testing.node.local.gz"
+ s3_local_file_path = params[:s3_local_file_path] || "/tmp/s3-test.txt"
+
+ # Assert content of event logs which are being sent to S3
+ s3obj = stub(Aws::S3::Object.new(:bucket_name => "test_bucket",
+ :key => "test",
+ :client => @s3_client))
+ s3obj.exists? { false }
+
+ tempfile = File.new(s3_local_file_path, "w")
+ stub(Tempfile).new("s3-") { tempfile }
+ s3obj.put(:body => tempfile,
+ :content_type => "application/x-gzip",
+ :storage_class => "STANDARD")
+
+ @s3_bucket.object(s3path) { s3obj }
+ end
+
def test_auto_create_bucket_false_with_non_existence_bucket
setup_mocks
config = CONFIG_TIME_SLICE + 'auto_create_bucket false'
d = create_time_sliced_driver(config)
@@ -531,7 +523,39 @@
d = create_time_sliced_driver(config)
assert_nothing_raised{ d.run }
client = d.instance.instance_variable_get(:@s3).client
credentials = client.config.credentials
assert_equal(expected_credentials, credentials)
+ end
+
+ def test_signature_version
+ config = [CONFIG, 'signature_version s3'].join("\n")
+ d = create_driver(config)
+
+ signature_version = d.instance.instance_variable_get(:@signature_version)
+ assert_equal("s3", signature_version)
+ end
+
+ def test_warn_for_delay
+ setup_mocks(true)
+ s3_local_file_path = "/tmp/s3-test.txt"
+ setup_s3_object_mocks(s3_local_file_path: s3_local_file_path)
+
+ config = CONFIG_TIME_SLICE + 'warn_for_delay 1d'
+ d = create_time_sliced_driver(config)
+
+ delayed_time = Time.parse("2011-01-02 13:14:15 UTC")
+ now = delayed_time + 86000 + 1
+ Timecop.freeze(now)
+
+ d.emit({"a"=>1}, delayed_time.to_i)
+ d.emit({"a"=>2}, delayed_time.to_i)
+
+ d.run
+
+ logs = d.instance.log.logs
+ assert_true logs.any? {|log| log.include?('[warn]: out_s3: delayed events were put') }
+
+ Timecop.return
+ FileUtils.rm_f(s3_local_file_path)
end
end