spec/inputs/s3_spec.rb in logstash-input-s3-3.0.1 vs spec/inputs/s3_spec.rb in logstash-input-s3-3.1.1

- old
+ new

@@ -1,31 +1,34 @@ # encoding: utf-8 require "logstash/devutils/rspec/spec_helper" require "logstash/inputs/s3" require "logstash/errors" +require "aws-sdk-resources" require_relative "../support/helpers" require "stud/temporary" require "aws-sdk" require "fileutils" describe LogStash::Inputs::S3 do let(:temporary_directory) { Stud::Temporary.pathname } let(:sincedb_path) { Stud::Temporary.pathname } let(:day) { 3600 * 24 } + let(:creds) { Aws::Credentials.new('1234', 'secret') } let(:config) { { "access_key_id" => "1234", "secret_access_key" => "secret", "bucket" => "logstash-test", "temporary_directory" => temporary_directory, "sincedb_path" => File.join(sincedb_path, ".sincedb") } } + before do FileUtils.mkdir_p(sincedb_path) - AWS.stub! + Aws.config[:stub_responses] = true Thread.abort_on_exception = true end context "when interrupting the plugin" do let(:config) { super.merge({ "interval" => 5 }) } @@ -50,31 +53,10 @@ end describe '#get_s3object' do subject { LogStash::Inputs::S3.new(settings) } - context 'with deprecated credentials option' do - let(:settings) { - { - "credentials" => ["1234", "secret"], - "proxy_uri" => "http://example.com", - "bucket" => "logstash-test", - } - } - - it 'should instantiate AWS::S3 clients with a proxy set' do - expect(AWS::S3).to receive(:new).with({ - :access_key_id => "1234", - :secret_access_key => "secret", - :proxy_uri => 'http://example.com', - :use_ssl => subject.use_ssl, - }.merge(subject.aws_service_endpoint(subject.region))) - - subject.send(:get_s3object) - end - end - context 'with modern access key options' do let(:settings) { { "access_key_id" => "1234", "secret_access_key" => "secret", @@ -82,25 +64,23 @@ "bucket" => "logstash-test", } } it 'should instantiate AWS::S3 clients with a proxy set' do - expect(AWS::S3).to receive(:new).with({ - :access_key_id => "1234", - :secret_access_key => "secret", - :proxy_uri => 'http://example.com', - :use_ssl => subject.use_ssl, - }.merge(subject.aws_service_endpoint(subject.region))) - + expect(Aws::S3::Resource).to receive(:new).with({ + :credentials => kind_of(Aws::Credentials), + :http_proxy => 'http://example.com', + :region => subject.region + }) subject.send(:get_s3object) end end end describe "#list_new_files" do - before { allow_any_instance_of(AWS::S3::ObjectCollection).to receive(:with_prefix).with(nil) { objects_list } } + before { allow_any_instance_of(Aws::S3::Bucket).to receive(:objects) { objects_list } } let!(:present_object) { double(:key => 'this-should-be-present', :last_modified => Time.now) } let(:objects_list) { [ double(:key => 'exclude-this-file-1', :last_modified => Time.now - 2 * day), @@ -126,11 +106,11 @@ objects_list = [ double(:key => 'mybackup-log-1', :last_modified => Time.now), present_object ] - allow_any_instance_of(AWS::S3::ObjectCollection).to receive(:with_prefix).with(nil) { objects_list } + allow_any_instance_of(Aws::S3::Bucket).to receive(:objects) { objects_list } plugin = LogStash::Inputs::S3.new(config.merge({ 'backup_add_prefix' => 'mybackup', 'backup_to_bucket' => config['bucket']})) plugin.register expect(plugin.list_new_files).to eq([present_object.key]) @@ -152,11 +132,11 @@ objects_list = [ double(:key => prefix, :last_modified => Time.now), present_object ] - allow_any_instance_of(AWS::S3::ObjectCollection).to receive(:with_prefix).with(prefix) { objects_list } + allow_any_instance_of(Aws::S3::Bucket).to receive(:objects).with(:prefix => prefix) { objects_list } plugin = LogStash::Inputs::S3.new(config.merge({ 'prefix' => prefix })) plugin.register expect(plugin.list_new_files).to eq([present_object.key]) end @@ -166,11 +146,11 @@ double(:key => 'YESTERDAY', :last_modified => Time.now - day), double(:key => 'TODAY', :last_modified => Time.now), double(:key => 'TWO_DAYS_AGO', :last_modified => Time.now - 2 * day) ] - allow_any_instance_of(AWS::S3::ObjectCollection).to receive(:with_prefix).with(nil) { objects } + allow_any_instance_of(Aws::S3::Bucket).to receive(:objects) { objects } plugin = LogStash::Inputs::S3.new(config) plugin.register expect(plugin.list_new_files).to eq(['TWO_DAYS_AGO', 'YESTERDAY', 'TODAY']) @@ -179,35 +159,38 @@ describe "when doing backup on the s3" do it 'should copy to another s3 bucket when keeping the original file' do plugin = LogStash::Inputs::S3.new(config.merge({ "backup_to_bucket" => "mybackup"})) plugin.register - s3object = double() - expect(s3object).to receive(:copy_to).with('test-file', :bucket => an_instance_of(AWS::S3::Bucket)) + s3object = Aws::S3::Object.new('mybucket', 'testkey') + expect_any_instance_of(Aws::S3::Object).to receive(:copy_from).with(:copy_source => "mybucket/testkey") + expect(s3object).to_not receive(:delete) - plugin.backup_to_bucket(s3object, 'test-file') + plugin.backup_to_bucket(s3object) end - it 'should move to another s3 bucket when deleting the original file' do + it 'should copy to another s3 bucket when deleting the original file' do plugin = LogStash::Inputs::S3.new(config.merge({ "backup_to_bucket" => "mybackup", "delete" => true })) plugin.register - s3object = double() - expect(s3object).to receive(:move_to).with('test-file', :bucket => an_instance_of(AWS::S3::Bucket)) + s3object = Aws::S3::Object.new('mybucket', 'testkey') + expect_any_instance_of(Aws::S3::Object).to receive(:copy_from).with(:copy_source => "mybucket/testkey") + expect(s3object).to receive(:delete) - plugin.backup_to_bucket(s3object, 'test-file') + plugin.backup_to_bucket(s3object) end it 'should add the specified prefix to the backup file' do plugin = LogStash::Inputs::S3.new(config.merge({ "backup_to_bucket" => "mybackup", "backup_add_prefix" => 'backup-' })) plugin.register - s3object = double() - expect(s3object).to receive(:copy_to).with('backup-test-file', :bucket => an_instance_of(AWS::S3::Bucket)) + s3object = Aws::S3::Object.new('mybucket', 'testkey') + expect_any_instance_of(Aws::S3::Object).to receive(:copy_from).with(:copy_source => "mybucket/testkey") + expect(s3object).to_not receive(:delete) - plugin.backup_to_bucket(s3object, 'test-file') + plugin.backup_to_bucket(s3object) end end it 'should support doing local backup of files' do Stud::Temporary.directory do |backup_dir| @@ -220,23 +203,10 @@ expect(File.exists?(backup_file)).to eq(true) end end end - - it 'should accepts a list of credentials for the aws-sdk, this is deprecated' do - Stud::Temporary.directory do |tmp_directory| - old_credentials_config = { - "credentials" => ['1234', 'secret'], - "backup_to_dir" => tmp_directory, - "bucket" => "logstash-test" - } - - plugin = LogStash::Inputs::S3.new(old_credentials_config) - expect{ plugin.register }.not_to raise_error - end - end end shared_examples "generated events" do it 'should process events' do events = fetch_events(config) @@ -250,14 +220,22 @@ end context 'when working with logs' do let(:objects) { [log] } let(:log) { double(:key => 'uncompressed.log', :last_modified => Time.now - 2 * day) } + let(:data) { File.read(log_file) } before do - allow_any_instance_of(AWS::S3::ObjectCollection).to receive(:with_prefix).with(nil) { objects } - allow_any_instance_of(AWS::S3::ObjectCollection).to receive(:[]).with(log.key) { log } - expect(log).to receive(:read) { |&block| block.call(File.read(log_file)) } + Aws.config[:s3] = { + stub_responses: { + get_object: { body: data } + } + } + allow_any_instance_of(Aws::S3::Bucket).to receive(:objects) { objects } + allow_any_instance_of(Aws::S3::Bucket).to receive(:object).with(log.key) { log } + expect(log).to receive(:get).with(instance_of(Hash)) do |arg| + File.open(arg[:response_target], 'wb') { |s3file| s3file.write(data) } + end end context "when event doesn't have a `message` field" do let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'json.log') } let(:config) {