spec/outputs/s3_spec.rb in logstash-output-s3-1.0.0 vs spec/outputs/s3_spec.rb in logstash-output-s3-1.0.1

- old
+ new

@@ -10,11 +10,10 @@ describe LogStash::Outputs::S3 do before do # We stub all the calls from S3, for more information see: # http://ruby.awsblog.com/post/Tx2SU6TYJWQQLC3/Stubbing-AWS-Responses AWS.stub! - Thread.abort_on_exception = true end let(:minimal_settings) { { "access_key_id" => "1234", "secret_access_key" => "secret", @@ -68,12 +67,12 @@ end end describe "#generate_temporary_filename" do before do - Socket.stub(:gethostname) { "logstash.local" } - Time.stub(:now) { Time.new('2015-10-09-09:00') } + allow(Socket).to receive(:gethostname) { "logstash.local" } + allow(Time).to receive(:now) { Time.new('2015-10-09-09:00') } end it "should add tags to the filename if present" do config = minimal_settings.merge({ "tags" => ["elasticsearch", "logstash", "kibana"], "temporary_directory" => "/tmp/logstash"}) s3 = LogStash::Outputs::S3.new(config) @@ -95,11 +94,11 @@ describe "#write_on_bucket" do let!(:fake_data) { Stud::Temporary.file } let(:fake_bucket) do s3 = double('S3Object') - s3.stub(:write) + allow(s3).to receive(:write) s3 end it "should prefix the file on the bucket if a prefix is specified" do prefix = "my-prefix" @@ -156,63 +155,86 @@ end end end describe "#rotate_events_log" do - let(:s3) { LogStash::Outputs::S3.new(minimal_settings.merge({ "size_file" => 1024 })) } - it "returns true if the tempfile is over the file_size limit" do - Stud::Temporary.file do |tmp| - tmp.stub(:size) { 2024001 } + context "having a single worker" do + let(:s3) { LogStash::Outputs::S3.new(minimal_settings.merge({ "size_file" => 1024 })) } - s3.tempfile = tmp - expect(s3.rotate_events_log?).to be(true) + before(:each) do + s3.register end + + it "returns true if the tempfile is over the file_size limit" do + Stud::Temporary.file do |tmp| + allow(tmp).to receive(:size) { 2024001 } + + s3.tempfile = tmp + expect(s3.rotate_events_log?).to be(true) + end + end + + it "returns false if the tempfile is under the file_size limit" do + Stud::Temporary.file do |tmp| + allow(tmp).to receive(:size) { 100 } + + s3.tempfile = tmp + expect(s3.rotate_events_log?).to eq(false) + end + end end - it "returns false if the tempfile is under the file_size limit" do - Stud::Temporary.file do |tmp| - tmp.stub(:size) { 100 } + context "having periodic rotations" do + let(:s3) { LogStash::Outputs::S3.new(minimal_settings.merge({ "size_file" => 1024, "time_file" => 6e-10 })) } + let(:tmp) { Tempfile.new('s3_rotation_temp_file') } + before(:each) do s3.tempfile = tmp - expect(s3.rotate_events_log?).to eq(false) + s3.register end + + after(:each) do + s3.teardown + tmp.close + tmp.unlink + end + + it "raises no error when periodic rotation happen" do + 1000.times do + expect { s3.rotate_events_log? }.not_to raise_error + end + end end end describe "#move_file_to_bucket" do - let!(:s3) { LogStash::Outputs::S3.new(minimal_settings) } + subject { LogStash::Outputs::S3.new(minimal_settings) } - before do - # Assume the AWS test credentials pass. - allow(s3).to receive(:test_s3_write) - s3.register - end - it "should always delete the source file" do tmp = Stud::Temporary.file allow(File).to receive(:zero?).and_return(true) expect(File).to receive(:delete).with(tmp) - s3.move_file_to_bucket(tmp) + subject.move_file_to_bucket(tmp) end it 'should not upload the file if the size of the file is zero' do temp_file = Stud::Temporary.file allow(temp_file).to receive(:zero?).and_return(true) - expect(s3).not_to receive(:write_on_bucket) - s3.move_file_to_bucket(temp_file) + expect(subject).not_to receive(:write_on_bucket) + subject.move_file_to_bucket(temp_file) end it "should upload the file if the size > 0" do tmp = Stud::Temporary.file allow(File).to receive(:zero?).and_return(false) - expect(s3).to receive(:write_on_bucket) + expect(subject).to receive(:write_on_bucket) - s3.move_file_to_bucket(tmp) + subject.move_file_to_bucket(tmp) end end describe "#restore_from_crashes" do it "read the temp directory and upload the matching file to s3" do