spec/inputs/jdbc_spec.rb in logstash-input-jdbc-4.3.13 vs spec/inputs/jdbc_spec.rb in logstash-input-jdbc-4.3.14
- old
+ new
@@ -9,15 +9,13 @@
require "timecop"
require "stud/temporary"
require "time"
require "date"
+# We do not need to set TZ env var anymore because we can have 'Sequel.application_timezone' set to utc by default now.
+
describe LogStash::Inputs::Jdbc do
- # This is a necessary change test-wide to guarantee that no local timezone
- # is picked up. It could be arbitrarily set to any timezone, but then the test
- # would have to compensate differently. That's why UTC is chosen.
- ENV["TZ"] = "Etc/UTC"
let(:mixin_settings) do
{ "jdbc_user" => ENV['USER'], "jdbc_driver_class" => "org.apache.derby.jdbc.EmbeddedDriver",
"jdbc_connection_string" => "jdbc:derby:memory:testdb;create=true"}
end
let(:settings) { {} }
@@ -62,26 +60,10 @@
mixin_settings['jdbc_password'] = 'pass'
expect { plugin.register }.to_not raise_error
plugin.stop
end
- it "should load all drivers when passing an array" do
- mixin_settings['jdbc_driver_library'] = '/foo/bar,/bar/foo'
- expect(plugin).to receive(:load_drivers).with(['/foo/bar', '/bar/foo'])
- plugin.register
- plugin.run(queue) # load when first run
- plugin.stop
- end
-
- it "should load all drivers when using a single value" do
- mixin_settings['jdbc_driver_library'] = '/foo/bar'
- expect(plugin).to receive(:load_drivers).with(['/foo/bar'])
- plugin.register
- plugin.run(queue) # load when first run
- plugin.stop
- end
-
it "should stop without raising exception" do
plugin.register
expect { plugin.stop }.to_not raise_error
end
@@ -369,21 +351,22 @@
"statement" => "SELECT * from test_table WHERE custom_time > :sql_last_value",
"last_run_metadata_path" => Stud::Temporary.pathname
}
end
- it "should convert the time to reflect the timezone " do
+ before do
last_run_value = DateTime.iso8601("2000-01-01T12:00:00.000Z")
- File.write(settings["last_run_metadata_path"], YAML.dump(last_run_value))
-
- Timecop.travel(DateTime.iso8601("2015-01-01T15:50:00.000Z")) do
+ File.write(settings["last_run_metadata_path"], last_run_value)
+ Timecop.travel(DateTime.iso8601("2015-01-01T15:50:01.000Z")) do
# simulate earlier records written
hours.each do |i|
db[:test_table].insert(:num => i, :custom_time => "2015-01-01 #{i}:00:00", :created_at => Time.now.utc)
end
end
+ end
+ it "should convert the time to reflect the timezone " do
Timecop.travel(DateTime.iso8601("2015-01-02T02:10:00.000Z")) do
# simulate the first plugin run after the custom time of the last record
plugin.register
plugin.run(queue)
expected = hours.map{|hour| Time.iso8601("2015-01-01T06:00:00.000Z") + (hour * 3600) }# because Sequel converts the column values to Time instances.
@@ -391,19 +374,21 @@
expect(actual).to eq(expected)
plugin.stop
end
Timecop.travel(DateTime.iso8601("2015-01-02T02:20:00.000Z")) do
# simulate a run 10 minutes later
+ plugin.register
plugin.run(queue)
expect(queue.size).to eq(0) # no new records
plugin.stop
# now add records
db[:test_table].insert(:num => 11, :custom_time => "2015-01-01 20:20:20", :created_at => Time.now.utc)
db[:test_table].insert(:num => 12, :custom_time => "2015-01-01 21:21:21", :created_at => Time.now.utc)
end
Timecop.travel(DateTime.iso8601("2015-01-02T03:30:00.000Z")) do
# simulate another run later than the custom time of the last record
+ plugin.register
plugin.run(queue)
expect(queue.size).to eq(2)
plugin.stop
end
event = queue.pop
@@ -430,23 +415,24 @@
it "should convert the time to reflect the timezone " do
# Sequel only does the *correct* timezone calc on a DateTime instance
last_run_value = DateTime.iso8601("2000-01-01T00:00:00.987Z")
File.write(settings["last_run_metadata_path"], YAML.dump(last_run_value))
-
hours.each_with_index do |i, j|
- db[:test1_table].insert(:num => i, :custom_time => "2015-01-01 #{i}:00:00.#{msecs[j]}", :created_at => Time.now.utc)
+ time_value = Time.utc(2015, 1, 1, i, 0, 0, msecs[j] * 1000)
+ db[:test1_table].insert(:num => i, :custom_time => time_value, :created_at => Time.now.utc)
end
plugin.register
plugin.run(queue)
expected = hours.map.with_index {|hour, i| Time.iso8601("2015-01-01T06:00:00.000Z") + (hour * 3600 + (msecs[i] / 1000.0)) }
actual = queue.size.times.map { queue.pop.get("custom_time").time }
expect(actual).to eq(expected)
plugin.stop
- last_run_value = YAML.load(File.read(settings["last_run_metadata_path"]))
+ raw_last_run_value = File.read(settings["last_run_metadata_path"])
+ last_run_value = YAML.load(raw_last_run_value)
expect(last_run_value).to be_a(DateTime)
expect(last_run_value.strftime("%F %T.%N %Z")).to eq("2015-01-02 02:00:00.722000000 +00:00")
plugin.run(queue)
plugin.stop
@@ -993,11 +979,11 @@
it "should fail" do
expect do
plugin.register
plugin.run(queue) # load when first run
- end.to raise_error(LogStash::ConfigurationError)
+ end.to raise_error(LogStash::PluginLoadingError)
end
end
context "when timing out on connection" do
let(:settings) do
@@ -1134,11 +1120,18 @@
"column2" => 3
}
end
before(:each) do
- allow_any_instance_of(Sequel::JDBC::Derby::Dataset).to receive(:each).and_yield(row)
+ dataset = double("Dataset")
+ allow(dataset).to receive(:each).and_yield(row)
+ allow(plugin).to receive(:jdbc_connect).and_wrap_original do |m, *args|
+ _db = m.call(*args)
+ allow(_db).to receive(:[]).and_return(dataset)
+ _db
+ end
+ # allow_any_instance_of(Sequel::JDBC::Derby::Dataset).to receive(:each).and_yield(row)
plugin.register
end
after(:each) do
plugin.stop
@@ -1189,10 +1182,10 @@
expect(LogStash::Event).to receive(:new) do |row|
row.each do |k, v|
next unless v.is_a?(String)
expect(row[k].encoding).to eq(encoded_row[k].encoding)
end
-
+
event
end
plugin.run(events)
end
end