# frozen_string_literal: true require "spec_helpers" describe Wayfarer::Base, redis: true do include Wayfarer::Redis::Connection let(:url) { "https://example.com" } let(:batch) { "batch" } let(:task) { build(:task, batch: batch, url: url) } let(:klass) { Class.new(Wayfarer::Base) } before { stub_const("DummyJob", klass) } describe "::crawl_later" do it "enqueues a task" do expect(DummyJob).to receive(:perform_later).with(task) DummyJob.crawl_later(url, batch: batch) end end describe "#perform" do it "assigns itself to the task" do job = DummyJob.new job.perform(task) expect(task.job).to be(job) end it "calls the middleware chain" do job = DummyJob.new allow(job).to receive(:chain).and_return(spy) expect(job.chain).to receive(:call).with(task) job.perform(task) end end describe "Callbacks" do let(:counter) { task.counter } describe "after enqueue" do it "increments the counter" do expect { DummyJob.crawl_later(url, batch: batch) }.to change { counter.value }.by(1) end end describe "after perform" do it "decrements the counter" do DummyJob.crawl_later(url, batch: batch) task.counter.increment expect { perform_enqueued_jobs }.to change { task.counter.value }.by(-1) end context "when counter reaches 0" do it "resets the barrier" do DummyJob.crawl_later(url, batch: batch) perform_enqueued_jobs redis do |conn| expect(conn.exists?(task.barrier.redis_key)).to be(false) end end it "resets the counter" do DummyJob.crawl_later(url, batch: batch) perform_enqueued_jobs redis do |conn| expect(conn.exists?(task.counter.redis_key)).to be(false) end end it "runs after batch callbacks" do expect { |spy| klass.after_batch(&spy) DummyJob.crawl_later(url, batch: batch) perform_enqueued_jobs }.to yield_control end end end end describe "Unhandled exceptions" do let(:klass) { Class.new(Wayfarer::Base) } before do allow_any_instance_of(DummyJob).to receive(:perform).and_raise(RuntimeError.new) end it "does not retry the job" do DummyJob.crawl_later(url, batch: batch) expect { begin perform_enqueued_jobs rescue StandardError nil end }.to change { enqueued_jobs.size }.by(-1) end it "decrements the counter" do 3.times { task.counter.increment } DummyJob.crawl_later(url, batch: batch) begin perform_enqueued_jobs rescue StandardError nil end expect(task.counter.value).to be(3) end # it "runs after batch callbacks" do # expect do |spy| # klass.after_batch(&spy) # DummyJob.crawl_later(url, batch: batch) # 3.times { perform_enqueued_jobs } # end.to yield_control # end end describe "Retries" do let(:klass) do Class.new(Wayfarer::Base) do retry_on RuntimeError, attempts: 3 do |job, error| Spy.call(job, error) end end end before do allow_any_instance_of(DummyJob).to receive(:perform).and_raise(RuntimeError.new) stub_const("Spy", spy) end it "retries the job" do expect(Spy).to receive(:call).exactly(:once) .with(kind_of(DummyJob), kind_of(RuntimeError)) DummyJob.crawl_later(url, batch: batch) expect { perform_enqueued_jobs }.to change { enqueued_jobs.last["executions"] }.by(1) expect { perform_enqueued_jobs }.to change { enqueued_jobs.last["executions"] }.by(1) expect { perform_enqueued_jobs }.to change { enqueued_jobs.size }.by(-1) end it "marks the URL seen" do task.counter.increment # otherwise barrier gets reset DummyJob.crawl_later(url, batch: batch) 3.times { perform_enqueued_jobs } expect(task.barrier.seen?(task.url)).to be(true) end it "decrements the counter" do 3.times { task.counter.increment } DummyJob.crawl_later(url, batch: batch) 3.times { perform_enqueued_jobs } expect(task.counter.value).to be(3) end it "runs after batch callbacks" do expect { |spy| klass.after_batch(&spy) DummyJob.crawl_later(url, batch: batch) 3.times { perform_enqueued_jobs } }.to yield_control end end describe "Discarding" do let(:klass) do Class.new(Wayfarer::Base) do discard_on RuntimeError do |job, error| Spy.call(job, error) end end end before do allow_any_instance_of(DummyJob).to receive(:perform).and_raise(RuntimeError.new) stub_const("Spy", spy) end it "discards the job" do expect(Spy).to receive(:call).exactly(:once) .with(kind_of(DummyJob), kind_of(RuntimeError)) DummyJob.crawl_later(url, batch: batch) expect { perform_enqueued_jobs }.to change { enqueued_jobs.size }.by(-1) end it "marks the URL seen" do task.counter.increment # otherwise barrier gets reset DummyJob.crawl_later(url, batch: batch) perform_enqueued_jobs expect(task.barrier.seen?(task.url)).to be(true) end it "decrements the counter" do 3.times { task.counter.increment } DummyJob.crawl_later(url, batch: batch) perform_enqueued_jobs expect(task.counter.value).to be(3) end it "runs after batch callbacks" do expect { |spy| klass.after_batch(&spy) DummyJob.crawl_later(url, batch: batch) perform_enqueued_jobs }.to yield_control end end end