2015-12-31 18:33:35 -05:00
|
|
|
# frozen_string_literal: true
|
2014-12-30 15:54:58 -05:00
|
|
|
require_relative 'helper'
|
2015-10-21 01:04:36 -04:00
|
|
|
require 'sidekiq/api'
|
2015-12-14 16:29:02 -05:00
|
|
|
require 'active_job'
|
|
|
|
require 'action_mailer'
|
2012-10-20 17:03:43 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
describe 'API' do
|
|
|
|
before do
|
|
|
|
Sidekiq.redis {|c| c.flushdb }
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'RedisScanner' do
|
|
|
|
it 'returns identical to smembers' do
|
|
|
|
test_obj = Object.new
|
|
|
|
test_obj.extend(Sidekiq::RedisScanner)
|
|
|
|
50.times do |i|
|
|
|
|
Sidekiq.redis { |conn| conn.sadd('processes', "test-process-#{i}") }
|
|
|
|
end
|
|
|
|
sscan = Sidekiq.redis { |c| test_obj.sscan(c, 'processes') }.sort!
|
|
|
|
smembers = Sidekiq.redis { |c| c.smembers('processes') }.sort!
|
|
|
|
assert_equal sscan.size, 50
|
|
|
|
assert_equal sscan, smembers
|
2015-10-08 12:37:37 -04:00
|
|
|
end
|
2019-02-28 15:43:50 -05:00
|
|
|
end
|
2012-12-04 08:11:25 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
describe "stats" do
|
|
|
|
it "is initially zero" do
|
|
|
|
s = Sidekiq::Stats.new
|
|
|
|
assert_equal 0, s.processed
|
|
|
|
assert_equal 0, s.failed
|
|
|
|
assert_equal 0, s.enqueued
|
|
|
|
assert_equal 0, s.default_queue_latency
|
|
|
|
end
|
|
|
|
|
|
|
|
describe "processed" do
|
|
|
|
it "returns number of processed jobs" do
|
|
|
|
Sidekiq.redis { |conn| conn.set("stat:processed", 5) }
|
|
|
|
s = Sidekiq::Stats.new
|
|
|
|
assert_equal 5, s.processed
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe "failed" do
|
|
|
|
it "returns number of failed jobs" do
|
|
|
|
Sidekiq.redis { |conn| conn.set("stat:failed", 5) }
|
|
|
|
s = Sidekiq::Stats.new
|
|
|
|
assert_equal 5, s.failed
|
2018-05-30 16:20:28 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
describe "reset" do
|
|
|
|
before do
|
|
|
|
Sidekiq.redis do |conn|
|
|
|
|
conn.set('stat:processed', 5)
|
|
|
|
conn.set('stat:failed', 10)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'will reset all stats by default' do
|
|
|
|
Sidekiq::Stats.new.reset
|
2012-12-04 13:14:38 -05:00
|
|
|
s = Sidekiq::Stats.new
|
2019-02-28 15:43:50 -05:00
|
|
|
assert_equal 0, s.failed
|
2015-10-21 01:04:36 -04:00
|
|
|
assert_equal 0, s.processed
|
2019-02-28 15:43:50 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
it 'can reset individual stats' do
|
|
|
|
Sidekiq::Stats.new.reset('failed')
|
|
|
|
s = Sidekiq::Stats.new
|
2015-10-21 01:04:36 -04:00
|
|
|
assert_equal 0, s.failed
|
2019-02-28 15:43:50 -05:00
|
|
|
assert_equal 5, s.processed
|
2012-12-04 08:11:25 -05:00
|
|
|
end
|
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'can accept anything that responds to #to_s' do
|
|
|
|
Sidekiq::Stats.new.reset(:failed)
|
|
|
|
s = Sidekiq::Stats.new
|
|
|
|
assert_equal 0, s.failed
|
|
|
|
assert_equal 5, s.processed
|
2013-12-18 10:46:41 -05:00
|
|
|
end
|
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'ignores anything other than "failed" or "processed"' do
|
|
|
|
Sidekiq::Stats.new.reset((1..10).to_a, ['failed'])
|
|
|
|
s = Sidekiq::Stats.new
|
|
|
|
assert_equal 0, s.failed
|
|
|
|
assert_equal 5, s.processed
|
2013-04-30 18:16:21 -04:00
|
|
|
end
|
2019-02-28 15:43:50 -05:00
|
|
|
end
|
2013-12-18 10:46:41 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
describe "queues" do
|
|
|
|
it "is initially empty" do
|
|
|
|
s = Sidekiq::Stats::Queues.new
|
|
|
|
assert_equal 0, s.lengths.size
|
|
|
|
end
|
2013-12-18 10:46:41 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it "returns a hash of queue and size in order" do
|
|
|
|
Sidekiq.redis do |conn|
|
|
|
|
conn.rpush 'queue:foo', '{}'
|
|
|
|
conn.sadd 'queues', 'foo'
|
2013-12-18 10:46:41 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
3.times { conn.rpush 'queue:bar', '{}' }
|
|
|
|
conn.sadd 'queues', 'bar'
|
2015-10-21 01:04:36 -04:00
|
|
|
end
|
2013-04-30 18:16:21 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
s = Sidekiq::Stats::Queues.new
|
|
|
|
assert_equal ({ "foo" => 1, "bar" => 3 }), s.lengths
|
|
|
|
assert_equal "bar", s.lengths.first.first
|
|
|
|
|
|
|
|
assert_equal Sidekiq::Stats.new.queues, Sidekiq::Stats::Queues.new.lengths
|
|
|
|
end
|
|
|
|
end
|
2015-01-28 11:49:29 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
describe "enqueued" do
|
|
|
|
it 'handles latency for good jobs' do
|
|
|
|
Sidekiq.redis do |conn|
|
|
|
|
conn.rpush 'queue:default', "{\"enqueued_at\": #{Time.now.to_f}}"
|
|
|
|
conn.sadd 'queues', 'default'
|
2015-10-21 01:04:36 -04:00
|
|
|
end
|
2019-02-28 15:43:50 -05:00
|
|
|
s = Sidekiq::Stats.new
|
|
|
|
assert s.default_queue_latency > 0
|
|
|
|
q = Sidekiq::Queue.new
|
|
|
|
assert q.latency > 0
|
2012-12-04 08:11:25 -05:00
|
|
|
end
|
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'handles latency for incomplete jobs' do
|
|
|
|
Sidekiq.redis do |conn|
|
|
|
|
conn.rpush 'queue:default', '{}'
|
|
|
|
conn.sadd 'queues', 'default'
|
2012-12-04 08:11:25 -05:00
|
|
|
end
|
2019-02-28 15:43:50 -05:00
|
|
|
s = Sidekiq::Stats.new
|
|
|
|
assert_equal 0, s.default_queue_latency
|
|
|
|
q = Sidekiq::Queue.new
|
|
|
|
assert_equal 0, q.latency
|
|
|
|
end
|
2012-12-04 08:11:25 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it "returns total enqueued jobs" do
|
|
|
|
Sidekiq.redis do |conn|
|
|
|
|
conn.rpush 'queue:foo', '{}'
|
|
|
|
conn.sadd 'queues', 'foo'
|
2015-09-22 15:48:43 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
3.times { conn.rpush 'queue:bar', '{}' }
|
|
|
|
conn.sadd 'queues', 'bar'
|
|
|
|
end
|
2012-12-04 08:11:25 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
s = Sidekiq::Stats.new
|
|
|
|
assert_equal 4, s.enqueued
|
|
|
|
end
|
|
|
|
end
|
2012-12-04 08:11:25 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
describe "over time" do
|
|
|
|
before do
|
|
|
|
require 'active_support/core_ext/time/conversions'
|
|
|
|
@before = Time::DATE_FORMATS[:default]
|
|
|
|
Time::DATE_FORMATS[:default] = "%d/%m/%Y %H:%M:%S"
|
2015-09-22 15:48:43 -04:00
|
|
|
end
|
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
after do
|
|
|
|
Time::DATE_FORMATS[:default] = @before
|
|
|
|
end
|
2017-02-02 15:25:29 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
describe "processed" do
|
|
|
|
it 'retrieves hash of dates' do
|
|
|
|
Sidekiq.redis do |c|
|
|
|
|
c.incrby("stat:processed:2012-12-24", 4)
|
|
|
|
c.incrby("stat:processed:2012-12-25", 1)
|
|
|
|
c.incrby("stat:processed:2012-12-26", 6)
|
|
|
|
c.incrby("stat:processed:2012-12-27", 2)
|
2017-02-02 15:25:29 -05:00
|
|
|
end
|
2019-02-28 15:43:50 -05:00
|
|
|
Time.stub(:now, Time.parse("2012-12-26 1:00:00 -0500")) do
|
|
|
|
s = Sidekiq::Stats::History.new(2)
|
|
|
|
assert_equal({ "2012-12-26" => 6, "2012-12-25" => 1 }, s.processed)
|
2017-02-02 15:25:29 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
s = Sidekiq::Stats::History.new(3)
|
|
|
|
assert_equal({ "2012-12-26" => 6, "2012-12-25" => 1, "2012-12-24" => 4 }, s.processed)
|
2015-09-22 15:48:43 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
s = Sidekiq::Stats::History.new(2, Date.parse("2012-12-25"))
|
|
|
|
assert_equal({ "2012-12-25" => 1, "2012-12-24" => 4 }, s.processed)
|
2012-12-05 20:35:49 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
describe "failed" do
|
|
|
|
it 'retrieves hash of dates' do
|
|
|
|
Sidekiq.redis do |c|
|
|
|
|
c.incrby("stat:failed:2012-12-24", 4)
|
|
|
|
c.incrby("stat:failed:2012-12-25", 1)
|
|
|
|
c.incrby("stat:failed:2012-12-26", 6)
|
|
|
|
c.incrby("stat:failed:2012-12-27", 2)
|
2012-12-05 20:35:49 -05:00
|
|
|
end
|
2019-02-28 15:43:50 -05:00
|
|
|
Time.stub(:now, Time.parse("2012-12-26 1:00:00 -0500")) do
|
|
|
|
s = Sidekiq::Stats::History.new(2)
|
|
|
|
assert_equal ({ "2012-12-26" => 6, "2012-12-25" => 1 }), s.failed
|
|
|
|
|
|
|
|
s = Sidekiq::Stats::History.new(3)
|
|
|
|
assert_equal ({ "2012-12-26" => 6, "2012-12-25" => 1, "2012-12-24" => 4 }), s.failed
|
2012-12-04 08:11:25 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
s = Sidekiq::Stats::History.new(2, Date.parse("2012-12-25"))
|
|
|
|
assert_equal ({ "2012-12-25" => 1, "2012-12-24" => 4 }), s.failed
|
2015-10-21 01:04:36 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2012-10-20 17:03:43 -04:00
|
|
|
end
|
2019-02-28 15:43:50 -05:00
|
|
|
end
|
2012-10-20 17:03:43 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
describe 'with an empty database' do
|
|
|
|
it 'shows queue as empty' do
|
|
|
|
q = Sidekiq::Queue.new
|
|
|
|
assert_equal 0, q.size
|
|
|
|
assert_equal 0, q.latency
|
|
|
|
end
|
2013-05-24 01:58:06 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
before do
|
|
|
|
ActiveJob::Base.queue_adapter = :sidekiq
|
|
|
|
ActiveJob::Base.logger = nil
|
|
|
|
end
|
2015-12-14 16:29:02 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
class ApiMailer < ActionMailer::Base
|
|
|
|
def test_email(*)
|
2016-11-23 17:50:11 -05:00
|
|
|
end
|
2019-02-28 15:43:50 -05:00
|
|
|
end
|
2015-12-14 16:29:02 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
class ApiJob < ActiveJob::Base
|
|
|
|
def perform(*)
|
2015-12-14 16:29:02 -05:00
|
|
|
end
|
2019-02-28 15:43:50 -05:00
|
|
|
end
|
2015-12-14 16:29:02 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
class ApiWorker
|
|
|
|
include Sidekiq::Worker
|
|
|
|
end
|
2013-05-24 01:58:06 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'can enumerate jobs' do
|
|
|
|
q = Sidekiq::Queue.new
|
|
|
|
Time.stub(:now, Time.new(2012, 12, 26)) do
|
|
|
|
ApiWorker.perform_async(1, 'mike')
|
|
|
|
assert_equal [ApiWorker.name], q.map(&:klass)
|
2014-05-19 23:54:28 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
job = q.first
|
|
|
|
assert_equal 24, job.jid.size
|
|
|
|
assert_equal [1, 'mike'], job.args
|
|
|
|
assert_equal Time.new(2012, 12, 26), job.enqueued_at
|
2015-10-21 01:04:36 -04:00
|
|
|
end
|
2019-02-28 15:43:50 -05:00
|
|
|
assert q.latency > 10_000_000
|
2014-05-19 23:54:28 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
q = Sidekiq::Queue.new('other')
|
|
|
|
assert_equal 0, q.size
|
|
|
|
end
|
JobSet#each goes through elements in descending score order
The current implementation of the #each method uses Redis.zrange to
paginate the iteration and use multiple lightweight calls. It performs
this pagination in descending score order, but each page is returned
from Redis in ascending order. The result is that the final iteration
through the whole set is not sorted properly. Here's an example with a
page of size 3:
Redis set: 1, 2, 3, 4, 5, 6, 7, 8, 9
JobSet.to_a: 7, 8, 9, 4, 5, 6, 1, 2, 3
This fixes it with barely no performance cost (each page is reverted in
Ruby) and all the items are perfectly sorted in descending score order.
2017-11-01 18:13:34 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'enumerates jobs in descending score order' do
|
|
|
|
# We need to enqueue more than 50 items, which is the page size when retrieving
|
|
|
|
# from Redis to ensure everything is sorted: the pages and the items withing them.
|
|
|
|
51.times { ApiWorker.perform_in(100, 1, 'foo') }
|
JobSet#each goes through elements in descending score order
The current implementation of the #each method uses Redis.zrange to
paginate the iteration and use multiple lightweight calls. It performs
this pagination in descending score order, but each page is returned
from Redis in ascending order. The result is that the final iteration
through the whole set is not sorted properly. Here's an example with a
page of size 3:
Redis set: 1, 2, 3, 4, 5, 6, 7, 8, 9
JobSet.to_a: 7, 8, 9, 4, 5, 6, 1, 2, 3
This fixes it with barely no performance cost (each page is reverted in
Ruby) and all the items are perfectly sorted in descending score order.
2017-11-01 18:13:34 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
set = Sidekiq::ScheduledSet.new.to_a
|
JobSet#each goes through elements in descending score order
The current implementation of the #each method uses Redis.zrange to
paginate the iteration and use multiple lightweight calls. It performs
this pagination in descending score order, but each page is returned
from Redis in ascending order. The result is that the final iteration
through the whole set is not sorted properly. Here's an example with a
page of size 3:
Redis set: 1, 2, 3, 4, 5, 6, 7, 8, 9
JobSet.to_a: 7, 8, 9, 4, 5, 6, 1, 2, 3
This fixes it with barely no performance cost (each page is reverted in
Ruby) and all the items are perfectly sorted in descending score order.
2017-11-01 18:13:34 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
assert_equal set.sort_by { |job| -job.score }, set
|
|
|
|
end
|
2012-10-20 17:03:43 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'has no enqueued_at time for jobs enqueued in the future' do
|
|
|
|
job_id = ApiWorker.perform_in(100, 1, 'foo')
|
|
|
|
job = Sidekiq::ScheduledSet.new.find_job(job_id)
|
|
|
|
assert_nil job.enqueued_at
|
|
|
|
end
|
2014-06-11 23:31:39 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'unwraps delayed jobs' do
|
|
|
|
Sidekiq::Extensions.enable_delay!
|
|
|
|
Sidekiq::Queue.delay.foo(1,2,3)
|
|
|
|
q = Sidekiq::Queue.new
|
|
|
|
x = q.first
|
|
|
|
assert_equal "Sidekiq::Queue.foo", x.display_class
|
|
|
|
assert_equal [1,2,3], x.display_args
|
|
|
|
end
|
2015-12-14 16:29:02 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'unwraps ActiveJob jobs' do
|
|
|
|
ApiJob.perform_later(1, 2, 3)
|
|
|
|
q = Sidekiq::Queue.new
|
|
|
|
x = q.first
|
|
|
|
assert_equal ApiJob.name, x.display_class
|
|
|
|
assert_equal [1,2,3], x.display_args
|
|
|
|
end
|
2015-12-14 16:29:02 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'unwraps ActionMailer jobs' do
|
|
|
|
ApiMailer.test_email(1, 2, 3).deliver_later
|
|
|
|
q = Sidekiq::Queue.new('mailers')
|
|
|
|
x = q.first
|
|
|
|
assert_equal "#{ApiMailer.name}#test_email", x.display_class
|
|
|
|
assert_equal [1,2,3], x.display_args
|
|
|
|
end
|
2014-06-11 23:31:39 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'has no enqueued_at time for jobs enqueued in the future' do
|
|
|
|
job_id = ApiWorker.perform_in(100, 1, 'foo')
|
|
|
|
job = Sidekiq::ScheduledSet.new.find_job(job_id)
|
|
|
|
assert_nil job.enqueued_at
|
|
|
|
end
|
2013-06-23 18:57:52 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'can delete jobs' do
|
|
|
|
q = Sidekiq::Queue.new
|
|
|
|
ApiWorker.perform_async(1, 'mike')
|
|
|
|
assert_equal 1, q.size
|
2013-04-17 14:11:29 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
x = q.first
|
|
|
|
assert_equal ApiWorker.name, x.display_class
|
|
|
|
assert_equal [1,'mike'], x.display_args
|
2013-10-10 22:27:47 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
assert_equal [true], q.map(&:delete)
|
|
|
|
assert_equal 0, q.size
|
|
|
|
end
|
2012-11-25 21:43:48 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it "can move scheduled job to queue" do
|
|
|
|
remain_id = ApiWorker.perform_in(100, 1, 'jason')
|
|
|
|
job_id = ApiWorker.perform_in(100, 1, 'jason')
|
|
|
|
job = Sidekiq::ScheduledSet.new.find_job(job_id)
|
|
|
|
q = Sidekiq::Queue.new
|
|
|
|
job.add_to_queue
|
|
|
|
queued_job = q.find_job(job_id)
|
|
|
|
refute_nil queued_job
|
|
|
|
assert_equal queued_job.jid, job_id
|
|
|
|
assert_nil Sidekiq::ScheduledSet.new.find_job(job_id)
|
|
|
|
refute_nil Sidekiq::ScheduledSet.new.find_job(remain_id)
|
|
|
|
end
|
2012-12-02 23:32:16 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it "handles multiple scheduled jobs when moving to queue" do
|
|
|
|
jids = Sidekiq::Client.push_bulk('class' => ApiWorker,
|
|
|
|
'args' => [[1, 'jason'], [2, 'jason']],
|
|
|
|
'at' => Time.now.to_f)
|
|
|
|
assert_equal 2, jids.size
|
|
|
|
(remain_id, job_id) = jids
|
|
|
|
job = Sidekiq::ScheduledSet.new.find_job(job_id)
|
|
|
|
q = Sidekiq::Queue.new
|
|
|
|
job.add_to_queue
|
|
|
|
queued_job = q.find_job(job_id)
|
|
|
|
refute_nil queued_job
|
|
|
|
assert_equal queued_job.jid, job_id
|
|
|
|
assert_nil Sidekiq::ScheduledSet.new.find_job(job_id)
|
|
|
|
refute_nil Sidekiq::ScheduledSet.new.find_job(remain_id)
|
|
|
|
end
|
2012-12-02 23:32:16 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'can kill a scheduled job' do
|
|
|
|
job_id = ApiWorker.perform_in(100, 1, '{"foo":123}')
|
|
|
|
job = Sidekiq::ScheduledSet.new.find_job(job_id)
|
|
|
|
ds = Sidekiq::DeadSet.new
|
|
|
|
assert_equal 0, ds.size
|
|
|
|
job.kill
|
|
|
|
assert_equal 1, ds.size
|
|
|
|
end
|
2012-10-20 17:03:43 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'can remove jobs when iterating over a sorted set' do
|
|
|
|
# scheduled jobs must be greater than SortedSet#each underlying page size
|
|
|
|
51.times do
|
|
|
|
ApiWorker.perform_in(100, 'aaron')
|
2015-10-21 01:04:36 -04:00
|
|
|
end
|
2019-02-28 15:43:50 -05:00
|
|
|
set = Sidekiq::ScheduledSet.new
|
|
|
|
set.map(&:delete)
|
|
|
|
assert_equal set.size, 0
|
|
|
|
end
|
2012-10-20 17:03:43 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'can remove jobs when iterating over a queue' do
|
|
|
|
# initial queue size must be greater than Queue#each underlying page size
|
|
|
|
51.times do
|
|
|
|
ApiWorker.perform_async(1, 'aaron')
|
2015-10-21 01:04:36 -04:00
|
|
|
end
|
2019-02-28 15:43:50 -05:00
|
|
|
q = Sidekiq::Queue.new
|
|
|
|
q.map(&:delete)
|
|
|
|
assert_equal q.size, 0
|
|
|
|
end
|
2012-10-20 17:03:43 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'can find job by id in queues' do
|
|
|
|
q = Sidekiq::Queue.new
|
|
|
|
job_id = ApiWorker.perform_async(1, 'jason')
|
|
|
|
job = q.find_job(job_id)
|
|
|
|
refute_nil job
|
|
|
|
assert_equal job_id, job.jid
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'can clear a queue' do
|
|
|
|
q = Sidekiq::Queue.new
|
|
|
|
2.times { ApiWorker.perform_async(1, 'mike') }
|
|
|
|
q.clear
|
2012-10-20 17:03:43 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
Sidekiq.redis do |conn|
|
|
|
|
refute conn.smembers('queues').include?('foo')
|
|
|
|
refute conn.exists('queue:foo')
|
2015-08-08 21:23:56 -04:00
|
|
|
end
|
2019-02-28 15:43:50 -05:00
|
|
|
end
|
2012-10-20 17:03:43 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'can fetch by score' do
|
|
|
|
same_time = Time.now.to_f
|
|
|
|
add_retry('bob1', same_time)
|
|
|
|
add_retry('bob2', same_time)
|
|
|
|
r = Sidekiq::RetrySet.new
|
|
|
|
assert_equal 2, r.fetch(same_time).size
|
|
|
|
end
|
2012-11-26 12:56:08 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'can fetch by score and jid' do
|
|
|
|
same_time = Time.now.to_f
|
|
|
|
add_retry('bob1', same_time)
|
|
|
|
add_retry('bob2', same_time)
|
|
|
|
r = Sidekiq::RetrySet.new
|
|
|
|
assert_equal 1, r.fetch(same_time, 'bob1').size
|
|
|
|
end
|
2012-11-26 11:22:48 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'shows empty retries' do
|
|
|
|
r = Sidekiq::RetrySet.new
|
|
|
|
assert_equal 0, r.size
|
|
|
|
end
|
2012-11-25 21:43:48 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'can enumerate retries' do
|
|
|
|
add_retry
|
2014-03-03 00:18:26 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
r = Sidekiq::RetrySet.new
|
|
|
|
assert_equal 1, r.size
|
|
|
|
array = r.to_a
|
|
|
|
assert_equal 1, array.size
|
2015-10-21 01:04:36 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
retri = array.first
|
|
|
|
assert_equal 'ApiWorker', retri.klass
|
|
|
|
assert_equal 'default', retri.queue
|
|
|
|
assert_equal 'bob', retri.jid
|
|
|
|
assert_in_delta Time.now.to_f, retri.at.to_f, 0.02
|
|
|
|
end
|
2014-02-28 00:15:08 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'requires a jid to delete an entry' do
|
|
|
|
start_time = Time.now.to_f
|
|
|
|
add_retry('bob2', Time.now.to_f)
|
|
|
|
assert_raises(ArgumentError) do
|
|
|
|
Sidekiq::RetrySet.new.delete(start_time)
|
|
|
|
end
|
|
|
|
end
|
2013-01-24 12:50:30 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'can delete a single retry from score and jid' do
|
|
|
|
same_time = Time.now.to_f
|
|
|
|
add_retry('bob1', same_time)
|
|
|
|
add_retry('bob2', same_time)
|
|
|
|
r = Sidekiq::RetrySet.new
|
|
|
|
assert_equal 2, r.size
|
|
|
|
Sidekiq::RetrySet.new.delete(same_time, 'bob1')
|
|
|
|
assert_equal 1, r.size
|
|
|
|
end
|
2013-01-29 16:17:59 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'can retry a retry' do
|
|
|
|
add_retry
|
|
|
|
r = Sidekiq::RetrySet.new
|
|
|
|
assert_equal 1, r.size
|
|
|
|
r.first.retry
|
|
|
|
assert_equal 0, r.size
|
|
|
|
assert_equal 1, Sidekiq::Queue.new('default').size
|
|
|
|
job = Sidekiq::Queue.new('default').first
|
|
|
|
assert_equal 'bob', job.jid
|
|
|
|
assert_equal 1, job['retry_count']
|
|
|
|
end
|
2013-01-29 16:17:59 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'can clear retries' do
|
|
|
|
add_retry
|
|
|
|
add_retry('test')
|
|
|
|
r = Sidekiq::RetrySet.new
|
|
|
|
assert_equal 2, r.size
|
|
|
|
r.clear
|
|
|
|
assert_equal 0, r.size
|
|
|
|
end
|
2015-10-21 01:04:36 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'can enumerate processes' do
|
|
|
|
identity_string = "identity_string"
|
|
|
|
odata = {
|
|
|
|
'pid' => 123,
|
|
|
|
'hostname' => Socket.gethostname,
|
|
|
|
'key' => identity_string,
|
|
|
|
'identity' => identity_string,
|
|
|
|
'started_at' => Time.now.to_f - 15,
|
|
|
|
}
|
|
|
|
|
|
|
|
time = Time.now.to_f
|
|
|
|
Sidekiq.redis do |conn|
|
|
|
|
conn.multi do
|
|
|
|
conn.sadd('processes', odata['key'])
|
|
|
|
conn.hmset(odata['key'], 'info', Sidekiq.dump_json(odata), 'busy', 10, 'beat', time)
|
|
|
|
conn.sadd('processes', 'fake:pid')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
ps = Sidekiq::ProcessSet.new.to_a
|
|
|
|
assert_equal 1, ps.size
|
|
|
|
data = ps.first
|
|
|
|
assert_equal 10, data['busy']
|
|
|
|
assert_equal time, data['beat']
|
|
|
|
assert_equal 123, data['pid']
|
|
|
|
data.quiet!
|
|
|
|
data.stop!
|
|
|
|
signals_string = "#{odata['key']}-signals"
|
|
|
|
assert_equal "TERM", Sidekiq.redis{|c| c.lpop(signals_string) }
|
|
|
|
assert_equal "TSTP", Sidekiq.redis{|c| c.lpop(signals_string) }
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'can enumerate workers' do
|
|
|
|
w = Sidekiq::Workers.new
|
|
|
|
assert_equal 0, w.size
|
|
|
|
w.each do
|
|
|
|
assert false
|
|
|
|
end
|
|
|
|
|
|
|
|
hn = Socket.gethostname
|
|
|
|
key = "#{hn}:#{$$}"
|
|
|
|
pdata = { 'pid' => $$, 'hostname' => hn, 'started_at' => Time.now.to_i }
|
|
|
|
Sidekiq.redis do |conn|
|
|
|
|
conn.sadd('processes', key)
|
|
|
|
conn.hmset(key, 'info', Sidekiq.dump_json(pdata), 'busy', 0, 'beat', Time.now.to_f)
|
2013-01-29 16:17:59 -05:00
|
|
|
end
|
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
s = "#{key}:workers"
|
|
|
|
data = Sidekiq.dump_json({ 'payload' => {}, 'queue' => 'default', 'run_at' => Time.now.to_i })
|
|
|
|
Sidekiq.redis do |c|
|
|
|
|
c.hmset(s, '1234', data)
|
|
|
|
end
|
2013-01-29 16:17:59 -05:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
w.each do |p, x, y|
|
|
|
|
assert_equal key, p
|
|
|
|
assert_equal "1234", x
|
|
|
|
assert_equal 'default', y['queue']
|
|
|
|
assert_equal Time.now.year, Time.at(y['run_at']).year
|
|
|
|
end
|
2014-03-09 17:32:27 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
s = "#{key}:workers"
|
|
|
|
data = Sidekiq.dump_json({ 'payload' => {}, 'queue' => 'default', 'run_at' => (Time.now.to_i - 2*60*60) })
|
|
|
|
Sidekiq.redis do |c|
|
|
|
|
c.multi do
|
|
|
|
c.hmset(s, '5678', data)
|
|
|
|
c.hmset("b#{s}", '5678', data)
|
2015-10-21 01:04:36 -04:00
|
|
|
end
|
2019-02-28 15:43:50 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
assert_equal ['1234', '5678'], w.map { |_, tid, _| tid }
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'can reschedule jobs' do
|
|
|
|
add_retry('foo1')
|
|
|
|
add_retry('foo2')
|
|
|
|
|
|
|
|
retries = Sidekiq::RetrySet.new
|
|
|
|
assert_equal 2, retries.size
|
|
|
|
refute(retries.map { |r| r.score > (Time.now.to_f + 9) }.any?)
|
2014-03-09 17:32:27 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
retries.each do |retri|
|
|
|
|
retri.reschedule(Time.now.to_f + 10) if retri.jid == 'foo2'
|
2014-03-09 17:32:27 -04:00
|
|
|
end
|
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
assert_equal 2, retries.size
|
|
|
|
assert(retries.map { |r| r.score > (Time.now.to_f + 9) }.any?)
|
|
|
|
end
|
2014-03-09 17:32:27 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
it 'prunes processes which have died' do
|
|
|
|
data = { 'pid' => rand(10_000), 'hostname' => "app#{rand(1_000)}", 'started_at' => Time.now.to_f }
|
|
|
|
key = "#{data['hostname']}:#{data['pid']}"
|
|
|
|
Sidekiq.redis do |conn|
|
|
|
|
conn.sadd('processes', key)
|
|
|
|
conn.hmset(key, 'info', Sidekiq.dump_json(data), 'busy', 0, 'beat', Time.now.to_f)
|
|
|
|
end
|
2015-10-21 01:04:36 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
ps = Sidekiq::ProcessSet.new
|
|
|
|
assert_equal 1, ps.size
|
|
|
|
assert_equal 1, ps.to_a.size
|
2015-10-21 01:04:36 -04:00
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
Sidekiq.redis do |conn|
|
|
|
|
conn.sadd('processes', "bar:987")
|
|
|
|
conn.sadd('processes', "bar:986")
|
2015-10-21 01:04:36 -04:00
|
|
|
end
|
|
|
|
|
2019-02-28 15:43:50 -05:00
|
|
|
ps = Sidekiq::ProcessSet.new
|
|
|
|
assert_equal 1, ps.size
|
|
|
|
assert_equal 1, ps.to_a.size
|
|
|
|
end
|
|
|
|
|
|
|
|
def add_retry(jid = 'bob', at = Time.now.to_f)
|
|
|
|
payload = Sidekiq.dump_json('class' => 'ApiWorker', 'args' => [1, 'mike'], 'queue' => 'default', 'jid' => jid, 'retry_count' => 2, 'failed_at' => Time.now.to_f)
|
|
|
|
Sidekiq.redis do |conn|
|
|
|
|
conn.zadd('retry', at.to_s, payload)
|
2012-10-20 17:03:43 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|