1
0
Fork 0
mirror of https://github.com/mperham/sidekiq.git synced 2022-11-09 13:52:34 -05:00

Rename worker references to job (#5568)

* standardrb

* Rename all `*_worker` references to `*_job`

* Rename Worker references to Job in test suite

* Rename ApiJob to ApiAjJob to avoid collisions

* Rename remaining Worker to Job references

* Rename WorkController to JobController

Including routes

* fixup! Rename WorkController to JobController
This commit is contained in:
Teo Ljungberg 2022-10-11 17:38:53 +02:00 committed by GitHub
parent e3df9e383d
commit c72f0d2e81
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
27 changed files with 341 additions and 341 deletions

View file

@ -1,7 +1,7 @@
# frozen_string_literal: true
require "sidekiq/job"
require 'rails'
require "rails"
module Sidekiq
class Rails < ::Rails::Engine

View file

@ -1,9 +1,9 @@
class WorkController < ApplicationController
class JobController < ApplicationController
def index
@count = rand(100)
puts "Adding #{@count} jobs"
@count.times do |x|
HardWorker.perform_async("bubba", 0.01, x)
HardJob.perform_async("bubba", 0.01, x)
end
end
@ -13,20 +13,20 @@ class WorkController < ApplicationController
end
def bulk
Sidekiq::Client.push_bulk("class" => HardWorker,
Sidekiq::Client.push_bulk("class" => HardJob,
"args" => [["bob", 1, 1], ["mike", 1, 2]])
render plain: "enbulked"
end
def long
50.times do |x|
HardWorker.perform_async("bob", 15, x)
HardJob.perform_async("bob", 15, x)
end
render plain: "enqueued"
end
def crash
HardWorker.perform_async("crash", 1, Time.now.to_f)
HardJob.perform_async("crash", 1, Time.now.to_f)
render plain: "enqueued"
end

View file

@ -1,4 +1,4 @@
class ExitWorker
class ExitJob
include Sidekiq::Job
def perform

View file

@ -1,4 +1,4 @@
class HardWorker
class HardJob
include Sidekiq::Job
sidekiq_options backtrace: 5

View file

@ -1,4 +1,4 @@
class LazyWorker
class LazyJob
include Sidekiq::Job
def perform

View file

@ -16,14 +16,14 @@ Sidekiq.configure_server do |config|
end
end
class EmptyWorker
class EmptyJob
include Sidekiq::Job
def perform
end
end
class TimedWorker
class TimedJob
include Sidekiq::Job
def perform(start)

View file

@ -6,10 +6,10 @@ Sidekiq::Web.app_url = "/"
Rails.application.routes.draw do
mount Sidekiq::Web => "/sidekiq"
get "work" => "work#index"
get "work/email" => "work#email"
get "work/post" => "work#delayed_post"
get "work/long" => "work#long"
get "work/crash" => "work#crash"
get "work/bulk" => "work#bulk"
get "job" => "job#index"
get "job/email" => "job#email"
get "job/post" => "job#delayed_post"
get "job/long" => "job#long"
get "job/crash" => "job#crash"
get "job/bulk" => "job#bulk"
end

View file

@ -8,7 +8,7 @@ Sidekiq.configure_client do |config|
config.redis = {url: "redis://localhost:6379/0", size: 1}
end
Sidekiq::Client.push("class" => "HardWorker", "args" => [])
Sidekiq::Client.push("class" => "HardJob", "args" => [])
# In a multi-process deployment, all Web UI instances should share
# this secret key so they can all decode the encrypted browser cookies

View file

@ -7,7 +7,7 @@ require "sidekiq/scheduled"
require "sidekiq/processor"
require "sidekiq/api"
class JoeWorker
class JoeJob
include Sidekiq::Job
def perform(slp)
raise "boom" if slp == "boom"
@ -33,7 +33,7 @@ describe "Actors" do
ss = Sidekiq::ScheduledSet.new
q = Sidekiq::Queue.new
JoeWorker.perform_in(0.01, 0)
JoeJob.perform_in(0.01, 0)
assert_equal 0, q.size
assert_equal 1, ss.size
@ -82,7 +82,7 @@ describe "Actors" do
pr = Sidekiq::Processor.new(@cap) do |prc, ex|
result(prc, ex)
end
jid = JoeWorker.perform_async("boom")
jid = JoeJob.perform_async("boom")
assert jid, jid
assert_equal 1, q.size
@ -106,7 +106,7 @@ describe "Actors" do
p = Sidekiq::Processor.new(@cap) do |pr, ex|
result(pr, ex)
end
jid = JoeWorker.perform_async(2)
jid = JoeJob.perform_async(2)
assert jid, jid
assert_equal 1, q.size

View file

@ -10,27 +10,27 @@ class ApiMailer < ActionMailer::Base
end
end
class ApiJob < ActiveJob::Base
class ApiAjJob < ActiveJob::Base
def perform(*)
end
end
class ApiWorker
class ApiJob
include Sidekiq::Job
end
class WorkerWithTags
class JobWithTags
include Sidekiq::Job
sidekiq_options tags: ["foo"]
end
SERIALIZED_JOBS = {
"5.x" => [
'{"class":"ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper","wrapped":"ApiJob","queue":"default","args":[{"job_class":"ApiJob","job_id":"f1bde53f-3852-4ae4-a879-c12eacebbbb0","provider_job_id":null,"queue_name":"default","priority":null,"arguments":[1,2,3],"executions":0,"locale":"en"}],"retry":true,"jid":"099eee72911085a511d0e312","created_at":1568305542.339916,"enqueued_at":1568305542.339947}',
'{"class":"ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper","wrapped":"ApiAjJob","queue":"default","args":[{"job_class":"ApiAjJob","job_id":"f1bde53f-3852-4ae4-a879-c12eacebbbb0","provider_job_id":null,"queue_name":"default","priority":null,"arguments":[1,2,3],"executions":0,"locale":"en"}],"retry":true,"jid":"099eee72911085a511d0e312","created_at":1568305542.339916,"enqueued_at":1568305542.339947}',
'{"class":"ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper","wrapped":"ActionMailer::DeliveryJob","queue":"mailers","args":[{"job_class":"ActionMailer::DeliveryJob","job_id":"19cc0115-3d1c-4bbe-a51e-bfa1385895d1","provider_job_id":null,"queue_name":"mailers","priority":null,"arguments":["ApiMailer","test_email","deliver_now",1,2,3],"executions":0,"locale":"en"}],"retry":true,"jid":"37436e5504936400e8cf98db","created_at":1568305542.370133,"enqueued_at":1568305542.370241}'
],
"6.x" => [
'{"class":"ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper","wrapped":"ApiJob","queue":"default","args":[{"job_class":"ApiJob","job_id":"ff2b48d4-bdce-4825-af6b-ef8c11ab651e","provider_job_id":null,"queue_name":"default","priority":null,"arguments":[1,2,3],"executions":0,"exception_executions":{},"locale":"en","timezone":"UTC","enqueued_at":"2019-09-12T16:28:37Z"}],"retry":true,"jid":"ce121bf77b37ae81fe61b6dc","created_at":1568305717.9469702,"enqueued_at":1568305717.947005}',
'{"class":"ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper","wrapped":"ApiAjJob","queue":"default","args":[{"job_class":"ApiAjJob","job_id":"ff2b48d4-bdce-4825-af6b-ef8c11ab651e","provider_job_id":null,"queue_name":"default","priority":null,"arguments":[1,2,3],"executions":0,"exception_executions":{},"locale":"en","timezone":"UTC","enqueued_at":"2019-09-12T16:28:37Z"}],"retry":true,"jid":"ce121bf77b37ae81fe61b6dc","created_at":1568305717.9469702,"enqueued_at":1568305717.947005}',
'{"class":"ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper","wrapped":"ActionMailer::MailDeliveryJob","queue":"mailers","args":[{"job_class":"ActionMailer::MailDeliveryJob","job_id":"2f967da1-a389-479c-9a4e-5cc059e6d65c","provider_job_id":null,"queue_name":"mailers","priority":null,"arguments":["ApiMailer","test_email","deliver_now",{"args":[1,2,3],"_aj_symbol_keys":["args"]}],"executions":0,"exception_executions":{},"locale":"en","timezone":"UTC","enqueued_at":"2019-09-12T16:28:37Z"}],"retry":true,"jid":"469979df52bb9ef9f48b49e1","created_at":1568305717.9457421,"enqueued_at":1568305717.9457731}'
]
}
@ -250,8 +250,8 @@ describe "API" do
it "can enumerate jobs" do
q = Sidekiq::Queue.new
Time.stub(:now, Time.new(2012, 12, 26)) do
ApiWorker.perform_async(1, "mike")
assert_equal [ApiWorker.name], q.map(&:klass)
ApiJob.perform_async(1, "mike")
assert_equal [ApiJob.name], q.map(&:klass)
job = q.first
assert_equal 24, job.jid.size
@ -267,7 +267,7 @@ describe "API" do
it "enumerates jobs in descending score order" do
# We need to enqueue more than 50 items, which is the page size when retrieving
# from Redis to ensure everything is sorted: the pages and the items withing them.
51.times { ApiWorker.perform_in(100, 1, "foo") }
51.times { ApiJob.perform_in(100, 1, "foo") }
set = Sidekiq::ScheduledSet.new.to_a
@ -275,7 +275,7 @@ describe "API" do
end
it "has no enqueued_at time for jobs enqueued in the future" do
job_id = ApiWorker.perform_in(100, 1, "foo")
job_id = ApiJob.perform_in(100, 1, "foo")
job = Sidekiq::ScheduledSet.new.find_job(job_id)
assert_nil job.enqueued_at
end
@ -283,10 +283,10 @@ describe "API" do
describe "Rails unwrapping" do
SERIALIZED_JOBS.each_pair do |ver, jobs|
it "unwraps ActiveJob #{ver} jobs" do
# ApiJob.perform_later(1,2,3)
# ApiAjJob.perform_later(1,2,3)
# puts Sidekiq::Queue.new.first.value
x = Sidekiq::JobRecord.new(jobs[0], "default")
assert_equal ApiJob.name, x.display_class
assert_equal ApiAjJob.name, x.display_class
assert_equal [1, 2, 3], x.display_args
end
@ -301,26 +301,26 @@ describe "API" do
end
it "has no enqueued_at time for jobs enqueued in the future" do
job_id = ApiWorker.perform_in(100, 1, "foo")
job_id = ApiJob.perform_in(100, 1, "foo")
job = Sidekiq::ScheduledSet.new.find_job(job_id)
assert_nil job.enqueued_at
end
it "returns tags field for jobs" do
job_id = ApiWorker.perform_async
job_id = ApiJob.perform_async
assert_equal [], Sidekiq::Queue.new.find_job(job_id).tags
job_id = WorkerWithTags.perform_async
job_id = JobWithTags.perform_async
assert_equal ["foo"], Sidekiq::Queue.new.find_job(job_id).tags
end
it "can delete jobs" do
q = Sidekiq::Queue.new
ApiWorker.perform_async(1, "mike")
ApiJob.perform_async(1, "mike")
assert_equal 1, q.size
x = q.first
assert_equal ApiWorker.name, x.display_class
assert_equal ApiJob.name, x.display_class
assert_equal [1, "mike"], x.display_args
assert_equal [true], q.map(&:delete)
@ -328,8 +328,8 @@ describe "API" do
end
it "can move scheduled job to queue" do
remain_id = ApiWorker.perform_in(100, 1, "jason")
job_id = ApiWorker.perform_in(100, 1, "jason")
remain_id = ApiJob.perform_in(100, 1, "jason")
job_id = ApiJob.perform_in(100, 1, "jason")
job = Sidekiq::ScheduledSet.new.find_job(job_id)
q = Sidekiq::Queue.new
job.add_to_queue
@ -341,7 +341,7 @@ describe "API" do
end
it "handles multiple scheduled jobs when moving to queue" do
jids = Sidekiq::Client.push_bulk("class" => ApiWorker,
jids = Sidekiq::Client.push_bulk("class" => ApiJob,
"args" => [[1, "jason"], [2, "jason"]],
"at" => Time.now.to_f)
assert_equal 2, jids.size
@ -357,7 +357,7 @@ describe "API" do
end
it "can kill a scheduled job" do
job_id = ApiWorker.perform_in(100, 1, '{"foo":123}')
job_id = ApiJob.perform_in(100, 1, '{"foo":123}')
job = Sidekiq::ScheduledSet.new.find_job(job_id)
ds = Sidekiq::DeadSet.new
assert_equal 0, ds.size
@ -367,21 +367,21 @@ describe "API" do
it "can find a scheduled job by jid" do
10.times do |idx|
ApiWorker.perform_in(idx, 1)
ApiJob.perform_in(idx, 1)
end
job_id = ApiWorker.perform_in(5, 1)
job_id = ApiJob.perform_in(5, 1)
job = Sidekiq::ScheduledSet.new.find_job(job_id)
assert_equal job_id, job.jid
ApiWorker.perform_in(100, 1, "jid" => "jid_in_args")
ApiJob.perform_in(100, 1, "jid" => "jid_in_args")
assert_nil Sidekiq::ScheduledSet.new.find_job("jid_in_args")
end
it "can remove jobs when iterating over a sorted set" do
# scheduled jobs must be greater than SortedSet#each underlying page size
51.times do
ApiWorker.perform_in(100, "aaron")
ApiJob.perform_in(100, "aaron")
end
set = Sidekiq::ScheduledSet.new
set.map(&:delete)
@ -391,7 +391,7 @@ describe "API" do
it "can remove jobs when iterating over a queue" do
# initial queue size must be greater than Queue#each underlying page size
51.times do
ApiWorker.perform_async(1, "aaron")
ApiJob.perform_async(1, "aaron")
end
q = Sidekiq::Queue.new
q.map(&:delete)
@ -400,7 +400,7 @@ describe "API" do
it "can find job by id in queues" do
q = Sidekiq::Queue.new
job_id = ApiWorker.perform_async(1, "jason")
job_id = ApiJob.perform_async(1, "jason")
job = q.find_job(job_id)
refute_nil job
assert_equal job_id, job.jid
@ -408,7 +408,7 @@ describe "API" do
it "can clear a queue" do
q = Sidekiq::Queue.new
2.times { ApiWorker.perform_async(1, "mike") }
2.times { ApiJob.perform_async(1, "mike") }
q.clear
Sidekiq.redis do |conn|
@ -469,7 +469,7 @@ describe "API" do
assert_equal 1, array.size
retri = array.first
assert_equal "ApiWorker", retri.klass
assert_equal "ApiJob", retri.klass
assert_equal "default", retri.queue
assert_equal "bob", retri.jid
assert_in_delta Time.now.to_f, retri.at.to_f, 0.02
@ -518,8 +518,8 @@ describe "API" do
add_retry
add_retry("test")
r = Sidekiq::RetrySet.new
assert_instance_of Enumerator, r.scan("Worker")
assert_equal 2, r.scan("ApiWorker").to_a.size
assert_instance_of Enumerator, r.scan("Job")
assert_equal 2, r.scan("ApiJob").to_a.size
assert_equal 1, r.scan("*test*").to_a.size
end
@ -637,7 +637,7 @@ describe "API" do
end
def add_retry(jid = "bob", at = Time.now.to_f)
payload = Sidekiq.dump_json("class" => "ApiWorker", "args" => [1, "mike"], "queue" => "default", "jid" => jid, "retry_count" => 2, "failed_at" => Time.now.to_f, "error_backtrace" => ["line1", "line2"])
payload = Sidekiq.dump_json("class" => "ApiJob", "args" => [1, "mike"], "queue" => "default", "jid" => jid, "retry_count" => 2, "failed_at" => Time.now.to_f, "error_backtrace" => ["line1", "line2"])
@cfg.redis do |conn|
conn.zadd("retry", at.to_s, payload)
end

View file

@ -5,16 +5,16 @@ require "active_job"
require "sidekiq/api"
require "sidekiq/rails"
class MyWorker
class MyJob
include Sidekiq::Job
end
class QueuedWorker
class QueuedJob
include Sidekiq::Job
sidekiq_options queue: :flimflam
end
class InterestingWorker
class InterestingJob
include Sidekiq::Job
def perform(an_argument)
@ -26,19 +26,19 @@ class TestActiveJob < ActiveJob::Base
end
end
class BaseWorker
class BaseJob
include Sidekiq::Job
sidekiq_options "retry" => "base"
end
class AWorker < BaseWorker
class AJob < BaseJob
end
class BWorker < BaseWorker
class BJob < BaseJob
sidekiq_options "retry" => "b"
end
class CWorker < BaseWorker
class CJob < BaseJob
sidekiq_options "retry" => 2
end
@ -59,7 +59,7 @@ class MiddlewareArguments
end
end
class DWorker < BaseWorker
class DJob < BaseJob
end
describe Sidekiq::Client do
@ -79,7 +79,7 @@ describe Sidekiq::Client do
end
assert_raises ArgumentError do
Sidekiq::Client.push("queue" => "foo", "class" => MyWorker, "noargs" => [1, 2])
Sidekiq::Client.push("queue" => "foo", "class" => MyJob, "noargs" => [1, 2])
end
assert_raises ArgumentError do
@ -87,15 +87,15 @@ describe Sidekiq::Client do
end
assert_raises ArgumentError do
Sidekiq::Client.push("queue" => "foo", "class" => MyWorker, "args" => :not_an_array)
Sidekiq::Client.push("queue" => "foo", "class" => MyJob, "args" => :not_an_array)
end
assert_raises ArgumentError do
Sidekiq::Client.push("queue" => "foo", "class" => MyWorker, "args" => [1], "at" => :not_a_numeric)
Sidekiq::Client.push("queue" => "foo", "class" => MyJob, "args" => [1], "at" => :not_a_numeric)
end
assert_raises ArgumentError do
Sidekiq::Client.push("queue" => "foo", "class" => MyWorker, "args" => [1], "tags" => :not_an_array)
Sidekiq::Client.push("queue" => "foo", "class" => MyJob, "args" => [1], "tags" => :not_an_array)
end
end
end
@ -150,7 +150,7 @@ describe Sidekiq::Client do
it "pushes messages to redis" do
q = Sidekiq::Queue.new("foo")
pre = q.size
jid = Sidekiq::Client.push("queue" => "foo", "class" => MyWorker, "args" => [1, 2])
jid = Sidekiq::Client.push("queue" => "foo", "class" => MyJob, "args" => [1, 2])
assert jid
assert_equal 24, jid.size
assert_equal pre + 1, q.size
@ -159,23 +159,23 @@ describe Sidekiq::Client do
it "pushes messages to redis using a String class" do
q = Sidekiq::Queue.new("foo")
pre = q.size
jid = Sidekiq::Client.push("queue" => "foo", "class" => "MyWorker", "args" => [1, 2])
jid = Sidekiq::Client.push("queue" => "foo", "class" => "MyJob", "args" => [1, 2])
assert jid
assert_equal 24, jid.size
assert_equal pre + 1, q.size
end
it "enqueues" do
assert_equal Sidekiq.default_job_options, MyWorker.get_sidekiq_options
assert MyWorker.perform_async(1, 2)
assert Sidekiq::Client.enqueue(MyWorker, 1, 2)
assert Sidekiq::Client.enqueue_to(:custom_queue, MyWorker, 1, 2)
assert_equal Sidekiq.default_job_options, MyJob.get_sidekiq_options
assert MyJob.perform_async(1, 2)
assert Sidekiq::Client.enqueue(MyJob, 1, 2)
assert Sidekiq::Client.enqueue_to(:custom_queue, MyJob, 1, 2)
assert_equal 1, Sidekiq::Queue.new("custom_queue").size
assert Sidekiq::Client.enqueue_to_in(:custom_queue, 3, MyWorker, 1, 2)
assert Sidekiq::Client.enqueue_to_in(:custom_queue, -3, MyWorker, 1, 2)
assert Sidekiq::Client.enqueue_to_in(:custom_queue, 3, MyJob, 1, 2)
assert Sidekiq::Client.enqueue_to_in(:custom_queue, -3, MyJob, 1, 2)
assert_equal 2, Sidekiq::Queue.new("custom_queue").size
assert Sidekiq::Client.enqueue_in(3, MyWorker, 1, 2)
assert QueuedWorker.perform_async(1, 2)
assert Sidekiq::Client.enqueue_in(3, MyJob, 1, 2)
assert QueuedJob.perform_async(1, 2)
assert_equal 1, Sidekiq::Queue.new("flimflam").size
end
@ -189,15 +189,15 @@ describe Sidekiq::Client do
end
it "enqueues jobs with a symbol as an argument" do
InterestingWorker.perform_async(:symbol)
InterestingJob.perform_async(:symbol)
end
it "enqueues jobs with a Date as an argument" do
InterestingWorker.perform_async(Date.new(2021, 1, 1))
InterestingJob.perform_async(Date.new(2021, 1, 1))
end
it "enqueues jobs with a Hash with symbols and string as keys as an argument" do
InterestingWorker.perform_async(
InterestingJob.perform_async(
{
:some => "hash",
"with" => "different_keys"
@ -206,13 +206,13 @@ describe Sidekiq::Client do
end
it "enqueues jobs with a Struct as an argument" do
InterestingWorker.perform_async(
InterestingJob.perform_async(
Struct.new(:x, :y).new(0, 0)
)
end
it "works with a JSON-friendly deep, nested structure" do
InterestingWorker.perform_async(
InterestingJob.perform_async(
{
"foo" => ["a", "b", "c"],
"bar" => ["x", "y", "z"]
@ -231,19 +231,19 @@ describe Sidekiq::Client do
it "raises an error when using a symbol as an argument" do
assert_raises ArgumentError do
InterestingWorker.perform_async(:symbol)
InterestingJob.perform_async(:symbol)
end
end
it "raises an error when using a Date as an argument" do
assert_raises ArgumentError do
InterestingWorker.perform_async(Date.new(2021, 1, 1))
InterestingJob.perform_async(Date.new(2021, 1, 1))
end
end
it "raises an error when using a Hash with symbols and string as keys as an argument" do
assert_raises ArgumentError do
InterestingWorker.perform_async(
InterestingJob.perform_async(
{
:some => "hash",
"with" => "different_keys"
@ -254,14 +254,14 @@ describe Sidekiq::Client do
it "raises an error when using a Struct as an argument" do
assert_raises ArgumentError do
InterestingWorker.perform_async(
InterestingJob.perform_async(
Struct.new(:x, :y).new(0, 0)
)
end
end
it "works with a JSON-friendly deep, nested structure" do
InterestingWorker.perform_async(
InterestingJob.perform_async(
{
"foo" => ["a", "b", "c"],
"bar" => ["x", "y", "z"]
@ -272,14 +272,14 @@ describe Sidekiq::Client do
describe "worker that takes deep, nested structures" do
it "raises an error on JSON-unfriendly structures" do
error = assert_raises ArgumentError do
InterestingWorker.perform_async(
InterestingJob.perform_async(
{
"foo" => [:a, :b, :c],
:bar => ["x", "y", "z"]
}
)
end
assert_match(/Job arguments to InterestingWorker/, error.message)
assert_match(/Job arguments to InterestingJob/, error.message)
end
end
@ -306,12 +306,12 @@ describe Sidekiq::Client do
end
it "can push a large set of jobs at once" do
jids = Sidekiq::Client.push_bulk("class" => QueuedWorker, "args" => (1..1_000).to_a.map { |x| Array(x) })
jids = Sidekiq::Client.push_bulk("class" => QueuedJob, "args" => (1..1_000).to_a.map { |x| Array(x) })
assert_equal 1_000, jids.size
end
it "can push a large set of jobs at once using a String class" do
jids = Sidekiq::Client.push_bulk("class" => "QueuedWorker", "args" => (1..1_000).to_a.map { |x| Array(x) })
jids = Sidekiq::Client.push_bulk("class" => "QueuedJob", "args" => (1..1_000).to_a.map { |x| Array(x) })
assert_equal 1_000, jids.size
end
@ -320,7 +320,7 @@ describe Sidekiq::Client do
times = job_count.times.map { |i| Time.new(2019, 1, i + 1) }
args = job_count.times.map { |i| [i] }
jids = Sidekiq::Client.push_bulk("class" => QueuedWorker, "args" => args, "at" => times.map(&:to_f))
jids = Sidekiq::Client.push_bulk("class" => QueuedJob, "args" => args, "at" => times.map(&:to_f))
assert_equal job_count, jids.size
assert_equal times, jids.map { |jid| Sidekiq::ScheduledSet.new.find_job(jid).at }
@ -329,61 +329,61 @@ describe Sidekiq::Client do
it "can push jobs scheduled using ActiveSupport::Duration" do
require "active_support/core_ext/integer/time"
jids = Sidekiq::Client.push_bulk("class" => QueuedWorker, "args" => [[1], [2]], "at" => [1.seconds, 111.seconds])
jids = Sidekiq::Client.push_bulk("class" => QueuedJob, "args" => [[1], [2]], "at" => [1.seconds, 111.seconds])
assert_equal 2, jids.size
end
it "returns the jids for the jobs" do
Sidekiq::Client.push_bulk("class" => "QueuedWorker", "args" => (1..2).to_a.map { |x| Array(x) }).each do |jid|
Sidekiq::Client.push_bulk("class" => "QueuedJob", "args" => (1..2).to_a.map { |x| Array(x) }).each do |jid|
assert_match(/[0-9a-f]{12}/, jid)
end
end
it "handles no jobs" do
result = Sidekiq::Client.push_bulk("class" => "QueuedWorker", "args" => [])
result = Sidekiq::Client.push_bulk("class" => "QueuedJob", "args" => [])
assert_equal 0, result.size
end
describe "errors" do
it "raises ArgumentError with invalid params" do
assert_raises ArgumentError do
Sidekiq::Client.push_bulk("class" => "QueuedWorker", "args" => [[1], 2])
Sidekiq::Client.push_bulk("class" => "QueuedJob", "args" => [[1], 2])
end
assert_raises ArgumentError do
Sidekiq::Client.push_bulk("class" => "QueuedWorker", "args" => [[1], [2]], "at" => [Time.now.to_f, :not_a_numeric])
Sidekiq::Client.push_bulk("class" => "QueuedJob", "args" => [[1], [2]], "at" => [Time.now.to_f, :not_a_numeric])
end
assert_raises ArgumentError do
Sidekiq::Client.push_bulk("class" => QueuedWorker, "args" => [[1], [2]], "at" => [Time.now.to_f])
Sidekiq::Client.push_bulk("class" => QueuedJob, "args" => [[1], [2]], "at" => [Time.now.to_f])
end
assert_raises ArgumentError do
Sidekiq::Client.push_bulk("class" => QueuedWorker, "args" => [[1]], "at" => [Time.now.to_f, Time.now.to_f])
Sidekiq::Client.push_bulk("class" => QueuedJob, "args" => [[1]], "at" => [Time.now.to_f, Time.now.to_f])
end
end
end
describe ".perform_bulk" do
it "pushes a large set of jobs" do
jids = MyWorker.perform_bulk((1..1_001).to_a.map { |x| Array(x) })
jids = MyJob.perform_bulk((1..1_001).to_a.map { |x| Array(x) })
assert_equal 1_001, jids.size
end
it "pushes a large set of jobs with a different batch size" do
jids = MyWorker.perform_bulk((1..1_001).to_a.map { |x| Array(x) }, batch_size: 100)
jids = MyJob.perform_bulk((1..1_001).to_a.map { |x| Array(x) }, batch_size: 100)
assert_equal 1_001, jids.size
end
it "handles no jobs" do
jids = MyWorker.perform_bulk([])
jids = MyJob.perform_bulk([])
assert_equal 0, jids.size
end
describe "errors" do
it "raises ArgumentError with invalid params" do
assert_raises ArgumentError do
Sidekiq::Client.push_bulk("class" => "MyWorker", "args" => [[1], 2])
Sidekiq::Client.push_bulk("class" => "MyJob", "args" => [[1], 2])
end
end
end
@ -391,7 +391,7 @@ describe Sidekiq::Client do
describe "lazy enumerator" do
it "enqueues the jobs by evaluating the enumerator" do
lazy_array = (1..1_001).to_a.map { |x| Array(x) }.lazy
jids = MyWorker.perform_bulk(lazy_array)
jids = MyJob.perform_bulk(lazy_array)
assert_equal 1_001, jids.size
end
end
@ -404,9 +404,9 @@ describe Sidekiq::Client do
@client.middleware do |chain|
chain.add MiddlewareArguments
end
@client.push("class" => MyWorker, "args" => [0])
@client.push("class" => MyJob, "args" => [0])
assert_equal($arguments_worker_class, MyWorker)
assert_equal($arguments_worker_class, MyJob)
assert((minimum_job_args & $arguments_job.keys) == minimum_job_args)
assert_instance_of(ConnectionPool, $arguments_redis)
end
@ -416,9 +416,9 @@ describe Sidekiq::Client do
@client.middleware do |chain|
chain.add MiddlewareArguments
end
@client.push_bulk("class" => MyWorker, "args" => [[0]])
@client.push_bulk("class" => MyJob, "args" => [[0]])
assert_equal($arguments_worker_class, MyWorker)
assert_equal($arguments_worker_class, MyJob)
assert((minimum_job_args & $arguments_job.keys) == minimum_job_args)
assert_instance_of(ConnectionPool, $arguments_redis)
end
@ -428,9 +428,9 @@ describe Sidekiq::Client do
chain.add Stopper
end
assert_nil @client.push("class" => MyWorker, "args" => [0])
assert_match(/[0-9a-f]{12}/, @client.push("class" => MyWorker, "args" => [1]))
@client.push_bulk("class" => MyWorker, "args" => [[0], [1]]).each do |jid|
assert_nil @client.push("class" => MyJob, "args" => [0])
assert_match(/[0-9a-f]{12}/, @client.push("class" => MyJob, "args" => [1]))
@client.push_bulk("class" => MyJob, "args" => [[0], [1]]).each do |jid|
assert_match(/[0-9a-f]{12}/, jid)
end
end
@ -438,8 +438,8 @@ describe Sidekiq::Client do
describe "inheritance" do
it "inherits sidekiq options" do
assert_equal "base", AWorker.get_sidekiq_options["retry"]
assert_equal "b", BWorker.get_sidekiq_options["retry"]
assert_equal "base", AJob.get_sidekiq_options["retry"]
assert_equal "b", BJob.get_sidekiq_options["retry"]
end
end
@ -447,8 +447,8 @@ describe Sidekiq::Client do
it "allows sidekiq_options to point to different Redi" do
conn = MiniTest::Mock.new
conn.expect(:pipelined, [0, 1])
DWorker.sidekiq_options("pool" => ConnectionPool.new(size: 1) { conn })
DWorker.perform_async(1, 2, 3)
DJob.sidekiq_options("pool" => ConnectionPool.new(size: 1) { conn })
DJob.perform_async(1, 2, 3)
conn.verify
end
@ -458,7 +458,7 @@ describe Sidekiq::Client do
sharded_pool = ConnectionPool.new(size: 1) { conn }
Sidekiq::Client.via(sharded_pool) do
Sidekiq::Client.via(sharded_pool) do
CWorker.perform_async(1, 2, 3)
CJob.perform_async(1, 2, 3)
end
end
conn.verify
@ -476,11 +476,11 @@ describe Sidekiq::Client do
pork = ConnectionPool.new(size: 1) { oink }
Sidekiq::Client.via(beef) do
CWorker.perform_async(1, 2, 3)
CJob.perform_async(1, 2, 3)
assert_equal beef, Sidekiq::Client.new.redis_pool
Sidekiq::Client.via(pork) do
assert_equal pork, Sidekiq::Client.new.redis_pool
CWorker.perform_async(1, 2, 3)
CJob.perform_async(1, 2, 3)
end
assert_equal beef, Sidekiq::Client.new.redis_pool
end
@ -493,8 +493,8 @@ describe Sidekiq::Client do
conn = MiniTest::Mock.new
conn.expect(:pipelined, []) { |*args, &block| block.call(conn) }
conn.expect(:zadd, 1, [String, Array])
DWorker.sidekiq_options("pool" => ConnectionPool.new(size: 1) { conn })
Sidekiq::Client.enqueue_in(10, DWorker, 3)
DJob.sidekiq_options("pool" => ConnectionPool.new(size: 1) { conn })
Sidekiq::Client.enqueue_in(10, DJob, 3)
conn.verify
end
end

View file

@ -9,7 +9,7 @@ describe "DeadSet" do
end
it 'should put passed serialized job to the "dead" sorted set' do
serialized_job = Sidekiq.dump_json(jid: "123123", class: "SomeWorker", args: [])
serialized_job = Sidekiq.dump_json(jid: "123123", class: "SomeJob", args: [])
dead_set.kill(serialized_job)
assert_equal dead_set.find_job("123123").value, serialized_job
@ -18,12 +18,12 @@ describe "DeadSet" do
it "should remove dead jobs older than Sidekiq::DeadSet.timeout" do
old, Sidekiq::Config::DEFAULTS[:dead_timeout_in_seconds] = Sidekiq::Config::DEFAULTS[:dead_timeout_in_seconds], 10
Time.stub(:now, Time.now - 11) do
dead_set.kill(Sidekiq.dump_json(jid: "000103", class: "MyWorker3", args: [])) # the oldest
dead_set.kill(Sidekiq.dump_json(jid: "000103", class: "MyJob3", args: [])) # the oldest
end
Time.stub(:now, Time.now - 9) do
dead_set.kill(Sidekiq.dump_json(jid: "000102", class: "MyWorker2", args: []))
dead_set.kill(Sidekiq.dump_json(jid: "000102", class: "MyJob2", args: []))
end
dead_set.kill(Sidekiq.dump_json(jid: "000101", class: "MyWorker1", args: []))
dead_set.kill(Sidekiq.dump_json(jid: "000101", class: "MyJob1", args: []))
assert_nil dead_set.find_job("000103")
assert dead_set.find_job("000102")
@ -34,9 +34,9 @@ describe "DeadSet" do
it "should remove all but last Sidekiq::DeadSet.max_jobs-1 jobs" do
old, Sidekiq::Config::DEFAULTS[:dead_max_jobs] = Sidekiq::Config::DEFAULTS[:dead_max_jobs], 3
dead_set.kill(Sidekiq.dump_json(jid: "000101", class: "MyWorker1", args: []))
dead_set.kill(Sidekiq.dump_json(jid: "000102", class: "MyWorker2", args: []))
dead_set.kill(Sidekiq.dump_json(jid: "000103", class: "MyWorker3", args: []))
dead_set.kill(Sidekiq.dump_json(jid: "000101", class: "MyJob1", args: []))
dead_set.kill(Sidekiq.dump_json(jid: "000102", class: "MyJob2", args: []))
dead_set.kill(Sidekiq.dump_json(jid: "000103", class: "MyJob3", args: []))
assert_nil dead_set.find_job("000101")
assert dead_set.find_job("000102")

View file

@ -26,7 +26,7 @@ describe "Job logger" do
# pretty
p = @logger.formatter = Sidekiq::Logger::Formatters::Pretty.new
job = {"jid" => "1234abc", "wrapped" => "FooWorker", "class" => "Wrapper", "tags" => ["bar", "baz"]}
job = {"jid" => "1234abc", "wrapped" => "FooJob", "class" => "Wrapper", "tags" => ["bar", "baz"]}
# this mocks what Processor does
jl.prepare(job) do
jl.call(job, "queue") {}
@ -36,7 +36,7 @@ describe "Job logger" do
assert a
assert b
expected = /pid=#{$$} tid=#{p.tid} class=FooWorker jid=1234abc tags=bar,baz/
expected = /pid=#{$$} tid=#{p.tid} class=FooJob jid=1234abc tags=bar,baz/
assert_match(expected, a)
assert_match(expected, b)
assert_match(/#{Time.now.utc.to_date}.+Z pid=#{$$} tid=#{p.tid} .+INFO: done/, b)
@ -46,7 +46,7 @@ describe "Job logger" do
# json
@logger.formatter = Sidekiq::Logger::Formatters::JSON.new
jl = Sidekiq::JobLogger.new(@logger)
job = {"jid" => "1234abc", "wrapped" => "Wrapper", "class" => "FooWorker", "bid" => "b-xyz", "tags" => ["bar", "baz"]}
job = {"jid" => "1234abc", "wrapped" => "Wrapper", "class" => "FooJob", "bid" => "b-xyz", "tags" => ["bar", "baz"]}
# this mocks what Processor does
jl.prepare(job) do
jl.call(job, "queue") {}
@ -63,7 +63,7 @@ describe "Job logger" do
it "tests custom log level" do
jl = Sidekiq::JobLogger.new(@logger)
job = {"class" => "FooWorker", "log_level" => "debug"}
job = {"class" => "FooJob", "log_level" => "debug"}
assert @logger.info?
jl.prepare(job) do
@ -85,7 +85,7 @@ describe "Job logger" do
@cfg.logger = @logger
jl = Sidekiq::JobLogger.new(@logger)
job = {"class" => "FooWorker", "log_level" => "debug"}
job = {"class" => "FooJob", "log_level" => "debug"}
assert @logger.info?
refute @logger.debug?

View file

@ -80,7 +80,7 @@ describe "logger" do
Sidekiq::Logger::Formatters::JSON]
formats.each do |fmt|
@logger.formatter = fmt.new
Sidekiq::Context.with(class: "HaikuWorker", bid: "b-1234abc") do
Sidekiq::Context.with(class: "HaikuJob", bid: "b-1234abc") do
@logger.info("hello context")
end
assert_match(/INFO/, @output.string)
@ -94,7 +94,7 @@ describe "logger" do
@logger.formatter = Sidekiq::Logger::Formatters::JSON.new
@logger.debug("boom")
Sidekiq::Context.with(class: "HaikuWorker", jid: "1234abc") do
Sidekiq::Context.with(class: "HaikuJob", jid: "1234abc") do
@logger.info("json format")
end
a, b = @output.string.lines

View file

@ -18,11 +18,11 @@ class CustomMiddleware
end
end
class CustomWorker
class CustomJob
$recorder = []
include Sidekiq::Job
def perform(recorder)
$recorder << ["work_performed"]
$recorder << ["job_performed"]
end
end
@ -90,7 +90,7 @@ describe Sidekiq::Middleware do
end
it "executes middleware in the proper order" do
msg = Sidekiq.dump_json({"class" => CustomWorker.to_s, "args" => [$recorder]})
msg = Sidekiq.dump_json({"class" => CustomJob.to_s, "args" => [$recorder]})
@config.server_middleware do |chain|
# should only add once, second should replace the first
@ -101,7 +101,7 @@ describe Sidekiq::Middleware do
processor = Sidekiq::Processor.new(@config.default_capsule) { |pr, ex| }
processor.process(Sidekiq::BasicFetch::UnitOfWork.new("queue:default", msg))
assert_equal %w[2 before 3 before 1 before work_performed 1 after 3 after 2 after], $recorder.flatten
assert_equal %w[2 before 3 before 1 before job_performed 1 after 3 after 2 after], $recorder.flatten
end
it "correctly replaces middleware when using middleware with options in the initializer" do

View file

@ -9,7 +9,7 @@ require "sidekiq/processor"
TestProcessorException = Class.new(StandardError)
TEST_PROC_EXCEPTION = TestProcessorException.new("kerboom!")
class MockWorker
class MockJob
include Sidekiq::Job
def perform(args)
raise TEST_PROC_EXCEPTION if args.to_s == "boom"
@ -71,7 +71,7 @@ describe Sidekiq::Processor do
end
it "processes as expected" do
msg = Sidekiq.dump_json({"class" => MockWorker.to_s, "args" => ["myarg"]})
msg = Sidekiq.dump_json({"class" => MockJob.to_s, "args" => ["myarg"]})
@processor.process(work(msg))
assert_equal 1, $invokes
end
@ -83,7 +83,7 @@ describe Sidekiq::Processor do
end
it "re-raises exceptions after handling" do
msg = Sidekiq.dump_json({"class" => MockWorker.to_s, "args" => ["boom"]})
msg = Sidekiq.dump_json({"class" => MockJob.to_s, "args" => ["boom"]})
re_raise = false
begin
@ -98,7 +98,7 @@ describe Sidekiq::Processor do
end
it "does not modify original arguments" do
msg = {"class" => MockWorker.to_s, "args" => [["myarg"]]}
msg = {"class" => MockJob.to_s, "args" => [["myarg"]]}
msgstr = Sidekiq.dump_json(msg)
@processor.process(work(msgstr))
assert_equal [["myarg"]], msg["args"]
@ -123,7 +123,7 @@ describe Sidekiq::Processor do
it "handles invalid JSON" do
ds = Sidekiq::DeadSet.new
ds.clear
job_hash = {"class" => MockWorker.to_s, "args" => ["boom"]}
job_hash = {"class" => MockJob.to_s, "args" => ["boom"]}
msg = Sidekiq.dump_json(job_hash)
job = work(msg[0...-2])
ds = Sidekiq::DeadSet.new
@ -137,7 +137,7 @@ describe Sidekiq::Processor do
end
it "handles exceptions raised by the job" do
job_hash = {"class" => MockWorker.to_s, "args" => ["boom"], "jid" => "123987123"}
job_hash = {"class" => MockJob.to_s, "args" => ["boom"], "jid" => "123987123"}
msg = Sidekiq.dump_json(job_hash)
job = work(msg)
begin
@ -151,7 +151,7 @@ describe Sidekiq::Processor do
end
it "handles exceptions raised by the reloader" do
job_hash = {"class" => MockWorker.to_s, "args" => ["boom"]}
job_hash = {"class" => MockJob.to_s, "args" => ["boom"]}
msg = Sidekiq.dump_json(job_hash)
@processor.instance_variable_set(:@reloader, proc { raise TEST_PROC_EXCEPTION })
job = work(msg)
@ -187,7 +187,7 @@ describe Sidekiq::Processor do
before do
work.expect(:queue_name, "queue:default")
work.expect(:job, Sidekiq.dump_json({"class" => MockWorker.to_s, "args" => worker_args}))
work.expect(:job, Sidekiq.dump_json({"class" => MockJob.to_s, "args" => worker_args}))
@config.server_middleware do |chain|
chain.prepend ExceptionRaisingMiddleware, raise_before_yield, raise_after_yield, skip_job
end
@ -268,7 +268,7 @@ describe Sidekiq::Processor do
describe "middleware mutates the job args and then fails" do
it "requeues with original arguments" do
job_data = {"class" => MockWorker.to_s, "args" => ["boom"]}
job_data = {"class" => MockJob.to_s, "args" => ["boom"]}
retry_stub_called = false
retry_stub = lambda { |worker, msg, queue, exception|
@ -297,7 +297,7 @@ describe Sidekiq::Processor do
end
it "is called instead default Sidekiq::JobLogger" do
msg = Sidekiq.dump_json({"class" => MockWorker.to_s, "args" => ["myarg"]})
msg = Sidekiq.dump_json({"class" => MockJob.to_s, "args" => ["myarg"]})
@processor.process(work(msg))
assert_equal 2, $invokes
end
@ -312,7 +312,7 @@ describe Sidekiq::Processor do
let(:processed_today_key) { "stat:processed:#{Time.now.utc.strftime("%Y-%m-%d")}" }
def successful_job
msg = Sidekiq.dump_json({"class" => MockWorker.to_s, "args" => ["myarg"]})
msg = Sidekiq.dump_json({"class" => MockJob.to_s, "args" => ["myarg"]})
@processor.process(work(msg))
end
@ -330,7 +330,7 @@ describe Sidekiq::Processor do
end
def successful_job
msg = Sidekiq.dump_json({"class" => MockWorker.to_s, "args" => ["myarg"]})
msg = Sidekiq.dump_json({"class" => MockJob.to_s, "args" => ["myarg"]})
@processor.process(work(msg))
end

View file

@ -11,7 +11,7 @@ describe "ActiveJob" do
# need to force this since we aren't booting a Rails app
ActiveJob::Base.queue_adapter = :sidekiq
ActiveJob::Base.logger = nil
ActiveJob::Base.send(:include, ::Sidekiq::Worker::Options) unless ActiveJob::Base.respond_to?(:sidekiq_options)
ActiveJob::Base.send(:include, ::Sidekiq::Job::Options) unless ActiveJob::Base.respond_to?(:sidekiq_options)
end
it "does not allow Sidekiq::Job in AJ::Base classes" do

View file

@ -6,7 +6,7 @@ require "sidekiq/job_retry"
require "sidekiq/api"
require "sidekiq/capsule"
class SomeWorker
class SomeJob
include Sidekiq::Job
end
@ -16,7 +16,7 @@ class BadErrorMessage < StandardError
end
end
class CustomWorkerWithoutException
class CustomJobWithoutException
include Sidekiq::Job
sidekiq_retry_in do |count|
@ -27,7 +27,7 @@ end
class SpecialError < StandardError
end
class CustomWorkerWithException
class CustomJobWithException
include Sidekiq::Job
sidekiq_retry_in do |count, exception|
@ -46,7 +46,7 @@ class CustomWorkerWithException
end
end
class ErrorWorker
class ErrorJob
include Sidekiq::Job
sidekiq_retry_in do |count|
@ -61,7 +61,7 @@ describe Sidekiq::JobRetry do
describe "middleware" do
def worker
@worker ||= SomeWorker.new
@worker ||= SomeJob.new
end
def handler
@ -296,45 +296,45 @@ describe Sidekiq::JobRetry do
end
it "retries with a custom delay and exception 1" do
strat, count = handler.__send__(:delay_for, CustomWorkerWithException, 2, ArgumentError.new)
strat, count = handler.__send__(:delay_for, CustomJobWithException, 2, ArgumentError.new)
assert_equal :default, strat
assert_includes 4..35, count
end
it "supports discard" do
strat, count = handler.__send__(:delay_for, CustomWorkerWithException, 2, Interrupt.new)
strat, count = handler.__send__(:delay_for, CustomJobWithException, 2, Interrupt.new)
assert_equal :discard, strat
assert_nil count
end
it "supports kill" do
strat, count = handler.__send__(:delay_for, CustomWorkerWithException, 2, RuntimeError.new)
strat, count = handler.__send__(:delay_for, CustomJobWithException, 2, RuntimeError.new)
assert_equal :kill, strat
assert_nil count
end
it "retries with a custom delay and exception 2" do
strat, count = handler.__send__(:delay_for, CustomWorkerWithException, 2, StandardError.new)
strat, count = handler.__send__(:delay_for, CustomJobWithException, 2, StandardError.new)
assert_equal :default, strat
assert_includes 4..35, count
end
it "retries with a default delay and exception in case of configured with nil" do
strat, count = handler.__send__(:delay_for, CustomWorkerWithException, 2, SpecialError.new)
strat, count = handler.__send__(:delay_for, CustomJobWithException, 2, SpecialError.new)
assert_equal :default, strat
refute_equal 8, count
refute_equal 4, count
end
it "retries with a custom delay without exception" do
strat, count = handler.__send__(:delay_for, CustomWorkerWithoutException, 2, StandardError.new)
strat, count = handler.__send__(:delay_for, CustomJobWithoutException, 2, StandardError.new)
assert_equal :default, strat
assert_includes 4..35, count
end
it "falls back to the default retry on exception" do
output = capture_logging(@config) do
strat, count = handler.__send__(:delay_for, ErrorWorker, 2, StandardError.new)
strat, count = handler.__send__(:delay_for, ErrorJob, 2, StandardError.new)
assert_equal :default, strat
refute_equal 4, count
end
@ -346,7 +346,7 @@ describe Sidekiq::JobRetry do
ds = Sidekiq::DeadSet.new
assert_equal 0, ds.size
assert_raises Sidekiq::JobRetry::Skip do
handler.local(CustomWorkerWithException, jobstr({"class" => "CustomWorkerWithException"}), "default") do
handler.local(CustomJobWithException, jobstr({"class" => "CustomJobWithException"}), "default") do
raise "oops"
end
end

View file

@ -2,7 +2,7 @@ require_relative "helper"
require "sidekiq/job_retry"
require "sidekiq/capsule"
class NewWorker
class NewJob
include Sidekiq::Job
sidekiq_class_attribute :exhausted_called, :exhausted_job, :exhausted_exception
@ -14,7 +14,7 @@ class NewWorker
end
end
class OldWorker
class OldJob
include Sidekiq::Job
sidekiq_class_attribute :exhausted_called, :exhausted_job, :exhausted_exception
@ -31,7 +31,7 @@ end
describe "sidekiq_retries_exhausted" do
def cleanup
[NewWorker, OldWorker].each do |worker_class|
[NewJob, OldJob].each do |worker_class|
worker_class.exhausted_called = nil
worker_class.exhausted_job = nil
worker_class.exhausted_exception = nil
@ -48,11 +48,11 @@ describe "sidekiq_retries_exhausted" do
end
def new_worker
@new_worker ||= NewWorker.new
@new_worker ||= NewJob.new
end
def old_worker
@old_worker ||= OldWorker.new
@old_worker ||= OldJob.new
end
def handler
@ -68,7 +68,7 @@ describe "sidekiq_retries_exhausted" do
# successful
end
refute NewWorker.exhausted_called
refute NewJob.exhausted_called
end
it "does not run exhausted block when job successful on last retry" do
@ -76,7 +76,7 @@ describe "sidekiq_retries_exhausted" do
# successful
end
refute NewWorker.exhausted_called
refute NewJob.exhausted_called
end
it "does not run exhausted block when retries not exhausted yet" do
@ -86,7 +86,7 @@ describe "sidekiq_retries_exhausted" do
end
end
refute NewWorker.exhausted_called
refute NewJob.exhausted_called
end
it "runs exhausted block when retries exhausted" do
@ -96,7 +96,7 @@ describe "sidekiq_retries_exhausted" do
end
end
assert NewWorker.exhausted_called
assert NewJob.exhausted_called
end
it "passes job and exception to retries exhausted block" do

View file

@ -4,7 +4,7 @@ require_relative "helper"
require "sidekiq/scheduled"
require "sidekiq/api"
class ScheduledWorker
class ScheduledJob
include Sidekiq::Job
def perform(x)
end
@ -20,12 +20,12 @@ describe Sidekiq::Scheduled do
describe "poller" do
before do
@config = reset!
@error_1 = {"class" => ScheduledWorker.name, "args" => [0], "queue" => "queue_1"}
@error_2 = {"class" => ScheduledWorker.name, "args" => [1], "queue" => "queue_2"}
@error_3 = {"class" => ScheduledWorker.name, "args" => [2], "queue" => "queue_3"}
@future_1 = {"class" => ScheduledWorker.name, "args" => [3], "queue" => "queue_4"}
@future_2 = {"class" => ScheduledWorker.name, "args" => [4], "queue" => "queue_5"}
@future_3 = {"class" => ScheduledWorker.name, "args" => [5], "queue" => "queue_6"}
@error_1 = {"class" => ScheduledJob.name, "args" => [0], "queue" => "queue_1"}
@error_2 = {"class" => ScheduledJob.name, "args" => [1], "queue" => "queue_2"}
@error_3 = {"class" => ScheduledJob.name, "args" => [2], "queue" => "queue_3"}
@future_1 = {"class" => ScheduledJob.name, "args" => [3], "queue" => "queue_4"}
@future_2 = {"class" => ScheduledJob.name, "args" => [4], "queue" => "queue_5"}
@future_3 = {"class" => ScheduledJob.name, "args" => [5], "queue" => "queue_6"}
@retry = Sidekiq::RetrySet.new
@scheduled = Sidekiq::ScheduledSet.new

View file

@ -4,7 +4,7 @@ require_relative "helper"
require "sidekiq/api"
require "active_support/core_ext/integer/time"
class SomeScheduledWorker
class SomeScheduledJob
include Sidekiq::Job
sidekiq_options queue: :custom_queue
def perform(x)
@ -26,25 +26,25 @@ describe "job scheduling" do
assert_equal 0, ss.size
assert SomeScheduledWorker.perform_in(600, "mike")
assert SomeScheduledJob.perform_in(600, "mike")
assert_equal 1, ss.size
assert SomeScheduledWorker.perform_in(1.month, "mike")
assert SomeScheduledJob.perform_in(1.month, "mike")
assert_equal 2, ss.size
assert SomeScheduledWorker.perform_in(5.days.from_now, "mike")
assert SomeScheduledJob.perform_in(5.days.from_now, "mike")
assert_equal 3, ss.size
q = Sidekiq::Queue.new("custom_queue")
qs = q.size
assert SomeScheduledWorker.perform_in(-300, "mike")
assert SomeScheduledJob.perform_in(-300, "mike")
assert_equal 3, ss.size
assert_equal qs + 1, q.size
assert Sidekiq::Client.push_bulk("class" => SomeScheduledWorker, "args" => [["mike"], ["mike"]], "at" => Time.now.to_f + 100)
assert Sidekiq::Client.push_bulk("class" => SomeScheduledJob, "args" => [["mike"], ["mike"]], "at" => Time.now.to_f + 100)
assert_equal 5, ss.size
assert SomeScheduledWorker.perform_in(TimeDuck.new, "samwise")
assert SomeScheduledJob.perform_in(TimeDuck.new, "samwise")
assert_equal 6, ss.size
end
@ -52,7 +52,7 @@ describe "job scheduling" do
ss = Sidekiq::ScheduledSet.new
ss.clear
assert SomeScheduledWorker.perform_in(1.month, "mike")
assert SomeScheduledJob.perform_in(1.month, "mike")
job = ss.first
assert job["created_at"]
refute job["enqueued_at"]
@ -62,7 +62,7 @@ describe "job scheduling" do
ss = Sidekiq::ScheduledSet.new
ss.clear
assert Sidekiq::Client.push_bulk("class" => SomeScheduledWorker, "args" => [["mike"], ["mike"]], "at" => Time.now.to_f + 100)
assert Sidekiq::Client.push_bulk("class" => SomeScheduledJob, "args" => [["mike"], ["mike"]], "at" => Time.now.to_f + 100)
job = ss.first
assert job["created_at"]
refute job["enqueued_at"]

View file

@ -4,7 +4,7 @@ require_relative "helper"
require "sidekiq"
require "sidekiq/api"
class ShardWorker
class ShardJob
include Sidekiq::Job
end
@ -31,8 +31,8 @@ describe "Sharding" do
Sidekiq::Client.via(@sh1) do
assert_equal 0, q.size
assert_equal 0, ss.size
ShardWorker.perform_async
ShardWorker.perform_in(3)
ShardJob.perform_async
ShardJob.perform_in(3)
assert_equal 1, q.size
assert_equal 1, ss.size
end
@ -43,8 +43,8 @@ describe "Sharding" do
end
# redirect jobs explicitly with pool attribute
ShardWorker.set(pool: @sh2).perform_async
ShardWorker.set(pool: @sh2).perform_in(4)
ShardJob.set(pool: @sh2).perform_async
ShardJob.set(pool: @sh2).perform_in(4)
Sidekiq::Client.via(@sh2) do
assert_equal 1, q.size
assert_equal 1, ss.size

View file

@ -2,7 +2,7 @@
require_relative "helper"
class AttributeWorker
class AttributeJob
include Sidekiq::Job
sidekiq_class_attribute :count
self.count = 0
@ -114,16 +114,16 @@ describe "Sidekiq::Testing" do
begin
Sidekiq::Testing.fake! do
AttributeWorker.perform_async
assert_equal 0, AttributeWorker.count
AttributeJob.perform_async
assert_equal 0, AttributeJob.count
end
AttributeWorker.perform_one
assert_equal 1, AttributeWorker.count
AttributeJob.perform_one
assert_equal 1, AttributeJob.count
Sidekiq::Testing.inline! do
AttributeWorker.perform_async
assert_equal 2, AttributeWorker.count
AttributeJob.perform_async
assert_equal 2, AttributeJob.count
end
ensure
Sidekiq::Testing.server_middleware.clear

View file

@ -3,28 +3,28 @@
require_relative "helper"
class PerformError < RuntimeError; end
class DirectWorker
class DirectJob
include Sidekiq::Job
def perform(a, b)
a + b
end
end
class EnqueuedWorker
class EnqueuedJob
include Sidekiq::Job
def perform(a, b)
a + b
end
end
class StoredWorker
class StoredJob
include Sidekiq::Job
def perform(error)
raise PerformError if error
end
end
class SpecificJidWorker
class SpecificJidJob
include Sidekiq::Job
sidekiq_class_attribute :count
self.count = 0
@ -34,7 +34,7 @@ class SpecificJidWorker
end
end
class FirstWorker
class FirstJob
include Sidekiq::Job
sidekiq_class_attribute :count
self.count = 0
@ -43,7 +43,7 @@ class FirstWorker
end
end
class SecondWorker
class SecondJob
include Sidekiq::Job
sidekiq_class_attribute :count
self.count = 0
@ -52,23 +52,23 @@ class SecondWorker
end
end
class ThirdWorker
class ThirdJob
include Sidekiq::Job
sidekiq_class_attribute :count
def perform
FirstWorker.perform_async
SecondWorker.perform_async
FirstJob.perform_async
SecondJob.perform_async
end
end
class QueueWorker
class QueueJob
include Sidekiq::Job
def perform(a, b)
a + b
end
end
class AltQueueWorker
class AltQueueJob
include Sidekiq::Job
sidekiq_options queue: :alt
def perform(a, b)
@ -80,8 +80,8 @@ describe "Sidekiq::Testing.fake" do
before do
require "sidekiq/testing"
Sidekiq::Testing.fake!
EnqueuedWorker.jobs.clear
DirectWorker.jobs.clear
EnqueuedJob.jobs.clear
DirectJob.jobs.clear
end
after do
@ -90,189 +90,189 @@ describe "Sidekiq::Testing.fake" do
end
it "stubs the async call" do
assert_equal 0, DirectWorker.jobs.size
assert DirectWorker.perform_async(1, 2)
assert_in_delta Time.now.to_f, DirectWorker.jobs.last["enqueued_at"], 0.1
assert_equal 1, DirectWorker.jobs.size
assert DirectWorker.perform_in(10, 1, 2)
refute DirectWorker.jobs.last["enqueued_at"]
assert_equal 2, DirectWorker.jobs.size
assert DirectWorker.perform_at(10, 1, 2)
assert_equal 3, DirectWorker.jobs.size
assert_equal 0, DirectJob.jobs.size
assert DirectJob.perform_async(1, 2)
assert_in_delta Time.now.to_f, DirectJob.jobs.last["enqueued_at"], 0.1
assert_equal 1, DirectJob.jobs.size
assert DirectJob.perform_in(10, 1, 2)
refute DirectJob.jobs.last["enqueued_at"]
assert_equal 2, DirectJob.jobs.size
assert DirectJob.perform_at(10, 1, 2)
assert_equal 3, DirectJob.jobs.size
soon = (Time.now.to_f + 10)
assert_in_delta soon, DirectWorker.jobs.last["at"], 0.1
assert_in_delta soon, DirectJob.jobs.last["at"], 0.1
end
it "stubs the enqueue call" do
assert_equal 0, EnqueuedWorker.jobs.size
assert Sidekiq::Client.enqueue(EnqueuedWorker, 1, 2)
assert_equal 1, EnqueuedWorker.jobs.size
assert_equal 0, EnqueuedJob.jobs.size
assert Sidekiq::Client.enqueue(EnqueuedJob, 1, 2)
assert_equal 1, EnqueuedJob.jobs.size
end
it "stubs the enqueue_to call" do
assert_equal 0, EnqueuedWorker.jobs.size
assert Sidekiq::Client.enqueue_to("someq", EnqueuedWorker, 1, 2)
assert_equal 0, EnqueuedJob.jobs.size
assert Sidekiq::Client.enqueue_to("someq", EnqueuedJob, 1, 2)
assert_equal 1, Sidekiq::Queues["someq"].size
end
it "executes all stored jobs" do
assert StoredWorker.perform_async(false)
assert StoredWorker.perform_async(true)
assert StoredJob.perform_async(false)
assert StoredJob.perform_async(true)
assert_equal 2, StoredWorker.jobs.size
assert_equal 2, StoredJob.jobs.size
assert_raises PerformError do
StoredWorker.drain
StoredJob.drain
end
assert_equal 0, StoredWorker.jobs.size
assert_equal 0, StoredJob.jobs.size
end
it "execute only jobs with assigned JID" do
4.times do |i|
jid = SpecificJidWorker.perform_async(nil)
SpecificJidWorker.jobs[-1]["args"] = if i % 2 == 0
jid = SpecificJidJob.perform_async(nil)
SpecificJidJob.jobs[-1]["args"] = if i % 2 == 0
["wrong_jid"]
else
[jid]
end
end
SpecificJidWorker.perform_one
assert_equal 0, SpecificJidWorker.count
SpecificJidJob.perform_one
assert_equal 0, SpecificJidJob.count
SpecificJidWorker.perform_one
assert_equal 1, SpecificJidWorker.count
SpecificJidJob.perform_one
assert_equal 1, SpecificJidJob.count
SpecificJidWorker.drain
assert_equal 2, SpecificJidWorker.count
SpecificJidJob.drain
assert_equal 2, SpecificJidJob.count
end
it "round trip serializes the job arguments" do
assert_raises ArgumentError do
StoredWorker.perform_async(:mike)
StoredJob.perform_async(:mike)
end
Sidekiq.strict_args!(false)
assert StoredWorker.perform_async(:mike)
job = StoredWorker.jobs.first
assert StoredJob.perform_async(:mike)
job = StoredJob.jobs.first
assert_equal "mike", job["args"].first
StoredWorker.clear
StoredJob.clear
ensure
Sidekiq.strict_args!(:raise)
end
it "perform_one runs only one job" do
DirectWorker.perform_async(1, 2)
DirectWorker.perform_async(3, 4)
assert_equal 2, DirectWorker.jobs.size
DirectJob.perform_async(1, 2)
DirectJob.perform_async(3, 4)
assert_equal 2, DirectJob.jobs.size
DirectWorker.perform_one
assert_equal 1, DirectWorker.jobs.size
DirectJob.perform_one
assert_equal 1, DirectJob.jobs.size
DirectWorker.clear
DirectJob.clear
end
it "perform_one raise error upon empty queue" do
DirectWorker.clear
DirectJob.clear
assert_raises Sidekiq::EmptyQueueError do
DirectWorker.perform_one
DirectJob.perform_one
end
end
it "clears jobs across all workers" do
Sidekiq::Job.jobs.clear
FirstWorker.count = 0
SecondWorker.count = 0
FirstJob.count = 0
SecondJob.count = 0
assert_equal 0, FirstWorker.jobs.size
assert_equal 0, SecondWorker.jobs.size
assert_equal 0, FirstJob.jobs.size
assert_equal 0, SecondJob.jobs.size
FirstWorker.perform_async
SecondWorker.perform_async
FirstJob.perform_async
SecondJob.perform_async
assert_equal 1, FirstWorker.jobs.size
assert_equal 1, SecondWorker.jobs.size
assert_equal 1, FirstJob.jobs.size
assert_equal 1, SecondJob.jobs.size
Sidekiq::Job.clear_all
assert_equal 0, FirstWorker.jobs.size
assert_equal 0, SecondWorker.jobs.size
assert_equal 0, FirstJob.jobs.size
assert_equal 0, SecondJob.jobs.size
assert_equal 0, FirstWorker.count
assert_equal 0, SecondWorker.count
assert_equal 0, FirstJob.count
assert_equal 0, SecondJob.count
end
it "drains jobs across all workers" do
Sidekiq::Job.jobs.clear
FirstWorker.count = 0
SecondWorker.count = 0
FirstJob.count = 0
SecondJob.count = 0
assert_equal 0, FirstWorker.jobs.size
assert_equal 0, SecondWorker.jobs.size
assert_equal 0, FirstJob.jobs.size
assert_equal 0, SecondJob.jobs.size
assert_equal 0, FirstWorker.count
assert_equal 0, SecondWorker.count
assert_equal 0, FirstJob.count
assert_equal 0, SecondJob.count
FirstWorker.perform_async
SecondWorker.perform_async
FirstJob.perform_async
SecondJob.perform_async
assert_equal 1, FirstWorker.jobs.size
assert_equal 1, SecondWorker.jobs.size
assert_equal 1, FirstJob.jobs.size
assert_equal 1, SecondJob.jobs.size
Sidekiq::Job.drain_all
assert_equal 0, FirstWorker.jobs.size
assert_equal 0, SecondWorker.jobs.size
assert_equal 0, FirstJob.jobs.size
assert_equal 0, SecondJob.jobs.size
assert_equal 1, FirstWorker.count
assert_equal 1, SecondWorker.count
assert_equal 1, FirstJob.count
assert_equal 1, SecondJob.count
end
it "clears the jobs of workers having their queue name defined as a symbol" do
assert_equal Symbol, AltQueueWorker.sidekiq_options["queue"].class
assert_equal Symbol, AltQueueJob.sidekiq_options["queue"].class
AltQueueWorker.perform_async
assert_equal 1, AltQueueWorker.jobs.size
assert_equal 1, Sidekiq::Queues[AltQueueWorker.sidekiq_options["queue"].to_s].size
AltQueueJob.perform_async
assert_equal 1, AltQueueJob.jobs.size
assert_equal 1, Sidekiq::Queues[AltQueueJob.sidekiq_options["queue"].to_s].size
AltQueueWorker.clear
assert_equal 0, AltQueueWorker.jobs.size
assert_equal 0, Sidekiq::Queues[AltQueueWorker.sidekiq_options["queue"].to_s].size
AltQueueJob.clear
assert_equal 0, AltQueueJob.jobs.size
assert_equal 0, Sidekiq::Queues[AltQueueJob.sidekiq_options["queue"].to_s].size
end
it "drains jobs across all workers even when workers create new jobs" do
Sidekiq::Job.jobs.clear
FirstWorker.count = 0
SecondWorker.count = 0
FirstJob.count = 0
SecondJob.count = 0
assert_equal 0, ThirdWorker.jobs.size
assert_equal 0, ThirdJob.jobs.size
assert_equal 0, FirstWorker.count
assert_equal 0, SecondWorker.count
assert_equal 0, FirstJob.count
assert_equal 0, SecondJob.count
ThirdWorker.perform_async
ThirdJob.perform_async
assert_equal 1, ThirdWorker.jobs.size
assert_equal 1, ThirdJob.jobs.size
Sidekiq::Job.drain_all
assert_equal 0, ThirdWorker.jobs.size
assert_equal 0, ThirdJob.jobs.size
assert_equal 1, FirstWorker.count
assert_equal 1, SecondWorker.count
assert_equal 1, FirstJob.count
assert_equal 1, SecondJob.count
end
it "drains jobs of workers with symbolized queue names" do
Sidekiq::Job.jobs.clear
AltQueueWorker.perform_async(5, 6)
assert_equal 1, AltQueueWorker.jobs.size
AltQueueJob.perform_async(5, 6)
assert_equal 1, AltQueueJob.jobs.size
Sidekiq::Job.drain_all
assert_equal 0, AltQueueWorker.jobs.size
assert_equal 0, AltQueueJob.jobs.size
end
it "can execute a job" do
DirectWorker.execute_job(DirectWorker.new, [2, 3])
DirectJob.execute_job(DirectJob.new, [2, 3])
end
describe "queue testing" do
@ -289,9 +289,9 @@ describe "Sidekiq::Testing.fake" do
it "finds enqueued jobs" do
assert_equal 0, Sidekiq::Queues["default"].size
QueueWorker.perform_async(1, 2)
QueueWorker.perform_async(1, 2)
AltQueueWorker.perform_async(1, 2)
QueueJob.perform_async(1, 2)
QueueJob.perform_async(1, 2)
AltQueueJob.perform_async(1, 2)
assert_equal 2, Sidekiq::Queues["default"].size
assert_equal [1, 2], Sidekiq::Queues["default"].first["args"]
@ -302,21 +302,21 @@ describe "Sidekiq::Testing.fake" do
it "clears out all queues" do
assert_equal 0, Sidekiq::Queues["default"].size
QueueWorker.perform_async(1, 2)
QueueWorker.perform_async(1, 2)
AltQueueWorker.perform_async(1, 2)
QueueJob.perform_async(1, 2)
QueueJob.perform_async(1, 2)
AltQueueJob.perform_async(1, 2)
Sidekiq::Queues.clear_all
assert_equal 0, Sidekiq::Queues["default"].size
assert_equal 0, QueueWorker.jobs.size
assert_equal 0, QueueJob.jobs.size
assert_equal 0, Sidekiq::Queues["alt"].size
assert_equal 0, AltQueueWorker.jobs.size
assert_equal 0, AltQueueJob.jobs.size
end
it "finds jobs enqueued by client" do
Sidekiq::Client.push(
"class" => "NonExistentWorker",
"class" => "NonExistentJob",
"queue" => "missing",
"args" => [1]
)
@ -327,13 +327,13 @@ describe "Sidekiq::Testing.fake" do
it "respects underlying array changes" do
# Rspec expect change() syntax saves a reference to
# an underlying array. When the array containing jobs is
# derived, Rspec test using `change(QueueWorker.jobs, :size).by(1)`
# derived, Rspec test using `change(QueueJob.jobs, :size).by(1)`
# won't pass. This attempts to recreate that scenario
# by saving a reference to the jobs array and ensuring
# it changes properly on enqueueing
jobs = QueueWorker.jobs
jobs = QueueJob.jobs
assert_equal 0, jobs.size
QueueWorker.perform_async(1, 2)
QueueJob.perform_async(1, 2)
assert_equal 1, jobs.size
end
end

View file

@ -6,7 +6,7 @@ class InlineError < RuntimeError; end
class ParameterIsNotString < RuntimeError; end
class InlineWorker
class InlineJob
include Sidekiq::Job
def perform(pass)
raise ArgumentError, "no jid" unless jid
@ -14,7 +14,7 @@ class InlineWorker
end
end
class InlineWorkerWithTimeParam
class InlineJobWithTimeParam
include Sidekiq::Job
def perform(time)
raise ParameterIsNotString unless time.is_a?(String) || time.is_a?(Numeric)
@ -32,30 +32,30 @@ describe "Sidekiq::Testing.inline" do
end
it "stubs the async call when in testing mode" do
assert InlineWorker.perform_async(true)
assert InlineJob.perform_async(true)
assert_raises InlineError do
InlineWorker.perform_async(false)
InlineJob.perform_async(false)
end
end
it "stubs the enqueue call when in testing mode" do
assert Sidekiq::Client.enqueue(InlineWorker, true)
assert Sidekiq::Client.enqueue(InlineJob, true)
assert_raises InlineError do
Sidekiq::Client.enqueue(InlineWorker, false)
Sidekiq::Client.enqueue(InlineJob, false)
end
end
it "stubs the push_bulk call when in testing mode" do
assert Sidekiq::Client.push_bulk({"class" => InlineWorker, "args" => [[true], [true]]})
assert Sidekiq::Client.push_bulk({"class" => InlineJob, "args" => [[true], [true]]})
assert_raises InlineError do
Sidekiq::Client.push_bulk({"class" => InlineWorker, "args" => [[true], [false]]})
Sidekiq::Client.push_bulk({"class" => InlineJob, "args" => [[true], [false]]})
end
end
it "should relay parameters through json" do
assert Sidekiq::Client.enqueue(InlineWorkerWithTimeParam, Time.now.to_f)
assert Sidekiq::Client.enqueue(InlineJobWithTimeParam, Time.now.to_f)
end
end

View file

@ -4,7 +4,7 @@ require_relative "helper"
require "sidekiq/web"
require "rack/test"
class WebWorker
class WebJob
include Sidekiq::Job
def perform(a, b)
@ -65,7 +65,7 @@ describe Sidekiq::Web do
conn.sadd("processes", ["foo:1234"])
conn.hmset("foo:1234", "info", Sidekiq.dump_json("hostname" => "foo", "started_at" => Time.now.to_f, "queues" => [], "concurrency" => 10), "at", Time.now.to_f, "busy", 4)
identity = "foo:1234:work"
hash = {queue: "critical", payload: {"class" => WebWorker.name, "args" => [1, "abc"]}, run_at: Time.now.to_i}
hash = {queue: "critical", payload: {"class" => WebJob.name, "args" => [1, "abc"]}, run_at: Time.now.to_i}
conn.hmset(identity, 1001, Sidekiq.dump_json(hash))
end
assert_equal ["1001"], Sidekiq::WorkSet.new.map { |pid, tid, data| tid }
@ -74,7 +74,7 @@ describe Sidekiq::Web do
assert_equal 200, last_response.status
assert_match(/status-active/, last_response.body)
assert_match(/critical/, last_response.body)
assert_match(/WebWorker/, last_response.body)
assert_match(/WebJob/, last_response.body)
end
it "can quiet a process" do
@ -99,24 +99,24 @@ describe Sidekiq::Web do
end
it "can display queues" do
assert Sidekiq::Client.push("queue" => :foo, "class" => WebWorker, "args" => [1, 3])
assert Sidekiq::Client.push("queue" => :foo, "class" => WebJob, "args" => [1, 3])
get "/queues"
assert_equal 200, last_response.status
assert_match(/foo/, last_response.body)
refute_match(/HardWorker/, last_response.body)
refute_match(/HardJob/, last_response.body)
assert_match(/0.0/, last_response.body)
refute_match(/datetime/, last_response.body)
Sidekiq::Queue.new("foo").clear
Time.stub(:now, Time.now - 65) do
assert Sidekiq::Client.push("queue" => :foo, "class" => WebWorker, "args" => [1, 3])
assert Sidekiq::Client.push("queue" => :foo, "class" => WebJob, "args" => [1, 3])
end
get "/queues"
assert_equal 200, last_response.status
assert_match(/foo/, last_response.body)
refute_match(/HardWorker/, last_response.body)
refute_match(/HardJob/, last_response.body)
assert_match(/65.0/, last_response.body)
assert_match(/datetime/, last_response.body)
end
@ -261,14 +261,14 @@ describe Sidekiq::Web do
get "/retries"
assert_equal 200, last_response.status
assert_match(/found/, last_response.body)
refute_match(/HardWorker/, last_response.body)
refute_match(/HardJob/, last_response.body)
add_retry
get "/retries"
assert_equal 200, last_response.status
refute_match(/found/, last_response.body)
assert_match(/HardWorker/, last_response.body)
assert_match(/HardJob/, last_response.body)
end
it "can display a single retry" do
@ -277,7 +277,7 @@ describe Sidekiq::Web do
assert_equal 302, last_response.status
get "/retries/#{job_params(*params)}"
assert_equal 200, last_response.status
assert_match(/HardWorker/, last_response.body)
assert_match(/HardJob/, last_response.body)
end
it "handles missing retry" do
@ -331,14 +331,14 @@ describe Sidekiq::Web do
get "/scheduled"
assert_equal 200, last_response.status
assert_match(/found/, last_response.body)
refute_match(/HardWorker/, last_response.body)
refute_match(/HardJob/, last_response.body)
add_scheduled
get "/scheduled"
assert_equal 200, last_response.status
refute_match(/found/, last_response.body)
assert_match(/HardWorker/, last_response.body)
assert_match(/HardJob/, last_response.body)
end
it "can display a single scheduled job" do
@ -347,7 +347,7 @@ describe Sidekiq::Web do
assert_equal 302, last_response.status
get "/scheduled/#{job_params(*params)}"
assert_equal 200, last_response.status
assert_match(/HardWorker/, last_response.body)
assert_match(/HardJob/, last_response.body)
end
it "can display a single scheduled job tags" do
@ -431,7 +431,7 @@ describe Sidekiq::Web do
add_xss_retry
get "/retries"
assert_equal 200, last_response.status
assert_match(/FailWorker/, last_response.body)
assert_match(/FailJob/, last_response.body)
assert last_response.body.include?("fail message: &lt;a&gt;hello&lt;&#x2F;a&gt;")
assert !last_response.body.include?("fail message: <a>hello</a>")
@ -445,14 +445,14 @@ describe Sidekiq::Web do
conn.sadd("processes", [pro])
conn.hmset(pro, "info", Sidekiq.dump_json("started_at" => Time.now.to_f, "labels" => ["frumduz"], "queues" => [], "concurrency" => 10), "busy", 1, "beat", Time.now.to_f)
identity = "#{pro}:work"
hash = {queue: "critical", payload: {"class" => "FailWorker", "args" => ["<a>hello</a>"]}, run_at: Time.now.to_i}
hash = {queue: "critical", payload: {"class" => "FailJob", "args" => ["<a>hello</a>"]}, run_at: Time.now.to_i}
conn.hmset(identity, 100001, Sidekiq.dump_json(hash))
conn.incr("busy")
end
get "/busy"
assert_equal 200, last_response.status
assert_match(/FailWorker/, last_response.body)
assert_match(/FailJob/, last_response.body)
assert_match(/frumduz/, last_response.body)
assert last_response.body.include?("&lt;a&gt;hello&lt;&#x2F;a&gt;")
assert !last_response.body.include?("<a>hello</a>")
@ -636,7 +636,7 @@ describe Sidekiq::Web do
def add_scheduled
score = Time.now.to_f
msg = {"class" => "HardWorker",
msg = {"class" => "HardJob",
"args" => ["bob", 1, Time.now.to_f],
"jid" => SecureRandom.hex(12),
"tags" => ["tag1", "tag2"]}
@ -647,7 +647,7 @@ describe Sidekiq::Web do
end
def add_retry
msg = {"class" => "HardWorker",
msg = {"class" => "HardJob",
"args" => ["bob", 1, Time.now.to_f],
"queue" => "default",
"error_message" => "Some fake message",
@ -664,7 +664,7 @@ describe Sidekiq::Web do
end
def add_dead(jid = SecureRandom.hex(12))
msg = {"class" => "HardWorker",
msg = {"class" => "HardJob",
"args" => ["bob", 1, Time.now.to_f],
"queue" => "foo",
"error_message" => "Some fake message",
@ -689,7 +689,7 @@ describe Sidekiq::Web do
end
def add_xss_retry(job_id = SecureRandom.hex(12))
msg = {"class" => "FailWorker",
msg = {"class" => "FailJob",
"args" => ["<a>hello</a>"],
"queue" => "foo",
"error_message" => "fail message: <a>hello</a>",
@ -711,7 +711,7 @@ describe Sidekiq::Web do
def add_worker
key = "#{hostname}:#{$$}"
msg = "{\"queue\":\"default\",\"payload\":{\"retry\":true,\"queue\":\"default\",\"timeout\":20,\"backtrace\":5,\"class\":\"HardWorker\",\"args\":[\"bob\",10,5],\"jid\":\"2b5ad2b016f5e063a1c62872\"},\"run_at\":1361208995}"
msg = "{\"queue\":\"default\",\"payload\":{\"retry\":true,\"queue\":\"default\",\"timeout\":20,\"backtrace\":5,\"class\":\"HardJob\",\"args\":[\"bob\",10,5],\"jid\":\"2b5ad2b016f5e063a1c62872\"},\"run_at\":1361208995}"
@config.redis do |conn|
conn.multi do |transaction|
transaction.sadd("processes", [key])