mirror of
https://github.com/mperham/sidekiq.git
synced 2022-11-09 13:52:34 -05:00
Redis-rb 4.8.0 compatibility, fixes #5484
This commit is contained in:
parent
2f1fd1e6b5
commit
09dacfed8f
10 changed files with 28 additions and 30 deletions
1
Gemfile
1
Gemfile
|
@ -3,6 +3,7 @@ source "https://rubygems.org"
|
||||||
gemspec
|
gemspec
|
||||||
|
|
||||||
gem "rake"
|
gem "rake"
|
||||||
|
gem "redis"
|
||||||
gem "redis-namespace"
|
gem "redis-namespace"
|
||||||
gem "redis-client"
|
gem "redis-client"
|
||||||
gem "rails", "~> 6.0"
|
gem "rails", "~> 6.0"
|
||||||
|
|
|
@ -316,7 +316,7 @@ module Sidekiq
|
||||||
Sidekiq.redis do |conn|
|
Sidekiq.redis do |conn|
|
||||||
conn.multi do |transaction|
|
conn.multi do |transaction|
|
||||||
transaction.unlink(@rname)
|
transaction.unlink(@rname)
|
||||||
transaction.srem("queues", name)
|
transaction.srem("queues", [name])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
true
|
true
|
||||||
|
|
|
@ -231,7 +231,7 @@ module Sidekiq
|
||||||
entry["enqueued_at"] = now
|
entry["enqueued_at"] = now
|
||||||
Sidekiq.dump_json(entry)
|
Sidekiq.dump_json(entry)
|
||||||
}
|
}
|
||||||
conn.sadd("queues", queue)
|
conn.sadd("queues", [queue])
|
||||||
conn.lpush("queue:#{queue}", to_push)
|
conn.lpush("queue:#{queue}", to_push)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -33,7 +33,7 @@ module Sidekiq # :nodoc:
|
||||||
@queues = @config[:queues].map { |q| "queue:#{q}" }
|
@queues = @config[:queues].map { |q| "queue:#{q}" }
|
||||||
if @strictly_ordered_queues
|
if @strictly_ordered_queues
|
||||||
@queues.uniq!
|
@queues.uniq!
|
||||||
@queues << TIMEOUT
|
@queues << {timeout: TIMEOUT}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -83,7 +83,7 @@ module Sidekiq # :nodoc:
|
||||||
else
|
else
|
||||||
permute = @queues.shuffle
|
permute = @queues.shuffle
|
||||||
permute.uniq!
|
permute.uniq!
|
||||||
permute << TIMEOUT
|
permute << {timeout: TIMEOUT}
|
||||||
permute
|
permute
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -86,7 +86,7 @@ module Sidekiq
|
||||||
# doesn't actually exit, it'll reappear in the Web UI.
|
# doesn't actually exit, it'll reappear in the Web UI.
|
||||||
redis do |conn|
|
redis do |conn|
|
||||||
conn.pipelined do |pipeline|
|
conn.pipelined do |pipeline|
|
||||||
pipeline.srem("processes", identity)
|
pipeline.srem("processes", [identity])
|
||||||
pipeline.unlink("#{identity}:work")
|
pipeline.unlink("#{identity}:work")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -165,7 +165,7 @@ module Sidekiq
|
||||||
|
|
||||||
_, exists, _, _, msg = redis { |conn|
|
_, exists, _, _, msg = redis { |conn|
|
||||||
conn.multi { |transaction|
|
conn.multi { |transaction|
|
||||||
transaction.sadd("processes", key)
|
transaction.sadd("processes", [key])
|
||||||
transaction.exists?(key)
|
transaction.exists?(key)
|
||||||
transaction.hmset(key, "info", to_json,
|
transaction.hmset(key, "info", to_json,
|
||||||
"busy", curstate.size,
|
"busy", curstate.size,
|
||||||
|
|
|
@ -76,8 +76,7 @@ describe "API" do
|
||||||
describe "workers_size" do
|
describe "workers_size" do
|
||||||
it "retrieves the number of busy workers" do
|
it "retrieves the number of busy workers" do
|
||||||
Sidekiq.redis do |c|
|
Sidekiq.redis do |c|
|
||||||
c.sadd("processes", "process_1")
|
c.sadd("processes", ["process_1", "process_2"])
|
||||||
c.sadd("processes", "process_2")
|
|
||||||
c.hset("process_1", "busy", 1)
|
c.hset("process_1", "busy", 1)
|
||||||
c.hset("process_2", "busy", 2)
|
c.hset("process_2", "busy", 2)
|
||||||
end
|
end
|
||||||
|
@ -95,10 +94,10 @@ describe "API" do
|
||||||
it "returns a hash of queue and size in order" do
|
it "returns a hash of queue and size in order" do
|
||||||
Sidekiq.redis do |conn|
|
Sidekiq.redis do |conn|
|
||||||
conn.rpush "queue:foo", "{}"
|
conn.rpush "queue:foo", "{}"
|
||||||
conn.sadd "queues", "foo"
|
conn.sadd "queues", ["foo"]
|
||||||
|
|
||||||
3.times { conn.rpush "queue:bar", "{}" }
|
3.times { conn.rpush "queue:bar", "{}" }
|
||||||
conn.sadd "queues", "bar"
|
conn.sadd "queues", ["bar"]
|
||||||
end
|
end
|
||||||
|
|
||||||
s = Sidekiq::Stats::Queues.new
|
s = Sidekiq::Stats::Queues.new
|
||||||
|
@ -113,7 +112,7 @@ describe "API" do
|
||||||
it "handles latency for good jobs" do
|
it "handles latency for good jobs" do
|
||||||
Sidekiq.redis do |conn|
|
Sidekiq.redis do |conn|
|
||||||
conn.rpush "queue:default", "{\"enqueued_at\": #{Time.now.to_f}}"
|
conn.rpush "queue:default", "{\"enqueued_at\": #{Time.now.to_f}}"
|
||||||
conn.sadd "queues", "default"
|
conn.sadd "queues", ["default"]
|
||||||
end
|
end
|
||||||
s = Sidekiq::Stats.new
|
s = Sidekiq::Stats.new
|
||||||
assert s.default_queue_latency > 0
|
assert s.default_queue_latency > 0
|
||||||
|
@ -124,7 +123,7 @@ describe "API" do
|
||||||
it "handles latency for incomplete jobs" do
|
it "handles latency for incomplete jobs" do
|
||||||
Sidekiq.redis do |conn|
|
Sidekiq.redis do |conn|
|
||||||
conn.rpush "queue:default", "{}"
|
conn.rpush "queue:default", "{}"
|
||||||
conn.sadd "queues", "default"
|
conn.sadd "queues", ["default"]
|
||||||
end
|
end
|
||||||
s = Sidekiq::Stats.new
|
s = Sidekiq::Stats.new
|
||||||
assert_equal 0, s.default_queue_latency
|
assert_equal 0, s.default_queue_latency
|
||||||
|
@ -135,10 +134,10 @@ describe "API" do
|
||||||
it "returns total enqueued jobs" do
|
it "returns total enqueued jobs" do
|
||||||
Sidekiq.redis do |conn|
|
Sidekiq.redis do |conn|
|
||||||
conn.rpush "queue:foo", "{}"
|
conn.rpush "queue:foo", "{}"
|
||||||
conn.sadd "queues", "foo"
|
conn.sadd "queues", ["foo"]
|
||||||
|
|
||||||
3.times { conn.rpush "queue:bar", "{}" }
|
3.times { conn.rpush "queue:bar", "{}" }
|
||||||
conn.sadd "queues", "bar"
|
conn.sadd "queues", ["bar"]
|
||||||
end
|
end
|
||||||
|
|
||||||
s = Sidekiq::Stats.new
|
s = Sidekiq::Stats.new
|
||||||
|
@ -570,9 +569,9 @@ describe "API" do
|
||||||
time = Time.now.to_f
|
time = Time.now.to_f
|
||||||
Sidekiq.redis do |conn|
|
Sidekiq.redis do |conn|
|
||||||
conn.multi do |transaction|
|
conn.multi do |transaction|
|
||||||
transaction.sadd("processes", odata["key"])
|
transaction.sadd("processes", [odata["key"]])
|
||||||
transaction.hmset(odata["key"], "info", Sidekiq.dump_json(odata), "busy", 10, "beat", time)
|
transaction.hmset(odata["key"], "info", Sidekiq.dump_json(odata), "busy", 10, "beat", time)
|
||||||
transaction.sadd("processes", "fake:pid")
|
transaction.sadd("processes", ["fake:pid"])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -601,7 +600,7 @@ describe "API" do
|
||||||
key = "#{hn}:#{$$}"
|
key = "#{hn}:#{$$}"
|
||||||
pdata = {"pid" => $$, "hostname" => hn, "started_at" => Time.now.to_i}
|
pdata = {"pid" => $$, "hostname" => hn, "started_at" => Time.now.to_i}
|
||||||
Sidekiq.redis do |conn|
|
Sidekiq.redis do |conn|
|
||||||
conn.sadd("processes", key)
|
conn.sadd("processes", [key])
|
||||||
conn.hmset(key, "info", Sidekiq.dump_json(pdata), "busy", 0, "beat", Time.now.to_f)
|
conn.hmset(key, "info", Sidekiq.dump_json(pdata), "busy", 0, "beat", Time.now.to_f)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -653,7 +652,7 @@ describe "API" do
|
||||||
data = {"pid" => rand(10_000), "hostname" => "app#{rand(1_000)}", "started_at" => Time.now.to_f}
|
data = {"pid" => rand(10_000), "hostname" => "app#{rand(1_000)}", "started_at" => Time.now.to_f}
|
||||||
key = "#{data["hostname"]}:#{data["pid"]}"
|
key = "#{data["hostname"]}:#{data["pid"]}"
|
||||||
Sidekiq.redis do |conn|
|
Sidekiq.redis do |conn|
|
||||||
conn.sadd("processes", key)
|
conn.sadd("processes", [key])
|
||||||
conn.hmset(key, "info", Sidekiq.dump_json(data), "busy", 0, "beat", Time.now.to_f)
|
conn.hmset(key, "info", Sidekiq.dump_json(data), "busy", 0, "beat", Time.now.to_f)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -662,8 +661,7 @@ describe "API" do
|
||||||
assert_equal 1, ps.to_a.size
|
assert_equal 1, ps.to_a.size
|
||||||
|
|
||||||
Sidekiq.redis do |conn|
|
Sidekiq.redis do |conn|
|
||||||
conn.sadd("processes", "bar:987")
|
conn.sadd("processes", ["bar:987", "bar:986"])
|
||||||
conn.sadd("processes", "bar:986")
|
|
||||||
conn.del("process_cleanup")
|
conn.del("process_cleanup")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -35,7 +35,7 @@ describe Sidekiq::BasicFetch do
|
||||||
it "retrieves with strict setting" do
|
it "retrieves with strict setting" do
|
||||||
fetch = fetcher(queues: ["basic", "bar", "bar"], strict: true)
|
fetch = fetcher(queues: ["basic", "bar", "bar"], strict: true)
|
||||||
cmd = fetch.queues_cmd
|
cmd = fetch.queues_cmd
|
||||||
assert_equal cmd, ["queue:basic", "queue:bar", Sidekiq::BasicFetch::TIMEOUT]
|
assert_equal cmd, ["queue:basic", "queue:bar", {timeout: Sidekiq::BasicFetch::TIMEOUT}]
|
||||||
end
|
end
|
||||||
|
|
||||||
it "bulk requeues" do
|
it "bulk requeues" do
|
||||||
|
|
|
@ -129,7 +129,7 @@ describe Sidekiq::Scheduled do
|
||||||
with_sidekiq_option(:average_scheduled_poll_interval, 10) do
|
with_sidekiq_option(:average_scheduled_poll_interval, 10) do
|
||||||
3.times do |i|
|
3.times do |i|
|
||||||
Sidekiq.redis do |conn|
|
Sidekiq.redis do |conn|
|
||||||
conn.sadd("processes", "process-#{i}")
|
conn.sadd("processes", ["process-#{i}"])
|
||||||
conn.hset("process-#{i}", "info", "")
|
conn.hset("process-#{i}", "info", "")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -62,7 +62,7 @@ describe Sidekiq::Web do
|
||||||
it "can display workers" do
|
it "can display workers" do
|
||||||
Sidekiq.redis do |conn|
|
Sidekiq.redis do |conn|
|
||||||
conn.incr("busy")
|
conn.incr("busy")
|
||||||
conn.sadd("processes", "foo:1234")
|
conn.sadd("processes", ["foo:1234"])
|
||||||
conn.hmset("foo:1234", "info", Sidekiq.dump_json("hostname" => "foo", "started_at" => Time.now.to_f, "queues" => [], "concurrency" => 10), "at", Time.now.to_f, "busy", 4)
|
conn.hmset("foo:1234", "info", Sidekiq.dump_json("hostname" => "foo", "started_at" => Time.now.to_f, "queues" => [], "concurrency" => 10), "at", Time.now.to_f, "busy", 4)
|
||||||
identity = "foo:1234:work"
|
identity = "foo:1234:work"
|
||||||
hash = {queue: "critical", payload: {"class" => WebWorker.name, "args" => [1, "abc"]}, run_at: Time.now.to_i}
|
hash = {queue: "critical", payload: {"class" => WebWorker.name, "args" => [1, "abc"]}, run_at: Time.now.to_i}
|
||||||
|
@ -152,7 +152,7 @@ describe Sidekiq::Web do
|
||||||
it "can delete a queue" do
|
it "can delete a queue" do
|
||||||
Sidekiq.redis do |conn|
|
Sidekiq.redis do |conn|
|
||||||
conn.rpush("queue:foo", "{\"args\":[],\"enqueued_at\":1567894960}")
|
conn.rpush("queue:foo", "{\"args\":[],\"enqueued_at\":1567894960}")
|
||||||
conn.sadd("queues", "foo")
|
conn.sadd("queues", ["foo"])
|
||||||
end
|
end
|
||||||
|
|
||||||
get "/queues/foo"
|
get "/queues/foo"
|
||||||
|
@ -442,7 +442,7 @@ describe Sidekiq::Web do
|
||||||
# on /workers page
|
# on /workers page
|
||||||
Sidekiq.redis do |conn|
|
Sidekiq.redis do |conn|
|
||||||
pro = "foo:1234"
|
pro = "foo:1234"
|
||||||
conn.sadd("processes", pro)
|
conn.sadd("processes", [pro])
|
||||||
conn.hmset(pro, "info", Sidekiq.dump_json("started_at" => Time.now.to_f, "labels" => ["frumduz"], "queues" => [], "concurrency" => 10), "busy", 1, "beat", Time.now.to_f)
|
conn.hmset(pro, "info", Sidekiq.dump_json("started_at" => Time.now.to_f, "labels" => ["frumduz"], "queues" => [], "concurrency" => 10), "busy", 1, "beat", Time.now.to_f)
|
||||||
identity = "#{pro}:work"
|
identity = "#{pro}:work"
|
||||||
hash = {queue: "critical", payload: {"class" => "FailWorker", "args" => ["<a>hello</a>"]}, run_at: Time.now.to_i}
|
hash = {queue: "critical", payload: {"class" => "FailWorker", "args" => ["<a>hello</a>"]}, run_at: Time.now.to_i}
|
||||||
|
@ -517,7 +517,7 @@ describe Sidekiq::Web do
|
||||||
Sidekiq.redis do |conn|
|
Sidekiq.redis do |conn|
|
||||||
conn.set("stat:processed", 5)
|
conn.set("stat:processed", 5)
|
||||||
conn.set("stat:failed", 2)
|
conn.set("stat:failed", 2)
|
||||||
conn.sadd("queues", "default")
|
conn.sadd("queues", ["default"])
|
||||||
end
|
end
|
||||||
2.times { add_retry }
|
2.times { add_retry }
|
||||||
3.times { add_scheduled }
|
3.times { add_scheduled }
|
||||||
|
@ -570,8 +570,7 @@ describe Sidekiq::Web do
|
||||||
Sidekiq.redis do |conn|
|
Sidekiq.redis do |conn|
|
||||||
conn.set("stat:processed", 5)
|
conn.set("stat:processed", 5)
|
||||||
conn.set("stat:failed", 2)
|
conn.set("stat:failed", 2)
|
||||||
conn.sadd("queues", "default")
|
conn.sadd("queues", ["default", "queue2"])
|
||||||
conn.sadd("queues", "queue2")
|
|
||||||
end
|
end
|
||||||
2.times { add_retry }
|
2.times { add_retry }
|
||||||
3.times { add_scheduled }
|
3.times { add_scheduled }
|
||||||
|
@ -714,7 +713,7 @@ describe Sidekiq::Web do
|
||||||
msg = "{\"queue\":\"default\",\"payload\":{\"retry\":true,\"queue\":\"default\",\"timeout\":20,\"backtrace\":5,\"class\":\"HardWorker\",\"args\":[\"bob\",10,5],\"jid\":\"2b5ad2b016f5e063a1c62872\"},\"run_at\":1361208995}"
|
msg = "{\"queue\":\"default\",\"payload\":{\"retry\":true,\"queue\":\"default\",\"timeout\":20,\"backtrace\":5,\"class\":\"HardWorker\",\"args\":[\"bob\",10,5],\"jid\":\"2b5ad2b016f5e063a1c62872\"},\"run_at\":1361208995}"
|
||||||
Sidekiq.redis do |conn|
|
Sidekiq.redis do |conn|
|
||||||
conn.multi do |transaction|
|
conn.multi do |transaction|
|
||||||
transaction.sadd("processes", key)
|
transaction.sadd("processes", [key])
|
||||||
transaction.hmset(key, "info", Sidekiq.dump_json("hostname" => "foo", "started_at" => Time.now.to_f, "queues" => []), "at", Time.now.to_f, "busy", 4)
|
transaction.hmset(key, "info", Sidekiq.dump_json("hostname" => "foo", "started_at" => Time.now.to_f, "queues" => []), "at", Time.now.to_f, "busy", 4)
|
||||||
transaction.hmset("#{key}:work", Time.now.to_f, msg)
|
transaction.hmset("#{key}:work", Time.now.to_f, msg)
|
||||||
end
|
end
|
||||||
|
|
|
@ -136,7 +136,7 @@ describe "Web helpers" do
|
||||||
key = "#{hostname}:123"
|
key = "#{hostname}:123"
|
||||||
|
|
||||||
Sidekiq.redis do |conn|
|
Sidekiq.redis do |conn|
|
||||||
conn.sadd("processes", key)
|
conn.sadd("processes", [key])
|
||||||
conn.hmset(key, "info", Sidekiq.dump_json(pdata), "busy", 0, "beat", Time.now.to_f)
|
conn.hmset(key, "info", Sidekiq.dump_json(pdata), "busy", 0, "beat", Time.now.to_f)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue