1
0
Fork 0
mirror of https://github.com/mperham/sidekiq.git synced 2022-11-09 13:52:34 -05:00

Redis-rb 4.8.0 compatibility, fixes #5484

This commit is contained in:
Mike Perham 2022-08-22 13:27:37 -07:00
parent 2f1fd1e6b5
commit 09dacfed8f
10 changed files with 28 additions and 30 deletions

View file

@ -3,6 +3,7 @@ source "https://rubygems.org"
gemspec
gem "rake"
gem "redis"
gem "redis-namespace"
gem "redis-client"
gem "rails", "~> 6.0"

View file

@ -316,7 +316,7 @@ module Sidekiq
Sidekiq.redis do |conn|
conn.multi do |transaction|
transaction.unlink(@rname)
transaction.srem("queues", name)
transaction.srem("queues", [name])
end
end
true

View file

@ -231,7 +231,7 @@ module Sidekiq
entry["enqueued_at"] = now
Sidekiq.dump_json(entry)
}
conn.sadd("queues", queue)
conn.sadd("queues", [queue])
conn.lpush("queue:#{queue}", to_push)
end
end

View file

@ -33,7 +33,7 @@ module Sidekiq # :nodoc:
@queues = @config[:queues].map { |q| "queue:#{q}" }
if @strictly_ordered_queues
@queues.uniq!
@queues << TIMEOUT
@queues << {timeout: TIMEOUT}
end
end
@ -83,7 +83,7 @@ module Sidekiq # :nodoc:
else
permute = @queues.shuffle
permute.uniq!
permute << TIMEOUT
permute << {timeout: TIMEOUT}
permute
end
end

View file

@ -86,7 +86,7 @@ module Sidekiq
# doesn't actually exit, it'll reappear in the Web UI.
redis do |conn|
conn.pipelined do |pipeline|
pipeline.srem("processes", identity)
pipeline.srem("processes", [identity])
pipeline.unlink("#{identity}:work")
end
end
@ -165,7 +165,7 @@ module Sidekiq
_, exists, _, _, msg = redis { |conn|
conn.multi { |transaction|
transaction.sadd("processes", key)
transaction.sadd("processes", [key])
transaction.exists?(key)
transaction.hmset(key, "info", to_json,
"busy", curstate.size,

View file

@ -76,8 +76,7 @@ describe "API" do
describe "workers_size" do
it "retrieves the number of busy workers" do
Sidekiq.redis do |c|
c.sadd("processes", "process_1")
c.sadd("processes", "process_2")
c.sadd("processes", ["process_1", "process_2"])
c.hset("process_1", "busy", 1)
c.hset("process_2", "busy", 2)
end
@ -95,10 +94,10 @@ describe "API" do
it "returns a hash of queue and size in order" do
Sidekiq.redis do |conn|
conn.rpush "queue:foo", "{}"
conn.sadd "queues", "foo"
conn.sadd "queues", ["foo"]
3.times { conn.rpush "queue:bar", "{}" }
conn.sadd "queues", "bar"
conn.sadd "queues", ["bar"]
end
s = Sidekiq::Stats::Queues.new
@ -113,7 +112,7 @@ describe "API" do
it "handles latency for good jobs" do
Sidekiq.redis do |conn|
conn.rpush "queue:default", "{\"enqueued_at\": #{Time.now.to_f}}"
conn.sadd "queues", "default"
conn.sadd "queues", ["default"]
end
s = Sidekiq::Stats.new
assert s.default_queue_latency > 0
@ -124,7 +123,7 @@ describe "API" do
it "handles latency for incomplete jobs" do
Sidekiq.redis do |conn|
conn.rpush "queue:default", "{}"
conn.sadd "queues", "default"
conn.sadd "queues", ["default"]
end
s = Sidekiq::Stats.new
assert_equal 0, s.default_queue_latency
@ -135,10 +134,10 @@ describe "API" do
it "returns total enqueued jobs" do
Sidekiq.redis do |conn|
conn.rpush "queue:foo", "{}"
conn.sadd "queues", "foo"
conn.sadd "queues", ["foo"]
3.times { conn.rpush "queue:bar", "{}" }
conn.sadd "queues", "bar"
conn.sadd "queues", ["bar"]
end
s = Sidekiq::Stats.new
@ -570,9 +569,9 @@ describe "API" do
time = Time.now.to_f
Sidekiq.redis do |conn|
conn.multi do |transaction|
transaction.sadd("processes", odata["key"])
transaction.sadd("processes", [odata["key"]])
transaction.hmset(odata["key"], "info", Sidekiq.dump_json(odata), "busy", 10, "beat", time)
transaction.sadd("processes", "fake:pid")
transaction.sadd("processes", ["fake:pid"])
end
end
@ -601,7 +600,7 @@ describe "API" do
key = "#{hn}:#{$$}"
pdata = {"pid" => $$, "hostname" => hn, "started_at" => Time.now.to_i}
Sidekiq.redis do |conn|
conn.sadd("processes", key)
conn.sadd("processes", [key])
conn.hmset(key, "info", Sidekiq.dump_json(pdata), "busy", 0, "beat", Time.now.to_f)
end
@ -653,7 +652,7 @@ describe "API" do
data = {"pid" => rand(10_000), "hostname" => "app#{rand(1_000)}", "started_at" => Time.now.to_f}
key = "#{data["hostname"]}:#{data["pid"]}"
Sidekiq.redis do |conn|
conn.sadd("processes", key)
conn.sadd("processes", [key])
conn.hmset(key, "info", Sidekiq.dump_json(data), "busy", 0, "beat", Time.now.to_f)
end
@ -662,8 +661,7 @@ describe "API" do
assert_equal 1, ps.to_a.size
Sidekiq.redis do |conn|
conn.sadd("processes", "bar:987")
conn.sadd("processes", "bar:986")
conn.sadd("processes", ["bar:987", "bar:986"])
conn.del("process_cleanup")
end

View file

@ -35,7 +35,7 @@ describe Sidekiq::BasicFetch do
it "retrieves with strict setting" do
fetch = fetcher(queues: ["basic", "bar", "bar"], strict: true)
cmd = fetch.queues_cmd
assert_equal cmd, ["queue:basic", "queue:bar", Sidekiq::BasicFetch::TIMEOUT]
assert_equal cmd, ["queue:basic", "queue:bar", {timeout: Sidekiq::BasicFetch::TIMEOUT}]
end
it "bulk requeues" do

View file

@ -129,7 +129,7 @@ describe Sidekiq::Scheduled do
with_sidekiq_option(:average_scheduled_poll_interval, 10) do
3.times do |i|
Sidekiq.redis do |conn|
conn.sadd("processes", "process-#{i}")
conn.sadd("processes", ["process-#{i}"])
conn.hset("process-#{i}", "info", "")
end
end

View file

@ -62,7 +62,7 @@ describe Sidekiq::Web do
it "can display workers" do
Sidekiq.redis do |conn|
conn.incr("busy")
conn.sadd("processes", "foo:1234")
conn.sadd("processes", ["foo:1234"])
conn.hmset("foo:1234", "info", Sidekiq.dump_json("hostname" => "foo", "started_at" => Time.now.to_f, "queues" => [], "concurrency" => 10), "at", Time.now.to_f, "busy", 4)
identity = "foo:1234:work"
hash = {queue: "critical", payload: {"class" => WebWorker.name, "args" => [1, "abc"]}, run_at: Time.now.to_i}
@ -152,7 +152,7 @@ describe Sidekiq::Web do
it "can delete a queue" do
Sidekiq.redis do |conn|
conn.rpush("queue:foo", "{\"args\":[],\"enqueued_at\":1567894960}")
conn.sadd("queues", "foo")
conn.sadd("queues", ["foo"])
end
get "/queues/foo"
@ -442,7 +442,7 @@ describe Sidekiq::Web do
# on /workers page
Sidekiq.redis do |conn|
pro = "foo:1234"
conn.sadd("processes", pro)
conn.sadd("processes", [pro])
conn.hmset(pro, "info", Sidekiq.dump_json("started_at" => Time.now.to_f, "labels" => ["frumduz"], "queues" => [], "concurrency" => 10), "busy", 1, "beat", Time.now.to_f)
identity = "#{pro}:work"
hash = {queue: "critical", payload: {"class" => "FailWorker", "args" => ["<a>hello</a>"]}, run_at: Time.now.to_i}
@ -517,7 +517,7 @@ describe Sidekiq::Web do
Sidekiq.redis do |conn|
conn.set("stat:processed", 5)
conn.set("stat:failed", 2)
conn.sadd("queues", "default")
conn.sadd("queues", ["default"])
end
2.times { add_retry }
3.times { add_scheduled }
@ -570,8 +570,7 @@ describe Sidekiq::Web do
Sidekiq.redis do |conn|
conn.set("stat:processed", 5)
conn.set("stat:failed", 2)
conn.sadd("queues", "default")
conn.sadd("queues", "queue2")
conn.sadd("queues", ["default", "queue2"])
end
2.times { add_retry }
3.times { add_scheduled }
@ -714,7 +713,7 @@ describe Sidekiq::Web do
msg = "{\"queue\":\"default\",\"payload\":{\"retry\":true,\"queue\":\"default\",\"timeout\":20,\"backtrace\":5,\"class\":\"HardWorker\",\"args\":[\"bob\",10,5],\"jid\":\"2b5ad2b016f5e063a1c62872\"},\"run_at\":1361208995}"
Sidekiq.redis do |conn|
conn.multi do |transaction|
transaction.sadd("processes", key)
transaction.sadd("processes", [key])
transaction.hmset(key, "info", Sidekiq.dump_json("hostname" => "foo", "started_at" => Time.now.to_f, "queues" => []), "at", Time.now.to_f, "busy", 4)
transaction.hmset("#{key}:work", Time.now.to_f, msg)
end

View file

@ -136,7 +136,7 @@ describe "Web helpers" do
key = "#{hostname}:123"
Sidekiq.redis do |conn|
conn.sadd("processes", key)
conn.sadd("processes", [key])
conn.hmset(key, "info", Sidekiq.dump_json(pdata), "busy", 0, "beat", Time.now.to_f)
end
end