2015-12-31 15:33:35 -08:00
|
|
|
# frozen_string_literal: true
|
2019-04-01 18:20:41 +02:00
|
|
|
|
|
|
|
require "sidekiq"
|
2022-05-31 13:37:31 -07:00
|
|
|
require "sidekiq/component"
|
2012-03-24 13:28:18 -07:00
|
|
|
|
2022-06-08 12:26:06 -07:00
|
|
|
module Sidekiq # :nodoc:
|
2015-10-08 09:37:37 -07:00
|
|
|
class BasicFetch
|
2022-05-31 13:37:31 -07:00
|
|
|
include Sidekiq::Component
|
2015-10-09 15:33:42 -07:00
|
|
|
# We want the fetch operation to timeout every few seconds so the thread
|
|
|
|
# can check if the process is shutting down.
|
2015-10-07 12:21:10 -07:00
|
|
|
TIMEOUT = 2
|
2013-05-10 17:19:23 -07:00
|
|
|
|
2022-05-31 13:37:31 -07:00
|
|
|
UnitOfWork = Struct.new(:queue, :job, :config) {
|
2015-10-09 15:33:42 -07:00
|
|
|
def acknowledge
|
|
|
|
# nothing to do
|
|
|
|
end
|
|
|
|
|
|
|
|
def queue_name
|
2019-10-01 01:26:31 +03:00
|
|
|
queue.delete_prefix("queue:")
|
2015-10-09 15:33:42 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
def requeue
|
2022-05-31 13:37:31 -07:00
|
|
|
config.redis do |conn|
|
2019-10-01 01:26:31 +03:00
|
|
|
conn.rpush(queue, job)
|
2015-10-09 15:33:42 -07:00
|
|
|
end
|
|
|
|
end
|
2019-04-01 18:20:41 +02:00
|
|
|
}
|
2015-10-09 15:33:42 -07:00
|
|
|
|
2022-08-25 10:15:11 -07:00
|
|
|
def initialize(cap)
|
|
|
|
raise ArgumentError, "missing queue list" unless cap.queues
|
|
|
|
@config = cap
|
|
|
|
@strictly_ordered_queues = !!@config.strict
|
|
|
|
@queues = config.queues.map { |q| "queue:#{q}" }
|
2015-10-07 09:43:08 -07:00
|
|
|
if @strictly_ordered_queues
|
2019-09-14 00:30:54 +03:00
|
|
|
@queues.uniq!
|
2015-10-08 09:37:37 -07:00
|
|
|
@queues << TIMEOUT
|
2015-10-07 09:43:08 -07:00
|
|
|
end
|
2013-01-05 21:17:08 -08:00
|
|
|
end
|
|
|
|
|
|
|
|
def retrieve_work
|
2021-02-23 09:12:40 -08:00
|
|
|
qs = queues_cmd
|
|
|
|
# 4825 Sidekiq Pro with all queues paused will return an
|
|
|
|
# empty set of queues with a trailing TIMEOUT value.
|
|
|
|
if qs.size <= 1
|
2021-08-19 10:52:24 -07:00
|
|
|
sleep(TIMEOUT)
|
2021-02-23 09:12:40 -08:00
|
|
|
return nil
|
|
|
|
end
|
|
|
|
|
2022-05-31 13:37:31 -07:00
|
|
|
queue, job = redis { |conn| conn.brpop(*qs) }
|
|
|
|
UnitOfWork.new(queue, job, config) if queue
|
2013-01-05 21:17:08 -08:00
|
|
|
end
|
2012-05-11 21:25:38 -07:00
|
|
|
|
2022-08-25 10:15:11 -07:00
|
|
|
def bulk_requeue(inprogress, _)
|
2013-12-01 12:38:49 -08:00
|
|
|
return if inprogress.empty?
|
|
|
|
|
2022-05-31 13:37:31 -07:00
|
|
|
logger.debug { "Re-queueing terminated jobs" }
|
2013-01-16 21:48:21 -08:00
|
|
|
jobs_to_requeue = {}
|
|
|
|
inprogress.each do |unit_of_work|
|
2019-10-01 01:26:31 +03:00
|
|
|
jobs_to_requeue[unit_of_work.queue] ||= []
|
|
|
|
jobs_to_requeue[unit_of_work.queue] << unit_of_work.job
|
2013-01-16 21:48:21 -08:00
|
|
|
end
|
|
|
|
|
2022-05-31 13:37:31 -07:00
|
|
|
redis do |conn|
|
2022-01-24 17:18:16 +01:00
|
|
|
conn.pipelined do |pipeline|
|
2014-05-28 13:33:33 -07:00
|
|
|
jobs_to_requeue.each do |queue, jobs|
|
2022-01-24 17:18:16 +01:00
|
|
|
pipeline.rpush(queue, jobs)
|
2014-05-28 13:33:33 -07:00
|
|
|
end
|
2013-01-16 21:48:21 -08:00
|
|
|
end
|
|
|
|
end
|
2022-05-31 13:37:31 -07:00
|
|
|
logger.info("Pushed #{inprogress.size} jobs back to Redis")
|
2013-05-10 17:19:23 -07:00
|
|
|
rescue => ex
|
2022-05-31 13:37:31 -07:00
|
|
|
logger.warn("Failed to requeue #{inprogress.size} jobs: #{ex.message}")
|
2013-01-16 21:48:21 -08:00
|
|
|
end
|
2020-06-19 08:39:18 -07:00
|
|
|
|
|
|
|
# Creating the Redis#brpop command takes into account any
|
|
|
|
# configured queue weights. By default Redis#brpop returns
|
|
|
|
# data from the first queue that has pending elements. We
|
|
|
|
# recreate the queue command each time we invoke Redis#brpop
|
|
|
|
# to honor weights and avoid queue starvation.
|
|
|
|
def queues_cmd
|
|
|
|
if @strictly_ordered_queues
|
|
|
|
@queues
|
|
|
|
else
|
2021-10-21 11:39:12 -07:00
|
|
|
permute = @queues.shuffle
|
|
|
|
permute.uniq!
|
|
|
|
permute << TIMEOUT
|
|
|
|
permute
|
2020-06-19 08:39:18 -07:00
|
|
|
end
|
|
|
|
end
|
2012-03-24 13:28:18 -07:00
|
|
|
end
|
|
|
|
end
|