Merge branch 'dm-application-worker' into 'master'

Add ApplicationWorker and make every worker include it

See merge request gitlab-org/gitlab-ce!15632
This commit is contained in:
Sean McGivern 2017-12-05 13:06:22 +00:00
commit 29be9c1acc
117 changed files with 371 additions and 304 deletions

View File

@ -2,6 +2,7 @@ require 'carrierwave/orm/activerecord'
class Group < Namespace
include Gitlab::ConfigHelper
include AfterCommitQueue
include AccessRequestable
include Avatarable
include Referable

View File

@ -2,6 +2,7 @@ require 'digest/md5'
class Key < ActiveRecord::Base
include Gitlab::CurrentSettings
include AfterCommitQueue
include Sortable
belongs_to :user

View File

@ -1,4 +1,5 @@
class Member < ActiveRecord::Base
include AfterCommitQueue
include Sortable
include Importable
include Expirable

View File

@ -211,7 +211,7 @@ class Service < ActiveRecord::Base
def async_execute(data)
return unless supported_events.include?(data[:object_kind])
Sidekiq::Client.enqueue(ProjectServiceWorker, id, data)
ProjectServiceWorker.perform_async(id, data)
end
def issue_tracker?

View File

@ -7,6 +7,7 @@ class User < ActiveRecord::Base
include Gitlab::ConfigHelper
include Gitlab::CurrentSettings
include Gitlab::SQL::Pattern
include AfterCommitQueue
include Avatarable
include Referable
include Sortable
@ -903,6 +904,7 @@ class User < ActiveRecord::Base
def post_destroy_hook
log_info("User \"#{name}\" (#{email}) was removed")
system_hook_service.execute_hooks_for(self, :destroy)
end

View File

@ -1,6 +1,10 @@
class SystemHooksService
def execute_hooks_for(model, event)
execute_hooks(build_event_data(model, event))
data = build_event_data(model, event)
model.run_after_commit_or_now do
SystemHooksService.new.execute_hooks(data)
end
end
def execute_hooks(data, hooks_scope = :all)

View File

@ -63,7 +63,7 @@ class WebHookService
end
def async_execute
Sidekiq::Client.enqueue(WebHookWorker, hook.id, data, hook_name)
WebHookWorker.perform_async(hook.id, data, hook_name)
end
private

View File

@ -1,5 +1,5 @@
class AdminEmailWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,6 +1,5 @@
class AuthorizedProjectsWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
# Schedules multiple jobs and waits for them to be completed.
def self.bulk_perform_and_wait(args_list)
@ -17,11 +16,6 @@ class AuthorizedProjectsWorker
waiter.wait
end
# Schedules multiple jobs to run in sidekiq without waiting for completion
def self.bulk_perform_async(args_list)
Sidekiq::Client.push_bulk('class' => self, 'queue' => sidekiq_options['queue'], 'args' => args_list)
end
# Performs multiple jobs directly. Failed jobs will be put into sidekiq so
# they can benefit from retries
def self.bulk_perform_inline(args_list)

View File

@ -1,34 +1,5 @@
class BackgroundMigrationWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
# Enqueues a number of jobs in bulk.
#
# The `jobs` argument should be an Array of Arrays, each sub-array must be in
# the form:
#
# [migration-class, [arg1, arg2, ...]]
def self.perform_bulk(jobs)
Sidekiq::Client.push_bulk('class' => self,
'queue' => sidekiq_options['queue'],
'args' => jobs)
end
# Schedules multiple jobs in bulk, with a delay.
#
def self.perform_bulk_in(delay, jobs)
now = Time.now.to_i
schedule = now + delay.to_i
if schedule <= now
raise ArgumentError, 'The schedule time must be in the future!'
end
Sidekiq::Client.push_bulk('class' => self,
'queue' => sidekiq_options['queue'],
'args' => jobs,
'at' => schedule)
end
include ApplicationWorker
# Performs the background migration.
#

View File

@ -1,5 +1,5 @@
class BuildCoverageWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
def perform(build_id)

View File

@ -1,5 +1,5 @@
class BuildFinishedWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :processing

View File

@ -1,5 +1,5 @@
class BuildHooksWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :hooks

View File

@ -1,5 +1,5 @@
class BuildQueueWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :processing

View File

@ -1,5 +1,5 @@
class BuildSuccessWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :processing

View File

@ -1,5 +1,5 @@
class BuildTraceSectionsWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
def perform(build_id)

View File

@ -1,5 +1,5 @@
class ClusterInstallAppWorker
include Sidekiq::Worker
include ApplicationWorker
include ClusterQueue
include ClusterApplications

View File

@ -1,5 +1,5 @@
class ClusterProvisionWorker
include Sidekiq::Worker
include ApplicationWorker
include ClusterQueue
def perform(cluster_id)

View File

@ -1,5 +1,5 @@
class ClusterWaitForAppInstallationWorker
include Sidekiq::Worker
include ApplicationWorker
include ClusterQueue
include ClusterApplications

View File

@ -0,0 +1,40 @@
Sidekiq::Worker.extend ActiveSupport::Concern
module ApplicationWorker
extend ActiveSupport::Concern
include Sidekiq::Worker
included do
sidekiq_options queue: base_queue_name
end
module ClassMethods
def base_queue_name
name
.sub(/\AGitlab::/, '')
.sub(/Worker\z/, '')
.underscore
.tr('/', '_')
end
def queue
get_sidekiq_options['queue'].to_s
end
def bulk_perform_async(args_list)
Sidekiq::Client.push_bulk('class' => self, 'args' => args_list)
end
def bulk_perform_in(delay, args_list)
now = Time.now.to_i
schedule = now + delay.to_i
if schedule <= now
raise ArgumentError, 'The schedule time must be in the future!'
end
Sidekiq::Client.push_bulk('class' => self, 'args' => args_list, 'at' => schedule)
end
end
end

View File

@ -1,9 +0,0 @@
# Concern that sets the queue of a Sidekiq worker based on the worker's class
# name/namespace.
module DedicatedSidekiqQueue
extend ActiveSupport::Concern
included do
sidekiq_options queue: name.sub(/Worker\z/, '').underscore.tr('/', '_')
end
end

View File

@ -8,7 +8,7 @@ module Gitlab
extend ActiveSupport::Concern
included do
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include ReschedulingMethods
include NotifyUponDeath

View File

@ -1,6 +1,5 @@
class CreateGpgSignatureWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(commit_sha, project_id)
project = Project.find_by(id: project_id)

View File

@ -1,5 +1,5 @@
class CreatePipelineWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :creation

View File

@ -1,6 +1,5 @@
class DeleteMergedBranchesWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(project_id, user_id)
begin

View File

@ -1,6 +1,5 @@
class DeleteUserWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(current_user_id, delete_user_id, options = {})
delete_user = User.find(delete_user_id)

View File

@ -1,6 +1,5 @@
class EmailReceiverWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(raw)
return unless Gitlab::IncomingEmail.enabled?

View File

@ -1,6 +1,5 @@
class EmailsOnPushWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
attr_reader :email, :skip_premailer

View File

@ -1,5 +1,5 @@
class ExpireBuildArtifactsWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform
@ -8,6 +8,6 @@ class ExpireBuildArtifactsWorker
build_ids = Ci::Build.with_expired_artifacts.pluck(:id)
build_ids = build_ids.map { |build_id| [build_id] }
Sidekiq::Client.push_bulk('class' => ExpireBuildInstanceArtifactsWorker, 'args' => build_ids )
ExpireBuildInstanceArtifactsWorker.bulk_perform_async(build_ids)
end
end

View File

@ -1,6 +1,5 @@
class ExpireBuildInstanceArtifactsWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(build_id)
build = Ci::Build

View File

@ -1,5 +1,5 @@
class ExpireJobCacheWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :cache

View File

@ -1,5 +1,5 @@
class ExpirePipelineCacheWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :cache

View File

@ -1,6 +1,5 @@
class GitGarbageCollectWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include Gitlab::CurrentSettings
sidekiq_options retry: false

View File

@ -7,7 +7,7 @@ module Gitlab
# been completed this worker will advance the import process to the next
# stage.
class AdvanceStageWorker
include Sidekiq::Worker
include ApplicationWorker
sidekiq_options queue: 'github_importer_advance_stage', dead: false

View File

@ -3,7 +3,7 @@
module Gitlab
module GithubImport
class RefreshImportJidWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
# The interval to schedule new instances of this job at.

View File

@ -4,7 +4,7 @@ module Gitlab
module GithubImport
module Stage
class FinishImportWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include StageMethods

View File

@ -4,7 +4,7 @@ module Gitlab
module GithubImport
module Stage
class ImportBaseDataWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include StageMethods

View File

@ -4,7 +4,7 @@ module Gitlab
module GithubImport
module Stage
class ImportIssuesAndDiffNotesWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include StageMethods

View File

@ -4,7 +4,7 @@ module Gitlab
module GithubImport
module Stage
class ImportNotesWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include StageMethods

View File

@ -4,7 +4,7 @@ module Gitlab
module GithubImport
module Stage
class ImportPullRequestsWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include StageMethods

View File

@ -4,7 +4,7 @@ module Gitlab
module GithubImport
module Stage
class ImportRepositoryWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include StageMethods

View File

@ -1,7 +1,6 @@
class GitlabShellWorker
include Sidekiq::Worker
include ApplicationWorker
include Gitlab::ShellAdapter
include DedicatedSidekiqQueue
def perform(action, *arg)
gitlab_shell.__send__(action, *arg) # rubocop:disable GitlabSecurity/PublicSend

View File

@ -1,7 +1,7 @@
class GitlabUsagePingWorker
LEASE_TIMEOUT = 86400
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,6 +1,5 @@
class GroupDestroyWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include ExceptionBacktrace
def perform(group_id, user_id)

View File

@ -1,5 +1,5 @@
class ImportExportProjectCleanupWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,6 +1,5 @@
class InvalidGpgSignatureUpdateWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(gpg_key_id)
gpg_key = GpgKey.find_by(id: gpg_key_id)

View File

@ -2,8 +2,7 @@ require 'json'
require 'socket'
class IrkerWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(project_id, chans, colors, push_data, settings)
project = Project.find(project_id)

View File

@ -1,6 +1,5 @@
class MergeWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(merge_request_id, current_user_id, params)
params = params.with_indifferent_access

View File

@ -5,14 +5,9 @@
# The worker will reject doing anything for projects that *do* have a
# namespace. For those use ProjectDestroyWorker instead.
class NamespacelessProjectDestroyWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include ExceptionBacktrace
def self.bulk_perform_async(args_list)
Sidekiq::Client.push_bulk('class' => self, 'queue' => sidekiq_options['queue'], 'args' => args_list)
end
def perform(project_id)
begin
project = Project.unscoped.find(project_id)

View File

@ -1,6 +1,5 @@
class NewIssueWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include NewIssuable
def perform(issue_id, user_id)

View File

@ -1,6 +1,5 @@
class NewMergeRequestWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include NewIssuable
def perform(merge_request_id, user_id)

View File

@ -1,6 +1,5 @@
class NewNoteWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
# Keep extra parameter to preserve backwards compatibility with
# old `NewNoteWorker` jobs (can remove later)

View File

@ -1,5 +1,5 @@
class PagesWorker
include Sidekiq::Worker
include ApplicationWorker
sidekiq_options queue: :pages, retry: false

View File

@ -1,5 +1,5 @@
class PipelineHooksWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :hooks

View File

@ -1,5 +1,5 @@
class PipelineMetricsWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
def perform(pipeline_id)

View File

@ -1,5 +1,5 @@
class PipelineNotificationWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
def perform(pipeline_id, recipients = nil)

View File

@ -1,5 +1,5 @@
class PipelineProcessWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :processing

View File

@ -1,5 +1,5 @@
class PipelineScheduleWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,5 +1,5 @@
class PipelineSuccessWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :processing

View File

@ -1,5 +1,5 @@
class PipelineUpdateWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :processing

View File

@ -1,6 +1,5 @@
class PostReceive
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(gl_repository, identifier, changes)
project, is_wiki = Gitlab::GlRepository.parse(gl_repository)

View File

@ -5,8 +5,7 @@
# Consider using an extra worker if you need to add any extra (and potentially
# slow) processing of commits.
class ProcessCommitWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
# project_id - The ID of the project this commit belongs to.
# user_id - The ID of the user that pushed the commit.

View File

@ -1,7 +1,6 @@
# Worker for updating any project specific caches.
class ProjectCacheWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
LEASE_TIMEOUT = 15.minutes.to_i

View File

@ -1,6 +1,5 @@
class ProjectDestroyWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include ExceptionBacktrace
def perform(project_id, user_id, params)

View File

@ -1,6 +1,5 @@
class ProjectExportWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include ExceptionBacktrace
sidekiq_options retry: 3

View File

@ -1,6 +1,5 @@
class ProjectMigrateHashedStorageWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
LEASE_TIMEOUT = 30.seconds.to_i

View File

@ -1,6 +1,5 @@
class ProjectServiceWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
sidekiq_options dead: false

View File

@ -1,7 +1,6 @@
# Worker for updating any project specific caches.
class PropagateServiceTemplateWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
LEASE_TIMEOUT = 4.hours.to_i

View File

@ -1,5 +1,5 @@
class PruneOldEventsWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,6 +1,5 @@
class ReactiveCachingWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(class_name, id, *args)
klass = begin

View File

@ -1,5 +1,5 @@
class RemoveExpiredGroupLinksWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,5 +1,5 @@
class RemoveExpiredMembersWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,5 +1,5 @@
class RemoveOldWebHookLogsWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
WEB_HOOK_LOG_LIFETIME = 2.days

View File

@ -1,5 +1,5 @@
class RemoveUnreferencedLfsObjectsWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,5 +1,5 @@
class RepositoryArchiveCacheWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,6 +1,6 @@
module RepositoryCheck
class BatchWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
RUN_TIME = 3600

View File

@ -1,6 +1,6 @@
module RepositoryCheck
class ClearWorker
include Sidekiq::Worker
include ApplicationWorker
include RepositoryCheckQueue
def perform

View File

@ -1,6 +1,6 @@
module RepositoryCheck
class SingleRepositoryWorker
include Sidekiq::Worker
include ApplicationWorker
include RepositoryCheckQueue
def perform(project_id)

View File

@ -1,9 +1,8 @@
class RepositoryForkWorker
ForkError = Class.new(StandardError)
include Sidekiq::Worker
include ApplicationWorker
include Gitlab::ShellAdapter
include DedicatedSidekiqQueue
include ProjectStartImport
sidekiq_options status_expiration: StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION

View File

@ -1,8 +1,7 @@
class RepositoryImportWorker
ImportError = Class.new(StandardError)
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include ExceptionBacktrace
include ProjectStartImport

View File

@ -1,5 +1,5 @@
class RequestsProfilesWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,5 +1,5 @@
class ScheduleUpdateUserActivityWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform(batch_size = 500)

View File

@ -1,5 +1,5 @@
class StageUpdateWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :processing

View File

@ -1,6 +1,5 @@
class StorageMigratorWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
BATCH_SIZE = 100

View File

@ -1,5 +1,5 @@
class StuckCiJobsWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
EXCLUSIVE_LEASE_KEY = 'stuck_ci_builds_worker_lease'.freeze

View File

@ -1,5 +1,5 @@
class StuckImportJobsWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
IMPORT_JOBS_EXPIRATION = 15.hours.to_i

View File

@ -1,5 +1,5 @@
class StuckMergeJobsWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,6 +1,5 @@
class SystemHookPushWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(push_data, hook_id)
SystemHooksService.new.execute_hooks(push_data, hook_id)

View File

@ -1,5 +1,5 @@
class TrendingProjectsWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,6 +1,5 @@
class UpdateMergeRequestsWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
LOG_TIME_THRESHOLD = 90 # seconds

View File

@ -1,6 +1,5 @@
class UpdateUserActivityWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(pairs)
pairs = cast_data(pairs)

View File

@ -1,6 +1,5 @@
class UploadChecksumWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(upload_id)
upload = Upload.find(upload_id)

View File

@ -1,5 +1,5 @@
class WaitForClusterCreationWorker
include Sidekiq::Worker
include ApplicationWorker
include ClusterQueue
def perform(cluster_id)

View File

@ -1,6 +1,5 @@
class WebHookWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
sidekiq_options retry: 4, dead: false

View File

@ -13,20 +13,19 @@ module Sidekiq
module ClassMethods
module NoSchedulingFromTransactions
NESTING = ::Rails.env.test? ? 1 : 0
%i(perform_async perform_at perform_in).each do |name|
define_method(name) do |*args|
return super(*args) if Sidekiq::Worker.skip_transaction_check
return super(*args) unless ActiveRecord::Base.connection.open_transactions > NESTING
if !Sidekiq::Worker.skip_transaction_check && AfterCommitQueue.inside_transaction?
raise <<-MSG.strip_heredoc
`#{self}.#{name}` cannot be called inside a transaction as this can lead to
race conditions when the worker runs before the transaction is committed and
tries to access a model that has not been saved yet.
raise <<-MSG.strip_heredoc
`#{self}.#{name}` cannot be called inside a transaction as this can lead to
race conditions when the worker runs before the transaction is committed and
tries to access a model that has not been saved yet.
Use an `after_commit` hook, or include `AfterCommitQueue` and use a `run_after_commit` block instead.
MSG
end
Use an `after_commit` hook, or include `AfterCommitQueue` and use a `run_after_commit` block instead.
MSG
super(*args)
end
end
end

View File

@ -64,13 +64,13 @@ end
# The Sidekiq client API always adds the queue to the Sidekiq queue
# list, but mail_room and gitlab-shell do not. This is only necessary
# for monitoring.
config = YAML.load_file(Rails.root.join('config', 'sidekiq_queues.yml').to_s)
begin
queues = Gitlab::SidekiqConfig.worker_queues
Sidekiq.redis do |conn|
conn.pipelined do
config[:queues].each do |queue|
conn.sadd('queues', queue[0])
queues.each do |queue|
conn.sadd('queues', queue)
end
end
end

View File

@ -25,14 +25,14 @@ class ScheduleEventMigrations < ActiveRecord::Migration
# We push multiple jobs at a time to reduce the time spent in
# Sidekiq/Redis operations. We're using this buffer based approach so we
# don't need to run additional queries for every range.
BackgroundMigrationWorker.perform_bulk(jobs)
BackgroundMigrationWorker.bulk_perform_async(jobs)
jobs.clear
end
jobs << ['MigrateEventsToPushEventPayloads', [min, max]]
end
BackgroundMigrationWorker.perform_bulk(jobs) unless jobs.empty?
BackgroundMigrationWorker.bulk_perform_async(jobs) unless jobs.empty?
end
def down

View File

@ -19,7 +19,7 @@ class ScheduleCreateGpgKeySubkeysFromGpgKeys < ActiveRecord::Migration
[MIGRATION, [id]]
end
BackgroundMigrationWorker.perform_bulk(jobs)
BackgroundMigrationWorker.bulk_perform_async(jobs)
end
end

View File

@ -68,10 +68,10 @@ BackgroundMigrationWorker.perform_async('BackgroundMigrationClassName', [arg1, a
```
Usually it's better to enqueue jobs in bulk, for this you can use
`BackgroundMigrationWorker.perform_bulk`:
`BackgroundMigrationWorker.bulk_perform_async`:
```ruby
BackgroundMigrationWorker.perform_bulk(
BackgroundMigrationWorker.bulk_perform_async(
[['BackgroundMigrationClassName', [1]],
['BackgroundMigrationClassName', [2]]]
)
@ -85,13 +85,13 @@ updates. Removals in turn can be handled by simply defining foreign keys with
cascading deletes.
If you would like to schedule jobs in bulk with a delay, you can use
`BackgroundMigrationWorker.perform_bulk_in`:
`BackgroundMigrationWorker.bulk_perform_in`:
```ruby
jobs = [['BackgroundMigrationClassName', [1]],
['BackgroundMigrationClassName', [2]]]
BackgroundMigrationWorker.perform_bulk_in(5.minutes, jobs)
BackgroundMigrationWorker.bulk_perform_in(5.minutes, jobs)
```
## Cleaning Up
@ -201,7 +201,7 @@ class ScheduleExtractServicesUrl < ActiveRecord::Migration
['ExtractServicesUrl', [id]]
end
BackgroundMigrationWorker.perform_bulk(jobs)
BackgroundMigrationWorker.bulk_perform_async(jobs)
end
end

View File

@ -3,6 +3,12 @@
This document outlines various guidelines that should be followed when adding or
modifying Sidekiq workers.
## ApplicationWorker
All workers should include `ApplicationWorker` instead of `Sidekiq::Worker`,
which adds some convenience methods and automatically sets the queue based on
the worker's name.
## Default Queue
Use of the "default" queue is not allowed. Every worker should use a queue that
@ -13,19 +19,10 @@ A list of all available queues can be found in `config/sidekiq_queues.yml`.
## Dedicated Queues
Most workers should use their own queue. To ease this process a worker can
include the `DedicatedSidekiqQueue` concern as follows:
```ruby
class ProcessSomethingWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
end
```
This will set the queue name based on the class' name, minus the `Worker`
suffix. In the above example this would lead to the queue being
`process_something`.
Most workers should use their own queue, which is automatically set based on the
worker class name. For a worker named `ProcessSomethingWorker`, the queue name
would be `process_something`. If you're not sure what a worker's queue name is,
you can find it using `SomeWorker.queue`.
In some cases multiple workers do use the same queue. For example, the various
workers for updating CI pipelines all use the `pipeline` queue. Adding workers
@ -39,7 +36,7 @@ tests should be placed in `spec/workers`.
## Removing or renaming queues
Try to avoid renaming or removing queues in minor and patch releases.
During online update instance can have pending jobs and removing the queue can
lead to those jobs being stuck forever. If you can't write migration for those
Sidekiq jobs, please consider doing rename or remove queue in major release only.
Try to avoid renaming or removing queues in minor and patch releases.
During online update instance can have pending jobs and removing the queue can
lead to those jobs being stuck forever. If you can't write migration for those
Sidekiq jobs, please consider doing rename or remove queue in major release only.

Some files were not shown because too many files have changed in this diff Show More