Merge remote-tracking branch 'origin/master' into list-multiple-clusters

This commit is contained in:
Matija Čupić 2017-12-05 19:56:50 +01:00
commit 2d9670b525
No known key found for this signature in database
GPG Key ID: 4BAF84FFACD2E5DE
188 changed files with 1570 additions and 694 deletions

View File

@ -586,6 +586,7 @@ codequality:
paths: [codeclimate.json]
qa:internal:
<<: *except-docs
stage: test
variables:
SETUP_DB: "false"

View File

@ -298,18 +298,21 @@ import ProjectVariables from './project_variables';
break;
case 'projects:snippets:show':
initNotes();
new ZenMode();
break;
case 'projects:snippets:new':
case 'projects:snippets:edit':
case 'projects:snippets:create':
case 'projects:snippets:update':
new GLForm($('.snippet-form'), true);
new ZenMode();
break;
case 'snippets:new':
case 'snippets:edit':
case 'snippets:create':
case 'snippets:update':
new GLForm($('.snippet-form'), false);
new ZenMode();
break;
case 'projects:releases:edit':
new ZenMode();
@ -546,6 +549,7 @@ import ProjectVariables from './project_variables';
new LineHighlighter();
new BlobViewer();
initNotes();
new ZenMode();
break;
case 'import:fogbugz:new_user_map':
new UsersSelect();

View File

@ -4,8 +4,8 @@ class Admin::AppearancesController < Admin::ApplicationController
def show
end
def preview
render 'preview', layout: 'devise'
def preview_sign_in
render 'preview_sign_in', layout: 'devise'
end
def create
@ -52,7 +52,7 @@ class Admin::AppearancesController < Admin::ApplicationController
def appearance_params
params.require(:appearance).permit(
:title, :description, :logo, :logo_cache, :header_logo, :header_logo_cache,
:updated_by
:new_project_guidelines, :updated_by
)
end
end

View File

@ -1,30 +1,26 @@
module AppearancesHelper
def brand_title
if brand_item && brand_item.title
brand_item.title
else
'GitLab Community Edition'
end
brand_item&.title.presence || 'GitLab Community Edition'
end
def brand_image
if brand_item.logo?
image_tag brand_item.logo
else
nil
end
image_tag(brand_item.logo) if brand_item&.logo?
end
def brand_text
markdown_field(brand_item, :description)
end
def brand_new_project_guidelines
markdown_field(brand_item, :new_project_guidelines)
end
def brand_item
@appearance ||= Appearance.current
end
def brand_header_logo
if brand_item && brand_item.header_logo?
if brand_item&.header_logo?
image_tag brand_item.header_logo
else
render 'shared/logo.svg'
@ -33,7 +29,7 @@ module AppearancesHelper
# Skip the 'GitLab' type logo when custom brand logo is set
def brand_header_logo_type
unless brand_item && brand_item.header_logo?
unless brand_item&.header_logo?
render 'shared/logo_type.svg'
end
end

View File

@ -2,9 +2,8 @@ class Appearance < ActiveRecord::Base
include CacheMarkdownField
cache_markdown_field :description
cache_markdown_field :new_project_guidelines
validates :title, presence: true
validates :description, presence: true
validates :logo, file_size: { maximum: 1.megabyte }
validates :header_logo, file_size: { maximum: 1.megabyte }

View File

@ -1,5 +1,6 @@
module Ci
class Build < CommitStatus
prepend ArtifactMigratable
include TokenAuthenticatable
include AfterCommitQueue
include Presentable
@ -10,9 +11,14 @@ module Ci
belongs_to :erased_by, class_name: 'User'
has_many :deployments, as: :deployable
has_one :last_deployment, -> { order('deployments.id DESC') }, as: :deployable, class_name: 'Deployment'
has_many :trace_sections, class_name: 'Ci::BuildTraceSection'
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_one :job_artifacts_archive, -> { where(file_type: Ci::JobArtifact.file_types[:archive]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
has_one :job_artifacts_metadata, -> { where(file_type: Ci::JobArtifact.file_types[:metadata]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
# The "environment" field for builds is a String, and is the unexpanded name
def persisted_environment
@persisted_environment ||= Environment.find_by(
@ -31,15 +37,37 @@ module Ci
scope :unstarted, ->() { where(runner_id: nil) }
scope :ignore_failures, ->() { where(allow_failure: false) }
scope :with_artifacts, ->() { where.not(artifacts_file: [nil, '']) }
scope :with_artifacts, ->() do
where('(artifacts_file IS NOT NULL AND artifacts_file <> ?) OR EXISTS (?)',
'', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id'))
end
scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) }
scope :ref_protected, -> { where(protected: true) }
mount_uploader :artifacts_file, ArtifactUploader
mount_uploader :artifacts_metadata, ArtifactUploader
scope :matches_tag_ids, -> (tag_ids) do
matcher = ::ActsAsTaggableOn::Tagging
.where(taggable_type: CommitStatus)
.where(context: 'tags')
.where('taggable_id = ci_builds.id')
.where.not(tag_id: tag_ids).select('1')
where("NOT EXISTS (?)", matcher)
end
scope :with_any_tags, -> do
matcher = ::ActsAsTaggableOn::Tagging
.where(taggable_type: CommitStatus)
.where(context: 'tags')
.where('taggable_id = ci_builds.id').select('1')
where("EXISTS (?)", matcher)
end
mount_uploader :legacy_artifacts_file, LegacyArtifactUploader, mount_on: :artifacts_file
mount_uploader :legacy_artifacts_metadata, LegacyArtifactUploader, mount_on: :artifacts_metadata
acts_as_taggable
@ -326,14 +354,6 @@ module Ci
project.running_or_pending_build_count(force: true)
end
def artifacts?
!artifacts_expired? && artifacts_file.exists?
end
def artifacts_metadata?
artifacts? && artifacts_metadata.exists?
end
def artifacts_metadata_entry(path, **options)
metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
artifacts_metadata.path,
@ -386,6 +406,7 @@ module Ci
def keep_artifacts!
self.update(artifacts_expire_at: nil)
self.job_artifacts.update_all(expire_at: nil)
end
def coverage_regex
@ -473,11 +494,7 @@ module Ci
private
def update_artifacts_size
self.artifacts_size = if artifacts_file.exists?
artifacts_file.size
else
nil
end
self.artifacts_size = legacy_artifacts_file&.size
end
def erase_trace!

View File

@ -0,0 +1,36 @@
module Ci
class JobArtifact < ActiveRecord::Base
extend Gitlab::Ci::Model
belongs_to :project
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id
before_save :set_size, if: :file_changed?
mount_uploader :file, JobArtifactUploader
enum file_type: {
archive: 1,
metadata: 2
}
def self.artifacts_size_for(project)
self.where(project: project).sum(:size)
end
def set_size
self.size = file.size
end
def expire_in
expire_at - Time.now if expire_at
end
def expire_in=(value)
self.expire_at =
if value
ChronicDuration.parse(value)&.seconds&.from_now
end
end
end
end

View File

@ -112,7 +112,7 @@ module Ci
def can_pick?(build)
return false if self.ref_protected? && !build.protected?
assignable_for?(build.project) && accepting_tags?(build)
assignable_for?(build.project_id) && accepting_tags?(build)
end
def only_for?(project)
@ -171,8 +171,8 @@ module Ci
end
end
def assignable_for?(project)
is_shared? || projects.exists?(id: project.id)
def assignable_for?(project_id)
is_shared? || projects.exists?(id: project_id)
end
def accepting_tags?(build)

View File

@ -0,0 +1,45 @@
# Adapter class to unify the interface between mounted uploaders and the
# Ci::Artifact model
# Meant to be prepended so the interface can stay the same
module ArtifactMigratable
def artifacts_file
job_artifacts_archive&.file || legacy_artifacts_file
end
def artifacts_metadata
job_artifacts_metadata&.file || legacy_artifacts_metadata
end
def artifacts?
!artifacts_expired? && artifacts_file.exists?
end
def artifacts_metadata?
artifacts? && artifacts_metadata.exists?
end
def artifacts_file_changed?
job_artifacts_archive&.file_changed? || attribute_changed?(:artifacts_file)
end
def remove_artifacts_file!
if job_artifacts_archive
job_artifacts_archive.destroy
else
remove_legacy_artifacts_file!
end
end
def remove_artifacts_metadata!
if job_artifacts_metadata
job_artifacts_metadata.destroy
else
remove_legacy_artifacts_metadata!
end
end
def artifacts_size
read_attribute(:artifacts_size).to_i +
job_artifacts_archive&.size.to_i + job_artifacts_metadata&.size.to_i
end
end

View File

@ -2,6 +2,7 @@ require 'carrierwave/orm/activerecord'
class Group < Namespace
include Gitlab::ConfigHelper
include AfterCommitQueue
include AccessRequestable
include Avatarable
include Referable

View File

@ -2,6 +2,7 @@ require 'digest/md5'
class Key < ActiveRecord::Base
include Gitlab::CurrentSettings
include AfterCommitQueue
include Sortable
belongs_to :user

View File

@ -1,4 +1,5 @@
class Member < ActiveRecord::Base
include AfterCommitQueue
include Sortable
include Importable
include Expirable

View File

@ -35,7 +35,9 @@ class ProjectStatistics < ActiveRecord::Base
end
def update_build_artifacts_size
self.build_artifacts_size = project.builds.sum(:artifacts_size)
self.build_artifacts_size =
project.builds.sum(:artifacts_size) +
Ci::JobArtifact.artifacts_size_for(self)
end
def update_storage_size

View File

@ -211,7 +211,7 @@ class Service < ActiveRecord::Base
def async_execute(data)
return unless supported_events.include?(data[:object_kind])
Sidekiq::Client.enqueue(ProjectServiceWorker, id, data)
ProjectServiceWorker.perform_async(id, data)
end
def issue_tracker?

View File

@ -7,6 +7,7 @@ class User < ActiveRecord::Base
include Gitlab::ConfigHelper
include Gitlab::CurrentSettings
include Gitlab::SQL::Pattern
include AfterCommitQueue
include Avatarable
include Referable
include Sortable
@ -903,6 +904,7 @@ class User < ActiveRecord::Base
def post_destroy_hook
log_info("User \"#{name}\" (#{email}) was removed")
system_hook_service.execute_hooks_for(self, :destroy)
end

View File

@ -22,6 +22,16 @@ module Ci
valid = true
if Feature.enabled?('ci_job_request_with_tags_matcher')
# pick builds that does not have other tags than runner's one
builds = builds.matches_tag_ids(runner.tags.ids)
# pick builds that have at least one tag
unless runner.run_untagged?
builds = builds.with_any_tags
end
end
builds.find do |build|
next unless runner.can_pick?(build)

View File

@ -18,7 +18,7 @@ module Projects
@status.enqueue!
@status.run!
raise 'missing pages artifacts' unless build.artifacts_file?
raise 'missing pages artifacts' unless build.artifacts?
raise 'pages are outdated' unless latest?
# Create temporary directory in which we will extract the artifacts

View File

@ -1,6 +1,10 @@
class SystemHooksService
def execute_hooks_for(model, event)
execute_hooks(build_event_data(model, event))
data = build_event_data(model, event)
model.run_after_commit_or_now do
SystemHooksService.new.execute_hooks(data)
end
end
def execute_hooks(data, hooks_scope = :all)

View File

@ -63,7 +63,7 @@ class WebHookService
end
def async_execute
Sidekiq::Client.enqueue(WebHookWorker, hook.id, data, hook_name)
WebHookWorker.perform_async(hook.id, data, hook_name)
end
private

View File

@ -1,39 +0,0 @@
class ArtifactUploader < GitlabUploader
storage :file
attr_reader :job, :field
def self.local_artifacts_store
Gitlab.config.artifacts.path
end
def self.artifacts_upload_path
File.join(self.local_artifacts_store, 'tmp/uploads/')
end
def initialize(job, field)
@job, @field = job, field
end
def store_dir
default_local_path
end
def cache_dir
File.join(self.class.local_artifacts_store, 'tmp/cache')
end
def work_dir
File.join(self.class.local_artifacts_store, 'tmp/work')
end
private
def default_local_path
File.join(self.class.local_artifacts_store, default_path)
end
def default_path
File.join(job.created_at.utc.strftime('%Y_%m'), job.project_id.to_s, job.id.to_s)
end
end

View File

@ -0,0 +1,46 @@
class JobArtifactUploader < GitlabUploader
storage :file
def self.local_store_path
Gitlab.config.artifacts.path
end
def self.artifacts_upload_path
File.join(self.local_store_path, 'tmp/uploads/')
end
def size
return super if model.size.nil?
model.size
end
def store_dir
default_local_path
end
def cache_dir
File.join(self.class.local_store_path, 'tmp/cache')
end
def work_dir
File.join(self.class.local_store_path, 'tmp/work')
end
private
def default_local_path
File.join(self.class.local_store_path, default_path)
end
def default_path
creation_date = model.created_at.utc.strftime('%Y_%m_%d')
File.join(disk_hash[0..1], disk_hash[2..3], disk_hash,
creation_date, model.job_id.to_s, model.id.to_s)
end
def disk_hash
@disk_hash ||= Digest::SHA2.hexdigest(model.project_id.to_s)
end
end

View File

@ -0,0 +1,33 @@
class LegacyArtifactUploader < GitlabUploader
storage :file
def self.local_store_path
Gitlab.config.artifacts.path
end
def self.artifacts_upload_path
File.join(self.local_store_path, 'tmp/uploads/')
end
def store_dir
default_local_path
end
def cache_dir
File.join(self.class.local_store_path, 'tmp/cache')
end
def work_dir
File.join(self.class.local_store_path, 'tmp/work')
end
private
def default_local_path
File.join(self.class.local_store_path, default_path)
end
def default_path
File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s)
end
end

View File

@ -1,6 +1,23 @@
= form_for @appearance, url: admin_appearances_path, html: { class: 'form-horizontal'} do |f|
= form_errors(@appearance)
%fieldset.app_logo
%legend
Navigation bar:
.form-group
= f.label :header_logo, 'Header logo', class: 'control-label'
.col-sm-10
- if @appearance.header_logo?
= image_tag @appearance.header_logo_url, class: 'appearance-light-logo-preview'
- if @appearance.persisted?
%br
= link_to 'Remove header logo', header_logos_admin_appearances_path, data: { confirm: "Header logo will be removed. Are you sure?"}, method: :delete, class: "btn btn-remove btn-sm remove-logo"
%hr
= f.hidden_field :header_logo_cache
= f.file_field :header_logo, class: ""
.hint
Maximum file size is 1MB. Pages are optimized for a 28px tall header logo
%fieldset.sign-in
%legend
Sign in/Sign up pages:
@ -28,27 +45,22 @@
.hint
Maximum file size is 1MB. Pages are optimized for a 640x360 px logo.
%fieldset.app_logo
%fieldset
%legend
Navigation bar:
New project pages:
.form-group
= f.label :header_logo, 'Header logo', class: 'control-label'
= f.label :new_project_guidelines, class: 'control-label'
.col-sm-10
- if @appearance.header_logo?
= image_tag @appearance.header_logo_url, class: 'appearance-light-logo-preview'
- if @appearance.persisted?
%br
= link_to 'Remove header logo', header_logos_admin_appearances_path, data: { confirm: "Header logo will be removed. Are you sure?"}, method: :delete, class: "btn btn-remove btn-sm remove-logo"
%hr
= f.hidden_field :header_logo_cache
= f.file_field :header_logo, class: ""
= f.text_area :new_project_guidelines, class: "form-control", rows: 10
.hint
Maximum file size is 1MB. Pages are optimized for a 28px tall header logo
Guidelines parsed with #{link_to "GitLab Flavored Markdown", help_page_path('user/markdown'), target: '_blank'}.
.form-actions
= f.submit 'Save', class: 'btn btn-save append-right-10'
- if @appearance.persisted?
= link_to 'Preview last save', preview_admin_appearances_path, class: 'btn', target: '_blank', rel: 'noopener noreferrer'
Preview last save:
= link_to 'Sign-in page', preview_sign_in_admin_appearances_path, class: 'btn', target: '_blank', rel: 'noopener noreferrer'
= link_to 'New project page', new_project_path, class: 'btn', target: '_blank', rel: 'noopener noreferrer'
- if @appearance.updated_at
%span.pull-right

View File

@ -15,8 +15,8 @@
.col-sm-7.brand-holder.pull-left
%h1
= brand_title
- if brand_item
= brand_image
- if brand_item&.description?
= brand_text
- else
%h3 Open source software to collaborate on code

View File

@ -25,7 +25,7 @@
= markdown_toolbar_button({ icon: "list-bulleted", data: { "md-tag" => "* ", "md-prepend" => true }, title: "Add a bullet list" })
= markdown_toolbar_button({ icon: "list-numbered", data: { "md-tag" => "1. ", "md-prepend" => true }, title: "Add a numbered list" })
= markdown_toolbar_button({ icon: "task-done", data: { "md-tag" => "* [ ] ", "md-prepend" => true }, title: "Add a task list" })
%button.toolbar-btn.toolbar-fullscreen-btn.js-zen-enter.has-tooltip{ type: "button", tabindex: -1, aria: { label: "Go full screen" }, title: "Go full screen", data: { container: "body" } }
%button.toolbar-btn.toolbar-fullscreen-btn.js-zen-enter.has-tooltip{ type: "button", tabindex: -1, "aria-label": "Go full screen", title: "Go full screen", data: { container: "body" } }
= sprite_icon("screen-full")
.md-write-holder

View File

@ -18,6 +18,7 @@
A project is where you house your files (repository), plan your work (issues), and publish your documentation (wiki), #{link_to 'among other things', help_page_path("user/project/index.md", anchor: "projects-features"), target: '_blank'}.
%p
All features are enabled when you create a project, but you can disable the ones you dont need in the project settings.
= brand_new_project_guidelines
.col-lg-9.js-toggle-container
%ul.nav-links.gitlab-tabs{ role: 'tablist' }
%li.active{ role: 'presentation' }

View File

@ -1,5 +1,5 @@
class AdminEmailWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,6 +1,5 @@
class AuthorizedProjectsWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
# Schedules multiple jobs and waits for them to be completed.
def self.bulk_perform_and_wait(args_list)
@ -17,11 +16,6 @@ class AuthorizedProjectsWorker
waiter.wait
end
# Schedules multiple jobs to run in sidekiq without waiting for completion
def self.bulk_perform_async(args_list)
Sidekiq::Client.push_bulk('class' => self, 'queue' => sidekiq_options['queue'], 'args' => args_list)
end
# Performs multiple jobs directly. Failed jobs will be put into sidekiq so
# they can benefit from retries
def self.bulk_perform_inline(args_list)

View File

@ -1,34 +1,5 @@
class BackgroundMigrationWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
# Enqueues a number of jobs in bulk.
#
# The `jobs` argument should be an Array of Arrays, each sub-array must be in
# the form:
#
# [migration-class, [arg1, arg2, ...]]
def self.perform_bulk(jobs)
Sidekiq::Client.push_bulk('class' => self,
'queue' => sidekiq_options['queue'],
'args' => jobs)
end
# Schedules multiple jobs in bulk, with a delay.
#
def self.perform_bulk_in(delay, jobs)
now = Time.now.to_i
schedule = now + delay.to_i
if schedule <= now
raise ArgumentError, 'The schedule time must be in the future!'
end
Sidekiq::Client.push_bulk('class' => self,
'queue' => sidekiq_options['queue'],
'args' => jobs,
'at' => schedule)
end
include ApplicationWorker
# Performs the background migration.
#

View File

@ -1,5 +1,5 @@
class BuildCoverageWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
def perform(build_id)

View File

@ -1,5 +1,5 @@
class BuildFinishedWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :processing

View File

@ -1,5 +1,5 @@
class BuildHooksWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :hooks

View File

@ -1,5 +1,5 @@
class BuildQueueWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :processing

View File

@ -1,5 +1,5 @@
class BuildSuccessWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :processing

View File

@ -1,5 +1,5 @@
class BuildTraceSectionsWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
def perform(build_id)

View File

@ -1,5 +1,5 @@
class ClusterInstallAppWorker
include Sidekiq::Worker
include ApplicationWorker
include ClusterQueue
include ClusterApplications

View File

@ -1,5 +1,5 @@
class ClusterProvisionWorker
include Sidekiq::Worker
include ApplicationWorker
include ClusterQueue
def perform(cluster_id)

View File

@ -1,5 +1,5 @@
class ClusterWaitForAppInstallationWorker
include Sidekiq::Worker
include ApplicationWorker
include ClusterQueue
include ClusterApplications

View File

@ -0,0 +1,40 @@
Sidekiq::Worker.extend ActiveSupport::Concern
module ApplicationWorker
extend ActiveSupport::Concern
include Sidekiq::Worker
included do
sidekiq_options queue: base_queue_name
end
module ClassMethods
def base_queue_name
name
.sub(/\AGitlab::/, '')
.sub(/Worker\z/, '')
.underscore
.tr('/', '_')
end
def queue
get_sidekiq_options['queue'].to_s
end
def bulk_perform_async(args_list)
Sidekiq::Client.push_bulk('class' => self, 'args' => args_list)
end
def bulk_perform_in(delay, args_list)
now = Time.now.to_i
schedule = now + delay.to_i
if schedule <= now
raise ArgumentError, 'The schedule time must be in the future!'
end
Sidekiq::Client.push_bulk('class' => self, 'args' => args_list, 'at' => schedule)
end
end
end

View File

@ -1,9 +0,0 @@
# Concern that sets the queue of a Sidekiq worker based on the worker's class
# name/namespace.
module DedicatedSidekiqQueue
extend ActiveSupport::Concern
included do
sidekiq_options queue: name.sub(/Worker\z/, '').underscore.tr('/', '_')
end
end

View File

@ -8,7 +8,7 @@ module Gitlab
extend ActiveSupport::Concern
included do
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include ReschedulingMethods
include NotifyUponDeath

View File

@ -1,6 +1,5 @@
class CreateGpgSignatureWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(commit_sha, project_id)
project = Project.find_by(id: project_id)

View File

@ -1,5 +1,5 @@
class CreatePipelineWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :creation

View File

@ -1,6 +1,5 @@
class DeleteMergedBranchesWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(project_id, user_id)
begin

View File

@ -1,6 +1,5 @@
class DeleteUserWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(current_user_id, delete_user_id, options = {})
delete_user = User.find(delete_user_id)

View File

@ -1,6 +1,5 @@
class EmailReceiverWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(raw)
return unless Gitlab::IncomingEmail.enabled?

View File

@ -1,6 +1,5 @@
class EmailsOnPushWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
attr_reader :email, :skip_premailer

View File

@ -1,5 +1,5 @@
class ExpireBuildArtifactsWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform
@ -8,6 +8,6 @@ class ExpireBuildArtifactsWorker
build_ids = Ci::Build.with_expired_artifacts.pluck(:id)
build_ids = build_ids.map { |build_id| [build_id] }
Sidekiq::Client.push_bulk('class' => ExpireBuildInstanceArtifactsWorker, 'args' => build_ids )
ExpireBuildInstanceArtifactsWorker.bulk_perform_async(build_ids)
end
end

View File

@ -1,6 +1,5 @@
class ExpireBuildInstanceArtifactsWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(build_id)
build = Ci::Build

View File

@ -1,5 +1,5 @@
class ExpireJobCacheWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :cache

View File

@ -1,5 +1,5 @@
class ExpirePipelineCacheWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :cache

View File

@ -1,6 +1,5 @@
class GitGarbageCollectWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include Gitlab::CurrentSettings
sidekiq_options retry: false

View File

@ -7,7 +7,7 @@ module Gitlab
# been completed this worker will advance the import process to the next
# stage.
class AdvanceStageWorker
include Sidekiq::Worker
include ApplicationWorker
sidekiq_options queue: 'github_importer_advance_stage', dead: false

View File

@ -3,7 +3,7 @@
module Gitlab
module GithubImport
class RefreshImportJidWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
# The interval to schedule new instances of this job at.

View File

@ -4,7 +4,7 @@ module Gitlab
module GithubImport
module Stage
class FinishImportWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include StageMethods

View File

@ -4,7 +4,7 @@ module Gitlab
module GithubImport
module Stage
class ImportBaseDataWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include StageMethods

View File

@ -4,7 +4,7 @@ module Gitlab
module GithubImport
module Stage
class ImportIssuesAndDiffNotesWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include StageMethods

View File

@ -4,7 +4,7 @@ module Gitlab
module GithubImport
module Stage
class ImportNotesWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include StageMethods

View File

@ -4,7 +4,7 @@ module Gitlab
module GithubImport
module Stage
class ImportPullRequestsWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include StageMethods

View File

@ -4,7 +4,7 @@ module Gitlab
module GithubImport
module Stage
class ImportRepositoryWorker
include Sidekiq::Worker
include ApplicationWorker
include GithubImport::Queue
include StageMethods

View File

@ -1,7 +1,6 @@
class GitlabShellWorker
include Sidekiq::Worker
include ApplicationWorker
include Gitlab::ShellAdapter
include DedicatedSidekiqQueue
def perform(action, *arg)
gitlab_shell.__send__(action, *arg) # rubocop:disable GitlabSecurity/PublicSend

View File

@ -1,7 +1,7 @@
class GitlabUsagePingWorker
LEASE_TIMEOUT = 86400
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,6 +1,5 @@
class GroupDestroyWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include ExceptionBacktrace
def perform(group_id, user_id)

View File

@ -1,5 +1,5 @@
class ImportExportProjectCleanupWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,6 +1,5 @@
class InvalidGpgSignatureUpdateWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(gpg_key_id)
gpg_key = GpgKey.find_by(id: gpg_key_id)

View File

@ -2,8 +2,7 @@ require 'json'
require 'socket'
class IrkerWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(project_id, chans, colors, push_data, settings)
project = Project.find(project_id)

View File

@ -1,6 +1,5 @@
class MergeWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(merge_request_id, current_user_id, params)
params = params.with_indifferent_access

View File

@ -5,14 +5,9 @@
# The worker will reject doing anything for projects that *do* have a
# namespace. For those use ProjectDestroyWorker instead.
class NamespacelessProjectDestroyWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include ExceptionBacktrace
def self.bulk_perform_async(args_list)
Sidekiq::Client.push_bulk('class' => self, 'queue' => sidekiq_options['queue'], 'args' => args_list)
end
def perform(project_id)
begin
project = Project.unscoped.find(project_id)

View File

@ -1,6 +1,5 @@
class NewIssueWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include NewIssuable
def perform(issue_id, user_id)

View File

@ -1,6 +1,5 @@
class NewMergeRequestWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include NewIssuable
def perform(merge_request_id, user_id)

View File

@ -1,6 +1,5 @@
class NewNoteWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
# Keep extra parameter to preserve backwards compatibility with
# old `NewNoteWorker` jobs (can remove later)

View File

@ -1,5 +1,5 @@
class PagesWorker
include Sidekiq::Worker
include ApplicationWorker
sidekiq_options queue: :pages, retry: false

View File

@ -1,5 +1,5 @@
class PipelineHooksWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :hooks

View File

@ -1,5 +1,5 @@
class PipelineMetricsWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
def perform(pipeline_id)

View File

@ -1,5 +1,5 @@
class PipelineNotificationWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
def perform(pipeline_id, recipients = nil)

View File

@ -1,5 +1,5 @@
class PipelineProcessWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :processing

View File

@ -1,5 +1,5 @@
class PipelineScheduleWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,5 +1,5 @@
class PipelineSuccessWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :processing

View File

@ -1,5 +1,5 @@
class PipelineUpdateWorker
include Sidekiq::Worker
include ApplicationWorker
include PipelineQueue
enqueue_in group: :processing

View File

@ -1,6 +1,5 @@
class PostReceive
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(gl_repository, identifier, changes)
project, is_wiki = Gitlab::GlRepository.parse(gl_repository)

View File

@ -5,8 +5,7 @@
# Consider using an extra worker if you need to add any extra (and potentially
# slow) processing of commits.
class ProcessCommitWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
# project_id - The ID of the project this commit belongs to.
# user_id - The ID of the user that pushed the commit.

View File

@ -1,7 +1,6 @@
# Worker for updating any project specific caches.
class ProjectCacheWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
LEASE_TIMEOUT = 15.minutes.to_i

View File

@ -1,6 +1,5 @@
class ProjectDestroyWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include ExceptionBacktrace
def perform(project_id, user_id, params)

View File

@ -1,6 +1,5 @@
class ProjectExportWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include ExceptionBacktrace
sidekiq_options retry: 3

View File

@ -1,6 +1,5 @@
class ProjectMigrateHashedStorageWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
LEASE_TIMEOUT = 30.seconds.to_i

View File

@ -1,6 +1,5 @@
class ProjectServiceWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
sidekiq_options dead: false

View File

@ -1,7 +1,6 @@
# Worker for updating any project specific caches.
class PropagateServiceTemplateWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
LEASE_TIMEOUT = 4.hours.to_i

View File

@ -1,5 +1,5 @@
class PruneOldEventsWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,6 +1,5 @@
class ReactiveCachingWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
def perform(class_name, id, *args)
klass = begin

View File

@ -1,5 +1,5 @@
class RemoveExpiredGroupLinksWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,5 +1,5 @@
class RemoveExpiredMembersWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,5 +1,5 @@
class RemoveOldWebHookLogsWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
WEB_HOOK_LOG_LIFETIME = 2.days

View File

@ -1,5 +1,5 @@
class RemoveUnreferencedLfsObjectsWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,5 +1,5 @@
class RepositoryArchiveCacheWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
def perform

View File

@ -1,6 +1,6 @@
module RepositoryCheck
class BatchWorker
include Sidekiq::Worker
include ApplicationWorker
include CronjobQueue
RUN_TIME = 3600

View File

@ -1,6 +1,6 @@
module RepositoryCheck
class ClearWorker
include Sidekiq::Worker
include ApplicationWorker
include RepositoryCheckQueue
def perform

View File

@ -1,6 +1,6 @@
module RepositoryCheck
class SingleRepositoryWorker
include Sidekiq::Worker
include ApplicationWorker
include RepositoryCheckQueue
def perform(project_id)

View File

@ -1,9 +1,8 @@
class RepositoryForkWorker
ForkError = Class.new(StandardError)
include Sidekiq::Worker
include ApplicationWorker
include Gitlab::ShellAdapter
include DedicatedSidekiqQueue
include ProjectStartImport
sidekiq_options status_expiration: StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION

View File

@ -1,8 +1,7 @@
class RepositoryImportWorker
ImportError = Class.new(StandardError)
include Sidekiq::Worker
include DedicatedSidekiqQueue
include ApplicationWorker
include ExceptionBacktrace
include ProjectStartImport

Some files were not shown because too many files have changed in this diff Show More