Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
c7864d3d50
commit
234dc40a12
70 changed files with 659 additions and 263 deletions
|
@ -717,3 +717,8 @@ Performance/ActiveRecordSubtransactions:
|
|||
Exclude:
|
||||
- 'spec/**/*.rb'
|
||||
- 'ee/spec/**/*.rb'
|
||||
|
||||
Performance/ActiveRecordSubtransactionMethods:
|
||||
Exclude:
|
||||
- 'spec/**/*.rb'
|
||||
- 'ee/spec/**/*.rb'
|
||||
|
|
|
@ -69,6 +69,7 @@ export default () => {
|
|||
const currentAction = $('.js-file-title').data('currentAction');
|
||||
const projectId = editBlobForm.data('project-id');
|
||||
const isMarkdown = editBlobForm.data('is-markdown');
|
||||
const previewMarkdownPath = editBlobForm.data('previewMarkdownPath');
|
||||
const commitButton = $('.js-commit-button');
|
||||
const cancelLink = $('.btn.btn-cancel');
|
||||
|
||||
|
@ -80,6 +81,7 @@ export default () => {
|
|||
currentAction,
|
||||
projectId,
|
||||
isMarkdown,
|
||||
previewMarkdownPath,
|
||||
});
|
||||
initPopovers();
|
||||
initCodeQualityWalkthroughStep();
|
||||
|
|
|
@ -11,7 +11,7 @@ import { BLOB_EDITOR_ERROR, BLOB_PREVIEW_ERROR } from './constants';
|
|||
|
||||
export default class EditBlob {
|
||||
// The options object has:
|
||||
// assetsPath, filePath, currentAction, projectId, isMarkdown
|
||||
// assetsPath, filePath, currentAction, projectId, isMarkdown, previewMarkdownPath
|
||||
constructor(options) {
|
||||
this.options = options;
|
||||
this.configureMonacoEditor();
|
||||
|
@ -30,7 +30,10 @@ export default class EditBlob {
|
|||
import('~/editor/extensions/source_editor_markdown_ext')
|
||||
.then(({ EditorMarkdownExtension: MarkdownExtension } = {}) => {
|
||||
this.editor.use(
|
||||
new MarkdownExtension({ instance: this.editor, projectPath: this.options.projectPath }),
|
||||
new MarkdownExtension({
|
||||
instance: this.editor,
|
||||
previewMarkdownPath: this.options.previewMarkdownPath,
|
||||
}),
|
||||
);
|
||||
this.hasMarkdownExtension = true;
|
||||
addEditorMarkdownListeners(this.editor);
|
||||
|
|
|
@ -14,17 +14,9 @@ import {
|
|||
} from '../constants';
|
||||
import { SourceEditorExtension } from './source_editor_extension_base';
|
||||
|
||||
const getPreview = (text, projectPath = '') => {
|
||||
let url;
|
||||
|
||||
if (projectPath) {
|
||||
url = `/${projectPath}/preview_markdown`;
|
||||
} else {
|
||||
const { group, project } = document.body.dataset;
|
||||
url = `/${group}/${project}/preview_markdown`;
|
||||
}
|
||||
const getPreview = (text, previewMarkdownPath) => {
|
||||
return axios
|
||||
.post(url, {
|
||||
.post(previewMarkdownPath, {
|
||||
text,
|
||||
})
|
||||
.then(({ data }) => {
|
||||
|
@ -43,10 +35,10 @@ const setupDomElement = ({ injectToEl = null } = {}) => {
|
|||
};
|
||||
|
||||
export class EditorMarkdownExtension extends SourceEditorExtension {
|
||||
constructor({ instance, projectPath, ...args } = {}) {
|
||||
constructor({ instance, previewMarkdownPath, ...args } = {}) {
|
||||
super({ instance, ...args });
|
||||
Object.assign(instance, {
|
||||
projectPath,
|
||||
previewMarkdownPath,
|
||||
preview: {
|
||||
el: undefined,
|
||||
action: undefined,
|
||||
|
@ -112,7 +104,7 @@ export class EditorMarkdownExtension extends SourceEditorExtension {
|
|||
|
||||
fetchPreview() {
|
||||
const { el: previewEl } = this.preview;
|
||||
getPreview(this.getValue(), this.projectPath)
|
||||
getPreview(this.getValue(), this.previewMarkdownPath)
|
||||
.then((data) => {
|
||||
previewEl.innerHTML = sanitize(data);
|
||||
syntaxHighlight(previewEl.querySelectorAll('.js-syntax-highlight'));
|
||||
|
|
|
@ -79,6 +79,7 @@ export default {
|
|||
'editorTheme',
|
||||
'entries',
|
||||
'currentProjectId',
|
||||
'previewMarkdownPath',
|
||||
]),
|
||||
...mapGetters([
|
||||
'getAlert',
|
||||
|
@ -314,14 +315,15 @@ export default {
|
|||
|
||||
if (
|
||||
this.fileType === MARKDOWN_FILE_TYPE &&
|
||||
this.editor?.getEditorType() === EDITOR_TYPE_CODE
|
||||
this.editor?.getEditorType() === EDITOR_TYPE_CODE &&
|
||||
this.previewMarkdownPath
|
||||
) {
|
||||
import('~/editor/extensions/source_editor_markdown_ext')
|
||||
.then(({ EditorMarkdownExtension: MarkdownExtension } = {}) => {
|
||||
this.editor.use(
|
||||
new MarkdownExtension({
|
||||
instance: this.editor,
|
||||
projectPath: this.currentProjectId,
|
||||
previewMarkdownPath: this.previewMarkdownPath,
|
||||
}),
|
||||
);
|
||||
})
|
||||
|
|
|
@ -63,6 +63,7 @@ export function initIde(el, options = {}) {
|
|||
editorTheme: window.gon?.user_color_scheme || DEFAULT_THEME,
|
||||
codesandboxBundlerUrl: el.dataset.codesandboxBundlerUrl,
|
||||
environmentsGuidanceAlertDismissed: !parseBoolean(el.dataset.enableEnvironmentsGuidance),
|
||||
previewMarkdownPath: el.dataset.previewMarkdownPath,
|
||||
});
|
||||
},
|
||||
beforeDestroy() {
|
||||
|
|
|
@ -32,4 +32,5 @@ export default () => ({
|
|||
codesandboxBundlerUrl: null,
|
||||
environmentsGuidanceAlertDismissed: false,
|
||||
environmentsGuidanceAlertDetected: false,
|
||||
previewMarkdownPath: '',
|
||||
});
|
||||
|
|
|
@ -38,7 +38,7 @@ class Clusters::ClustersController < Clusters::BaseController
|
|||
|
||||
def new
|
||||
if params[:provider] == 'aws'
|
||||
@aws_role = Aws::Role.create_or_find_by!(user: current_user)
|
||||
@aws_role = Aws::Role.create_or_find_by!(user: current_user) # rubocop:disable Performance/ActiveRecordSubtransactionMethods
|
||||
@instance_types = load_instance_types.to_json
|
||||
|
||||
elsif params[:provider] == 'gcp'
|
||||
|
|
|
@ -36,7 +36,7 @@ class Dashboard::ProjectsController < Dashboard::ApplicationController
|
|||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def starred
|
||||
@projects = load_projects(params.merge(starred: true))
|
||||
.includes(:forked_from_project, :topics)
|
||||
.includes(:forked_from_project, :topics, :topics_acts_as_taggable)
|
||||
|
||||
@groups = []
|
||||
|
||||
|
|
|
@ -180,7 +180,14 @@ class ProjectsFinder < UnionFinder
|
|||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
def by_topics(items)
|
||||
params[:topic].present? ? items.tagged_with(params[:topic]) : items
|
||||
return items unless params[:topic].present?
|
||||
|
||||
topics = params[:topic].instance_of?(String) ? params[:topic].strip.split(/\s*,\s*/) : params[:topic]
|
||||
topics.each do |topic|
|
||||
items = items.with_topic(topic)
|
||||
end
|
||||
|
||||
items
|
||||
end
|
||||
|
||||
def by_search(items)
|
||||
|
|
|
@ -406,6 +406,10 @@ module Types
|
|||
object.topic_list
|
||||
end
|
||||
|
||||
def topics
|
||||
object.topic_list
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def project
|
||||
|
|
|
@ -220,7 +220,8 @@ module BlobHelper
|
|||
'assets-prefix' => Gitlab::Application.config.assets.prefix,
|
||||
'blob-filename' => @blob && @blob.path,
|
||||
'project-id' => project.id,
|
||||
'is-markdown' => @blob && @blob.path && Gitlab::MarkupHelper.gitlab_markdown?(@blob.path)
|
||||
'is-markdown' => @blob && @blob.path && Gitlab::MarkupHelper.gitlab_markdown?(@blob.path),
|
||||
'preview-markdown-path' => preview_markdown_path(project)
|
||||
}
|
||||
end
|
||||
|
||||
|
|
|
@ -19,7 +19,8 @@ module IdeHelper
|
|||
'merge-request' => @merge_request,
|
||||
'fork-info' => @fork_info&.to_json,
|
||||
'project' => convert_to_project_entity_json(@project),
|
||||
'enable-environments-guidance' => enable_environments_guidance?.to_s
|
||||
'enable-environments-guidance' => enable_environments_guidance?.to_s,
|
||||
'preview-markdown-path' => @project && preview_markdown_path(@project)
|
||||
}
|
||||
end
|
||||
|
||||
|
|
|
@ -66,7 +66,7 @@ class ApplicationRecord < ActiveRecord::Base
|
|||
def self.safe_find_or_create_by(*args, &block)
|
||||
return optimized_safe_find_or_create_by(*args, &block) if Feature.enabled?(:optimize_safe_find_or_create_by, default_enabled: :yaml)
|
||||
|
||||
safe_ensure_unique(retries: 1) do
|
||||
safe_ensure_unique(retries: 1) do # rubocop:disable Performance/ActiveRecordSubtransactionMethods
|
||||
find_or_create_by(*args, &block)
|
||||
end
|
||||
end
|
||||
|
@ -104,19 +104,6 @@ class ApplicationRecord < ActiveRecord::Base
|
|||
enum(enum_mod.key => values)
|
||||
end
|
||||
|
||||
def self.transaction(**options, &block)
|
||||
if options[:requires_new] && track_subtransactions?
|
||||
::Gitlab::Database::Metrics.subtransactions_increment(self.name)
|
||||
end
|
||||
|
||||
super(**options, &block)
|
||||
end
|
||||
|
||||
def self.track_subtransactions?
|
||||
::Feature.enabled?(:active_record_subtransactions_counter, type: :ops, default_enabled: :yaml) &&
|
||||
connection.transaction_open?
|
||||
end
|
||||
|
||||
def self.cached_column_list
|
||||
self.column_names.map { |column_name| self.arel_table[column_name] }
|
||||
end
|
||||
|
|
|
@ -171,7 +171,7 @@ module CacheMarkdownField
|
|||
|
||||
# One retry is enough as next time `model_user_mention` should return the existing mention record,
|
||||
# that threw the `ActiveRecord::RecordNotUnique` exception in first place.
|
||||
self.class.safe_ensure_unique(retries: 1) do
|
||||
self.class.safe_ensure_unique(retries: 1) do # rubocop:disable Performance/ActiveRecordSubtransactionMethods
|
||||
user_mention = model_user_mention
|
||||
|
||||
# this may happen due to notes polymorphism, so noteable_id may point to a record
|
||||
|
|
|
@ -89,7 +89,7 @@ class ExternalPullRequest < ApplicationRecord
|
|||
end
|
||||
|
||||
def self.safe_find_or_initialize_and_update(find:, update:)
|
||||
safe_ensure_unique(retries: 1) do
|
||||
safe_ensure_unique(retries: 1) do # rubocop:disable Performance/ActiveRecordSubtransactionMethods
|
||||
model = find_or_initialize_by(find)
|
||||
|
||||
if model.update(update)
|
||||
|
|
|
@ -1835,13 +1835,6 @@ class MergeRequest < ApplicationRecord
|
|||
Ability.allowed?(user, :push_code, source_project)
|
||||
end
|
||||
|
||||
def squash_in_progress?
|
||||
# The source project can be deleted
|
||||
return false unless source_project
|
||||
|
||||
source_project.repository.squash_in_progress?(id)
|
||||
end
|
||||
|
||||
def find_actual_head_pipeline
|
||||
all_pipelines.for_sha_or_source_sha(diff_head_sha).first
|
||||
end
|
||||
|
|
|
@ -103,6 +103,8 @@ class Project < ApplicationRecord
|
|||
|
||||
after_save :create_import_state, if: ->(project) { project.import? && project.import_state.nil? }
|
||||
|
||||
after_save :save_topics
|
||||
|
||||
after_create -> { create_or_load_association(:project_feature) }
|
||||
|
||||
after_create -> { create_or_load_association(:ci_cd_settings) }
|
||||
|
@ -127,12 +129,31 @@ class Project < ApplicationRecord
|
|||
after_initialize :use_hashed_storage
|
||||
after_create :check_repository_absence!
|
||||
|
||||
# Required during the `ActsAsTaggableOn::Tag -> Topic` migration
|
||||
# TODO: remove 'acts_as_ordered_taggable_on' and ':topics_acts_as_taggable' in the further process of the migration
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/335946
|
||||
acts_as_ordered_taggable_on :topics
|
||||
has_many :topics_acts_as_taggable, -> { order("#{ActsAsTaggableOn::Tagging.table_name}.id") },
|
||||
class_name: 'ActsAsTaggableOn::Tag',
|
||||
through: :topic_taggings,
|
||||
source: :tag
|
||||
|
||||
has_many :project_topics, -> { order(:id) }, class_name: 'Projects::ProjectTopic'
|
||||
has_many :topics, through: :project_topics, class_name: 'Projects::Topic'
|
||||
|
||||
# Required during the `ActsAsTaggableOn::Tag -> Topic` migration
|
||||
# TODO: remove 'topics' in the further process of the migration
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/335946
|
||||
alias_method :topics_new, :topics
|
||||
def topics
|
||||
self.topics_acts_as_taggable + self.topics_new
|
||||
end
|
||||
|
||||
attr_accessor :old_path_with_namespace
|
||||
attr_accessor :template_name
|
||||
attr_writer :pipeline_status
|
||||
attr_accessor :skip_disk_validation
|
||||
attr_writer :topic_list
|
||||
|
||||
alias_attribute :title, :name
|
||||
|
||||
|
@ -623,6 +644,19 @@ class Project < ApplicationRecord
|
|||
joins(:service_desk_setting).where('service_desk_settings.project_key' => key)
|
||||
end
|
||||
|
||||
scope :with_topic, ->(topic_name) do
|
||||
topic = Projects::Topic.find_by_name(topic_name)
|
||||
acts_as_taggable_on_topic = ActsAsTaggableOn::Tag.find_by_name(topic_name)
|
||||
|
||||
return none unless topic || acts_as_taggable_on_topic
|
||||
|
||||
relations = []
|
||||
relations << where(id: topic.project_topics.select(:project_id)) if topic
|
||||
relations << where(id: acts_as_taggable_on_topic.taggings.select(:taggable_id)) if acts_as_taggable_on_topic
|
||||
|
||||
Project.from_union(relations)
|
||||
end
|
||||
|
||||
enum auto_cancel_pending_pipelines: { disabled: 0, enabled: 1 }
|
||||
|
||||
chronic_duration_attr :build_timeout_human_readable, :build_timeout,
|
||||
|
@ -638,7 +672,7 @@ class Project < ApplicationRecord
|
|||
mount_uploader :bfg_object_map, AttachmentUploader
|
||||
|
||||
def self.with_api_entity_associations
|
||||
preload(:project_feature, :route, :topics, :group, :timelogs, namespace: [:route, :owner])
|
||||
preload(:project_feature, :route, :topics, :topics_acts_as_taggable, :group, :timelogs, namespace: [:route, :owner])
|
||||
end
|
||||
|
||||
def self.with_web_entity_associations
|
||||
|
@ -2673,8 +2707,32 @@ class Project < ApplicationRecord
|
|||
ci_cd_settings.group_runners_enabled?
|
||||
end
|
||||
|
||||
def topic_list
|
||||
self.topics.map(&:name)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def save_topics
|
||||
return if @topic_list.nil?
|
||||
|
||||
@topic_list = @topic_list.split(',') if @topic_list.instance_of?(String)
|
||||
@topic_list = @topic_list.map(&:strip).uniq.reject(&:empty?)
|
||||
|
||||
if @topic_list != self.topic_list || self.topics_acts_as_taggable.any?
|
||||
self.topics_new.delete_all
|
||||
self.topics = @topic_list.map { |topic| Projects::Topic.find_or_create_by(name: topic) }
|
||||
|
||||
# Remove old topics (ActsAsTaggableOn::Tag)
|
||||
# Required during the `ActsAsTaggableOn::Tag -> Topic` migration
|
||||
# TODO: remove in the further process of the migration
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/335946
|
||||
self.topic_taggings.clear
|
||||
end
|
||||
|
||||
@topic_list = nil
|
||||
end
|
||||
|
||||
def find_integration(integrations, name)
|
||||
integrations.find { _1.to_param == name }
|
||||
end
|
||||
|
|
8
app/models/projects/project_topic.rb
Normal file
8
app/models/projects/project_topic.rb
Normal file
|
@ -0,0 +1,8 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Projects
|
||||
class ProjectTopic < ApplicationRecord
|
||||
belongs_to :project
|
||||
belongs_to :topic
|
||||
end
|
||||
end
|
10
app/models/projects/topic.rb
Normal file
10
app/models/projects/topic.rb
Normal file
|
@ -0,0 +1,10 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Projects
|
||||
class Topic < ApplicationRecord
|
||||
validates :name, presence: true, uniqueness: true, length: { maximum: 255 }
|
||||
|
||||
has_many :project_topics, class_name: 'Projects::ProjectTopic'
|
||||
has_many :projects, through: :project_topics
|
||||
end
|
||||
end
|
|
@ -2007,19 +2007,19 @@ class User < ApplicationRecord
|
|||
end
|
||||
|
||||
def set_notification_email
|
||||
if notification_email.blank? || verified_emails.exclude?(notification_email)
|
||||
if verified_emails.exclude?(notification_email)
|
||||
self.notification_email = nil
|
||||
end
|
||||
end
|
||||
|
||||
def set_public_email
|
||||
if public_email.blank? || verified_emails.exclude?(public_email)
|
||||
if verified_emails.exclude?(public_email)
|
||||
self.public_email = ''
|
||||
end
|
||||
end
|
||||
|
||||
def set_commit_email
|
||||
if commit_email.blank? || verified_emails.exclude?(commit_email)
|
||||
if verified_emails.exclude?(commit_email)
|
||||
self.commit_email = nil
|
||||
end
|
||||
end
|
||||
|
|
|
@ -35,7 +35,7 @@ module MergeRequests
|
|||
end
|
||||
|
||||
def save_approval(approval)
|
||||
Approval.safe_ensure_unique do
|
||||
Approval.safe_ensure_unique do # rubocop:disable Performance/ActiveRecordSubtransactionMethods
|
||||
approval.save
|
||||
end
|
||||
end
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
|
||||
module MergeRequests
|
||||
class SquashService < MergeRequests::BaseService
|
||||
SquashInProgressError = Class.new(RuntimeError)
|
||||
|
||||
def execute
|
||||
# If performing a squash would result in no change, then
|
||||
# immediately return a success message without performing a squash
|
||||
|
@ -13,14 +11,7 @@ module MergeRequests
|
|||
|
||||
return error(s_('MergeRequests|This project does not allow squashing commits when merge requests are accepted.')) if squash_forbidden?
|
||||
|
||||
if squash_in_progress?
|
||||
return error(s_('MergeRequests|Squash task canceled: another squash is already in progress.'))
|
||||
end
|
||||
|
||||
squash! || error(s_('MergeRequests|Failed to squash. Should be done manually.'))
|
||||
|
||||
rescue SquashInProgressError
|
||||
error(s_('MergeRequests|An error occurred while checking whether another squash is in progress.'))
|
||||
end
|
||||
|
||||
private
|
||||
|
@ -35,14 +26,6 @@ module MergeRequests
|
|||
false
|
||||
end
|
||||
|
||||
def squash_in_progress?
|
||||
merge_request.squash_in_progress?
|
||||
rescue StandardError => e
|
||||
log_error(exception: e, message: 'Failed to check squash in progress')
|
||||
|
||||
raise SquashInProgressError, e.message
|
||||
end
|
||||
|
||||
def squash_forbidden?
|
||||
target_project.squash_never?
|
||||
end
|
||||
|
|
|
@ -70,7 +70,7 @@ module Terraform
|
|||
|
||||
return find_state!(find_params) if find_only
|
||||
|
||||
state = Terraform::State.create_or_find_by(find_params)
|
||||
state = Terraform::State.create_or_find_by(find_params) # rubocop:disable Performance/ActiveRecordSubtransactionMethods
|
||||
|
||||
# https://github.com/rails/rails/issues/36027
|
||||
return state unless state.errors.of_kind? :name, :taken
|
||||
|
|
|
@ -28,4 +28,4 @@
|
|||
|
||||
= render "projects/pipelines/with_tabs", pipeline: @pipeline, stages: @stages, pipeline_has_errors: pipeline_has_errors
|
||||
|
||||
.js-pipeline-details-vue{ data: { endpoint: project_pipeline_path(@project, @pipeline, format: :json), metrics_path: namespace_project_ci_prometheus_metrics_histograms_path(namespace_id: @project.namespace, project_id: @project, format: :json), pipeline_project_path: @project.full_path, pipeline_iid: @pipeline.iid, graphql_resource_etag: graphql_etag_pipeline_path(@pipeline) } }
|
||||
.js-pipeline-details-vue{ data: { metrics_path: namespace_project_ci_prometheus_metrics_histograms_path(namespace_id: @project.namespace, project_id: @project, format: :json), pipeline_project_path: @project.full_path, pipeline_iid: @pipeline.iid, graphql_resource_etag: graphql_etag_pipeline_path(@pipeline) } }
|
||||
|
|
|
@ -1459,7 +1459,7 @@
|
|||
:urgency: :high
|
||||
:resource_boundary: :unknown
|
||||
:weight: 3
|
||||
:idempotent: true
|
||||
:idempotent:
|
||||
:tags: []
|
||||
- :name: pipeline_cache:expire_pipeline_cache
|
||||
:worker_name: ExpirePipelineCacheWorker
|
||||
|
|
|
@ -1,16 +1,20 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class ExpireJobCacheWorker
|
||||
class ExpireJobCacheWorker # rubocop:disable Scalability/IdempotentWorker
|
||||
include ApplicationWorker
|
||||
|
||||
data_consistency :always
|
||||
data_consistency :delayed, feature_flag: :load_balancing_for_expire_job_cache_worker
|
||||
|
||||
sidekiq_options retry: 3
|
||||
include PipelineQueue
|
||||
|
||||
queue_namespace :pipeline_cache
|
||||
urgency :high
|
||||
idempotent!
|
||||
# This worker should be idempotent, but we're switching to data_consistency
|
||||
# :sticky and there is an ongoing incompatibility, so it needs to be disabled for
|
||||
# now. The following line can be uncommented and this comment removed once
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/325291 is resolved.
|
||||
# idempotent!
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def perform(job_id)
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
- chatops
|
||||
- cloud_native_installation
|
||||
- cluster_cost_management
|
||||
- code_analytics
|
||||
- code_quality
|
||||
- code_review
|
||||
- code_testing
|
||||
|
@ -34,6 +33,7 @@
|
|||
- continuous_integration_scaling
|
||||
- database
|
||||
- dataops
|
||||
- delivery_management
|
||||
- dependency_firewall
|
||||
- dependency_proxy
|
||||
- dependency_scanning
|
||||
|
@ -42,6 +42,7 @@
|
|||
- disaster_recovery
|
||||
- dynamic_application_security_testing
|
||||
- editor_extension
|
||||
- environment_management
|
||||
- epics
|
||||
- error_tracking
|
||||
- experimentation_activation
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
---
|
||||
name: other_storage_tab
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/57121
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/325967
|
||||
milestone: '13.11'
|
||||
name: load_balancing_for_expire_job_cache_worker
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68791
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/339137
|
||||
milestone: '14.3'
|
||||
type: development
|
||||
group: group::fulfillment
|
||||
group: group::pipeline authoring
|
||||
default_enabled: false
|
|
@ -1,8 +0,0 @@
|
|||
---
|
||||
name: active_record_subtransactions_counter
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/66477
|
||||
rollout_issue_url:
|
||||
milestone: '14.1'
|
||||
type: ops
|
||||
group: group::pipeline execution
|
||||
default_enabled: false
|
16
db/migrate/20210729081351_create_topics.rb
Normal file
16
db/migrate/20210729081351_create_topics.rb
Normal file
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class CreateTopics < ActiveRecord::Migration[6.1]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
def change
|
||||
create_table_with_constraints :topics do |t|
|
||||
t.text :name, null: false
|
||||
t.text_limit :name, 255
|
||||
|
||||
t.index :name, unique: true
|
||||
|
||||
t.timestamps_with_timezone
|
||||
end
|
||||
end
|
||||
end
|
16
db/migrate/20210729081739_create_project_topics.rb
Normal file
16
db/migrate/20210729081739_create_project_topics.rb
Normal file
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class CreateProjectTopics < ActiveRecord::Migration[6.1]
|
||||
def change
|
||||
create_table :project_topics do |t|
|
||||
t.bigint :project_id, null: false
|
||||
t.bigint :topic_id, null: false
|
||||
|
||||
t.index :project_id
|
||||
t.index :topic_id
|
||||
t.index [:project_id, :topic_id], unique: true
|
||||
|
||||
t.timestamps_with_timezone
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,17 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddForeignKeyToProjectOnProjectTopic < ActiveRecord::Migration[6.1]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_concurrent_foreign_key :project_topics, :projects, column: :project_id, on_delete: :cascade
|
||||
end
|
||||
|
||||
def down
|
||||
with_lock_retries do
|
||||
remove_foreign_key :project_topics, column: :project_id
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,17 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddForeignKeyToTopicOnProjectTopic < ActiveRecord::Migration[6.1]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_concurrent_foreign_key :project_topics, :topics, column: :topic_id, on_delete: :cascade
|
||||
end
|
||||
|
||||
def down
|
||||
with_lock_retries do
|
||||
remove_foreign_key :project_topics, column: :topic_id
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,37 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class ScheduleExtractProjectTopicsIntoSeparateTable < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
BATCH_SIZE = 1_000
|
||||
DELAY_INTERVAL = 2.minutes
|
||||
MIGRATION = 'ExtractProjectTopicsIntoSeparateTable'
|
||||
INDEX_NAME = 'tmp_index_taggings_on_id_where_taggable_type_project'
|
||||
INDEX_CONDITION = "taggable_type = 'Project'"
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
class Tagging < ActiveRecord::Base
|
||||
include ::EachBatch
|
||||
|
||||
self.table_name = 'taggings'
|
||||
end
|
||||
|
||||
def up
|
||||
# this index is used in 20210730104800_schedule_extract_project_topics_into_separate_table
|
||||
add_concurrent_index :taggings, :id, where: INDEX_CONDITION, name: INDEX_NAME
|
||||
|
||||
queue_background_migration_jobs_by_range_at_intervals(
|
||||
Tagging.where(taggable_type: 'Project'),
|
||||
MIGRATION,
|
||||
DELAY_INTERVAL,
|
||||
batch_size: BATCH_SIZE,
|
||||
track_jobs: true
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :taggings, INDEX_NAME
|
||||
end
|
||||
end
|
1
db/schema_migrations/20210729081351
Normal file
1
db/schema_migrations/20210729081351
Normal file
|
@ -0,0 +1 @@
|
|||
17463867a8c14981386256dc90169fb879e1921d65eccca53eae576d49fba49d
|
1
db/schema_migrations/20210729081739
Normal file
1
db/schema_migrations/20210729081739
Normal file
|
@ -0,0 +1 @@
|
|||
af7963d27bda6ef85fb5b5a06ecf1de14f21829eecdaf13e763aa9a6ffc2e83c
|
1
db/schema_migrations/20210729125641
Normal file
1
db/schema_migrations/20210729125641
Normal file
|
@ -0,0 +1 @@
|
|||
b7bc495d010e0640b1145ca55f47696047fd4360d2dfc9a3da7941ab62840132
|
1
db/schema_migrations/20210729125659
Normal file
1
db/schema_migrations/20210729125659
Normal file
|
@ -0,0 +1 @@
|
|||
5826e87b2ce13d4951e9b8e774c87c29c6e0a0954a85d60ec68155f2c5cf3ccc
|
1
db/schema_migrations/20210730104800
Normal file
1
db/schema_migrations/20210730104800
Normal file
|
@ -0,0 +1 @@
|
|||
7764c058665015707aff6e25ccbf60d4a329c67c16106b2ef523862ef82298b7
|
|
@ -17379,6 +17379,23 @@ CREATE SEQUENCE project_statistics_id_seq
|
|||
|
||||
ALTER SEQUENCE project_statistics_id_seq OWNED BY project_statistics.id;
|
||||
|
||||
CREATE TABLE project_topics (
|
||||
id bigint NOT NULL,
|
||||
project_id bigint NOT NULL,
|
||||
topic_id bigint NOT NULL,
|
||||
created_at timestamp with time zone NOT NULL,
|
||||
updated_at timestamp with time zone NOT NULL
|
||||
);
|
||||
|
||||
CREATE SEQUENCE project_topics_id_seq
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
ALTER SEQUENCE project_topics_id_seq OWNED BY project_topics.id;
|
||||
|
||||
CREATE TABLE project_tracing_settings (
|
||||
id bigint NOT NULL,
|
||||
created_at timestamp with time zone NOT NULL,
|
||||
|
@ -18835,6 +18852,23 @@ CREATE SEQUENCE token_with_ivs_id_seq
|
|||
|
||||
ALTER SEQUENCE token_with_ivs_id_seq OWNED BY token_with_ivs.id;
|
||||
|
||||
CREATE TABLE topics (
|
||||
id bigint NOT NULL,
|
||||
name text NOT NULL,
|
||||
created_at timestamp with time zone NOT NULL,
|
||||
updated_at timestamp with time zone NOT NULL,
|
||||
CONSTRAINT check_7a90d4c757 CHECK ((char_length(name) <= 255))
|
||||
);
|
||||
|
||||
CREATE SEQUENCE topics_id_seq
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
ALTER SEQUENCE topics_id_seq OWNED BY topics.id;
|
||||
|
||||
CREATE TABLE trending_projects (
|
||||
id integer NOT NULL,
|
||||
project_id integer NOT NULL
|
||||
|
@ -20777,6 +20811,8 @@ ALTER TABLE ONLY project_security_settings ALTER COLUMN project_id SET DEFAULT n
|
|||
|
||||
ALTER TABLE ONLY project_statistics ALTER COLUMN id SET DEFAULT nextval('project_statistics_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY project_topics ALTER COLUMN id SET DEFAULT nextval('project_topics_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY project_tracing_settings ALTER COLUMN id SET DEFAULT nextval('project_tracing_settings_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY projects ALTER COLUMN id SET DEFAULT nextval('projects_id_seq'::regclass);
|
||||
|
@ -20905,6 +20941,8 @@ ALTER TABLE ONLY todos ALTER COLUMN id SET DEFAULT nextval('todos_id_seq'::regcl
|
|||
|
||||
ALTER TABLE ONLY token_with_ivs ALTER COLUMN id SET DEFAULT nextval('token_with_ivs_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY topics ALTER COLUMN id SET DEFAULT nextval('topics_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY trending_projects ALTER COLUMN id SET DEFAULT nextval('trending_projects_id_seq'::regclass);
|
||||
|
||||
ALTER TABLE ONLY u2f_registrations ALTER COLUMN id SET DEFAULT nextval('u2f_registrations_id_seq'::regclass);
|
||||
|
@ -22412,6 +22450,9 @@ ALTER TABLE ONLY project_settings
|
|||
ALTER TABLE ONLY project_statistics
|
||||
ADD CONSTRAINT project_statistics_pkey PRIMARY KEY (id);
|
||||
|
||||
ALTER TABLE ONLY project_topics
|
||||
ADD CONSTRAINT project_topics_pkey PRIMARY KEY (id);
|
||||
|
||||
ALTER TABLE ONLY project_tracing_settings
|
||||
ADD CONSTRAINT project_tracing_settings_pkey PRIMARY KEY (id);
|
||||
|
||||
|
@ -22625,6 +22666,9 @@ ALTER TABLE ONLY todos
|
|||
ALTER TABLE ONLY token_with_ivs
|
||||
ADD CONSTRAINT token_with_ivs_pkey PRIMARY KEY (id);
|
||||
|
||||
ALTER TABLE ONLY topics
|
||||
ADD CONSTRAINT topics_pkey PRIMARY KEY (id);
|
||||
|
||||
ALTER TABLE ONLY trending_projects
|
||||
ADD CONSTRAINT trending_projects_pkey PRIMARY KEY (id);
|
||||
|
||||
|
@ -25023,6 +25067,12 @@ CREATE INDEX index_project_statistics_on_storage_size_and_project_id ON project_
|
|||
|
||||
CREATE INDEX index_project_statistics_on_wiki_size_and_project_id ON project_statistics USING btree (wiki_size, project_id);
|
||||
|
||||
CREATE INDEX index_project_topics_on_project_id ON project_topics USING btree (project_id);
|
||||
|
||||
CREATE UNIQUE INDEX index_project_topics_on_project_id_and_topic_id ON project_topics USING btree (project_id, topic_id);
|
||||
|
||||
CREATE INDEX index_project_topics_on_topic_id ON project_topics USING btree (topic_id);
|
||||
|
||||
CREATE UNIQUE INDEX index_project_tracing_settings_on_project_id ON project_tracing_settings USING btree (project_id);
|
||||
|
||||
CREATE INDEX index_projects_aimed_for_deletion ON projects USING btree (marked_for_deletion_at) WHERE ((marked_for_deletion_at IS NOT NULL) AND (pending_delete = false));
|
||||
|
@ -25527,6 +25577,8 @@ CREATE UNIQUE INDEX index_token_with_ivs_on_hashed_plaintext_token ON token_with
|
|||
|
||||
CREATE UNIQUE INDEX index_token_with_ivs_on_hashed_token ON token_with_ivs USING btree (hashed_token);
|
||||
|
||||
CREATE UNIQUE INDEX index_topics_on_name ON topics USING btree (name);
|
||||
|
||||
CREATE UNIQUE INDEX index_trending_projects_on_project_id ON trending_projects USING btree (project_id);
|
||||
|
||||
CREATE INDEX index_u2f_registrations_on_key_handle ON u2f_registrations USING btree (key_handle);
|
||||
|
@ -25861,6 +25913,8 @@ CREATE INDEX tmp_index_namespaces_empty_traversal_ids_with_root_namespaces ON na
|
|||
|
||||
CREATE INDEX tmp_index_on_vulnerabilities_non_dismissed ON vulnerabilities USING btree (id) WHERE (state <> 2);
|
||||
|
||||
CREATE INDEX tmp_index_taggings_on_id_where_taggable_type_project ON taggings USING btree (id) WHERE ((taggable_type)::text = 'Project'::text);
|
||||
|
||||
CREATE UNIQUE INDEX uniq_pkgs_deb_grp_architectures_on_distribution_id_and_name ON packages_debian_group_architectures USING btree (distribution_id, name);
|
||||
|
||||
CREATE UNIQUE INDEX uniq_pkgs_deb_grp_components_on_distribution_id_and_name ON packages_debian_group_components USING btree (distribution_id, name);
|
||||
|
@ -26354,6 +26408,9 @@ ALTER TABLE ONLY ci_group_variables
|
|||
ALTER TABLE ONLY namespaces
|
||||
ADD CONSTRAINT fk_3448c97865 FOREIGN KEY (push_rule_id) REFERENCES push_rules(id) ON DELETE SET NULL;
|
||||
|
||||
ALTER TABLE ONLY project_topics
|
||||
ADD CONSTRAINT fk_34af9ab07a FOREIGN KEY (topic_id) REFERENCES topics(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY in_product_marketing_emails
|
||||
ADD CONSTRAINT fk_35c9101b63 FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
|
||||
|
||||
|
@ -26873,6 +26930,9 @@ ALTER TABLE ONLY label_links
|
|||
ALTER TABLE ONLY project_group_links
|
||||
ADD CONSTRAINT fk_daa8cee94c FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY project_topics
|
||||
ADD CONSTRAINT fk_db13576296 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY security_scans
|
||||
ADD CONSTRAINT fk_dbc89265b9 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
|
||||
|
||||
|
|
|
@ -244,8 +244,8 @@ Graph API instead of IMAP. Follow the [documentation in the incoming email secti
|
|||
gitlab_rails['service_desk_email_email'] = "project_contact@example.onmicrosoft.com"
|
||||
gitlab_rails['service_desk_email_mailbox_name'] = "inbox"
|
||||
gitlab_rails['service_desk_email_log_file'] = "/var/log/gitlab/mailroom/mail_room_json.log"
|
||||
gitlab_rails['service_desk_inbox_method'] = 'microsoft_graph'
|
||||
gitlab_rails['service_desk_inbox_options'] = {
|
||||
gitlab_rails['service_desk_email_inbox_method'] = 'microsoft_graph'
|
||||
gitlab_rails['service_desk_email_inbox_options'] = {
|
||||
'tenant_id': '<YOUR-TENANT-ID>',
|
||||
'client_id': '<YOUR-CLIENT-ID>',
|
||||
'client_secret': '<YOUR-CLIENT-SECRET>',
|
||||
|
|
|
@ -43,7 +43,7 @@ module API
|
|||
# N+1 is solved then by using `subject.topics.map(&:name)`
|
||||
# MR describing the solution: https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/20555
|
||||
projects_relation.preload(:project_feature, :route)
|
||||
.preload(:import_state, :topics)
|
||||
.preload(:import_state, :topics, :topics_acts_as_taggable)
|
||||
.preload(:auto_devops)
|
||||
.preload(namespace: [:route, :owner])
|
||||
end
|
||||
|
|
|
@ -140,7 +140,7 @@ module API
|
|||
.preload(project_group_links: { group: :route },
|
||||
fork_network: :root_project,
|
||||
fork_network_member: :forked_from_project,
|
||||
forked_from_project: [:route, :topics, :group, :project_feature, namespace: [:route, :owner]])
|
||||
forked_from_project: [:route, :topics, :topics_acts_as_taggable, :group, :project_feature, namespace: [:route, :owner]])
|
||||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BackgroundMigration
|
||||
# The class to extract the project topics into a separate `topics` table
|
||||
class ExtractProjectTopicsIntoSeparateTable
|
||||
# Temporary AR table for tags
|
||||
class Tag < ActiveRecord::Base
|
||||
self.table_name = 'tags'
|
||||
end
|
||||
|
||||
# Temporary AR table for taggings
|
||||
class Tagging < ActiveRecord::Base
|
||||
self.table_name = 'taggings'
|
||||
belongs_to :tag
|
||||
end
|
||||
|
||||
# Temporary AR table for topics
|
||||
class Topic < ActiveRecord::Base
|
||||
self.table_name = 'topics'
|
||||
end
|
||||
|
||||
# Temporary AR table for project topics
|
||||
class ProjectTopic < ActiveRecord::Base
|
||||
self.table_name = 'project_topics'
|
||||
belongs_to :topic
|
||||
end
|
||||
|
||||
def perform(start_id, stop_id)
|
||||
Tagging.includes(:tag).where(taggable_type: 'Project', id: start_id..stop_id).each do |tagging|
|
||||
topic = Topic.find_or_create_by(name: tagging.tag.name)
|
||||
project_topic = ProjectTopic.find_or_create_by(project_id: tagging.taggable_id, topic: topic)
|
||||
|
||||
tagging.delete if project_topic.persisted?
|
||||
end
|
||||
|
||||
mark_job_as_succeeded(start_id, stop_id)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def mark_job_as_succeeded(*arguments)
|
||||
Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded(
|
||||
self.class.name.demodulize,
|
||||
arguments
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -214,9 +214,13 @@ module Gitlab
|
|||
extend ActiveSupport::Concern
|
||||
|
||||
class_methods do
|
||||
# A monkeypatch over ActiveRecord::Base.transaction.
|
||||
# It provides observability into transactional methods.
|
||||
# A patch over ActiveRecord::Base.transaction that provides
|
||||
# observability into transactional methods.
|
||||
def transaction(**options, &block)
|
||||
if options[:requires_new] && connection.transaction_open?
|
||||
::Gitlab::Database::Metrics.subtransactions_increment(self.name)
|
||||
end
|
||||
|
||||
ActiveSupport::Notifications.instrument('transaction.active_record', { connection: connection }) do
|
||||
super(**options, &block)
|
||||
end
|
||||
|
|
|
@ -869,12 +869,6 @@ module Gitlab
|
|||
end
|
||||
end
|
||||
|
||||
def squash_in_progress?(squash_id)
|
||||
wrapped_gitaly_errors do
|
||||
gitaly_repository_client.squash_in_progress?(squash_id)
|
||||
end
|
||||
end
|
||||
|
||||
def bundle_to_disk(save_path)
|
||||
wrapped_gitaly_errors do
|
||||
gitaly_repository_client.create_bundle(save_path)
|
||||
|
|
|
@ -155,23 +155,6 @@ module Gitlab
|
|||
)
|
||||
end
|
||||
|
||||
def squash_in_progress?(squash_id)
|
||||
request = Gitaly::IsSquashInProgressRequest.new(
|
||||
repository: @gitaly_repo,
|
||||
squash_id: squash_id.to_s
|
||||
)
|
||||
|
||||
response = GitalyClient.call(
|
||||
@storage,
|
||||
:repository_service,
|
||||
:is_squash_in_progress,
|
||||
request,
|
||||
timeout: GitalyClient.fast_timeout
|
||||
)
|
||||
|
||||
response.in_progress
|
||||
end
|
||||
|
||||
def fetch_source_branch(source_repository, source_branch, local_ref)
|
||||
request = Gitaly::FetchSourceBranchRequest.new(
|
||||
repository: @gitaly_repo,
|
||||
|
|
|
@ -21072,9 +21072,6 @@ msgstr ""
|
|||
msgid "MergeRequestDiffs|Select comment starting line"
|
||||
msgstr ""
|
||||
|
||||
msgid "MergeRequests|An error occurred while checking whether another squash is in progress."
|
||||
msgstr ""
|
||||
|
||||
msgid "MergeRequests|An error occurred while saving the draft comment."
|
||||
msgstr ""
|
||||
|
||||
|
@ -21087,9 +21084,6 @@ msgstr ""
|
|||
msgid "MergeRequests|Saving the comment failed"
|
||||
msgstr ""
|
||||
|
||||
msgid "MergeRequests|Squash task canceled: another squash is already in progress."
|
||||
msgstr ""
|
||||
|
||||
msgid "MergeRequests|This project does not allow squashing commits when merge requests are accepted."
|
||||
msgstr ""
|
||||
|
||||
|
@ -36028,9 +36022,6 @@ msgstr ""
|
|||
msgid "UsageQuota|Learn more about usage quotas"
|
||||
msgstr ""
|
||||
|
||||
msgid "UsageQuota|Other Storage"
|
||||
msgstr ""
|
||||
|
||||
msgid "UsageQuota|Packages"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module RuboCop
|
||||
module Cop
|
||||
module Performance
|
||||
# Cop that disallows certain methods that rely on subtransactions in their implementation.
|
||||
# Companion to Performance/ActiveRecordSubtransactions, which bans direct usage of subtransactions.
|
||||
class ActiveRecordSubtransactionMethods < RuboCop::Cop::Cop
|
||||
MSG = 'Methods that rely on subtransactions should not be used. ' \
|
||||
'For more information see: https://gitlab.com/gitlab-org/gitlab/-/issues/338346'
|
||||
|
||||
DISALLOWED_METHODS = %i[
|
||||
safe_ensure_unique
|
||||
create_or_find_by
|
||||
create_or_find_by!
|
||||
].freeze
|
||||
|
||||
def on_send(node)
|
||||
return unless DISALLOWED_METHODS.include?(node.method_name)
|
||||
|
||||
add_offense(node, location: :selector)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
8
spec/factories/project_topics.rb
Normal file
8
spec/factories/project_topics.rb
Normal file
|
@ -0,0 +1,8 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
FactoryBot.define do
|
||||
factory :project_topic, class: 'Projects::ProjectTopic' do
|
||||
association :project, factory: :project
|
||||
association :topic, factory: :topic
|
||||
end
|
||||
end
|
7
spec/factories/topics.rb
Normal file
7
spec/factories/topics.rb
Normal file
|
@ -0,0 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
FactoryBot.define do
|
||||
factory :topic, class: 'Projects::Topic' do
|
||||
name { generate(:name) }
|
||||
end
|
||||
end
|
|
@ -135,6 +135,7 @@ RSpec.describe ProjectsFinder do
|
|||
|
||||
describe 'filter by tags (deprecated)' do
|
||||
before do
|
||||
public_project.reload
|
||||
public_project.topic_list = 'foo'
|
||||
public_project.save!
|
||||
end
|
||||
|
@ -146,6 +147,7 @@ RSpec.describe ProjectsFinder do
|
|||
|
||||
describe 'filter by topics' do
|
||||
before do
|
||||
public_project.reload
|
||||
public_project.topic_list = 'foo, bar'
|
||||
public_project.save!
|
||||
end
|
||||
|
|
|
@ -8,6 +8,8 @@ jest.mock('~/editor/source_editor');
|
|||
jest.mock('~/editor/extensions/source_editor_markdown_ext');
|
||||
jest.mock('~/editor/extensions/source_editor_file_template_ext');
|
||||
|
||||
const PREVIEW_MARKDOWN_PATH = '/foo/bar/preview_markdown';
|
||||
|
||||
describe('Blob Editing', () => {
|
||||
const useMock = jest.fn();
|
||||
const mockInstance = {
|
||||
|
@ -34,6 +36,7 @@ describe('Blob Editing', () => {
|
|||
const editorInst = (isMarkdown) => {
|
||||
return new EditBlob({
|
||||
isMarkdown,
|
||||
previewMarkdownPath: PREVIEW_MARKDOWN_PATH,
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -44,6 +47,7 @@ describe('Blob Editing', () => {
|
|||
|
||||
it('loads FileTemplateExtension by default', async () => {
|
||||
await initEditor();
|
||||
expect(useMock).toHaveBeenCalledWith(expect.any(FileTemplateExtension));
|
||||
expect(FileTemplateExtension).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
|
@ -55,9 +59,12 @@ describe('Blob Editing', () => {
|
|||
|
||||
it('loads MarkdownExtension only for the markdown files', async () => {
|
||||
await initEditor(true);
|
||||
expect(useMock).toHaveBeenCalledTimes(2);
|
||||
expect(FileTemplateExtension).toHaveBeenCalledTimes(1);
|
||||
expect(useMock).toHaveBeenCalledWith(expect.any(EditorMarkdownExtension));
|
||||
expect(EditorMarkdownExtension).toHaveBeenCalledTimes(1);
|
||||
expect(EditorMarkdownExtension).toHaveBeenCalledWith({
|
||||
instance: mockInstance,
|
||||
previewMarkdownPath: PREVIEW_MARKDOWN_PATH,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ describe('Markdown Extension for Source Editor', () => {
|
|||
let editorEl;
|
||||
let panelSpy;
|
||||
let mockAxios;
|
||||
const projectPath = 'fooGroup/barProj';
|
||||
const previewMarkdownPath = '/gitlab/fooGroup/barProj/preview_markdown';
|
||||
const firstLine = 'This is a';
|
||||
const secondLine = 'multiline';
|
||||
const thirdLine = 'string with some **markup**';
|
||||
|
@ -57,7 +57,7 @@ describe('Markdown Extension for Source Editor', () => {
|
|||
blobPath: markdownPath,
|
||||
blobContent: text,
|
||||
});
|
||||
editor.use(new EditorMarkdownExtension({ instance, projectPath }));
|
||||
editor.use(new EditorMarkdownExtension({ instance, previewMarkdownPath }));
|
||||
panelSpy = jest.spyOn(EditorMarkdownExtension, 'togglePreviewPanel');
|
||||
});
|
||||
|
||||
|
@ -74,7 +74,7 @@ describe('Markdown Extension for Source Editor', () => {
|
|||
shown: false,
|
||||
modelChangeListener: undefined,
|
||||
});
|
||||
expect(instance.projectPath).toBe(projectPath);
|
||||
expect(instance.previewMarkdownPath).toBe(previewMarkdownPath);
|
||||
});
|
||||
|
||||
describe('model language changes listener', () => {
|
||||
|
@ -223,34 +223,24 @@ describe('Markdown Extension for Source Editor', () => {
|
|||
});
|
||||
|
||||
describe('fetchPreview', () => {
|
||||
const group = 'foo';
|
||||
const project = 'bar';
|
||||
const setData = (path, g, p) => {
|
||||
instance.projectPath = path;
|
||||
document.body.setAttribute('data-group', g);
|
||||
document.body.setAttribute('data-project', p);
|
||||
};
|
||||
const fetchPreview = async () => {
|
||||
instance.fetchPreview();
|
||||
await waitForPromises();
|
||||
};
|
||||
|
||||
let previewMarkdownSpy;
|
||||
|
||||
beforeEach(() => {
|
||||
mockAxios.onPost().reply(200, { body: responseData });
|
||||
previewMarkdownSpy = jest.fn().mockImplementation(() => [200, { body: responseData }]);
|
||||
mockAxios.onPost(previewMarkdownPath).replyOnce((req) => previewMarkdownSpy(req));
|
||||
});
|
||||
|
||||
it('correctly fetches preview based on projectPath', async () => {
|
||||
setData(projectPath, group, project);
|
||||
it('correctly fetches preview based on previewMarkdownPath', async () => {
|
||||
await fetchPreview();
|
||||
expect(mockAxios.history.post[0].url).toBe(`/${projectPath}/preview_markdown`);
|
||||
expect(mockAxios.history.post[0].data).toEqual(JSON.stringify({ text }));
|
||||
});
|
||||
|
||||
it('correctly fetches preview based on group and project data attributes', async () => {
|
||||
setData(undefined, group, project);
|
||||
await fetchPreview();
|
||||
expect(mockAxios.history.post[0].url).toBe(`/${group}/${project}/preview_markdown`);
|
||||
expect(mockAxios.history.post[0].data).toEqual(JSON.stringify({ text }));
|
||||
expect(previewMarkdownSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ data: JSON.stringify({ text }) }),
|
||||
);
|
||||
});
|
||||
|
||||
it('puts the fetched content into the preview DOM element', async () => {
|
||||
|
|
|
@ -24,6 +24,8 @@ import axios from '~/lib/utils/axios_utils';
|
|||
import ContentViewer from '~/vue_shared/components/content_viewer/content_viewer.vue';
|
||||
import { file } from '../helpers';
|
||||
|
||||
const PREVIEW_MARKDOWN_PATH = '/foo/bar/preview_markdown';
|
||||
|
||||
const defaultFileProps = {
|
||||
...file('file.txt'),
|
||||
content: 'hello world',
|
||||
|
@ -77,6 +79,7 @@ const prepareStore = (state, activeFile) => {
|
|||
entries: {
|
||||
[activeFile.path]: activeFile,
|
||||
},
|
||||
previewMarkdownPath: PREVIEW_MARKDOWN_PATH,
|
||||
};
|
||||
const storeOptions = createStoreOptions();
|
||||
return new Vuex.Store({
|
||||
|
@ -278,10 +281,10 @@ describe('RepoEditor', () => {
|
|||
async ({ activeFile, viewer, shouldHaveMarkdownExtension } = {}) => {
|
||||
await createComponent({ state: { viewer }, activeFile });
|
||||
if (shouldHaveMarkdownExtension) {
|
||||
expect(vm.editor.projectPath).toBe(vm.currentProjectId);
|
||||
expect(vm.editor.previewMarkdownPath).toBe(PREVIEW_MARKDOWN_PATH);
|
||||
expect(vm.editor.togglePreview).toBeDefined();
|
||||
} else {
|
||||
expect(vm.editor.projectPath).toBeUndefined();
|
||||
expect(vm.editor.previewMarkdownPath).toBeUndefined();
|
||||
expect(vm.editor.togglePreview).toBeUndefined();
|
||||
}
|
||||
},
|
||||
|
|
|
@ -18,7 +18,8 @@ RSpec.describe IdeHelper do
|
|||
'file-path' => nil,
|
||||
'merge-request' => nil,
|
||||
'fork-info' => nil,
|
||||
'project' => nil
|
||||
'project' => nil,
|
||||
'preview-markdown-path' => nil
|
||||
)
|
||||
end
|
||||
end
|
||||
|
@ -41,7 +42,8 @@ RSpec.describe IdeHelper do
|
|||
'file-path' => 'foo/bar',
|
||||
'merge-request' => '1',
|
||||
'fork-info' => fork_info.to_json,
|
||||
'project' => serialized_project
|
||||
'project' => serialized_project,
|
||||
'preview-markdown-path' => Gitlab::Routing.url_helpers.preview_markdown_project_path(project)
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -0,0 +1,41 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BackgroundMigration::ExtractProjectTopicsIntoSeparateTable, schema: 20210730104800 do
|
||||
it 'correctly extracts project topics into separate table' do
|
||||
namespaces = table(:namespaces)
|
||||
projects = table(:projects)
|
||||
taggings = table(:taggings)
|
||||
tags = table(:tags)
|
||||
project_topics = table(:project_topics)
|
||||
topics = table(:topics)
|
||||
|
||||
namespace = namespaces.create!(name: 'foo', path: 'foo')
|
||||
project = projects.create!(namespace_id: namespace.id)
|
||||
tag_1 = tags.create!(name: 'Topic1')
|
||||
tag_2 = tags.create!(name: 'Topic2')
|
||||
tag_3 = tags.create!(name: 'Topic3')
|
||||
topic_3 = topics.create!(name: 'Topic3')
|
||||
tagging_1 = taggings.create!(taggable_type: 'Project', taggable_id: project.id, context: 'topics', tag_id: tag_1.id)
|
||||
tagging_2 = taggings.create!(taggable_type: 'Project', taggable_id: project.id, context: 'topics', tag_id: tag_2.id)
|
||||
other_tagging = taggings.create!(taggable_type: 'Other', taggable_id: project.id, context: 'topics', tag_id: tag_1.id)
|
||||
tagging_3 = taggings.create!(taggable_type: 'Project', taggable_id: project.id, context: 'topics', tag_id: tag_3.id)
|
||||
|
||||
subject.perform(tagging_1.id, tagging_3.id)
|
||||
|
||||
# Tagging records
|
||||
expect { tagging_1.reload }.to raise_error(ActiveRecord::RecordNotFound)
|
||||
expect { tagging_2.reload }.to raise_error(ActiveRecord::RecordNotFound)
|
||||
expect { other_tagging.reload }.not_to raise_error(ActiveRecord::RecordNotFound)
|
||||
expect { tagging_3.reload }.to raise_error(ActiveRecord::RecordNotFound)
|
||||
|
||||
# Topic records
|
||||
topic_1 = topics.find_by(name: 'Topic1')
|
||||
topic_2 = topics.find_by(name: 'Topic2')
|
||||
expect(topics.all).to contain_exactly(topic_1, topic_2, topic_3)
|
||||
|
||||
# ProjectTopic records
|
||||
expect(project_topics.all.map(&:topic_id)).to contain_exactly(topic_1.id, topic_2.id, topic_3.id)
|
||||
end
|
||||
end
|
|
@ -195,19 +195,6 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#squash_in_progress?' do
|
||||
let(:squash_id) { 1 }
|
||||
|
||||
it 'sends a repository_squash_in_progress message' do
|
||||
expect_any_instance_of(Gitaly::RepositoryService::Stub)
|
||||
.to receive(:is_squash_in_progress)
|
||||
.with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
|
||||
.and_return(double(in_progress: true))
|
||||
|
||||
client.squash_in_progress?(squash_id)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#calculate_checksum' do
|
||||
it 'sends a calculate_checksum message' do
|
||||
expect_any_instance_of(Gitaly::RepositoryService::Stub)
|
||||
|
|
|
@ -354,6 +354,8 @@ project:
|
|||
- taggings
|
||||
- base_tags
|
||||
- topic_taggings
|
||||
- topics_acts_as_taggable
|
||||
- project_topics
|
||||
- topics
|
||||
- chat_services
|
||||
- cluster
|
||||
|
|
|
@ -164,6 +164,23 @@ RSpec.describe ApplicationRecord do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
# rubocop:disable Database/MultipleDatabases
|
||||
it 'increments a counter when a transaction is created in ActiveRecord' do
|
||||
expect(described_class.connection.transaction_open?).to be false
|
||||
|
||||
expect(::Gitlab::Database::Metrics)
|
||||
.to receive(:subtransactions_increment)
|
||||
.with('ActiveRecord::Base')
|
||||
.once
|
||||
|
||||
ActiveRecord::Base.transaction do
|
||||
ActiveRecord::Base.transaction(requires_new: true) do
|
||||
expect(ActiveRecord::Base.connection.transaction_open?).to be true
|
||||
end
|
||||
end
|
||||
end
|
||||
# rubocop:enable Database/MultipleDatabases
|
||||
end
|
||||
|
||||
describe '.with_fast_read_statement_timeout' do
|
||||
|
|
|
@ -151,43 +151,6 @@ RSpec.describe MergeRequest, factory_default: :keep do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#squash_in_progress?' do
|
||||
let(:repo_path) do
|
||||
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
|
||||
subject.source_project.repository.path
|
||||
end
|
||||
end
|
||||
|
||||
let(:squash_path) { File.join(repo_path, "gitlab-worktree", "squash-#{subject.id}") }
|
||||
|
||||
before do
|
||||
system(*%W(#{Gitlab.config.git.bin_path} -C #{repo_path} worktree add --detach #{squash_path} master))
|
||||
end
|
||||
|
||||
it 'returns true when there is a current squash directory' do
|
||||
expect(subject.squash_in_progress?).to be_truthy
|
||||
end
|
||||
|
||||
it 'returns false when there is no squash directory' do
|
||||
FileUtils.rm_rf(squash_path)
|
||||
|
||||
expect(subject.squash_in_progress?).to be_falsey
|
||||
end
|
||||
|
||||
it 'returns false when the squash directory has expired' do
|
||||
time = 20.minutes.ago.to_time
|
||||
File.utime(time, time, squash_path)
|
||||
|
||||
expect(subject.squash_in_progress?).to be_falsey
|
||||
end
|
||||
|
||||
it 'returns false when the source project has been removed' do
|
||||
allow(subject).to receive(:source_project).and_return(nil)
|
||||
|
||||
expect(subject.squash_in_progress?).to be_falsey
|
||||
end
|
||||
end
|
||||
|
||||
describe '#squash?' do
|
||||
let(:merge_request) { build(:merge_request, squash: squash) }
|
||||
|
||||
|
|
|
@ -7135,15 +7135,96 @@ RSpec.describe Project, factory_default: :keep do
|
|||
end
|
||||
|
||||
describe 'topics' do
|
||||
let_it_be(:project) { create(:project, topic_list: 'topic1, topic2, topic3') }
|
||||
let_it_be(:project) { create(:project, name: 'topic-project', topic_list: 'topic1, topic2, topic3') }
|
||||
|
||||
it 'topic_list returns correct string array' do
|
||||
expect(project.topic_list).to match_array(%w[topic1 topic2 topic3])
|
||||
expect(project.topic_list).to eq(%w[topic1 topic2 topic3])
|
||||
end
|
||||
|
||||
it 'topics returns correct tag records' do
|
||||
expect(project.topics.first.class.name).to eq('ActsAsTaggableOn::Tag')
|
||||
expect(project.topics.map(&:name)).to match_array(%w[topic1 topic2 topic3])
|
||||
it 'topics returns correct topic records' do
|
||||
expect(project.topics.first.class.name).to eq('Projects::Topic')
|
||||
expect(project.topics.map(&:name)).to eq(%w[topic1 topic2 topic3])
|
||||
end
|
||||
|
||||
context 'topic_list=' do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
where(:topic_list, :expected_result) do
|
||||
['topicA', 'topicB'] | %w[topicA topicB] # rubocop:disable Style/WordArray, Lint/BinaryOperatorWithIdenticalOperands
|
||||
['topicB', 'topicA'] | %w[topicB topicA] # rubocop:disable Style/WordArray, Lint/BinaryOperatorWithIdenticalOperands
|
||||
[' topicC ', ' topicD '] | %w[topicC topicD]
|
||||
['topicE', 'topicF', 'topicE'] | %w[topicE topicF] # rubocop:disable Style/WordArray
|
||||
['topicE ', 'topicF', ' topicE'] | %w[topicE topicF]
|
||||
'topicA, topicB' | %w[topicA topicB]
|
||||
'topicB, topicA' | %w[topicB topicA]
|
||||
' topicC , topicD ' | %w[topicC topicD]
|
||||
'topicE, topicF, topicE' | %w[topicE topicF]
|
||||
'topicE , topicF, topicE' | %w[topicE topicF]
|
||||
end
|
||||
|
||||
with_them do
|
||||
it 'set topics' do
|
||||
project.topic_list = topic_list
|
||||
project.save!
|
||||
|
||||
expect(project.topics.map(&:name)).to eq(expected_result)
|
||||
end
|
||||
end
|
||||
|
||||
it 'set topics if only the order is changed' do
|
||||
project.topic_list = 'topicA, topicB'
|
||||
project.save!
|
||||
|
||||
expect(project.reload.topics.map(&:name)).to eq(%w[topicA topicB])
|
||||
|
||||
project.topic_list = 'topicB, topicA'
|
||||
project.save!
|
||||
|
||||
expect(project.reload.topics.map(&:name)).to eq(%w[topicB topicA])
|
||||
end
|
||||
|
||||
it 'does not persist topics before project is saved' do
|
||||
project.topic_list = 'topicA, topicB'
|
||||
|
||||
expect(project.reload.topics.map(&:name)).to eq(%w[topic1 topic2 topic3])
|
||||
end
|
||||
|
||||
it 'does not update topics if project is not valid' do
|
||||
project.name = nil
|
||||
project.topic_list = 'topicA, topicB'
|
||||
|
||||
expect(project.save).to be_falsy
|
||||
expect(project.reload.topics.map(&:name)).to eq(%w[topic1 topic2 topic3])
|
||||
end
|
||||
end
|
||||
|
||||
context 'during ExtractProjectTopicsIntoSeparateTable migration' do
|
||||
before do
|
||||
topic_a = ActsAsTaggableOn::Tag.find_or_create_by!(name: 'topicA')
|
||||
topic_b = ActsAsTaggableOn::Tag.find_or_create_by!(name: 'topicB')
|
||||
|
||||
project.reload.topics_acts_as_taggable = [topic_a, topic_b]
|
||||
project.save!
|
||||
project.reload
|
||||
end
|
||||
|
||||
it 'topic_list returns correct string array' do
|
||||
expect(project.topic_list).to eq(%w[topicA topicB topic1 topic2 topic3])
|
||||
end
|
||||
|
||||
it 'topics returns correct topic records' do
|
||||
expect(project.topics.map(&:class)).to eq([ActsAsTaggableOn::Tag, ActsAsTaggableOn::Tag, Projects::Topic, Projects::Topic, Projects::Topic])
|
||||
expect(project.topics.map(&:name)).to eq(%w[topicA topicB topic1 topic2 topic3])
|
||||
end
|
||||
|
||||
it 'topic_list= sets new topics and removes old topics' do
|
||||
project.topic_list = 'new-topic1, new-topic2'
|
||||
project.save!
|
||||
project.reload
|
||||
|
||||
expect(project.topics.map(&:class)).to eq([Projects::Topic, Projects::Topic])
|
||||
expect(project.topics.map(&:name)).to eq(%w[new-topic1 new-topic2])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
16
spec/models/projects/project_topic_spec.rb
Normal file
16
spec/models/projects/project_topic_spec.rb
Normal file
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Projects::ProjectTopic do
|
||||
let_it_be(:project_topic, reload: true) { create(:project_topic) }
|
||||
|
||||
subject { project_topic }
|
||||
|
||||
it { expect(subject).to be_valid }
|
||||
|
||||
describe 'associations' do
|
||||
it { is_expected.to belong_to(:project) }
|
||||
it { is_expected.to belong_to(:topic) }
|
||||
end
|
||||
end
|
22
spec/models/projects/topic_spec.rb
Normal file
22
spec/models/projects/topic_spec.rb
Normal file
|
@ -0,0 +1,22 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Projects::Topic do
|
||||
let_it_be(:topic, reload: true) { create(:topic) }
|
||||
|
||||
subject { topic }
|
||||
|
||||
it { expect(subject).to be_valid }
|
||||
|
||||
describe 'associations' do
|
||||
it { is_expected.to have_many(:project_topics) }
|
||||
it { is_expected.to have_many(:projects) }
|
||||
end
|
||||
|
||||
describe 'validations' do
|
||||
it { is_expected.to validate_presence_of(:name) }
|
||||
it { is_expected.to validate_uniqueness_of(:name) }
|
||||
it { is_expected.to validate_length_of(:name).is_at_most(255) }
|
||||
end
|
||||
end
|
|
@ -0,0 +1,25 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'fast_spec_helper'
|
||||
require_relative '../../../../rubocop/cop/performance/active_record_subtransaction_methods'
|
||||
|
||||
RSpec.describe RuboCop::Cop::Performance::ActiveRecordSubtransactionMethods do
|
||||
subject(:cop) { described_class.new }
|
||||
|
||||
let(:message) { described_class::MSG }
|
||||
|
||||
shared_examples 'a method that uses a subtransaction' do |method_name|
|
||||
it 'registers an offense' do
|
||||
expect_offense(<<~RUBY)
|
||||
Project.#{method_name}
|
||||
#{'^' * method_name.length} #{message}
|
||||
RUBY
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the method uses a subtransaction' do
|
||||
described_class::DISALLOWED_METHODS.each do |method|
|
||||
it_behaves_like 'a method that uses a subtransaction', method
|
||||
end
|
||||
end
|
||||
end
|
|
@ -402,21 +402,6 @@ RSpec.describe MergeRequests::MergeService do
|
|||
expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
|
||||
end
|
||||
|
||||
it 'logs and saves error if there is a squash in progress' do
|
||||
error_message = 'another squash is already in progress'
|
||||
|
||||
allow_any_instance_of(MergeRequest).to receive(:squash_in_progress?).and_return(true)
|
||||
merge_request.update!(squash: true)
|
||||
|
||||
service.execute(merge_request)
|
||||
|
||||
expect(merge_request).to be_open
|
||||
expect(merge_request.merge_commit_sha).to be_nil
|
||||
expect(merge_request.squash_commit_sha).to be_nil
|
||||
expect(merge_request.merge_error).to include(error_message)
|
||||
expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
|
||||
end
|
||||
|
||||
it 'logs and saves error if there is an PreReceiveError exception' do
|
||||
error_message = 'error message'
|
||||
|
||||
|
|
|
@ -194,23 +194,6 @@ RSpec.describe MergeRequests::SquashService do
|
|||
expect(service.execute).to match(status: :error, message: a_string_including('squash'))
|
||||
end
|
||||
end
|
||||
|
||||
context 'with an error in squash in progress check' do
|
||||
before do
|
||||
allow(repository).to receive(:squash_in_progress?)
|
||||
.and_raise(Gitlab::Git::Repository::GitError, error)
|
||||
end
|
||||
|
||||
it 'logs the stage and output' do
|
||||
expect(service).to receive(:log_error).with(exception: an_instance_of(Gitlab::Git::Repository::GitError), message: 'Failed to check squash in progress')
|
||||
|
||||
service.execute
|
||||
end
|
||||
|
||||
it 'returns an error' do
|
||||
expect(service.execute).to match(status: :error, message: 'An error occurred while checking whether another squash is in progress.')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when any other exception is thrown' do
|
||||
|
|
|
@ -86,7 +86,7 @@ RSpec.describe Projects::CreateService, '#execute' do
|
|||
subject(:project) { create_project(user, opts) }
|
||||
|
||||
context "with 'topics' parameter" do
|
||||
let(:opts) { { topics: 'topics' } }
|
||||
let(:opts) { { name: 'topic-project', topics: 'topics' } }
|
||||
|
||||
it 'keeps them as specified' do
|
||||
expect(project.topic_list).to eq(%w[topics])
|
||||
|
@ -94,7 +94,7 @@ RSpec.describe Projects::CreateService, '#execute' do
|
|||
end
|
||||
|
||||
context "with 'topic_list' parameter" do
|
||||
let(:opts) { { topic_list: 'topic_list' } }
|
||||
let(:opts) { { name: 'topic-project', topic_list: 'topic_list' } }
|
||||
|
||||
it 'keeps them as specified' do
|
||||
expect(project.topic_list).to eq(%w[topic_list])
|
||||
|
@ -102,7 +102,7 @@ RSpec.describe Projects::CreateService, '#execute' do
|
|||
end
|
||||
|
||||
context "with 'tag_list' parameter (deprecated)" do
|
||||
let(:opts) { { tag_list: 'tag_list' } }
|
||||
let(:opts) { { name: 'topic-project', tag_list: 'tag_list' } }
|
||||
|
||||
it 'keeps them as specified' do
|
||||
expect(project.topic_list).to eq(%w[tag_list])
|
||||
|
|
|
@ -13,27 +13,6 @@ RSpec.describe ExpireJobCacheWorker do
|
|||
|
||||
let(:job_args) { job.id }
|
||||
|
||||
include_examples 'an idempotent worker' do
|
||||
it 'invalidates Etag caching for the job path' do
|
||||
job_path = "/#{project.full_path}/builds/#{job.id}.json"
|
||||
|
||||
spy_store = Gitlab::EtagCaching::Store.new
|
||||
|
||||
allow(Gitlab::EtagCaching::Store).to receive(:new) { spy_store }
|
||||
|
||||
expect(spy_store).to receive(:touch)
|
||||
.exactly(worker_exec_times).times
|
||||
.with(job_path)
|
||||
.and_call_original
|
||||
|
||||
expect(ExpirePipelineCacheWorker).to receive(:perform_async)
|
||||
.with(pipeline.id)
|
||||
.exactly(worker_exec_times).times
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
|
||||
it 'does not perform extra queries', :aggregate_failures do
|
||||
worker = described_class.new
|
||||
recorder = ActiveRecord::QueryRecorder.new { worker.perform(job.id) }
|
||||
|
@ -51,6 +30,11 @@ RSpec.describe ExpireJobCacheWorker do
|
|||
expect(namespace_queries.size).to eq(0)
|
||||
expect(route_queries.size).to eq(0)
|
||||
end
|
||||
|
||||
it_behaves_like 'worker with data consistency',
|
||||
described_class,
|
||||
feature_flag: :load_balancing_for_expire_job_cache_worker,
|
||||
data_consistency: :delayed
|
||||
end
|
||||
|
||||
context 'when there is no job in the pipeline' do
|
||||
|
|
Loading…
Reference in a new issue