Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-11-26 12:09:48 +00:00
parent bbede1e22c
commit 142890d5bb
111 changed files with 1276 additions and 1712 deletions

View File

@ -1 +1 @@
8.55.0
8.56.0

View File

@ -209,7 +209,7 @@ gem 'httparty', '~> 0.16.4'
gem 'rainbow', '~> 3.0'
# Progress bar
gem 'ruby-progressbar'
gem 'ruby-progressbar', '~> 1.10'
# GitLab settings
gem 'settingslogic', '~> 2.0.9'

View File

@ -1480,7 +1480,7 @@ DEPENDENCIES
rspec_profiling (~> 0.0.6)
ruby-fogbugz (~> 0.2.1)
ruby-prof (~> 1.3.0)
ruby-progressbar
ruby-progressbar (~> 1.10)
ruby_parser (~> 3.15)
rubyzip (~> 2.0.0)
rugged (~> 0.28)

View File

@ -169,12 +169,16 @@ export default {
:a-mode="diffFile.a_mode"
:b-mode="diffFile.b_mode"
>
<image-diff-overlay
slot="image-overlay"
:discussions="imageDiscussions"
:file-hash="diffFileHash"
:can-comment="getNoteableData.current_user.can_create_note && !diffFile.brokenSymlink"
/>
<template #image-overlay="{ renderedWidth, renderedHeight }">
<image-diff-overlay
v-if="renderedWidth"
:rendered-width="renderedWidth"
:rendered-height="renderedHeight"
:discussions="imageDiscussions"
:file-hash="diffFileHash"
:can-comment="getNoteableData.current_user.can_create_note && !diffFile.brokenSymlink"
/>
</template>
<div v-if="showNotesContainer" class="note-container">
<user-avatar-link
v-if="diffFileCommentForm && author"

View File

@ -4,6 +4,10 @@ import { isArray } from 'lodash';
import imageDiffMixin from 'ee_else_ce/diffs/mixins/image_diff';
import { GlIcon } from '@gitlab/ui';
function calcPercent(pos, size, renderedSize) {
return (((pos / size) * 100) / ((renderedSize / size) * 100)) * 100;
}
export default {
name: 'ImageDiffOverlay',
components: {
@ -39,6 +43,14 @@ export default {
required: false,
default: true,
},
renderedWidth: {
type: Number,
required: true,
},
renderedHeight: {
type: Number,
required: true,
},
},
computed: {
...mapGetters('diffs', ['getDiffFileByHash', 'getCommentFormForDiffFile']),
@ -59,33 +71,33 @@ export default {
},
getPositionForObject(meta) {
const { x, y, width, height } = meta;
const imageWidth = this.getImageDimensions().width;
const imageHeight = this.getImageDimensions().height;
const widthRatio = imageWidth / width;
const heightRatio = imageHeight / height;
return {
x: Math.round(x * widthRatio),
y: Math.round(y * heightRatio),
x: (x / width) * 100,
y: (y / height) * 100,
};
},
getPosition(discussion) {
const { x, y } = this.getPositionForObject(discussion.position);
return {
left: `${x}px`,
top: `${y}px`,
left: `${x}%`,
top: `${y}%`,
};
},
clickedImage(x, y) {
const { width, height } = this.getImageDimensions();
const xPercent = calcPercent(x, width, this.renderedWidth);
const yPercent = calcPercent(y, height, this.renderedHeight);
this.openDiffFileCommentForm({
fileHash: this.fileHash,
width,
height,
x,
y,
x: width * (xPercent / 100),
y: height * (yPercent / 100),
xPercent,
yPercent,
});
},
},
@ -112,22 +124,19 @@ export default {
type="button"
@click="clickedToggle(discussion)"
>
<gl-icon v-if="showCommentIcon" name="image-comment-dark" />
<gl-icon v-if="showCommentIcon" name="image-comment-dark" :size="24" />
<template v-else>
{{ toggleText(discussion, index) }}
</template>
</button>
<button
v-if="currentCommentForm"
:style="{
left: `${currentCommentForm.x}px`,
top: `${currentCommentForm.y}px`,
}"
v-if="canComment && currentCommentForm"
:style="{ left: `${currentCommentForm.xPercent}%`, top: `${currentCommentForm.yPercent}%` }"
:aria-label="__('Comment form position')"
class="btn-transparent comment-indicator"
class="btn-transparent comment-indicator position-absolute"
type="button"
>
<gl-icon name="image-comment-dark" />
<gl-icon name="image-comment-dark" :size="24" />
</button>
</div>
</template>

View File

@ -1,149 +0,0 @@
import $ from 'jquery';
import { escape } from 'lodash';
import { __, sprintf } from './locale';
import axios from './lib/utils/axios_utils';
import { deprecatedCreateFlash as flash } from './flash';
import { parseBoolean, spriteIcon } from './lib/utils/common_utils';
class ImporterStatus {
constructor({ jobsUrl, importUrl, ciCdOnly }) {
this.jobsUrl = jobsUrl;
this.importUrl = importUrl;
this.ciCdOnly = ciCdOnly;
this.initStatusPage();
this.setAutoUpdate();
}
initStatusPage() {
$('.js-add-to-import')
.off('click')
.on('click', this.addToImport.bind(this));
$('.js-import-all')
.off('click')
.on('click', function onClickImportAll() {
const $btn = $(this);
$btn.disable().addClass('is-loading');
return $('.js-add-to-import').each(function triggerAddImport() {
return $(this).trigger('click');
});
});
}
addToImport(event) {
const $btn = $(event.currentTarget);
const $tr = $btn.closest('tr');
const $targetField = $tr.find('.import-target');
const $namespaceInput = $targetField.find('.js-select-namespace option:selected');
const repoData = $tr.data();
const id = repoData.id || $tr.attr('id').replace('repo_', '');
let targetNamespace;
let newName;
if ($namespaceInput.length > 0) {
targetNamespace = $namespaceInput[0].innerHTML;
newName = $targetField.find('#path').prop('value');
$targetField.empty().append(`${targetNamespace}/${newName}`);
}
$btn.disable().addClass('is-loading');
this.id = id;
let attributes = {
repo_id: id,
target_namespace: targetNamespace,
new_name: newName,
ci_cd_only: this.ciCdOnly,
};
if (repoData) {
attributes = Object.assign(repoData, attributes);
}
return axios
.post(this.importUrl, attributes)
.then(({ data }) => {
const job = $tr;
job.attr('id', `project_${data.id}`);
job.find('.import-target').html(`<a href="${data.full_path}">${data.full_path}</a>`);
$('table.import-jobs tbody').prepend(job);
job.addClass('table-active');
const connectingVerb = this.ciCdOnly ? __('connecting') : __('importing');
job.find('.import-actions').html(
sprintf(
escape(__('%{loadingIcon} Started')),
{
loadingIcon: `<i class="fa fa-spinner fa-spin" aria-label="${escape(
connectingVerb,
)}"></i>`,
},
false,
),
);
})
.catch(error => {
let details = error;
const $statusField = $tr.find('.job-status');
$statusField.text(__('Failed'));
if (error.response && error.response.data && error.response.data.errors) {
details = error.response.data.errors;
}
flash(sprintf(__('An error occurred while importing project: %{details}'), { details }));
});
}
autoUpdate() {
return axios.get(this.jobsUrl).then(({ data = [] }) => {
data.forEach(job => {
const jobItem = $(`#project_${job.id}`);
const statusField = jobItem.find('.job-status');
const spinner = '<i class="fa fa-spinner fa-spin"></i>';
switch (job.import_status) {
case 'finished':
jobItem.removeClass('table-active').addClass('table-success');
statusField.html(`<span>${spriteIcon('check', 's16')} ${__('Done')}</span>`);
break;
case 'scheduled':
statusField.html(`${spinner} ${__('Scheduled')}`);
break;
case 'started':
statusField.html(`${spinner} ${__('Started')}`);
break;
case 'failed':
statusField.html(__('Failed'));
break;
default:
statusField.html(job.import_status);
break;
}
});
});
}
setAutoUpdate() {
setInterval(this.autoUpdate.bind(this), 4000);
}
}
// eslint-disable-next-line consistent-return
function initImporterStatus() {
const importerStatus = document.querySelector('.js-importer-status');
if (importerStatus) {
const data = importerStatus.dataset;
return new ImporterStatus({
jobsUrl: data.jobsImportPath,
importUrl: data.importPath,
ciCdOnly: parseBoolean(data.ciCdOnly),
});
}
}
export { initImporterStatus as default, ImporterStatus };

View File

@ -23,7 +23,6 @@ import { getLocationHash, visitUrl } from './lib/utils/url_utility';
// everything else
import { deprecatedCreateFlash as Flash, removeFlashClickListener } from './flash';
import initTodoToggle from './header';
import initImporterStatus from './importer_status';
import initLayoutNav from './layout_nav';
import initAlertHandler from './alert_handler';
import './feature_highlight/feature_highlight_options';
@ -107,7 +106,6 @@ function deferredInitialisation() {
const $body = $('body');
initBreadcrumbs();
initImporterStatus();
initTodoToggle();
initLogoAnimation();
initUsagePingConsent();

View File

@ -131,14 +131,18 @@ export default {
:file-hash="discussion.diff_file.file_hash"
:project-path="projectPath"
>
<image-diff-overlay
slot="image-overlay"
:discussions="discussion"
:file-hash="discussion.diff_file.file_hash"
:show-comment-icon="true"
:should-toggle-discussion="false"
badge-class="image-comment-badge"
/>
<template #image-overlay="{ renderedWidth, renderedHeight }">
<image-diff-overlay
v-if="renderedWidth"
:rendered-width="renderedWidth"
:rendered-height="renderedHeight"
:discussions="discussion"
:file-hash="discussion.diff_file.file_hash"
:show-comment-icon="true"
:should-toggle-discussion="false"
badge-class="image-comment-badge gl-text-gray-500"
/>
</template>
</diff-viewer>
<slot></slot>
</div>

View File

@ -2,6 +2,9 @@
import { GlIcon, GlTooltipDirective } from '@gitlab/ui';
import { sprintf, s__ } from '~/locale';
import statusIcon from '../mr_widget_status_icon.vue';
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import mergeRequestQueryVariablesMixin from '../../mixins/merge_request_query_variables';
import missingBranchQuery from '../../queries/states/missing_branch.query.graphql';
export default {
name: 'MRWidgetMissingBranch',
@ -12,15 +15,38 @@ export default {
GlIcon,
statusIcon,
},
mixins: [glFeatureFlagMixin(), mergeRequestQueryVariablesMixin],
apollo: {
state: {
query: missingBranchQuery,
skip() {
return !this.glFeatures.mergeRequestWidgetGraphql;
},
variables() {
return this.mergeRequestQueryVariables;
},
update: data => data.project.mergeRequest,
},
},
props: {
mr: {
type: Object,
required: true,
},
},
data() {
return { state: {} };
},
computed: {
sourceBranchRemoved() {
if (this.glFeatures.mergeRequestWidgetGraphql) {
return !this.state.sourceBranchExists;
}
return this.mr.sourceBranchRemoved;
},
missingBranchName() {
return this.mr.sourceBranchRemoved ? 'source' : 'target';
return this.sourceBranchRemoved ? 'source' : 'target';
},
missingBranchNameMessage() {
return sprintf(
@ -49,7 +75,7 @@ export default {
<div class="media-body space-children">
<span class="bold js-branch-text">
<span class="capitalize"> {{ missingBranchName }} </span>
<span class="capitalize" data-testid="missingBranchName"> {{ missingBranchName }} </span>
{{ s__('mrWidget|branch does not exist.') }} {{ missingBranchNameMessage }}
<gl-icon v-gl-tooltip :title="message" :aria-label="message" name="question-o" />
</span>

View File

@ -0,0 +1,7 @@
query missingBranchQuery($projectPath: ID!, $iid: String!) {
project(fullPath: $projectPath) {
mergeRequest(iid: $iid) {
sourceBranchExists
}
}
}

View File

@ -28,6 +28,8 @@ export default {
return {
width: 0,
height: 0,
renderedWidth: 0,
renderedHeight: 0,
};
},
computed: {
@ -63,11 +65,14 @@ export default {
this.height = contentImg.naturalHeight;
this.$nextTick(() => {
this.renderedWidth = contentImg.clientWidth;
this.renderedHeight = contentImg.clientHeight;
this.$emit('imgLoaded', {
width: this.width,
height: this.height,
renderedWidth: contentImg.clientWidth,
renderedHeight: contentImg.clientHeight,
renderedWidth: this.renderedWidth,
renderedHeight: this.renderedHeight,
});
});
}
@ -79,7 +84,12 @@ export default {
<template>
<div data-testid="image-viewer">
<div :class="innerCssClasses" class="position-relative">
<img ref="contentImg" :src="path" @load="onImgLoad" /> <slot name="image-overlay"></slot>
<img ref="contentImg" :src="path" @load="onImgLoad" />
<slot
name="image-overlay"
:rendered-width="renderedWidth"
:rendered-height="renderedHeight"
></slot>
</div>
<p v-if="renderInfo" class="image-info">
<template v-if="hasFileSize">

View File

@ -106,7 +106,13 @@ export default {
:a-mode="aMode"
:b-mode="bMode"
>
<slot slot="image-overlay" name="image-overlay"></slot>
<template #image-overlay="{ renderedWidth, renderedHeight }">
<slot
:rendered-width="renderedWidth"
:rendered-height="renderedHeight"
name="image-overlay"
></slot>
</template>
</component>
<slot></slot>
</div>

View File

@ -141,7 +141,13 @@ export default {
:path="newPath"
@imgLoaded="onionNewImgLoaded"
>
<slot slot="image-overlay" name="image-overlay"> </slot>
<template #image-overlay="{ renderedWidth, renderedHeight }">
<slot
:rendered-width="renderedWidth"
:rendered-height="renderedHeight"
name="image-overlay"
></slot>
</template>
</image-viewer>
</div>
<div class="controls">

View File

@ -143,7 +143,13 @@ export default {
class="frame added"
@imgLoaded="swipeNewImgLoaded"
>
<slot slot="image-overlay" name="image-overlay"> </slot>
<template #image-overlay="{ renderedWidth, renderedHeight }">
<slot
:rendered-width="renderedWidth"
:rendered-height="renderedHeight"
name="image-overlay"
></slot>
</template>
</image-viewer>
</div>
<span

View File

@ -44,7 +44,13 @@ export default {
:inner-css-classes="['frame', 'added']"
class="wrap w-50"
>
<slot slot="image-overlay" name="image-overlay"> </slot>
<template #image-overlay="{ renderedWidth, renderedHeight }">
<slot
:rendered-width="renderedWidth"
:rendered-height="renderedHeight"
name="image-overlay"
></slot>
</template>
</image-viewer>
</div>
</template>

View File

@ -76,7 +76,13 @@ export default {
<div v-if="diffMode === $options.diffModes.replaced" class="diff-viewer">
<div class="image js-replaced-image">
<component :is="imageViewComponent" v-bind="$props">
<slot slot="image-overlay" name="image-overlay"> </slot>
<template #image-overlay="{ renderedWidth, renderedHeight }">
<slot
:rendered-width="renderedWidth"
:rendered-height="renderedHeight"
name="image-overlay"
></slot>
</template>
</component>
</div>
<div class="view-modes">
@ -121,7 +127,13 @@ export default {
},
]"
>
<slot v-if="isNew || isRenamed" slot="image-overlay" name="image-overlay"> </slot>
<template v-if="isNew || isRenamed" #image-overlay="{ renderedWidth, renderedHeight }">
<slot
:rendered-width="renderedWidth"
:rendered-height="renderedHeight"
name="image-overlay"
></slot>
</template>
</image-viewer>
</div>
</div>

View File

@ -39,15 +39,3 @@
.import-projects-loading-icon {
margin-top: $gl-padding-32;
}
.btn-import {
.loading-icon {
display: none;
}
&.is-loading {
.loading-icon {
display: inline-block;
}
}
}

View File

@ -61,8 +61,7 @@ class ApplicationController < ActionController::Base
:gitea_import_enabled?, :github_import_configured?,
:gitlab_import_enabled?, :gitlab_import_configured?,
:bitbucket_import_enabled?, :bitbucket_import_configured?,
:bitbucket_server_import_enabled?,
:google_code_import_enabled?, :fogbugz_import_enabled?,
:bitbucket_server_import_enabled?, :fogbugz_import_enabled?,
:git_import_enabled?, :gitlab_project_import_enabled?,
:manifest_import_enabled?, :phabricator_import_enabled?
@ -434,10 +433,6 @@ class ApplicationController < ActionController::Base
Gitlab::Auth::OAuth::Provider.enabled?(:bitbucket)
end
def google_code_import_enabled?
Gitlab::CurrentSettings.import_sources.include?('google_code')
end
def fogbugz_import_enabled?
Gitlab::CurrentSettings.import_sources.include?('fogbugz')
end

View File

@ -1,123 +0,0 @@
# frozen_string_literal: true
class Import::GoogleCodeController < Import::BaseController
before_action :verify_google_code_import_enabled
before_action :user_map, only: [:new_user_map, :create_user_map]
def new
end
def callback
dump_file = params[:dump_file]
unless dump_file.respond_to?(:read)
return redirect_back_or_default(options: { alert: _("You need to upload a Google Takeout archive.") })
end
begin
dump = Gitlab::Json.parse(dump_file.read)
rescue
return redirect_back_or_default(options: { alert: _("The uploaded file is not a valid Google Takeout archive.") })
end
client = Gitlab::GoogleCodeImport::Client.new(dump)
unless client.valid?
return redirect_back_or_default(options: { alert: _("The uploaded file is not a valid Google Takeout archive.") })
end
session[:google_code_dump] = dump
if params[:create_user_map] == "1"
redirect_to new_user_map_import_google_code_path
else
redirect_to status_import_google_code_path
end
end
def new_user_map
end
def create_user_map
user_map_json = params[:user_map]
user_map_json = "{}" if user_map_json.blank?
begin
user_map = Gitlab::Json.parse(user_map_json)
rescue
flash.now[:alert] = _("The entered user map is not a valid JSON user map.")
return render "new_user_map"
end
unless user_map.is_a?(Hash) && user_map.all? { |k, v| k.is_a?(String) && v.is_a?(String) }
flash.now[:alert] = _("The entered user map is not a valid JSON user map.")
return render "new_user_map"
end
# This is the default, so let's not save it into the database.
user_map.reject! do |key, value|
value == Gitlab::GoogleCodeImport::Client.mask_email(key)
end
session[:google_code_user_map] = user_map
flash[:notice] = _("The user map has been saved. Continue by selecting the projects you want to import.")
redirect_to status_import_google_code_path
end
# rubocop: disable CodeReuse/ActiveRecord
def status
unless client.valid?
return redirect_to new_import_google_code_path
end
@repos = client.repos
@incompatible_repos = client.incompatible_repos
@already_added_projects = find_already_added_projects('google_code')
already_added_projects_names = @already_added_projects.pluck(:import_source)
@repos.reject! { |repo| already_added_projects_names.include? repo.name }
end
# rubocop: enable CodeReuse/ActiveRecord
def jobs
render json: find_jobs('google_code')
end
def create
repo = client.repo(params[:repo_id])
user_map = session[:google_code_user_map]
project = Gitlab::GoogleCodeImport::ProjectCreator.new(repo, current_user.namespace, current_user, user_map).execute
if project.persisted?
render json: ProjectSerializer.new.represent(project)
else
render json: { errors: project_save_error(project) }, status: :unprocessable_entity
end
end
private
def client
@client ||= Gitlab::GoogleCodeImport::Client.new(session[:google_code_dump])
end
def verify_google_code_import_enabled
render_404 unless google_code_import_enabled?
end
def user_map
@user_map ||= begin
user_map = client.user_map
stored_user_map = session[:google_code_user_map]
user_map.update(stored_user_map) if stored_user_map
Hash[user_map.sort]
end
end
end

View File

@ -178,6 +178,15 @@ module UsersHelper
header + list
end
def user_display_name(user)
return s_('UserProfile|Blocked user') if user.blocked?
can_read_profile = can?(user, :read_user_profile, current_user)
return s_('UserProfile|Unconfirmed user') unless user.confirmed? || can_read_profile
user.name
end
private
def blocked_user_badge(user)

View File

@ -9,7 +9,7 @@ module Ci
end
def all
(local + cross_pipeline).uniq
(local + cross_project).uniq
end
# Dependencies local to the given pipeline
@ -23,8 +23,8 @@ module Ci
deps
end
# Dependencies that are defined in other pipelines
def cross_pipeline
# Dependencies that are defined by project and ref
def cross_project
[]
end
@ -33,7 +33,7 @@ module Ci
end
def valid?
valid_local? && valid_cross_pipeline?
valid_local? && valid_cross_project?
end
private
@ -50,7 +50,7 @@ module Ci
local.all?(&:valid_dependency?)
end
def valid_cross_pipeline?
def valid_cross_project?
true
end

View File

@ -8,6 +8,7 @@ module Shardable
scope :for_repository_storage, -> (repository_storage) { joins(:shard).where(shards: { name: repository_storage }) }
scope :excluding_repository_storage, -> (repository_storage) { joins(:shard).where.not(shards: { name: repository_storage }) }
scope :for_shard, -> (shard) { where(shard_id: shard) }
validates :shard, presence: true
end

View File

@ -2099,10 +2099,10 @@ class Project < ApplicationRecord
# already in that state.
#
# @return nil. Failures will raise an exception
def set_repository_read_only!
def set_repository_read_only!(skip_git_transfer_check: false)
with_lock do
raise RepositoryReadOnlyError, _('Git transfer in progress') if
git_transfer_in_progress?
!skip_git_transfer_check && git_transfer_in_progress?
raise RepositoryReadOnlyError, _('Repository already read-only') if
self.class.where(id: id).pick(:repository_read_only)

View File

@ -1,6 +1,7 @@
# frozen_string_literal: true
class ProjectRepository < ApplicationRecord
include EachBatch
include Shardable
belongs_to :project, inverse_of: :project_repository

View File

@ -50,7 +50,7 @@ class ProjectRepositoryStorageMove < ApplicationRecord
block.call
begin
storage_move.project.set_repository_read_only!
storage_move.project.set_repository_read_only!(skip_git_transfer_check: true)
rescue => err
errors.add(:project, err.message)
next false

View File

@ -13,6 +13,9 @@ class UserPolicy < BasePolicy
desc "The user is blocked"
condition(:blocked_user, scope: :subject, score: 0) { @subject.blocked? }
desc "The user is unconfirmed"
condition(:unconfirmed_user, scope: :subject, score: 0) { !@subject.confirmed? }
rule { ~restricted_public_level }.enable :read_user
rule { ~anonymous }.enable :read_user
@ -25,7 +28,7 @@ class UserPolicy < BasePolicy
end
rule { default }.enable :read_user_profile
rule { (private_profile | blocked_user) & ~(user_is_self | admin) }.prevent :read_user_profile
rule { (private_profile | blocked_user | unconfirmed_user) & ~(user_is_self | admin) }.prevent :read_user_profile
rule { user_is_self | admin }.enable :disable_two_factor
rule { (user_is_self | admin) & ~blocked }.enable :create_user_personal_access_token
end

View File

@ -0,0 +1,35 @@
# frozen_string_literal: true
module Projects
# Tries to schedule a move for every project with repositories on the source shard
class ScheduleBulkRepositoryShardMovesService
include BaseServiceUtility
def execute(source_storage_name, destination_storage_name = nil)
shard = Shard.find_by_name!(source_storage_name)
ProjectRepository.for_shard(shard).each_batch(column: :project_id) do |relation|
Project.id_in(relation.select(:project_id)).each do |project|
project.with_lock do
next if project.repository_storage != source_storage_name
storage_move = project.repository_storage_moves.build(
source_storage_name: source_storage_name,
destination_storage_name: destination_storage_name
)
unless storage_move.schedule
log_info("Project #{project.full_path} (#{project.id}) was skipped: #{storage_move.errors.full_messages.to_sentence}")
end
end
end
end
success
end
def self.enqueue(source_storage_name, destination_storage_name = nil)
::ProjectScheduleBulkRepositoryShardMovesWorker.perform_async(source_storage_name, destination_storage_name)
end
end
end

View File

@ -1,63 +0,0 @@
- page_title _("Google Code import")
- header_title _("Projects"), root_path
%h3.page-title.gl-display-flex
.gl-display-flex.gl-align-items-center.gl-justify-content-center
= sprite_icon('google', css_class: 'gl-mr-2')
= _('Import projects from Google Code')
%hr
= form_tag callback_import_google_code_path, multipart: true do
%p
= _('Follow the steps below to export your Google Code project data.')
= _("In the next step, you'll be able to select the projects you want to import.")
%ol
%li
%p
- link_to_google_takeout = link_to(_("Google Takeout"), "https://www.google.com/settings/takeout", target: '_blank', rel: 'noopener noreferrer')
= _("Go to %{link_to_google_takeout}.").html_safe % { link_to_google_takeout: link_to_google_takeout }
%li
%p
= _("Make sure you're logged into the account that owns the projects you'd like to import.")
%li
%p
= html_escape(_('Click the %{strong_open}Select none%{strong_close} button on the right, since we only need "Google Code Project Hosting".')) % { strong_open: '<strong>'.html_safe, strong_close: '</strong>'.html_safe }
%li
%p
= html_escape(_('Scroll down to %{strong_open}Google Code Project Hosting%{strong_close} and enable the switch on the right.')) % { strong_open: '<strong>'.html_safe, strong_close: '</strong>'.html_safe }
%li
%p
= html_escape(_('Choose %{strong_open}Next%{strong_close} at the bottom of the page.')) % { strong_open: '<strong>'.html_safe, strong_close: '</strong>'.html_safe }
%li
%p
= _('Leave the "File type" and "Delivery method" options on their default values.')
%li
%p
= html_escape(_('Choose %{strong_open}Create archive%{strong_close} and wait for archiving to complete.')) % { strong_open: '<strong>'.html_safe, strong_close: '</strong>'.html_safe }
%li
%p
= html_escape(_('Click the %{strong_open}Download%{strong_close} button and wait for downloading to complete.')) % { strong_open: '<strong>'.html_safe, strong_close: '</strong>'.html_safe }
%li
%p
= _('Find the downloaded ZIP file and decompress it.')
%li
%p
= html_escape(_('Find the newly extracted %{code_open}Takeout/Google Code Project Hosting/GoogleCodeProjectHosting.json%{code_close} file.')) % { code_open: '<code>'.html_safe, code_close: '</code>'.html_safe }
%li
%p
= html_escape(_('Upload %{code_open}GoogleCodeProjectHosting.json%{code_close} here:')) % { code_open: '<code>'.html_safe, code_close: '</code>'.html_safe }
%p
%input{ type: "file", name: "dump_file", id: "dump_file" }
%li
%p
= _('Do you want to customize how Google Code email addresses and usernames are imported into GitLab?')
%p
= label_tag :create_user_map_0 do
= radio_button_tag :create_user_map, 0, true
= _('No, directly import the existing email addresses and usernames.')
%p
= label_tag :create_user_map_1 do
= radio_button_tag :create_user_map, 1, false
= _('Yes, let me map Google Code users to full names or GitLab users.')
%span
= submit_tag _('Continue to the next step'), class: "btn btn-success"

View File

@ -1,37 +0,0 @@
- page_title _("User map"), _("Google Code import")
- header_title _("Projects"), root_path
%h3.page-title.gl-display-flex
.gl-display-flex.gl-align-items-center.gl-justify-content-center
= sprite_icon('google', css_class: 'gl-mr-2')
= _('Import projects from Google Code')
%hr
= form_tag create_user_map_import_google_code_path do
%p
= _("Customize how Google Code email addresses and usernames are imported into GitLab. In the next step, you'll be able to select the projects you want to import.")
%p
= html_escape(_("The user map is a JSON document mapping the Google Code users that participated on your projects to the way their email addresses and usernames will be imported into GitLab. You can change this by changing the value on the right hand side of %{code_open}:%{code_close}. Be sure to preserve the surrounding double quotes, other punctuation and the email address or username on the left hand side.")) % { code_open: '<code>'.html_safe, code_close: '</code>'.html_safe }
%ul
%li
%strong= _("Default: Directly import the Google Code email address or username")
%p
= html_escape(_('%{code_open}"johnsmith@example.com": "johnsm...@example.com"%{code_close} will add "By johnsm...@example.com" to all issues and comments originally created by johnsmith@example.com. The email address or username is masked to ensure the user\'s privacy.')) % { code_open: '<code>'.html_safe, code_close: '</code>'.html_safe }
%li
%strong= _("Map a Google Code user to a GitLab user")
%p
= html_escape(_('%{code_open}"johnsmith@example.com": "@johnsmith"%{code_close} will add "By %{link_open}@johnsmith%{link_close}" to all issues and comments originally created by johnsmith@example.com, and will set %{link_open}@johnsmith%{link_close} as the assignee on all issues originally assigned to johnsmith@example.com.')) % { code_open: '<code>'.html_safe, code_close: '</code>'.html_safe, link_open: '<a href="#">'.html_safe, link_close: '</a>'.html_safe }
%li
%strong= _("Map a Google Code user to a full name")
%p
= html_escape(_('%{code_open}"johnsmith@example.com": "John Smith"%{code_close} will add "By John Smith" to all issues and comments originally created by johnsmith@example.com.')) % { code_open: '<code>'.html_safe, code_close: '</code>'.html_safe }
%li
%strong= _("Map a Google Code user to a full email address")
%p
= html_escape(_('%{code_open}"johnsmith@example.com": "johnsmith@example.com"%{code_close} will add "By %{link_open}johnsmith@example.com%{link_close}" to all issues and comments originally created by johnsmith@example.com. By default, the email address or username is masked to ensure the user\'s privacy. Use this option if you want to show the full email address.')) % { code_open: '<code>'.html_safe, code_close: '</code>'.html_safe, link_open: '<a href="#">'.html_safe, link_close: '</a>'.html_safe }
.form-group.row
.col-sm-12
= text_area_tag :user_map, Gitlab::Json.pretty_generate(@user_map), class: 'form-control', rows: 15
.form-actions
= submit_tag _('Continue to the next step'), class: "btn btn-success"

View File

@ -1,78 +0,0 @@
- page_title _("Google Code import")
- header_title _("Projects"), root_path
%h3.page-title.gl-display-flex
.gl-display-flex.gl-align-items-center.gl-justify-content-center
= sprite_icon('google', css_class: 'gl-mr-2')
= _('Import projects from Google Code')
- if @repos.any?
%p.light
= _('Select projects you want to import.')
%p.light
- link_to_customize = link_to(_("customize"), new_user_map_import_google_code_path)
= _("Optionally, you can %{link_to_customize} how Google Code email addresses and usernames are imported into GitLab.").html_safe % { link_to_customize: link_to_customize }
%hr
%p
- if @incompatible_repos.any?
= button_tag class: "btn btn-import btn-success js-import-all" do
= _("Import all compatible projects")
= loading_icon(css_class: 'loading-icon')
- else
= button_tag class: "btn btn-import btn-success js-import-all" do
= _("Import all projects")
= loading_icon(css_class: 'loading-icon')
.table-responsive
%table.table.import-jobs
%colgroup.import-jobs-from-col
%colgroup.import-jobs-to-col
%colgroup.import-jobs-status-col
%thead
%tr
%th= _("From Google Code")
%th= _("To GitLab")
%th= _("Status")
%tbody
- @already_added_projects.each do |project|
%tr{ id: "project_#{project.id}", class: "#{project_status_css_class(project.import_status)}" }
%td
= link_to project.import_source, "https://code.google.com/p/#{project.import_source}", target: "_blank", rel: 'noopener noreferrer'
%td
= link_to project.full_path, project
%td.job-status
- case project.import_status
- when 'finished'
%span
= sprite_icon('check')
= _("done")
- when 'started'
= loading_icon
= _("started")
- else
= project.human_import_status_name
- @repos.each do |repo|
%tr{ id: "repo_#{repo.id}" }
%td
= link_to repo.name, "https://code.google.com/p/#{repo.name}", target: "_blank", rel: 'noopener noreferrer'
%td.import-target
#{current_user.username}/#{repo.name}
%td.import-actions.job-status
= button_tag class: "btn btn-import js-add-to-import" do
= _("Import")
= loading_icon(css_class: 'loading-icon')
- @incompatible_repos.each do |repo|
%tr{ id: "repo_#{repo.id}" }
%td
= link_to repo.name, "https://code.google.com/p/#{repo.name}", target: "_blank", rel: 'noopener noreferrer'
%td.import-target
%td.import-actions-job-status
= label_tag _("Incompatible Project"), nil, class: "label badge-danger"
- if @incompatible_repos.any?
%p
= _("One or more of your Google Code projects cannot be imported into GitLab directly because they use Subversion or Mercurial for version control, rather than Git.")
- link_to_import_flow = link_to(_("import flow"), new_import_google_code_path)
= _("Please convert them to Git on Google Code, and go through the %{link_to_import_flow} again.").html_safe % { link_to_import_flow: link_to_import_flow }
.js-importer-status{ data: { jobs_import_path: "#{jobs_import_google_code_path}", import_path: "#{import_google_code_path}" } }

View File

@ -41,12 +41,6 @@
- unless gitlab_import_configured?
= render 'projects/gitlab_import_modal'
- if google_code_import_enabled?
%div
= link_to new_import_google_code_path, class: 'btn import_google_code', **tracking_attrs(track_label, 'click_button', 'google_code') do
= sprite_icon('google')
Google Code
- if fogbugz_import_enabled?
%div
= link_to new_import_fogbugz_path, class: 'btn import_fogbugz', **tracking_attrs(track_label, 'click_button', 'fogbugz') do

View File

@ -1,7 +1,7 @@
- @hide_top_links = true
- @hide_breadcrumbs = true
- @no_container = true
- page_title @user.blocked? ? s_('UserProfile|Blocked user') : @user.name
- page_title user_display_name(@user)
- page_description @user.bio_html
- header_title @user.name, user_path(@user)
- page_itemtype 'http://schema.org/Person'
@ -38,10 +38,10 @@
= link_to avatar_icon_for_user(@user, 400), target: '_blank', rel: 'noopener noreferrer' do
= image_tag avatar_icon_for_user(@user, 90), class: "avatar s90", alt: '', itemprop: 'image'
- if @user.blocked?
- if @user.blocked? || !@user.confirmed?
.user-info
.cover-title
= s_('UserProfile|Blocked user')
= user_display_name(@user)
= render "users/profile_basic_info"
- else
.user-info

View File

@ -1847,6 +1847,14 @@
:weight: 1
:idempotent:
:tags: []
- :name: project_schedule_bulk_repository_shard_moves
:feature_category: :gitaly
:has_external_dependencies:
:urgency: :throttled
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: project_service
:feature_category: :integrations
:has_external_dependencies: true

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
class ProjectScheduleBulkRepositoryShardMovesWorker
include ApplicationWorker
idempotent!
feature_category :gitaly
urgency :throttled
def perform(source_storage_name, destination_storage_name = nil)
Projects::ScheduleBulkRepositoryShardMovesService.new.execute(source_storage_name, destination_storage_name)
end
end

View File

@ -0,0 +1,5 @@
---
title: "Remove Google Code importer"
merge_request: 48139
author: Getulio Valentin Sánchez
type: removed

View File

@ -0,0 +1,5 @@
---
title: Rename "Cycle Analytics" with "Value Stream Analytics" under /spec
merge_request: 48531
author: Takuya Noguchi
type: other

View File

@ -0,0 +1,5 @@
---
title: Adds bulk project repository storage move API
merge_request: 47142
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Update GitLab Workhorse to v8.56.0
merge_request: 48592
author:
type: other

View File

@ -0,0 +1,5 @@
---
title: Obfuscate user profile for unconfirmed users
merge_request: 48271
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Fixed image diff comments positioning
merge_request: 48132
author:
type: fixed

View File

@ -42,15 +42,6 @@ namespace :import do
get :realtime_changes
end
resource :google_code, only: [:create, :new], controller: :google_code do
get :status
post :callback
get :jobs
get :new_user_map, path: :user_map
post :create_user_map, path: :user_map
end
resource :fogbugz, only: [:create, :new], controller: :fogbugz do
get :status
post :callback

View File

@ -250,6 +250,8 @@
- 1
- - project_import_schedule
- 1
- - project_schedule_bulk_repository_shard_moves
- 1
- - project_service
- 1
- - project_template_export

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddIndexToProjectRepositoriesShardIdProjectId < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index :project_repositories, [:shard_id, :project_id]
end
def down
remove_concurrent_index :project_repositories, [:shard_id, :project_id], name: 'index_project_repositories_on_shard_id_and_project_id'
end
end

View File

@ -0,0 +1 @@
7988d01be5fac0f2a28cc97e309bfa16450d2e376888401fc2ad521aa0082020

View File

@ -21771,6 +21771,8 @@ CREATE UNIQUE INDEX index_project_repositories_on_project_id ON project_reposito
CREATE INDEX index_project_repositories_on_shard_id ON project_repositories USING btree (shard_id);
CREATE INDEX index_project_repositories_on_shard_id_and_project_id ON project_repositories USING btree (shard_id, project_id);
CREATE UNIQUE INDEX index_project_repository_states_on_project_id ON project_repository_states USING btree (project_id);
CREATE INDEX index_project_repository_storage_moves_on_project_id ON project_repository_storage_moves USING btree (project_id);

View File

@ -1255,23 +1255,27 @@ Gitaly Cluster automatically.
Repositories may be moved from one storage location using the [Project repository storage moves API](../../api/project_repository_storage_moves.md):
NOTE: **Note:**
The Project repository storage moves API [cannot move all repository types](../../api/project_repository_storage_moves.md#limitations).
To move repositories to Gitaly Cluster:
1. [Schedule a move](../../api/project_repository_storage_moves.md#schedule-a-repository-storage-move-for-a-project)
for the first repository using the API. For example:
1. [Schedule repository storage moves for all projects on a storage shard](../../api/project_repository_storage_moves.md#schedule-repository-storage-moves-for-all-projects-on-a-storage-shard) using the API. For example:
```shell
curl --request POST --header "Private-Token: <your_access_token>" --header "Content-Type: application/json" \
--data '{"destination_storage_name":"praefect"}' "https://gitlab.example.com/api/v4/projects/123/repository_storage_moves"
--data '{"source_storage_name":"gitaly","destination_storage_name":"praefect"}' "https://gitlab.example.com/api/v4/project_repository_storage_moves"
```
1. Using the ID that is returned, [query the repository move](../../api/project_repository_storage_moves.md#get-a-single-repository-storage-move-for-a-project)
1. [Query the most recent repository moves](../../api/project_repository_storage_moves.md#retrieve-all-project-repository-storage-moves)
using the API. The query indicates either:
- The move has completed successfully. The `state` field is `finished`.
- The move is in progress. Re-query the repository move until it completes successfully.
- The move has failed. Most failures are temporary and are solved by rescheduling the move.
- The moves have completed successfully. The `state` field is `finished`.
- The moves are in progress. Re-query the repository move until it completes successfully.
- The moves have failed. Most failures are temporary and are solved by rescheduling the move.
1. Once the move is successful, repeat these steps for all repositories for your projects.
1. Once the moves are complete, [query projects](../../api/projects.md#list-all-projects)
using the API to confirm that all projects have moved. No projects should be returned
with `repository_storage` field set to the old storage.
## Debugging Praefect

View File

@ -51,7 +51,7 @@ master process has PID 56227 below.
The main tunable options for Unicorn are the number of worker processes and the
request timeout after which the Unicorn master terminates a worker process.
See the [Omnibus GitLab Unicorn settings
documentation](https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/doc/settings/unicorn.html)
documentation](https://docs.gitlab.com/omnibus/settings/unicorn.html)
if you want to adjust these settings.
## unicorn-worker-killer

View File

@ -239,3 +239,35 @@ Example response:
"created_at": "2020-05-07T04:27:17.016Z"
}
```
## Schedule repository storage moves for all projects on a storage shard
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47142) in GitLab 13.7.
Schedules repository storage moves for each project repository stored on the source storage shard.
```plaintext
POST /project_repository_storage_moves
```
Parameters:
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `source_storage_name` | string | yes | Name of the source storage shard. |
| `destination_storage_name` | string | no | Name of the destination storage shard. The storage is selected automatically if not provided. |
Example request:
```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" --header "Content-Type: application/json" \
--data '{"source_storage_name":"default"}' "https://gitlab.example.com/api/v4/project_repository_storage_moves"
```
Example response:
```json
{
"message": "202 Accepted"
}
```

View File

@ -292,7 +292,7 @@ listed in the descriptions of the relevant settings.
| `housekeeping_gc_period` | integer | required by: `housekeeping_enabled` | Number of Git pushes after which `git gc` is run. |
| `housekeeping_incremental_repack_period` | integer | required by: `housekeeping_enabled` | Number of Git pushes after which an incremental `git repack` is run. |
| `html_emails_enabled` | boolean | no | Enable HTML emails. |
| `import_sources` | array of strings | no | Sources to allow project import from, possible values: `github`, `bitbucket`, `bitbucket_server`, `gitlab`, `google_code`, `fogbugz`, `git`, `gitlab_project`, `gitea`, `manifest`, and `phabricator`. |
| `import_sources` | array of strings | no | Sources to allow project import from, possible values: `github`, `bitbucket`, `bitbucket_server`, `gitlab`, `fogbugz`, `git`, `gitlab_project`, `gitea`, `manifest`, and `phabricator`. |
| `issues_create_limit` | integer | no | Max number of issue creation requests per minute per user. Disabled by default.|
| `local_markdown_version` | integer | no | Increase this value when any cached Markdown should be invalidated. |
| `maintenance_mode_message` | string | no | **(PREMIUM)** Message displayed when instance is in maintenance mode |

View File

@ -40,7 +40,7 @@ Kubernetes-specific environment variables are detailed in the
| `CI_COMMIT_REF_SLUG` | 9.0 | all | `$CI_COMMIT_REF_NAME` lowercased, shortened to 63 bytes, and with everything except `0-9` and `a-z` replaced with `-`. No leading / trailing `-`. Use in URLs, host names and domain names. |
| `CI_COMMIT_SHA` | 9.0 | all | The commit revision for which project is built |
| `CI_COMMIT_SHORT_SHA` | 11.7 | all | The first eight characters of `CI_COMMIT_SHA` |
| `CI_COMMIT_BRANCH` | 12.6 | 0.5 | The commit branch name. Present in branch pipelines, including pipelines for the default branch. Not present in merge request pipelines. |
| `CI_COMMIT_BRANCH` | 12.6 | 0.5 | The commit branch name. Present in branch pipelines, including pipelines for the default branch. Not present in merge request pipelines or tag pipelines. |
| `CI_COMMIT_TAG` | 9.0 | 0.5 | The commit tag name. Present only when building tags. |
| `CI_COMMIT_TITLE` | 10.8 | all | The title of the commit - the full first line of the message |
| `CI_COMMIT_TIMESTAMP` | 13.4 | all | The timestamp of the commit in the ISO 8601 format. |

View File

@ -48,7 +48,7 @@ this needs to happen when the stable branches for all products have been created
committed.
1. Push the newly created branch, but **don't create a merge request**.
After you push, the `image:docker-singe` job creates a new Docker image
After you push, the `image:docs-single` job creates a new Docker image
tagged with the branch name you created in the first step. In the end, the
image is uploaded in the [Container Registry](https://gitlab.com/gitlab-org/gitlab-docs/container_registry)
and it is listed under the `registry` environment folder at
@ -114,15 +114,15 @@ version and rotates the old one:
The versions dropdown is in a way "hardcoded". When the site is built, it looks
at the contents of `content/_data/versions.yaml` and based on that, the dropdown
is populated. So, older branches have different content, which means the
dropdown list one or more releases behind. Remember that the new changes of
is populated. Older branches have different content, which means the
dropdown list is one or more releases behind. Remember that the new changes of
the dropdown are included in the unmerged `release-X-Y` branch.
The content of `content/_data/versions.yaml` needs to change for all online
versions (stable branches `X.Y` of the `gitlab-docs` project):
1. Run the Rake task that creates all the respective merge requests needed to
update the dropdowns and are set to automatically be merged when their
update the dropdowns. Set these to automatically be merged when their
pipelines succeed:
NOTE: **Note:**

View File

@ -435,7 +435,7 @@ We are using a custom mapping between source file to test files, maintained in t
We follow the [PostgreSQL versions shipped with Omnibus GitLab](https://docs.gitlab.com/omnibus/package-information/postgresql_versions.html):
| PostgreSQL version | 13.0 (May 2020) | 13.1 (June 2020) | 13.2 (July 2020) | 13.3 (August 2020) | 13.4, 13.5 | 13.6 (November 2020) | 14.0 (May 2021?) |
| PostgreSQL version | 13.0 (May 2020) | 13.1 (June 2020) | 13.2 (July 2020) | 13.3 (August 2020) | 13.4, 13.5 | [13.7 (December 2020)](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5722) | 14.0 (May 2021?) |
| ------ | --------------- | ---------------- | ---------------- | ------------------ | ------------ | -------------------- | ---------------- |
| PG11 | MRs/`master`/`2-hour`/`nightly` | MRs/`master`/`2-hour`/`nightly` | MRs/`master`/`2-hour`/`nightly` | MRs/`master`/`2-hour`/`nightly` | MRs/`master`/`2-hour`/`nightly` | `nightly` | - |
| PG12 | - | - | `nightly` | `2-hour`/`nightly` | `2-hour`/`nightly` | MRs/`2-hour`/`nightly` | `2-hour`/`nightly` |

View File

@ -75,8 +75,9 @@ require_migration!('populate_foo_column')
Use the `table` helper to create a temporary `ActiveRecord::Base`-derived model
for a table. [FactoryBot](best_practices.md#factories)
**should not** be used to create data for migration specs. For example, to
create a record in the `projects` table:
**should not** be used to create data for migration specs because it relies on
application code which can change after the migration has run, and cause the test
to fail. For example, to create a record in the `projects` table:
```ruby
project = table(:projects).create!(id: 1, name: 'gitlab1', path: 'gitlab1')

View File

@ -154,7 +154,6 @@ GitLab version | Minimum PostgreSQL version
-|-
10.0 | 9.6
13.0 | 11
13.6 | 12
You must also ensure the `pg_trgm` and `btree_gist` extensions are [loaded into every
GitLab database](postgresql_extensions.html).

View File

@ -555,7 +555,7 @@ For basic guidance on choosing a cluster configuration you may refer to [Elastic
- Generally, you will want to use at least a 2-node cluster configuration with one replica, which will allow you to have resilience. If your storage usage is growing quickly, you may want to plan horizontal scaling (adding more nodes) beforehand.
- It's not recommended to use HDD storage with the search cluster, because it will take a hit on performance. It's better to use SSD storage (NVMe or SATA SSD drives for example).
- You can use the [GitLab Performance Tool](https://gitlab.com/gitlab-org/quality/performance) to benchmark search performance with different search cluster sizes and configurations.
- `Heap size` should be set to no more than 50% of your physical RAM. Additionally, it shouldn't be set to more than the threshold for zero-based compressed oops. The exact threshold varies, but 26 GB is safe on most systems, but can also be as large as 30 GB on some systems. See [Setting the heap size](https://www.elastic.co/guide/en/elasticsearch/reference/current/heap-size.html#heap-size) for more details.
- `Heap size` should be set to no more than 50% of your physical RAM. Additionally, it shouldn't be set to more than the threshold for zero-based compressed oops. The exact threshold varies, but 26 GB is safe on most systems, but can also be as large as 30 GB on some systems. See [Heap size settings](https://www.elastic.co/guide/en/elasticsearch/reference/current/important-settings.html#heap-size-settings) and [Setting JVM options](https://www.elastic.co/guide/en/elasticsearch/reference/current/jvm-options.html) for more details.
- Number of CPUs (CPU cores) per node usually corresponds to the `Number of Elasticsearch shards` setting described below.
- A good guideline is to ensure you keep the number of shards per node below 20 per GB heap it has configured. A node with a 30GB heap should therefore have a maximum of 600 shards, but the further below this limit you can keep it the better. This will generally help the cluster stay in good health.
- Small shards result in small segments, which increases overhead. Aim to keep the average shard size between at least a few GB and a few tens of GB. Another consideration is the number of documents, you should aim for this simple formula for the number of shards: `number of expected documents / 5M +1`.

View File

@ -18,6 +18,7 @@ on the GitLab project's home page.
To better understand GitLab's Jenkins integration, watch the following video:
- [GitLab workflow with Jira issues and Jenkins pipelines](https://youtu.be/Jn-_fyra7xQ)
Use the Jenkins integration with GitLab when:
- You plan to migrate your CI from Jenkins to [GitLab CI/CD](../ci/README.md) in the future, but

View File

@ -34,6 +34,22 @@ module API
present storage_move, with: Entities::ProjectRepositoryStorageMove, current_user: current_user
end
desc 'Schedule bulk project repository storage moves' do
detail 'This feature was introduced in GitLab 13.7.'
end
params do
requires :source_storage_name, type: String, desc: 'The source storage shard', values: -> { Gitlab.config.repositories.storages.keys }
optional :destination_storage_name, type: String, desc: 'The destination storage shard', values: -> { Gitlab.config.repositories.storages.keys }
end
post do
::Projects::ScheduleBulkRepositoryShardMovesService.enqueue(
declared_params[:source_storage_name],
declared_params[:destination_storage_name]
)
accepted!
end
end
params do

View File

@ -88,7 +88,7 @@ module API
end
optional :html_emails_enabled, type: Boolean, desc: 'By default GitLab sends emails in HTML and plain text formats so mail clients can choose what format to use. Disable this option if you only want to send emails in plain text format.'
optional :import_sources, type: Array[String], coerce_with: Validations::Types::CommaSeparatedToArray.coerce,
values: %w[github bitbucket bitbucket_server gitlab google_code fogbugz git gitlab_project gitea manifest phabricator],
values: %w[github bitbucket bitbucket_server gitlab fogbugz git gitlab_project gitea manifest phabricator],
desc: 'Enabled sources for code import during project creation. OmniAuth must be configured for GitHub, Bitbucket, and GitLab.com'
optional :max_artifacts_size, type: Integer, desc: "Set the maximum file size for each job's artifacts"
optional :max_attachment_size, type: Integer, desc: 'Maximum attachment size in MB'

View File

@ -8,7 +8,19 @@ module Gitlab
strategy :JobString, if: -> (config) { config.is_a?(String) }
strategy :JobHash,
if: -> (config) { config.is_a?(Hash) && config.key?(:job) && !(config.key?(:project) || config.key?(:ref)) }
if: -> (config) { config.is_a?(Hash) && same_pipeline_need?(config) }
strategy :CrossPipelineDependency,
if: -> (config) { config.is_a?(Hash) && cross_pipeline_need?(config) }
def self.same_pipeline_need?(config)
config.key?(:job) &&
!(config.key?(:project) || config.key?(:ref) || config.key?(:pipeline))
end
def self.cross_pipeline_need?(config)
config.key?(:job) && config.key?(:pipeline) && !config.key?(:project)
end
class JobString < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Validatable
@ -50,6 +62,30 @@ module Gitlab
end
end
class CrossPipelineDependency < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Validatable
include ::Gitlab::Config::Entry::Attributable
ALLOWED_KEYS = %i[pipeline job artifacts].freeze
attributes :pipeline, :job, :artifacts
validations do
validates :config, presence: true
validates :config, allowed_keys: ALLOWED_KEYS
validates :pipeline, type: String, presence: true
validates :job, type: String, presence: true
validates :artifacts, boolean: true, allow_nil: true
end
def type
:cross_dependency
end
def value
super.merge(artifacts: artifacts || artifacts.nil?)
end
end
class UnknownStrategy < ::Gitlab::Config::Entry::Node
def type
end

View File

@ -10,6 +10,8 @@ module Gitlab
class Needs < ::Gitlab::Config::Entry::ComposableArray
include ::Gitlab::Config::Entry::Validatable
NEEDS_CROSS_PIPELINE_DEPENDENCIES_LIMIT = 5
validations do
validate do
unless config.is_a?(Hash) || config.is_a?(Array)
@ -27,6 +29,15 @@ module Gitlab
errors.add(:config, "uses invalid types: #{extra_keys.join(', ')}")
end
end
validate on: :composed do
cross_dependencies = value[:cross_dependency].to_a
cross_pipeline_dependencies = cross_dependencies.select { |dep| dep[:pipeline] }
if cross_pipeline_dependencies.size > NEEDS_CROSS_PIPELINE_DEPENDENCIES_LIMIT
errors.add(:config, "must be less than or equal to #{NEEDS_CROSS_PIPELINE_DEPENDENCIES_LIMIT}")
end
end
end
def value

View File

@ -1,54 +0,0 @@
# frozen_string_literal: true
module Gitlab
module GoogleCodeImport
class Client
attr_reader :raw_data
def self.mask_email(author)
parts = author.split("@", 2)
parts[0] = "#{parts[0][0...-3]}..."
parts.join("@")
end
def initialize(raw_data)
@raw_data = raw_data
end
def valid?
raw_data.is_a?(Hash) && raw_data["kind"] == "projecthosting#user" && raw_data.key?("projects")
end
def repos
@repos ||= raw_data["projects"].map { |raw_repo| GoogleCodeImport::Repository.new(raw_repo) }.select(&:git?)
end
def incompatible_repos
@incompatible_repos ||= raw_data["projects"].map { |raw_repo| GoogleCodeImport::Repository.new(raw_repo) }.reject(&:git?)
end
def repo(id)
repos.find { |repo| repo.id == id }
end
def user_map
user_map = Hash.new { |hash, user| hash[user] = self.class.mask_email(user) }
repos.each do |repo|
next unless repo.valid? && repo.issues
repo.issues.each do |raw_issue|
# Touching is enough to add the entry and masked email.
user_map[raw_issue["author"]["name"]]
raw_issue["comments"]["items"].each do |raw_comment|
user_map[raw_comment["author"]["name"]]
end
end
end
Hash[user_map.sort]
end
end
end
end

View File

@ -1,373 +0,0 @@
# frozen_string_literal: true
module Gitlab
module GoogleCodeImport
class Importer
attr_reader :project, :repo, :closed_statuses
NICE_LABEL_COLOR_HASH =
{
'Status: New' => '#428bca',
'Status: Accepted' => '#5cb85c',
'Status: Started' => '#8e44ad',
'Priority: Critical' => '#ffcfcf',
'Priority: High' => '#deffcf',
'Priority: Medium' => '#fff5cc',
'Priority: Low' => '#cfe9ff',
'Type: Defect' => '#d9534f',
'Type: Enhancement' => '#44ad8e',
'Type: Task' => '#4b6dd0',
'Type: Review' => '#8e44ad',
'Type: Other' => '#7f8c8d'
}.freeze
def initialize(project)
@project = project
import_data = project.import_data.try(:data)
repo_data = import_data["repo"] if import_data
@repo = GoogleCodeImport::Repository.new(repo_data)
@closed_statuses = []
@known_labels = Set.new
end
def execute
return true unless repo.valid?
import_status_labels
import_labels
import_issues
true
end
private
def user_map
@user_map ||= begin
user_map = Hash.new do |hash, user|
# Replace ... by \.\.\., so `johnsm...@gmail.com` isn't autolinked.
Client.mask_email(user).sub("...", "\\.\\.\\.")
end
import_data = project.import_data.try(:data)
stored_user_map = import_data["user_map"] if import_data
user_map.update(stored_user_map) if stored_user_map
user_map
end
end
def import_status_labels
repo.raw_data["issuesConfig"]["statuses"].each do |status|
closed = !status["meansOpen"]
@closed_statuses << status["status"] if closed
name = nice_status_name(status["status"])
create_label(name)
@known_labels << name
end
end
def import_labels
repo.raw_data["issuesConfig"]["labels"].each do |label|
name = nice_label_name(label["label"])
create_label(name)
@known_labels << name
end
end
# rubocop: disable CodeReuse/ActiveRecord
def import_issues
return unless repo.issues
while raw_issue = repo.issues.shift
author = user_map[raw_issue["author"]["name"]]
date = DateTime.parse(raw_issue["published"]).to_formatted_s(:long)
comments = raw_issue["comments"]["items"]
issue_comment = comments.shift
content = format_content(issue_comment["content"])
attachments = format_attachments(raw_issue["id"], 0, issue_comment["attachments"])
body = format_issue_body(author, date, content, attachments)
labels = import_issue_labels(raw_issue)
assignee_id = nil
if raw_issue.key?("owner")
username = user_map[raw_issue["owner"]["name"]]
if username.start_with?("@")
username = username[1..-1]
if user = UserFinder.new(username).find_by_username
assignee_id = user.id
end
end
end
issue = Issue.create!(
iid: raw_issue['id'],
project_id: project.id,
title: raw_issue['title'],
description: body,
author_id: project.creator_id,
assignee_ids: [assignee_id],
state_id: raw_issue['state'] == 'closed' ? Issue.available_states[:closed] : Issue.available_states[:opened]
)
issue_labels = ::LabelsFinder.new(nil, project_id: project.id, title: labels).execute(skip_authorization: true)
issue.update_attribute(:label_ids, issue_labels.pluck(:id))
import_issue_comments(issue, comments)
end
end
# rubocop: enable CodeReuse/ActiveRecord
def import_issue_labels(raw_issue)
labels = []
raw_issue["labels"].each do |label|
name = nice_label_name(label)
labels << name
unless @known_labels.include?(name)
create_label(name)
@known_labels << name
end
end
labels << nice_status_name(raw_issue["status"])
labels
end
def import_issue_comments(issue, comments)
Note.transaction do
while raw_comment = comments.shift
next if raw_comment.key?("deletedBy")
content = format_content(raw_comment["content"])
updates = format_updates(raw_comment["updates"])
attachments = format_attachments(issue.iid, raw_comment["id"], raw_comment["attachments"])
next if content.blank? && updates.blank? && attachments.blank?
author = user_map[raw_comment["author"]["name"]]
date = DateTime.parse(raw_comment["published"]).to_formatted_s(:long)
body = format_issue_comment_body(
raw_comment["id"],
author,
date,
content,
updates,
attachments
)
# Needs to match order of `comment_columns` below.
Note.create!(
project_id: project.id,
noteable_type: "Issue",
noteable_id: issue.id,
author_id: project.creator_id,
note: body
)
end
end
end
def nice_label_color(name)
NICE_LABEL_COLOR_HASH[name] ||
case name
when /\AComponent:/
'#fff39e'
when /\AOpSys:/
'#e2e2e2'
when /\AMilestone:/
'#fee3ff'
when *closed_statuses.map { |s| nice_status_name(s) }
'#cfcfcf'
else
'#e2e2e2'
end
end
def nice_label_name(name)
name.sub("-", ": ")
end
def nice_status_name(name)
"Status: #{name}"
end
def linkify_issues(str)
str = str.gsub(/([Ii]ssue) ([0-9]+)/, '\1 #\2')
str = str.gsub(/([Cc]omment) #([0-9]+)/, '\1 \2')
str
end
def escape_for_markdown(str)
# No headings and lists
str = str.gsub(/^#/, "\\#")
str = str.gsub(/^-/, "\\-")
# No inline code
str = str.gsub("`", "\\`")
# Carriage returns make me sad
str = str.delete("\r")
# Markdown ignores single newlines, but we need them as <br />.
str = str.gsub("\n", " \n")
str
end
def create_label(name)
params = { name: name, color: nice_label_color(name) }
::Labels::FindOrCreateService.new(nil, project, params).execute(skip_authorization: true)
end
def format_content(raw_content)
linkify_issues(escape_for_markdown(raw_content))
end
def format_updates(raw_updates)
updates = []
if raw_updates.key?("status")
updates << "*Status: #{raw_updates["status"]}*"
end
if raw_updates.key?("owner")
updates << "*Owner: #{user_map[raw_updates["owner"]]}*"
end
if raw_updates.key?("cc")
cc = raw_updates["cc"].map do |l|
deleted = l.start_with?("-")
l = l[1..-1] if deleted
l = user_map[l]
l = "~~#{l}~~" if deleted
l
end
updates << "*Cc: #{cc.join(", ")}*"
end
if raw_updates.key?("labels")
labels = raw_updates["labels"].map do |l|
deleted = l.start_with?("-")
l = l[1..-1] if deleted
l = nice_label_name(l)
l = "~~#{l}~~" if deleted
l
end
updates << "*Labels: #{labels.join(", ")}*"
end
if raw_updates.key?("mergedInto")
updates << "*Merged into: ##{raw_updates["mergedInto"]}*"
end
if raw_updates.key?("blockedOn")
blocked_ons = raw_updates["blockedOn"].map do |raw_blocked_on|
format_blocking_updates(raw_blocked_on)
end
updates << "*Blocked on: #{blocked_ons.join(", ")}*"
end
if raw_updates.key?("blocking")
blockings = raw_updates["blocking"].map do |raw_blocked_on|
format_blocking_updates(raw_blocked_on)
end
updates << "*Blocking: #{blockings.join(", ")}*"
end
updates
end
def format_blocking_updates(raw_blocked_on)
name, id = raw_blocked_on.split(":", 2)
deleted = name.start_with?("-")
name = name[1..-1] if deleted
text =
if name == project.import_source
"##{id}"
else
"#{project.namespace.full_path}/#{name}##{id}"
end
text = "~~#{text}~~" if deleted
text
end
def format_attachments(issue_id, comment_id, raw_attachments)
return [] unless raw_attachments
raw_attachments.map do |attachment|
next if attachment["isDeleted"]
filename = attachment["fileName"]
link = "https://storage.googleapis.com/google-code-attachments/#{@repo.name}/issue-#{issue_id}/comment-#{comment_id}/#{filename}"
text = "[#{filename}](#{link})"
text = "!#{text}" if filename =~ /\.(png|jpg|jpeg|gif|bmp|tiff)\z/i
text
end.compact
end
def format_issue_comment_body(id, author, date, content, updates, attachments)
body = []
body << "*Comment #{id} by #{author} on #{date}*"
body << "---"
if content.blank?
content = "*(No comment has been entered for this change)*"
end
body << content
if updates.any?
body << "---"
body += updates
end
if attachments.any?
body << "---"
body += attachments
end
body.join("\n\n")
end
def format_issue_body(author, date, content, attachments)
body = []
body << "*By #{author} on #{date} (imported from Google Code)*"
body << "---"
if content.blank?
content = "*(No description has been entered for this issue)*"
end
body << content
if attachments.any?
body << "---"
body += attachments
end
body.join("\n\n")
end
end
end
end

View File

@ -1,32 +0,0 @@
# frozen_string_literal: true
module Gitlab
module GoogleCodeImport
class ProjectCreator
attr_reader :repo, :namespace, :current_user, :user_map
def initialize(repo, namespace, current_user, user_map = nil)
@repo = repo
@namespace = namespace
@current_user = current_user
@user_map = user_map
end
def execute
::Projects::CreateService.new(
current_user,
name: repo.name,
path: repo.name,
description: repo.summary,
namespace: namespace,
creator: current_user,
visibility_level: Gitlab::VisibilityLevel::PUBLIC,
import_type: "google_code",
import_source: repo.name,
import_url: repo.import_url,
import_data: { data: { 'repo' => repo.raw_data, 'user_map' => user_map } }
).execute
end
end
end
end

View File

@ -1,45 +0,0 @@
# frozen_string_literal: true
module Gitlab
module GoogleCodeImport
class Repository
attr_accessor :raw_data
def initialize(raw_data)
@raw_data = raw_data
end
def valid?
raw_data.is_a?(Hash) && raw_data["kind"] == "projecthosting#project"
end
def id
raw_data["externalId"]
end
def name
raw_data["name"]
end
def summary
raw_data["summary"]
end
def description
raw_data["description"]
end
def git?
raw_data["versionControlSystem"] == "git"
end
def import_url
raw_data["repositoryUrls"].first
end
def issues
raw_data["issues"] && raw_data["issues"]["items"]
end
end
end
end

View File

@ -15,7 +15,6 @@ module Gitlab
ImportSource.new('bitbucket', 'Bitbucket Cloud', Gitlab::BitbucketImport::Importer),
ImportSource.new('bitbucket_server', 'Bitbucket Server', Gitlab::BitbucketServerImport::Importer),
ImportSource.new('gitlab', 'GitLab.com', Gitlab::GitlabImport::Importer),
ImportSource.new('google_code', 'Google Code', Gitlab::GoogleCodeImport::Importer),
ImportSource.new('fogbugz', 'FogBugz', Gitlab::FogbugzImport::Importer),
ImportSource.new('git', 'Repo by URL', nil),
ImportSource.new('gitlab_project', 'GitLab export', Gitlab::ImportExport::Importer),

View File

@ -370,18 +370,6 @@ msgstr ""
msgid "%{authorsName}'s thread"
msgstr ""
msgid "%{code_open}\"johnsmith@example.com\": \"@johnsmith\"%{code_close} will add \"By %{link_open}@johnsmith%{link_close}\" to all issues and comments originally created by johnsmith@example.com, and will set %{link_open}@johnsmith%{link_close} as the assignee on all issues originally assigned to johnsmith@example.com."
msgstr ""
msgid "%{code_open}\"johnsmith@example.com\": \"John Smith\"%{code_close} will add \"By John Smith\" to all issues and comments originally created by johnsmith@example.com."
msgstr ""
msgid "%{code_open}\"johnsmith@example.com\": \"johnsm...@example.com\"%{code_close} will add \"By johnsm...@example.com\" to all issues and comments originally created by johnsmith@example.com. The email address or username is masked to ensure the user's privacy."
msgstr ""
msgid "%{code_open}\"johnsmith@example.com\": \"johnsmith@example.com\"%{code_close} will add \"By %{link_open}johnsmith@example.com%{link_close}\" to all issues and comments originally created by johnsmith@example.com. By default, the email address or username is masked to ensure the user's privacy. Use this option if you want to show the full email address."
msgstr ""
msgid "%{code_open}Masked%{code_close} variables are hidden in job logs (though they must match certain regexp requirements to do so)."
msgstr ""
@ -623,9 +611,6 @@ msgstr ""
msgid "%{listToShow}, and %{awardsListLength} more."
msgstr ""
msgid "%{loadingIcon} Started"
msgstr ""
msgid "%{location} is missing required keys: %{keys}"
msgstr ""
@ -3128,9 +3113,6 @@ msgstr ""
msgid "An error occurred while getting projects"
msgstr ""
msgid "An error occurred while importing project: %{details}"
msgstr ""
msgid "An error occurred while initializing path locks"
msgstr ""
@ -5361,12 +5343,6 @@ msgstr ""
msgid "Chinese language support using"
msgstr ""
msgid "Choose %{strong_open}Create archive%{strong_close} and wait for archiving to complete."
msgstr ""
msgid "Choose %{strong_open}Next%{strong_close} at the bottom of the page."
msgstr ""
msgid "Choose a branch/tag (e.g. %{master}) or enter a commit (e.g. %{sha}) to see what's changed or to create a merge request."
msgstr ""
@ -5619,12 +5595,6 @@ msgstr ""
msgid "Click %{link_to} to view the request."
msgstr ""
msgid "Click the %{strong_open}Download%{strong_close} button and wait for downloading to complete."
msgstr ""
msgid "Click the %{strong_open}Select none%{strong_close} button on the right, since we only need \"Google Code Project Hosting\"."
msgstr ""
msgid "Click the button below to begin the install process by navigating to the Kubernetes page"
msgstr ""
@ -8260,9 +8230,6 @@ msgstr ""
msgid "Customize how FogBugz email addresses and usernames are imported into GitLab. In the next step, you'll be able to select the projects you want to import."
msgstr ""
msgid "Customize how Google Code email addresses and usernames are imported into GitLab. In the next step, you'll be able to select the projects you want to import."
msgstr ""
msgid "Customize icon"
msgstr ""
@ -8823,9 +8790,6 @@ msgstr ""
msgid "Default stages"
msgstr ""
msgid "Default: Directly import the Google Code email address or username"
msgstr ""
msgid "Default: Map a FogBugz account ID to a full name"
msgstr ""
@ -9803,9 +9767,6 @@ msgstr ""
msgid "Do not display offers from third parties within GitLab"
msgstr ""
msgid "Do you want to customize how Google Code email addresses and usernames are imported into GitLab?"
msgstr ""
msgid "Dockerfile"
msgstr ""
@ -12073,12 +12034,6 @@ msgstr ""
msgid "Find file"
msgstr ""
msgid "Find the downloaded ZIP file and decompress it."
msgstr ""
msgid "Find the newly extracted %{code_open}Takeout/Google Code Project Hosting/GoogleCodeProjectHosting.json%{code_close} file."
msgstr ""
msgid "Fingerprint"
msgstr ""
@ -12148,9 +12103,6 @@ msgstr ""
msgid "Folder/%{name}"
msgstr ""
msgid "Follow the steps below to export your Google Code project data."
msgstr ""
msgid "Font Color"
msgstr ""
@ -12262,9 +12214,6 @@ msgstr ""
msgid "From %{providerTitle}"
msgstr ""
msgid "From Google Code"
msgstr ""
msgid "From issue creation until deploy to production"
msgstr ""
@ -13003,9 +12952,6 @@ msgstr ""
msgid "Go full screen"
msgstr ""
msgid "Go to %{link_to_google_takeout}."
msgstr ""
msgid "Go to %{strongStart}Issues%{strongEnd} &gt; %{strongStart}Boards%{strongEnd} to access your personalized learning issue board."
msgstr ""
@ -13120,12 +13066,6 @@ msgstr ""
msgid "Google Cloud Platform"
msgstr ""
msgid "Google Code import"
msgstr ""
msgid "Google Takeout"
msgstr ""
msgid "Google authentication is not %{link_start}properly configured%{link_end}. Ask your GitLab administrator if you want to use this service."
msgstr ""
@ -14205,12 +14145,6 @@ msgstr ""
msgid "Import Projects from Gitea"
msgstr ""
msgid "Import all compatible projects"
msgstr ""
msgid "Import all projects"
msgstr ""
msgid "Import an exported GitLab project"
msgstr ""
@ -14262,9 +14196,6 @@ msgstr ""
msgid "Import projects from GitLab.com"
msgstr ""
msgid "Import projects from Google Code"
msgstr ""
msgid "Import repositories from Bitbucket Server"
msgstr ""
@ -14364,9 +14295,6 @@ msgstr ""
msgid "In progress"
msgstr ""
msgid "In the next step, you'll be able to select the projects you want to import."
msgstr ""
msgid "Incident"
msgstr ""
@ -14544,9 +14472,6 @@ msgstr ""
msgid "Incoming!"
msgstr ""
msgid "Incompatible Project"
msgstr ""
msgid "Incompatible options set!"
msgstr ""
@ -16030,9 +15955,6 @@ msgstr ""
msgid "Leave project"
msgstr ""
msgid "Leave the \"File type\" and \"Delivery method\" options on their default values."
msgstr ""
msgid "Leave zen mode"
msgstr ""
@ -16488,9 +16410,6 @@ msgstr ""
msgid "Make sure you save it - you won't be able to access it again."
msgstr ""
msgid "Make sure you're logged into the account that owns the projects you'd like to import."
msgstr ""
msgid "Make this epic confidential"
msgstr ""
@ -16554,15 +16473,6 @@ msgstr ""
msgid "Map a FogBugz account ID to a GitLab user"
msgstr ""
msgid "Map a Google Code user to a GitLab user"
msgstr ""
msgid "Map a Google Code user to a full email address"
msgstr ""
msgid "Map a Google Code user to a full name"
msgstr ""
msgid "Mar"
msgstr ""
@ -18736,9 +18646,6 @@ msgstr ""
msgid "No worries, you can still use all the %{strong}%{plan_name}%{strong_close} features for now. You have %{remaining_days} to renew your subscription."
msgstr ""
msgid "No, directly import the existing email addresses and usernames."
msgstr ""
msgid "No. of commits"
msgstr ""
@ -19125,9 +19032,6 @@ msgstr ""
msgid "One or more of your %{provider} projects cannot be imported into GitLab directly because they use Subversion or Mercurial for version control, rather than Git."
msgstr ""
msgid "One or more of your Google Code projects cannot be imported into GitLab directly because they use Subversion or Mercurial for version control, rather than Git."
msgstr ""
msgid "One or more of your dependency files are not supported, and the dependency list may be incomplete. Below is a list of supported file types."
msgstr ""
@ -19278,9 +19182,6 @@ msgstr ""
msgid "Optionally, you can %{link_to_customize} how FogBugz email addresses and usernames are imported into GitLab."
msgstr ""
msgid "Optionally, you can %{link_to_customize} how Google Code email addresses and usernames are imported into GitLab."
msgstr ""
msgid "Options"
msgstr ""
@ -20313,9 +20214,6 @@ msgstr ""
msgid "Please convert %{linkStart}them to Git%{linkEnd}, and go through the %{linkToImportFlow} again."
msgstr ""
msgid "Please convert them to Git on Google Code, and go through the %{link_to_import_flow} again."
msgstr ""
msgid "Please create a password for your new account."
msgstr ""
@ -23799,9 +23697,6 @@ msgstr ""
msgid "Scroll down"
msgstr ""
msgid "Scroll down to %{strong_open}Google Code Project Hosting%{strong_close} and enable the switch on the right."
msgstr ""
msgid "Scroll left"
msgstr ""
@ -24504,9 +24399,6 @@ msgstr ""
msgid "Select projects"
msgstr ""
msgid "Select projects you want to import."
msgstr ""
msgid "Select required regulatory standard"
msgstr ""
@ -27057,9 +26949,6 @@ msgstr ""
msgid "The download link will expire in 24 hours."
msgstr ""
msgid "The entered user map is not a valid JSON user map."
msgstr ""
msgid "The errors we encountered were:"
msgstr ""
@ -27329,9 +27218,6 @@ msgstr ""
msgid "The update action will time out after %{number_of_minutes} minutes. For big repositories, use a clone/push combination."
msgstr ""
msgid "The uploaded file is not a valid Google Takeout archive."
msgstr ""
msgid "The usage ping is disabled, and cannot be configured through this form."
msgstr ""
@ -27341,9 +27227,6 @@ msgstr ""
msgid "The user map has been saved. Continue by selecting the projects you want to import."
msgstr ""
msgid "The user map is a JSON document mapping the Google Code users that participated on your projects to the way their email addresses and usernames will be imported into GitLab. You can change this by changing the value on the right hand side of %{code_open}:%{code_close}. Be sure to preserve the surrounding double quotes, other punctuation and the email address or username on the left hand side."
msgstr ""
msgid "The user map is a mapping of the FogBugz users that participated on your projects to the way their email address and usernames will be imported into GitLab. You can change this by populating the table below."
msgstr ""
@ -29388,9 +29271,6 @@ msgstr ""
msgid "Upgrade your plan to improve Merge Requests."
msgstr ""
msgid "Upload %{code_open}GoogleCodeProjectHosting.json%{code_close} here:"
msgstr ""
msgid "Upload CSV file"
msgstr ""
@ -29835,6 +29715,9 @@ msgstr ""
msgid "UserProfile|This user is blocked"
msgstr ""
msgid "UserProfile|Unconfirmed user"
msgstr ""
msgid "UserProfile|View all"
msgstr ""
@ -30972,9 +30855,6 @@ msgstr ""
msgid "Yes, delete project"
msgstr ""
msgid "Yes, let me map Google Code users to full names or GitLab users."
msgstr ""
msgid "Yesterday"
msgstr ""
@ -31404,9 +31284,6 @@ msgstr ""
msgid "You need to upload a GitLab project export archive (ending in .gz)."
msgstr ""
msgid "You need to upload a Google Takeout archive."
msgstr ""
msgid "You successfully declined the invitation"
msgstr ""
@ -32122,9 +31999,6 @@ msgstr ""
msgid "committed"
msgstr ""
msgid "connecting"
msgstr ""
msgid "container_name can contain only lowercase letters, digits, '-', and '.' and must start and end with an alphanumeric character"
msgstr ""
@ -32140,9 +32014,6 @@ msgstr ""
msgid "created %{timeAgo}"
msgstr ""
msgid "customize"
msgstr ""
msgid "data"
msgstr ""
@ -32181,9 +32052,6 @@ msgstr ""
msgid "does not have a supported extension. Only %{extension_list} are supported"
msgstr ""
msgid "done"
msgstr ""
msgid "download it"
msgstr ""
@ -32322,9 +32190,6 @@ msgstr ""
msgid "import flow"
msgstr ""
msgid "importing"
msgstr ""
msgid "in group %{link_to_group}"
msgstr ""
@ -33098,9 +32963,6 @@ msgstr ""
msgid "ssh:"
msgstr ""
msgid "started"
msgstr ""
msgid "started a discussion on %{design_link}"
msgstr ""

View File

@ -47,6 +47,8 @@ module RuboCop
def known_match?(file_path, line_number, method_name)
file_path_from_root = file_path.sub(File.expand_path('../../..', __dir__), '')
method_name = 'initialize' if method_name == 'new'
self.class.keyword_warnings.any? do |warning|
warning.include?("#{file_path_from_root}:#{line_number}") && warning.include?("called method `#{method_name}'")
end

View File

@ -1,65 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Import::GoogleCodeController do
include ImportSpecHelper
let(:user) { create(:user) }
let(:dump_file) { fixture_file_upload('spec/fixtures/GoogleCodeProjectHosting.json', 'application/json') }
before do
sign_in(user)
end
describe "POST callback" do
it "stores Google Takeout dump list in session" do
post :callback, params: { dump_file: dump_file }
expect(session[:google_code_dump]).to be_a(Hash)
expect(session[:google_code_dump]["kind"]).to eq("projecthosting#user")
expect(session[:google_code_dump]).to have_key("projects")
end
end
describe "GET status" do
before do
@repo = OpenStruct.new(name: 'vim')
stub_client(valid?: true)
end
it "assigns variables" do
@project = create(:project, import_type: 'google_code', creator_id: user.id)
stub_client(repos: [@repo], incompatible_repos: [])
get :status
expect(assigns(:already_added_projects)).to eq([@project])
expect(assigns(:repos)).to eq([@repo])
expect(assigns(:incompatible_repos)).to eq([])
end
it "does not show already added project" do
@project = create(:project, import_type: 'google_code', creator_id: user.id, import_source: 'vim')
stub_client(repos: [@repo], incompatible_repos: [])
get :status
expect(assigns(:already_added_projects)).to eq([@project])
expect(assigns(:repos)).to eq([])
end
it "does not show any invalid projects" do
stub_client(repos: [], incompatible_repos: [@repo])
get :status
expect(assigns(:repos)).to be_empty
expect(assigns(:incompatible_repos)).to eq([@repo])
end
end
describe "POST create" do
it_behaves_like 'project import rate limiter'
end
end

View File

@ -13,4 +13,5 @@ FactoryBot.define do
sequence(:past_time) { |n| 4.hours.ago + (2 * n).seconds }
sequence(:iid)
sequence(:sha) { |n| Digest::SHA1.hexdigest("commit-like-#{n}") }
sequence(:variable) { |n| "var#{n}" }
end

View File

@ -124,15 +124,15 @@ RSpec.describe 'Project active tab' do
context 'on project Analytics' do
before do
visit charts_project_graph_path(project, 'master')
visit project_cycle_analytics_path(project)
end
context 'on project Analytics/Repository Analytics' do
context 'on project Analytics/Value Stream Analytics' do
it_behaves_like 'page has active tab', _('Analytics')
it_behaves_like 'page has active sub tab', _('Repository')
it_behaves_like 'page has active sub tab', _('Value Stream')
end
context 'on project Analytics/Cycle Analytics' do
context 'on project Analytics/"CI / CD"' do
before do
click_tab(_('CI / CD'))
end

View File

@ -56,7 +56,6 @@ RSpec.describe 'New project', :js do
expect(page).to have_link('GitHub')
expect(page).to have_link('Bitbucket')
expect(page).to have_link('GitLab.com')
expect(page).to have_link('Google Code')
expect(page).to have_button('Repo by URL')
expect(page).to have_link('GitLab export')
end
@ -292,17 +291,6 @@ RSpec.describe 'New project', :js do
end
end
context 'from Google Code' do
before do
first('.import_google_code').click
end
it 'shows import instructions' do
expect(page).to have_content('Import projects from Google Code')
expect(current_path).to eq new_import_google_code_path
end
end
context 'from manifest file' do
before do
first('.import_manifest').click

View File

@ -7,7 +7,7 @@ RSpec.describe 'User page' do
let_it_be(:user) { create(:user, bio: '**Lorem** _ipsum_ dolor sit [amet](https://example.com)') }
subject { visit(user_path(user)) }
subject(:visit_profile) { visit(user_path(user)) }
context 'with public profile' do
it 'shows all the tabs' do
@ -123,6 +123,32 @@ RSpec.describe 'User page' do
end
end
context 'with unconfirmed user' do
let_it_be(:user) { create(:user, :unconfirmed) }
before do
visit_profile
end
it 'shows user name as unconfirmed' do
expect(page).to have_css(".cover-title", text: 'Unconfirmed user')
end
it 'shows no tab' do
expect(page).to have_css("div.profile-header")
expect(page).not_to have_css("ul.nav-links")
end
it 'shows no additional fields' do
expect(page).not_to have_css(".profile-user-bio")
expect(page).not_to have_css(".profile-link-holder")
end
it 'shows private profile message' do
expect(page).to have_content("This user has a private profile")
end
end
it 'shows the status if there was one' do
create(:user_status, user: user, message: "Working hard!")

View File

@ -2,7 +2,7 @@ import Vue from 'vue';
import mountComponent from 'helpers/vue_mount_component_helper';
import banner from '~/cycle_analytics/components/banner.vue';
describe('Cycle analytics banner', () => {
describe('Value Stream Analytics banner', () => {
let vm;
beforeEach(() => {

View File

@ -6,7 +6,6 @@ import InlineDiffView from '~/diffs/components/inline_diff_view.vue';
import NotDiffableViewer from '~/vue_shared/components/diff_viewer/viewers/not_diffable.vue';
import NoPreviewViewer from '~/vue_shared/components/diff_viewer/viewers/no_preview.vue';
import ParallelDiffView from '~/diffs/components/parallel_diff_view.vue';
import ImageDiffOverlay from '~/diffs/components/image_diff_overlay.vue';
import NoteForm from '~/notes/components/note_form.vue';
import DiffDiscussions from '~/diffs/components/diff_discussions.vue';
import { IMAGE_DIFF_POSITION_TYPE } from '~/diffs/constants';
@ -166,14 +165,6 @@ describe('DiffContent', () => {
describe('with image files', () => {
const imageDiffFile = { ...defaultProps.diffFile, viewer: { name: diffViewerModes.image } };
it('should have image diff view in place', () => {
getCommentFormForDiffFileGetterMock.mockReturnValue(() => true);
createComponent({ props: { diffFile: imageDiffFile } });
expect(wrapper.find(InlineDiffView).exists()).toBe(false);
expect(wrapper.find(ImageDiffOverlay).exists()).toBe(true);
});
it('renders diff file discussions', () => {
getCommentFormForDiffFileGetterMock.mockReturnValue(() => true);
createComponent({

View File

@ -24,6 +24,8 @@ describe('Diffs image diff overlay component', () => {
propsData: {
discussions: [...imageDiffDiscussions],
fileHash: 'ABC',
renderedWidth: 200,
renderedHeight: 200,
...props,
},
methods: {
@ -71,8 +73,8 @@ describe('Diffs image diff overlay component', () => {
createComponent();
const imageBadges = getAllImageBadges();
expect(imageBadges.at(0).attributes('style')).toBe('left: 10px; top: 10px;');
expect(imageBadges.at(1).attributes('style')).toBe('left: 5px; top: 5px;');
expect(imageBadges.at(0).attributes('style')).toBe('left: 10%; top: 5%;');
expect(imageBadges.at(1).attributes('style')).toBe('left: 5%; top: 2.5%;');
});
it('renders single badge for discussion object', () => {
@ -95,6 +97,8 @@ describe('Diffs image diff overlay component', () => {
y: 0,
width: 100,
height: 200,
xPercent: 0,
yPercent: 0,
});
});
@ -120,11 +124,13 @@ describe('Diffs image diff overlay component', () => {
describe('comment form', () => {
const getCommentIndicator = () => wrapper.find('.comment-indicator');
beforeEach(() => {
createComponent({}, store => {
createComponent({ canComment: true }, store => {
store.state.diffs.commentForms.push({
fileHash: 'ABC',
x: 20,
y: 10,
xPercent: 10,
yPercent: 10,
});
});
});
@ -134,7 +140,7 @@ describe('Diffs image diff overlay component', () => {
});
it('sets comment form badge position', () => {
expect(getCommentIndicator().attributes('style')).toBe('left: 20px; top: 10px;');
expect(getCommentIndicator().attributes('style')).toBe('left: 10%; top: 10%;');
});
});
});

View File

@ -1,141 +0,0 @@
import MockAdapter from 'axios-mock-adapter';
import { ImporterStatus } from '~/importer_status';
import axios from '~/lib/utils/axios_utils';
describe('Importer Status', () => {
let instance;
let mock;
beforeEach(() => {
mock = new MockAdapter(axios);
});
afterEach(() => {
mock.restore();
});
describe('addToImport', () => {
const importUrl = '/import_url';
const fixtures = `
<table>
<tr id="repo_123">
<td class="import-target"></td>
<td class="import-actions job-status">
<button name="button" type="submit" class="btn btn-import js-add-to-import">
</button>
</td>
</tr>
</table>
`;
beforeEach(() => {
setFixtures(fixtures);
jest.spyOn(ImporterStatus.prototype, 'initStatusPage').mockImplementation(() => {});
jest.spyOn(ImporterStatus.prototype, 'setAutoUpdate').mockImplementation(() => {});
instance = new ImporterStatus({
jobsUrl: '',
importUrl,
});
});
it('sets table row to active after post request', done => {
mock.onPost(importUrl).reply(200, {
id: 1,
full_path: '/full_path',
});
instance
.addToImport({
currentTarget: document.querySelector('.js-add-to-import'),
})
.then(() => {
expect(document.querySelector('tr').classList.contains('table-active')).toEqual(true);
done();
})
.catch(done.fail);
});
it('shows error message after failed POST request', done => {
setFixtures(`${fixtures}<div class="flash-container"></div>`);
mock.onPost(importUrl).reply(422, {
errors: 'You forgot your lunch',
});
instance
.addToImport({
currentTarget: document.querySelector('.js-add-to-import'),
})
.then(() => {
const flashMessage = document.querySelector('.flash-text');
expect(flashMessage.textContent.trim()).toEqual(
'An error occurred while importing project: You forgot your lunch',
);
done();
})
.catch(done.fail);
});
});
describe('autoUpdate', () => {
const jobsUrl = '/jobs_url';
beforeEach(() => {
const div = document.createElement('div');
div.innerHTML = `
<div id="project_1">
<div class="job-status">
</div>
</div>
`;
document.body.appendChild(div);
jest.spyOn(ImporterStatus.prototype, 'initStatusPage').mockImplementation(() => {});
jest.spyOn(ImporterStatus.prototype, 'setAutoUpdate').mockImplementation(() => {});
instance = new ImporterStatus({
jobsUrl,
});
});
function setupMock(importStatus) {
mock.onGet(jobsUrl).reply(200, [
{
id: 1,
import_status: importStatus,
},
]);
}
function expectJobStatus(done, status) {
instance
.autoUpdate()
.then(() => {
expect(document.querySelector('#project_1').innerText.trim()).toEqual(status);
done();
})
.catch(done.fail);
}
it('sets the job status to done', done => {
setupMock('finished');
expectJobStatus(done, 'Done');
});
it('sets the job status to scheduled', done => {
setupMock('scheduled');
expectJobStatus(done, 'Scheduled');
});
it('sets the job status to started', done => {
setupMock('started');
expectJobStatus(done, 'Started');
});
it('sets the job status to custom status', done => {
setupMock('custom status');
expectJobStatus(done, 'custom status');
});
});
});

View File

@ -1,40 +1,46 @@
import Vue from 'vue';
import mountComponent from 'helpers/vue_mount_component_helper';
import missingBranchComponent from '~/vue_merge_request_widget/components/states/mr_widget_missing_branch.vue';
import { shallowMount } from '@vue/test-utils';
import MissingBranchComponent from '~/vue_merge_request_widget/components/states/mr_widget_missing_branch.vue';
let wrapper;
function factory(sourceBranchRemoved, mergeRequestWidgetGraphql) {
wrapper = shallowMount(MissingBranchComponent, {
propsData: {
mr: { sourceBranchRemoved },
},
provide: {
glFeatures: { mergeRequestWidgetGraphql },
},
});
if (mergeRequestWidgetGraphql) {
wrapper.setData({ state: { sourceBranchExists: !sourceBranchRemoved } });
}
return wrapper.vm.$nextTick();
}
describe('MRWidgetMissingBranch', () => {
let vm;
beforeEach(() => {
const Component = Vue.extend(missingBranchComponent);
vm = mountComponent(Component, { mr: { sourceBranchRemoved: true } });
});
afterEach(() => {
vm.$destroy();
wrapper.destroy();
});
describe('computed', () => {
describe('missingBranchName', () => {
it('should return proper branch name', () => {
expect(vm.missingBranchName).toEqual('source');
[true, false].forEach(mergeRequestWidgetGraphql => {
describe(`widget GraphQL feature flag is ${
mergeRequestWidgetGraphql ? 'enabled' : 'disabled'
}`, () => {
it.each`
sourceBranchRemoved | branchName
${true} | ${'source'}
${false} | ${'target'}
`(
'should set missing branch name as $branchName when sourceBranchRemoved is $sourceBranchRemoved',
async ({ sourceBranchRemoved, branchName }) => {
await factory(sourceBranchRemoved, mergeRequestWidgetGraphql);
vm.mr.sourceBranchRemoved = false;
expect(vm.missingBranchName).toEqual('target');
});
});
});
describe('template', () => {
it('should have correct elements', () => {
const el = vm.$el;
const content = el.textContent.replace(/\n(\s)+/g, ' ').trim();
expect(el.classList.contains('mr-widget-body')).toBeTruthy();
expect(el.querySelector('button').getAttribute('disabled')).toBeTruthy();
expect(content.replace(/\s\s+/g, ' ')).toContain('source branch does not exist.');
expect(content).toContain('Please restore it or use a different source branch');
expect(wrapper.find('[data-testid="missingBranchName"]').text()).toContain(branchName);
},
);
});
});
});

View File

@ -272,4 +272,65 @@ RSpec.describe UsersHelper do
end
end
end
describe '#user_display_name' do
subject { helper.user_display_name(user) }
before do
stub_current_user(nil)
end
context 'for a confirmed user' do
let(:user) { create(:user) }
before do
stub_profile_permission_allowed(true)
end
it { is_expected.to eq(user.name) }
end
context 'for an unconfirmed user' do
let(:user) { create(:user, :unconfirmed) }
before do
stub_profile_permission_allowed(false)
end
it { is_expected.to eq('Unconfirmed user') }
context 'when current user is an admin' do
before do
admin_user = create(:admin)
stub_current_user(admin_user)
stub_profile_permission_allowed(true, admin_user)
end
it { is_expected.to eq(user.name) }
end
context 'when the current user is self' do
before do
stub_current_user(user)
stub_profile_permission_allowed(true, user)
end
it { is_expected.to eq(user.name) }
end
end
context 'for a blocked user' do
let(:user) { create(:user, :blocked) }
it { is_expected.to eq('Blocked user') }
end
def stub_current_user(user)
allow(helper).to receive(:current_user).and_return(user)
end
def stub_profile_permission_allowed(allowed, current_user = nil)
allow(helper).to receive(:can?).with(user, :read_user_profile, current_user).and_return(allowed)
end
end
end

View File

@ -10,7 +10,7 @@ RSpec.describe Gitlab::Ci::Ansi2json::Result do
{ lines: [], state: state, append: false, truncated: false, offset: offset, stream: stream }
end
subject { described_class.new(params) }
subject { described_class.new(**params) }
describe '#size' do
before do

View File

@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Ansi2json::Style do
describe '#set?' do
subject { described_class.new(params).set? }
subject { described_class.new(**params).set? }
context 'when fg color is set' do
let(:params) { { fg: 'term-fg-black' } }
@ -44,7 +44,7 @@ RSpec.describe Gitlab::Ci::Ansi2json::Style do
end
describe 'update formats to mimic terminals' do
subject { described_class.new(params) }
subject { described_class.new(**params) }
context 'when fg color present' do
let(:params) { { fg: 'term-fg-black', mask: mask } }

View File

@ -142,7 +142,7 @@ RSpec.describe Gitlab::Ci::Build::Artifacts::Metadata do
it 'reads expected number of entries' do
stream = File.open(tmpfile.path)
metadata = described_class.new(stream, 'public', { recursive: true })
metadata = described_class.new(stream, 'public', recursive: true)
expect(metadata.find_entries!.count).to eq entry_count
end

View File

@ -81,7 +81,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
context 'when configuration has ports' do
let(:ports) { [{ number: 80, protocol: 'http', name: 'foobar' }] }
let(:config) { { name: 'ruby:2.7', entrypoint: %w(/bin/sh run), ports: ports } }
let(:entry) { described_class.new(config, { with_image_ports: image_ports }) }
let(:entry) { described_class.new(config, with_image_ports: image_ports) }
let(:image_ports) { false }
context 'when with_image_ports metadata is not enabled' do

View File

@ -165,6 +165,45 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Need do
end
end
context 'with cross pipeline artifacts needs' do
context 'when pipeline is provided' do
context 'when job is provided' do
let(:config) { { job: 'job_name', pipeline: '$THE_PIPELINE_ID' } }
it { is_expected.to be_valid }
it 'sets artifacts:true by default' do
expect(need.value).to eq(job: 'job_name', pipeline: '$THE_PIPELINE_ID', artifacts: true)
end
it 'sets the type as cross_dependency' do
expect(need.type).to eq(:cross_dependency)
end
end
context 'when artifacts is provided' do
let(:config) { { job: 'job_name', pipeline: '$THE_PIPELINE_ID', artifacts: false } }
it { is_expected.to be_valid }
it 'returns the correct value' do
expect(need.value).to eq(job: 'job_name', pipeline: '$THE_PIPELINE_ID', artifacts: false)
end
end
end
context 'when config contains not allowed keys' do
let(:config) { { job: 'job_name', pipeline: '$THE_PIPELINE_ID', something: 'else' } }
it { is_expected.not_to be_valid }
it 'returns an error' do
expect(need.errors)
.to contain_exactly('cross pipeline dependency config contains unknown keys: something')
end
end
end
context 'when need config is not a string or a hash' do
let(:config) { :job_name }

View File

@ -6,7 +6,7 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Needs do
subject(:needs) { described_class.new(config) }
before do
needs.metadata[:allowed_needs] = %i[job]
needs.metadata[:allowed_needs] = %i[job cross_dependency]
end
describe 'validations' do
@ -66,6 +66,27 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Needs do
end
end
end
context 'with too many cross pipeline dependencies' do
let(:limit) { described_class::NEEDS_CROSS_PIPELINE_DEPENDENCIES_LIMIT }
let(:config) do
Array.new(limit.next) do |index|
{ pipeline: "$UPSTREAM_PIPELINE_#{index}", job: 'job-1' }
end
end
describe '#valid?' do
it { is_expected.not_to be_valid }
end
describe '#errors' do
it 'returns error about incorrect type' do
expect(needs.errors).to contain_exactly(
"needs config must be less than or equal to #{limit}")
end
end
end
end
describe '.compose!' do

View File

@ -96,7 +96,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Service do
{ name: 'postgresql:9.5', alias: 'db', command: %w(cmd run), entrypoint: %w(/bin/sh run), ports: ports }
end
let(:entry) { described_class.new(config, { with_image_ports: image_ports }) }
let(:entry) { described_class.new(config, with_image_ports: image_ports) }
let(:image_ports) { false }
context 'when with_image_ports metadata is not enabled' do

View File

@ -38,7 +38,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Services do
context 'when configuration has ports' do
let(:ports) { [{ number: 80, protocol: 'http', name: 'foobar' }] }
let(:config) { ['postgresql:9.5', { name: 'postgresql:9.1', alias: 'postgres_old', ports: ports }] }
let(:entry) { described_class.new(config, { with_image_ports: image_ports }) }
let(:entry) { described_class.new(config, with_image_ports: image_ports) }
let(:image_ports) { false }
context 'when with_image_ports metadata is not enabled' do

View File

@ -2111,6 +2111,71 @@ module Gitlab
end
end
describe 'cross pipeline needs' do
context 'when configuration is valid' do
let(:config) do
<<~YAML
rspec:
stage: test
script: rspec
needs:
- pipeline: $THE_PIPELINE_ID
job: dependency-job
YAML
end
it 'returns a valid configuration and sets artifacts: true by default' do
expect(subject).to be_valid
rspec = subject.build_attributes(:rspec)
expect(rspec.dig(:options, :cross_dependencies)).to eq(
[{ pipeline: '$THE_PIPELINE_ID', job: 'dependency-job', artifacts: true }]
)
end
context 'when pipeline ID is hard-coded' do
let(:config) do
<<~YAML
rspec:
stage: test
script: rspec
needs:
- pipeline: "123"
job: dependency-job
YAML
end
it 'returns a valid configuration and sets artifacts: true by default' do
expect(subject).to be_valid
rspec = subject.build_attributes(:rspec)
expect(rspec.dig(:options, :cross_dependencies)).to eq(
[{ pipeline: '123', job: 'dependency-job', artifacts: true }]
)
end
end
end
context 'when configuration is not valid' do
let(:config) do
<<~YAML
rspec:
stage: test
script: rspec
needs:
- pipeline: $THE_PIPELINE_ID
job: dependency-job
something: else
YAML
end
it 'returns an error' do
expect(subject).not_to be_valid
expect(subject.errors).to include(/:need config contains unknown keys: something/)
end
end
end
describe "Hidden jobs" do
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config).execute }

View File

@ -11,7 +11,7 @@ RSpec.describe Gitlab::CycleAnalytics::StageSummary do
project.add_maintainer(user)
end
let(:stage_summary) { described_class.new(project, options).data }
let(:stage_summary) { described_class.new(project, **options).data }
describe "#new_issues" do
subject { stage_summary.first }
@ -121,7 +121,7 @@ RSpec.describe Gitlab::CycleAnalytics::StageSummary do
end
it 'does not include commit stats' do
data = described_class.new(project, options).data
data = described_class.new(project, **options).data
expect(includes_commits?(data)).to be_falsy
end

View File

@ -6,7 +6,7 @@ require "spec_helper"
RSpec.describe Gitlab::Email::ReplyParser do
describe '#execute' do
def test_parse_body(mail_string, params = {})
described_class.new(Mail::Message.new(mail_string), params).execute
described_class.new(Mail::Message.new(mail_string), **params).execute
end
it "returns an empty string if the message is blank" do

View File

@ -1,38 +0,0 @@
# frozen_string_literal: true
require "spec_helper"
RSpec.describe Gitlab::GoogleCodeImport::Client do
let(:raw_data) { Gitlab::Json.parse(fixture_file("GoogleCodeProjectHosting.json")) }
subject { described_class.new(raw_data) }
describe "#valid?" do
context "when the data is valid" do
it "returns true" do
expect(subject).to be_valid
end
end
context "when the data is invalid" do
let(:raw_data) { "No clue" }
it "returns true" do
expect(subject).not_to be_valid
end
end
end
describe "#repos" do
it "returns only Git repositories" do
expect(subject.repos.length).to eq(1)
expect(subject.incompatible_repos.length).to eq(1)
end
end
describe "#repo" do
it "returns the referenced repository" do
expect(subject.repo("tint2").name).to eq("tint2")
end
end
end

View File

@ -1,88 +0,0 @@
# frozen_string_literal: true
require "spec_helper"
RSpec.describe Gitlab::GoogleCodeImport::Importer do
let(:mapped_user) { create(:user, username: "thilo123") }
let(:raw_data) { Gitlab::Json.parse(fixture_file("GoogleCodeProjectHosting.json")) }
let(:client) { Gitlab::GoogleCodeImport::Client.new(raw_data) }
let(:import_data) do
{
'repo' => client.repo('tint2').raw_data,
'user_map' => { 'thilo...' => "@#{mapped_user.username}" }
}
end
let(:project) { create(:project) }
subject { described_class.new(project) }
before do
project.add_maintainer(project.creator)
project.create_import_data(data: import_data)
end
describe "#execute" do
it "imports status labels" do
subject.execute
%w(New NeedInfo Accepted Wishlist Started Fixed Invalid Duplicate WontFix Incomplete).each do |status|
expect(project.labels.find_by(name: "Status: #{status}")).not_to be_nil
end
end
it "imports labels" do
subject.execute
%w(
Type-Defect Type-Enhancement Type-Task Type-Review Type-Other Milestone-0.12 Priority-Critical
Priority-High Priority-Medium Priority-Low OpSys-All OpSys-Windows OpSys-Linux OpSys-OSX Security
Performance Usability Maintainability Component-Panel Component-Taskbar Component-Battery
Component-Systray Component-Clock Component-Launcher Component-Tint2conf Component-Docs Component-New
).each do |label|
label = label.sub("-", ": ")
expect(project.labels.find_by(name: label)).not_to be_nil
end
end
it "imports issues" do
subject.execute
issue = project.issues.first
expect(issue).not_to be_nil
expect(issue.iid).to eq(169)
expect(issue.author).to eq(project.creator)
expect(issue.assignees).to eq([mapped_user])
expect(issue.state).to eq("closed")
expect(issue.label_names).to include("Priority: Medium")
expect(issue.label_names).to include("Status: Fixed")
expect(issue.label_names).to include("Type: Enhancement")
expect(issue.title).to eq("Scrolling through tasks")
expect(issue.state).to eq("closed")
expect(issue.description).to include("schattenpr\\.\\.\\.")
expect(issue.description).to include("November 18, 2009 00:20")
expect(issue.description).to include("Google Code")
expect(issue.description).to include('I like to scroll through the tasks with my scrollwheel (like in fluxbox).')
expect(issue.description).to include('Patch is attached that adds two new mouse-actions (next_task+prev_task)')
expect(issue.description).to include('that can be used for exactly that purpose.')
expect(issue.description).to include('all the best!')
expect(issue.description).to include('[tint2_task_scrolling.diff](https://storage.googleapis.com/google-code-attachments/tint2/issue-169/comment-0/tint2_task_scrolling.diff)')
expect(issue.description).to include('![screenshot.png](https://storage.googleapis.com/google-code-attachments/tint2/issue-169/comment-0/screenshot.png)')
expect(issue.description).to include('![screenshot1.PNG](https://storage.googleapis.com/google-code-attachments/tint2/issue-169/comment-0/screenshot1.PNG)')
end
it "imports issue comments" do
subject.execute
note = project.issues.first.notes.first
expect(note).not_to be_nil
expect(note.note).to include("Comment 1")
expect(note.note).to include("@#{mapped_user.username}")
expect(note.note).to include("November 18, 2009 05:14")
expect(note.note).to include("applied, thanks.")
expect(note.note).to include("Status: Fixed")
expect(note.note).to include("~~Type: Defect~~")
expect(note.note).to include("Type: Enhancement")
end
end
end

View File

@ -1,32 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::GoogleCodeImport::ProjectCreator do
let(:user) { create(:user) }
let(:repo) do
Gitlab::GoogleCodeImport::Repository.new(
"name" => 'vim',
"summary" => 'VI Improved',
"repositoryUrls" => ["https://vim.googlecode.com/git/"]
)
end
let(:namespace) { create(:group) }
before do
namespace.add_owner(user)
end
it 'creates project' do
expect_next_instance_of(Project) do |project|
expect(project).to receive(:add_import_job)
end
project_creator = described_class.new(repo, namespace, user)
project = project_creator.execute
expect(project.import_url).to eq("https://vim.googlecode.com/git/")
expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
end
end

View File

@ -10,7 +10,7 @@ RSpec.describe Gitlab::Graphql::Pagination::ExternallyPaginatedArrayConnection d
let(:arguments) { {} }
subject(:connection) do
described_class.new(all_nodes, { max_page_size: values.size }.merge(arguments))
described_class.new(all_nodes, **{ max_page_size: values.size }.merge(arguments))
end
it_behaves_like 'a connection with collection methods'

View File

@ -10,11 +10,11 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:context) { GraphQL::Query::Context.new(query: OpenStruct.new(schema: schema), values: nil, object: nil) }
subject(:connection) do
described_class.new(nodes, { context: context, max_page_size: 3 }.merge(arguments))
described_class.new(nodes, **{ context: context, max_page_size: 3 }.merge(arguments))
end
def encoded_cursor(node)
described_class.new(nodes, { context: context }).cursor_for(node)
described_class.new(nodes, context: context).cursor_for(node)
end
def decoded_cursor(cursor)

View File

@ -11,7 +11,6 @@ RSpec.describe Gitlab::ImportSources do
'Bitbucket Cloud' => 'bitbucket',
'Bitbucket Server' => 'bitbucket_server',
'GitLab.com' => 'gitlab',
'Google Code' => 'google_code',
'FogBugz' => 'fogbugz',
'Repo by URL' => 'git',
'GitLab export' => 'gitlab_project',
@ -32,7 +31,6 @@ RSpec.describe Gitlab::ImportSources do
bitbucket
bitbucket_server
gitlab
google_code
fogbugz
git
gitlab_project
@ -53,7 +51,6 @@ RSpec.describe Gitlab::ImportSources do
bitbucket
bitbucket_server
gitlab
google_code
fogbugz
gitlab_project
gitea
@ -70,7 +67,6 @@ RSpec.describe Gitlab::ImportSources do
'bitbucket' => Gitlab::BitbucketImport::Importer,
'bitbucket_server' => Gitlab::BitbucketServerImport::Importer,
'gitlab' => Gitlab::GitlabImport::Importer,
'google_code' => Gitlab::GoogleCodeImport::Importer,
'fogbugz' => Gitlab::FogbugzImport::Importer,
'git' => nil,
'gitlab_project' => Gitlab::ImportExport::Importer,
@ -92,7 +88,6 @@ RSpec.describe Gitlab::ImportSources do
'bitbucket' => 'Bitbucket Cloud',
'bitbucket_server' => 'Bitbucket Server',
'gitlab' => 'GitLab.com',
'google_code' => 'Google Code',
'fogbugz' => 'FogBugz',
'git' => 'Repo by URL',
'gitlab_project' => 'GitLab export',

View File

@ -155,9 +155,9 @@ RSpec.describe Ci::BuildDependencies do
subject { dependencies.all }
it 'returns the union of all local dependencies and any cross pipeline dependencies' do
it 'returns the union of all local dependencies and any cross project dependencies' do
expect(dependencies).to receive(:local).and_return([1, 2, 3])
expect(dependencies).to receive(:cross_pipeline).and_return([3, 4])
expect(dependencies).to receive(:cross_project).and_return([3, 4])
expect(subject).to contain_exactly(1, 2, 3, 4)
end

View File

@ -3024,6 +3024,17 @@ RSpec.describe Project, factory_default: :keep do
expect { project.set_repository_read_only! }.to raise_error(described_class::RepositoryReadOnlyError, /in progress/)
end
context 'skip_git_transfer_check is true' do
it 'makes the project read-only when git transfers are in progress' do
allow(project).to receive(:git_transfer_in_progress?) { true }
expect { project.set_repository_read_only!(skip_git_transfer_check: true) }
.to change(project, :repository_read_only?)
.from(false)
.to(true)
end
end
end
describe '#set_repository_writable!' do

View File

@ -160,4 +160,16 @@ RSpec.describe UserPolicy do
it { is_expected.not_to be_allowed(:read_group_count) }
end
end
describe ':read_user_profile' do
context 'when the user is unconfirmed' do
let(:user) { create(:user, :unconfirmed) }
it { is_expected.not_to be_allowed(:read_user_profile) }
end
context 'when the user is confirmed' do
it { is_expected.to be_allowed(:read_user_profile) }
end
end
end

View File

@ -6,7 +6,7 @@ RSpec.describe API::ProjectRepositoryStorageMoves do
include AccessMatchersForRequest
let_it_be(:user) { create(:admin) }
let_it_be(:project) { create(:project) }
let_it_be(:project) { create(:project, :repository).tap { |project| project.track_project_repository } }
let_it_be(:storage_move) { create(:project_repository_storage_move, :scheduled, project: project) }
shared_examples 'get single project repository storage move' do
@ -159,4 +159,64 @@ RSpec.describe API::ProjectRepositoryStorageMoves do
end
end
end
describe 'POST /project_repository_storage_moves' do
let(:source_storage_name) { 'default' }
let(:destination_storage_name) { 'test_second_storage' }
def create_project_repository_storage_moves
post api('/project_repository_storage_moves', user), params: {
source_storage_name: source_storage_name,
destination_storage_name: destination_storage_name
}
end
before do
stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/extra_storage' })
end
it 'schedules the worker' do
expect(ProjectScheduleBulkRepositoryShardMovesWorker).to receive(:perform_async).with(source_storage_name, destination_storage_name)
create_project_repository_storage_moves
expect(response).to have_gitlab_http_status(:accepted)
end
context 'source_storage_name is invalid' do
let(:destination_storage_name) { 'not-a-real-storage' }
it 'gives an error' do
create_project_repository_storage_moves
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'destination_storage_name is missing' do
let(:destination_storage_name) { nil }
it 'schedules the worker' do
expect(ProjectScheduleBulkRepositoryShardMovesWorker).to receive(:perform_async).with(source_storage_name, destination_storage_name)
create_project_repository_storage_moves
expect(response).to have_gitlab_http_status(:accepted)
end
end
context 'destination_storage_name is invalid' do
let(:destination_storage_name) { 'not-a-real-storage' }
it 'gives an error' do
create_project_repository_storage_moves
expect(response).to have_gitlab_http_status(:bad_request)
end
end
describe 'normal user' do
it { expect { create_project_repository_storage_moves }.to be_denied_for(:user) }
end
end
end

View File

@ -126,32 +126,6 @@ RSpec.describe Import::BitbucketServerController, 'routing' do
end
end
# status_import_google_code GET /import/google_code/status(.:format) import/google_code#status
# callback_import_google_code POST /import/google_code/callback(.:format) import/google_code#callback
# jobs_import_google_code GET /import/google_code/jobs(.:format) import/google_code#jobs
# new_user_map_import_google_code GET /import/google_code/user_map(.:format) import/google_code#new_user_map
# create_user_map_import_google_code POST /import/google_code/user_map(.:format) import/google_code#create_user_map
# import_google_code POST /import/google_code(.:format) import/google_code#create
# new_import_google_code GET /import/google_code/new(.:format) import/google_code#new
RSpec.describe Import::GoogleCodeController, 'routing' do
it_behaves_like 'importer routing' do
let(:except_actions) { [:callback] }
let(:provider) { 'google_code' }
end
it 'to #callback' do
expect(post("/import/google_code/callback")).to route_to("import/google_code#callback")
end
it 'to #new_user_map' do
expect(get('/import/google_code/user_map')).to route_to('import/google_code#new_user_map')
end
it 'to #create_user_map' do
expect(post('/import/google_code/user_map')).to route_to('import/google_code#create_user_map')
end
end
# status_import_fogbugz GET /import/fogbugz/status(.:format) import/fogbugz#status
# callback_import_fogbugz POST /import/fogbugz/callback(.:format) import/fogbugz#callback
# realtime_changes_import_fogbugz GET /import/fogbugz/realtime_changes(.:format) import/fogbugz#realtime_changes

Some files were not shown because too many files have changed in this diff Show More