Merge remote-tracking branch 'origin/master'

This commit is contained in:
John T Skarbek 2019-04-30 18:58:48 -04:00
commit b1e7c4c6f1
No known key found for this signature in database
GPG Key ID: B698C3146A236712
213 changed files with 2960 additions and 849 deletions

View File

@ -1 +1 @@
1.35.0
1.36.0

View File

@ -285,7 +285,7 @@ gem 'gettext_i18n_rails', '~> 1.8.0'
gem 'gettext_i18n_rails_js', '~> 1.3'
gem 'gettext', '~> 3.2.2', require: false, group: :development
gem 'batch-loader', '~> 1.2.2'
gem 'batch-loader', '~> 1.4.0'
# Perf bar
gem 'peek', '~> 1.0.1'
@ -309,7 +309,7 @@ group :development do
gem 'foreman', '~> 0.84.0'
gem 'brakeman', '~> 4.2', require: false
gem 'letter_opener_web', '~> 1.3.0'
gem 'letter_opener_web', '~> 1.3.4'
gem 'rblineprof', '~> 0.3.6', platform: :mri, require: false
# Better errors handler
@ -417,7 +417,7 @@ group :ed25519 do
end
# Gitaly GRPC client
gem 'gitaly-proto', '~> 1.22.0', require: 'gitaly'
gem 'gitaly-proto', '~> 1.26.0', require: 'gitaly'
gem 'grpc', '~> 1.19.0'

View File

@ -76,7 +76,7 @@ GEM
thread_safe (~> 0.3, >= 0.3.1)
babosa (1.0.2)
base32 (0.3.2)
batch-loader (1.2.2)
batch-loader (1.4.0)
bcrypt (3.1.12)
bcrypt_pbkdf (1.0.0)
benchmark-ips (2.3.0)
@ -283,7 +283,7 @@ GEM
gettext_i18n_rails (>= 0.7.1)
po_to_json (>= 1.0.0)
rails (>= 3.2.0)
gitaly-proto (1.22.0)
gitaly-proto (1.26.0)
grpc (~> 1.0)
github-markup (1.7.0)
gitlab-default_value_for (3.1.1)
@ -444,9 +444,9 @@ GEM
rest-client (~> 2.0)
launchy (2.4.3)
addressable (~> 2.3)
letter_opener (1.4.1)
letter_opener (1.7.0)
launchy (~> 2.2)
letter_opener_web (1.3.0)
letter_opener_web (1.3.4)
actionmailer (>= 3.2)
letter_opener (~> 1.0)
railties (>= 3.2)
@ -999,7 +999,7 @@ DEPENDENCIES
awesome_print
babosa (~> 1.0.2)
base32 (~> 0.3.0)
batch-loader (~> 1.2.2)
batch-loader (~> 1.4.0)
bcrypt_pbkdf (~> 1.0)
benchmark-ips (~> 2.3.0)
better_errors (~> 2.5.0)
@ -1056,7 +1056,7 @@ DEPENDENCIES
gettext (~> 3.2.2)
gettext_i18n_rails (~> 1.8.0)
gettext_i18n_rails_js (~> 1.3)
gitaly-proto (~> 1.22.0)
gitaly-proto (~> 1.26.0)
github-markup (~> 1.7.0)
gitlab-default_value_for (~> 3.1.1)
gitlab-labkit (~> 0.1.2)
@ -1093,7 +1093,7 @@ DEPENDENCIES
kaminari (~> 1.0)
knapsack (~> 1.17)
kubeclient (~> 4.2.2)
letter_opener_web (~> 1.3.0)
letter_opener_web (~> 1.3.4)
license_finder (~> 5.4)
licensee (~> 8.9)
lograge (~> 0.5)

View File

@ -1,8 +1,9 @@
import Flash from '../flash';
import BalsamiqViewer from './balsamiq/balsamiq_viewer';
import { __ } from '~/locale';
function onError() {
const flash = new Flash('Balsamiq file could not be loaded.');
const flash = new Flash(__('Balsamiq file could not be loaded.'));
return flash;
}

View File

@ -5,6 +5,7 @@ import Dropzone from 'dropzone';
import { visitUrl } from '../lib/utils/url_utility';
import { HIDDEN_CLASS } from '../lib/utils/constants';
import csrf from '../lib/utils/csrf';
import { sprintf, __ } from '~/locale';
Dropzone.autoDiscover = false;
@ -73,7 +74,7 @@ export default class BlobFileDropzone {
.html(errorMessage)
.text();
$('.dropzone-alerts')
.html(`Error uploading file: "${stripped}"`)
.html(sprintf(__('Error uploading file: %{stripped}'), { stripped }))
.show();
this.removeFile(file);
},
@ -84,7 +85,7 @@ export default class BlobFileDropzone {
e.stopPropagation();
if (dropzone[0].dropzone.getQueuedFiles().length === 0) {
// eslint-disable-next-line no-alert
alert('Please select a file');
alert(__('Please select a file'));
return false;
}
toggleLoading(submitButton, submitButtonLoadingIcon, true);

View File

@ -1,5 +1,6 @@
import JSZip from 'jszip';
import JSZipUtils from 'jszip-utils';
import { __ } from '~/locale';
export default class SketchLoader {
constructor(container) {
@ -56,10 +57,10 @@ export default class SketchLoader {
const errorMsg = document.createElement('p');
errorMsg.className = 'prepend-top-default append-bottom-default text-center';
errorMsg.textContent = `
errorMsg.textContent = __(`
Cannot show preview. For previews on sketch files, they must have the file format
introduced by Sketch version 43 and above.
`;
`);
this.container.appendChild(errorMsg);
this.removeLoadingIcon();

View File

@ -1,11 +1,12 @@
import FileTemplateSelector from '../file_template_selector';
import { __ } from '~/locale';
export default class DockerfileSelector extends FileTemplateSelector {
constructor({ mediator }) {
super(mediator);
this.config = {
key: 'dockerfile',
name: 'Dockerfile',
name: __('Dockerfile'),
pattern: /(Dockerfile)/,
type: 'dockerfiles',
dropdown: '.js-dockerfile-selector',

View File

@ -2,6 +2,7 @@ import $ from 'jquery';
import Flash from '../../flash';
import { handleLocationHash } from '../../lib/utils/common_utils';
import axios from '../../lib/utils/axios_utils';
import { __ } from '~/locale';
export default class BlobViewer {
constructor() {
@ -26,7 +27,7 @@ export default class BlobViewer {
promise
.then(module => module.default(viewer))
.catch(error => {
Flash('Error loading file viewer.');
Flash(__('Error loading file viewer.'));
throw error;
});
@ -106,16 +107,19 @@ export default class BlobViewer {
if (!this.copySourceBtn) return;
if (this.simpleViewer.getAttribute('data-loaded')) {
this.copySourceBtn.setAttribute('title', 'Copy source to clipboard');
this.copySourceBtn.setAttribute('title', __('Copy source to clipboard'));
this.copySourceBtn.classList.remove('disabled');
} else if (this.activeViewer === this.simpleViewer) {
this.copySourceBtn.setAttribute(
'title',
'Wait for the source to load to copy it to the clipboard',
__('Wait for the source to load to copy it to the clipboard'),
);
this.copySourceBtn.classList.add('disabled');
} else {
this.copySourceBtn.setAttribute('title', 'Switch to the source to copy it to the clipboard');
this.copySourceBtn.setAttribute(
'title',
__('Switch to the source to copy it to the clipboard'),
);
this.copySourceBtn.classList.add('disabled');
}
@ -158,7 +162,7 @@ export default class BlobViewer {
this.toggleCopyButtonState();
})
.catch(() => new Flash('Error loading viewer'));
.catch(() => new Flash(__('Error loading viewer')));
}
static loadViewer(viewerParam) {

View File

@ -83,7 +83,7 @@ export default {
}"
:index="index"
:data-issue-id="issue.id"
class="board-card position-relative p-3 rounded"
class="board-card p-3 rounded"
@mousedown="mouseDown"
@mousemove="mouseMove"
@mouseup="showIssue($event)"

View File

@ -279,14 +279,10 @@ export default class Clusters {
this.store.acknowledgeSuccessfulUpdate(appId);
}
toggleIngressDomainHelpText(ingressPreviousState, ingressNewState) {
const { externalIp, status } = ingressNewState;
const helpTextHidden = status !== APPLICATION_STATUS.INSTALLED || !externalIp;
const domainSnippetText = `${externalIp}${INGRESS_DOMAIN_SUFFIX}`;
if (ingressPreviousState.status !== status) {
this.ingressDomainHelpText.classList.toggle('hide', helpTextHidden);
this.ingressDomainSnippet.textContent = domainSnippetText;
toggleIngressDomainHelpText({ externalIp }, { externalIp: newExternalIp }) {
if (externalIp !== newExternalIp) {
this.ingressDomainHelpText.classList.toggle('hide', !newExternalIp);
this.ingressDomainSnippet.textContent = `${newExternalIp}${INGRESS_DOMAIN_SUFFIX}`;
}
}

View File

@ -12,6 +12,7 @@ import stageStagingComponent from './components/stage_staging_component.vue';
import stageTestComponent from './components/stage_test_component.vue';
import CycleAnalyticsService from './cycle_analytics_service';
import CycleAnalyticsStore from './cycle_analytics_store';
import { __ } from '~/locale';
Vue.use(Translate);
@ -61,7 +62,7 @@ export default () => {
methods: {
handleError() {
this.store.setErrorState(true);
return new Flash('There was an error while fetching cycle analytics data.');
return new Flash(__('There was an error while fetching cycle analytics data.'));
},
initDropdown() {
const $dropdown = $('.js-ca-dropdown');

View File

@ -3,6 +3,7 @@ import VueRouter from 'vue-router';
import { joinPaths } from '~/lib/utils/url_utility';
import flash from '~/flash';
import store from './stores';
import { __ } from '~/locale';
Vue.use(VueRouter);
@ -94,7 +95,7 @@ router.beforeEach((to, from, next) => {
})
.catch(e => {
flash(
'Error while loading the project data. Please try again.',
__('Error while loading the project data. Please try again.'),
'alert',
document,
null,

View File

@ -1,4 +1,5 @@
import { activityBarViews } from '../../../constants';
import { __ } from '~/locale';
export const templateTypes = () => [
{
@ -10,11 +11,11 @@ export const templateTypes = () => [
key: 'gitignores',
},
{
name: 'LICENSE',
name: __('LICENSE'),
key: 'licenses',
},
{
name: 'Dockerfile',
name: __('Dockerfile'),
key: 'dockerfiles',
},
];

View File

@ -290,7 +290,7 @@ export default class SSHMirror {
this.setSSHPublicKey(data.import_data_attributes.ssh_public_key);
})
.catch(() => {
Flash(_('Unable to regenerate public ssh key.'));
Flash(__('Unable to regenerate public ssh key.'));
});
}

View File

@ -1,7 +1,7 @@
import axios from '../../lib/utils/axios_utils';
import statusCodes from '../../lib/utils/http_status';
import { backOff } from '../../lib/utils/common_utils';
import { s__ } from '../../locale';
import { s__, __ } from '../../locale';
const MAX_REQUESTS = 3;
@ -15,7 +15,7 @@ function backOffRequest(makeRequestCallback) {
if (requestCounter < MAX_REQUESTS) {
next();
} else {
stop(new Error('Failed to connect to the prometheus server'));
stop(new Error(__('Failed to connect to the prometheus server')));
}
} else {
stop(resp);

View File

@ -1,10 +1,12 @@
import { __ } from '~/locale';
export const mrStates = {
merged: 'merged',
closed: 'closed',
};
export const humanMRStates = {
merged: 'Merged',
closed: 'Closed',
open: 'Open',
merged: __('Merged'),
closed: __('Closed'),
open: __('Open'),
};

View File

@ -1,6 +1,7 @@
import $ from 'jquery';
import { addSelectOnFocusBehaviour } from '../lib/utils/common_utils';
import { slugifyWithHyphens } from '../lib/utils/text_utility';
import { s__ } from '~/locale';
let hasUserDefinedProjectPath = false;
@ -114,71 +115,71 @@ const bindEvents = () => {
const value = $(this).val();
const templates = {
rails: {
text: 'Ruby on Rails',
text: s__('ProjectTemplates|Ruby on Rails'),
icon: '.template-option .icon-rails',
},
express: {
text: 'NodeJS Express',
text: s__('ProjectTemplates|NodeJS Express'),
icon: '.template-option .icon-express',
},
spring: {
text: 'Spring',
text: s__('ProjectTemplates|Spring'),
icon: '.template-option .icon-spring',
},
iosswift: {
text: 'iOS (Swift)',
text: s__('ProjectTemplates|iOS (Swift)'),
icon: '.template-option svg.icon-gitlab',
},
dotnetcore: {
text: '.NET Core',
text: s__('ProjectTemplates|.NET Core'),
icon: '.template-option .icon-dotnet',
},
android: {
text: 'Android',
text: s__('ProjectTemplates|Android'),
icon: '.template-option svg.icon-android',
},
gomicro: {
text: 'Go Micro',
text: s__('ProjectTemplates|Go Micro'),
icon: '.template-option .icon-gomicro',
},
hugo: {
text: 'Pages/Hugo',
text: s__('ProjectTemplates|Pages/Hugo'),
icon: '.template-option .icon-hugo',
},
jekyll: {
text: 'Pages/Jekyll',
text: s__('ProjectTemplates|Pages/Jekyll'),
icon: '.template-option .icon-jekyll',
},
plainhtml: {
text: 'Pages/Plain HTML',
text: s__('ProjectTemplates|Pages/Plain HTML'),
icon: '.template-option .icon-plainhtml',
},
gitbook: {
text: 'Pages/GitBook',
text: s__('ProjectTemplates|Pages/GitBook'),
icon: '.template-option .icon-gitbook',
},
hexo: {
text: 'Pages/Hexo',
text: s__('ProjectTemplates|Pages/Hexo'),
icon: '.template-option .icon-hexo',
},
nfhugo: {
text: 'Netlify/Hugo',
text: s__('ProjectTemplates|Netlify/Hugo'),
icon: '.template-option .icon-netlify',
},
nfjekyll: {
text: 'Netlify/Jekyll',
text: s__('ProjectTemplates|Netlify/Jekyll'),
icon: '.template-option .icon-netlify',
},
nfplainhtml: {
text: 'Netlify/Plain HTML',
text: s__('ProjectTemplates|Netlify/Plain HTML'),
icon: '.template-option .icon-netlify',
},
nfgitbook: {
text: 'Netlify/GitBook',
text: s__('ProjectTemplates|Netlify/GitBook'),
icon: '.template-option .icon-netlify',
},
nfhexo: {
text: 'Netlify/Hexo',
text: s__('ProjectTemplates|Netlify/Hexo'),
icon: '.template-option .icon-netlify',
},
};

View File

@ -4,8 +4,11 @@ const index = function index() {
RavenConfig.init({
sentryDsn: gon.sentry_dsn,
currentUserId: gon.current_user_id,
whitelistUrls: [gon.gitlab_url],
isProduction: process.env.NODE_ENV,
whitelistUrls:
process.env.NODE_ENV === 'production'
? [gon.gitlab_url]
: [gon.gitlab_url, 'webpack-internal://'],
environment: gon.sentry_environment,
release: gon.revision,
tags: {
revision: gon.revision,

View File

@ -61,7 +61,7 @@ const RavenConfig = {
release: this.options.release,
tags: this.options.tags,
whitelistUrls: this.options.whitelistUrls,
environment: this.options.isProduction ? 'production' : 'development',
environment: this.options.environment,
ignoreErrors: this.IGNORE_ERRORS,
ignoreUrls: this.IGNORE_URLS,
shouldSendCallback: this.shouldSendSample.bind(this),

View File

@ -1,3 +1,5 @@
import { __ } from '~/locale';
export default class U2FError {
constructor(errorCode, u2fFlowType) {
this.errorCode = errorCode;
@ -8,15 +10,17 @@ export default class U2FError {
message() {
if (this.errorCode === window.u2f.ErrorCodes.BAD_REQUEST && this.httpsDisabled) {
return 'U2F only works with HTTPS-enabled websites. Contact your administrator for more details.';
return __(
'U2F only works with HTTPS-enabled websites. Contact your administrator for more details.',
);
} else if (this.errorCode === window.u2f.ErrorCodes.DEVICE_INELIGIBLE) {
if (this.u2fFlowType === 'authenticate') {
return 'This device has not been registered with us.';
return __('This device has not been registered with us.');
}
if (this.u2fFlowType === 'register') {
return 'This device has already been registered with us.';
return __('This device has already been registered with us.');
}
}
return 'There was a problem communicating with your device.';
return __('There was a problem communicating with your device.');
}
}

View File

@ -1,10 +1,12 @@
import { __ } from '~/locale';
const viewers = {
image: {
id: 'image',
},
markdown: {
id: 'markdown',
previewTitle: 'Preview Markdown',
previewTitle: __('Preview Markdown'),
},
};

View File

@ -1,4 +1,5 @@
import $ from 'jquery';
import '~/commons/bootstrap';
export default {
bind(el) {

View File

@ -210,6 +210,7 @@
border: 1px solid $gray-200;
box-shadow: 0 1px 2px $issue-boards-card-shadow;
line-height: $gl-padding;
list-style: none;
&:not(:last-child) {
margin-bottom: $gl-padding-8;

View File

@ -4,6 +4,7 @@ class Clusters::ApplicationsController < Clusters::BaseController
before_action :cluster
before_action :authorize_create_cluster!, only: [:create]
before_action :authorize_update_cluster!, only: [:update]
before_action :authorize_admin_cluster!, only: [:destroy]
def create
request_handler do
@ -21,6 +22,14 @@ class Clusters::ApplicationsController < Clusters::BaseController
end
end
def destroy
request_handler do
Clusters::Applications::DestroyService
.new(@cluster, current_user, cluster_application_destroy_params)
.execute(request)
end
end
private
def request_handler
@ -40,4 +49,8 @@ class Clusters::ApplicationsController < Clusters::BaseController
def cluster_application_params
params.permit(:application, :hostname, :email)
end
def cluster_application_destroy_params
params.permit(:application)
end
end

View File

@ -132,18 +132,6 @@ class Projects::IssuesController < Projects::ApplicationController
render_conflict_response
end
def referenced_merge_requests
@merge_requests, @closed_by_merge_requests = ::Issues::ReferencedMergeRequestsService.new(project, current_user).execute(issue)
respond_to do |format|
format.json do
render json: {
html: view_to_html_string('projects/issues/_merge_requests')
}
end
end
end
def related_branches
@related_branches = Issues::RelatedBranchesService.new(project, current_user).execute(issue)

View File

@ -36,10 +36,10 @@ class ProjectsController < Projects::ApplicationController
# rubocop: disable CodeReuse/ActiveRecord
def new
namespace = Namespace.find_by(id: params[:namespace_id]) if params[:namespace_id]
return access_denied! if namespace && !can?(current_user, :create_projects, namespace)
@namespace = Namespace.find_by(id: params[:namespace_id]) if params[:namespace_id]
return access_denied! if @namespace && !can?(current_user, :create_projects, @namespace)
@project = Project.new(namespace_id: namespace&.id)
@project = Project.new(namespace_id: @namespace&.id)
end
# rubocop: enable CodeReuse/ActiveRecord

View File

@ -7,14 +7,14 @@ module Resolvers
prepended do
argument :full_path, GraphQL::ID_TYPE,
required: true,
description: 'The full path of the project or namespace, e.g., "gitlab-org/gitlab-ce"'
description: 'The full path of the project, group or namespace, e.g., "gitlab-org/gitlab-ce"'
end
def model_by_full_path(model, full_path)
BatchLoader.for(full_path).batch(key: model) do |full_paths, loader, args|
# `with_route` avoids an N+1 calculating full_path
args[:key].where_full_path_in(full_paths).with_route.each do |project|
loader.call(project.full_path, project)
args[:key].where_full_path_in(full_paths).with_route.each do |model_instance|
loader.call(model_instance.full_path, model_instance)
end
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
module Resolvers
class GroupResolver < BaseResolver
prepend FullPathResolver
type Types::GroupType, null: true
def resolve(full_path:)
model_by_full_path(Group, full_path)
end
end
end

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
module Types
class GroupType < NamespaceType
graphql_name 'Group'
authorize :read_group
expose_permissions Types::PermissionTypes::Group
field :web_url, GraphQL::STRING_TYPE, null: true
field :avatar_url, GraphQL::STRING_TYPE, null: true, resolve: -> (group, args, ctx) do
group.avatar_url(only_path: false)
end
if ::Group.supports_nested_objects?
field :parent, GroupType, null: true
end
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
module Types
class NamespaceType < BaseObject
graphql_name 'Namespace'
field :id, GraphQL::ID_TYPE, null: false
field :name, GraphQL::STRING_TYPE, null: false
field :path, GraphQL::STRING_TYPE, null: false
field :full_name, GraphQL::STRING_TYPE, null: false
field :full_path, GraphQL::ID_TYPE, null: false
field :description, GraphQL::STRING_TYPE, null: true
field :visibility, GraphQL::STRING_TYPE, null: true
field :lfs_enabled, GraphQL::BOOLEAN_TYPE, null: true, method: :lfs_enabled?
field :request_access_enabled, GraphQL::BOOLEAN_TYPE, null: true
end
end

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
module Types
module PermissionTypes
class Group < BasePermissionType
graphql_name 'GroupPermissions'
abilities :read_group
end
end
end

View File

@ -66,6 +66,9 @@ module Types
field :only_allow_merge_if_all_discussions_are_resolved, GraphQL::BOOLEAN_TYPE, null: true
field :printing_merge_request_link_enabled, GraphQL::BOOLEAN_TYPE, null: true
field :namespace, Types::NamespaceType, null: false
field :group, Types::GroupType, null: true
field :merge_requests,
Types::MergeRequestType.connection_type,
null: true,

View File

@ -9,6 +9,11 @@ module Types
resolver: Resolvers::ProjectResolver,
description: "Find a project"
field :group, Types::GroupType,
null: true,
resolver: Resolvers::GroupResolver,
description: "Find a group"
field :metadata, Types::MetadataType,
null: true,
resolver: Resolvers::MetadataResolver,

View File

@ -183,6 +183,22 @@ module ApplicationSettingImplementation
clientside_sentry_dsn.strip! if clientside_sentry_dsn.present?
end
def sentry_enabled
Gitlab.config.sentry.enabled || read_attribute(:sentry_enabled)
end
def sentry_dsn
Gitlab.config.sentry.dsn || read_attribute(:sentry_dsn)
end
def clientside_sentry_enabled
Gitlab.config.sentry.enabled || read_attribute(:clientside_sentry_enabled)
end
def clientside_sentry_dsn
Gitlab.config.sentry.dsn || read_attribute(:clientside_sentry_dsn)
end
def performance_bar_allowed_group
Group.find_by_id(performance_bar_allowed_group_id)
end

View File

@ -4,6 +4,7 @@ module Ci
class Bridge < CommitStatus
include Ci::Processable
include Ci::Contextable
include Ci::PipelineDelegator
include Importable
include AfterCommitQueue
include HasRef
@ -13,8 +14,6 @@ module Ci
belongs_to :trigger_request
validates :ref, presence: true
delegate :merge_request_event?, to: :pipeline
def self.retry(bridge, current_user)
raise NotImplementedError
end

View File

@ -6,6 +6,7 @@ module Ci
include Ci::Processable
include Ci::Metadatable
include Ci::Contextable
include Ci::PipelineDelegator
include TokenAuthenticatable
include AfterCommitQueue
include ObjectStorage::BackgroundMove
@ -49,8 +50,6 @@ module Ci
delegate :terminal_specification, to: :runner_session, allow_nil: true
delegate :gitlab_deploy_token, to: :project
delegate :trigger_short_token, to: :trigger_request, allow_nil: true
delegate :merge_request_event?, :merge_request_ref?,
:legacy_detached_merge_request_pipeline?, to: :pipeline
##
# Since Gitlab 11.5, deployments records started being created right after

View File

@ -759,6 +759,18 @@ module Ci
user == current_user
end
def source_ref
if triggered_by_merge_request?
merge_request.source_branch
else
ref
end
end
def source_ref_slug
Gitlab::Utils.slugify(source_ref.to_s)
end
private
def ci_yaml_from_repo

View File

@ -24,6 +24,12 @@ module Clusters
'stable/cert-manager'
end
# We will implement this in future MRs.
# Need to reverse postinstall step
def allowed_to_uninstall?
false
end
def install_command
Gitlab::Kubernetes::Helm::InstallCommand.new(
name: 'certmanager',

View File

@ -29,6 +29,13 @@ module Clusters
self.status = 'installable' if cluster&.platform_kubernetes_active?
end
# We will implement this in future MRs.
# Basically we need to check all other applications are not installed
# first.
def allowed_to_uninstall?
false
end
def install_command
Gitlab::Kubernetes::Helm::InitCommand.new(
name: name,

View File

@ -35,6 +35,13 @@ module Clusters
'stable/nginx-ingress'
end
# We will implement this in future MRs.
# Basically we need to check all dependent applications are not installed
# first.
def allowed_to_uninstall?
false
end
def install_command
Gitlab::Kubernetes::Helm::InstallCommand.new(
name: name,

View File

@ -38,6 +38,12 @@ module Clusters
content_values.to_yaml
end
# Will be addressed in future MRs
# We need to investigate and document what will be permenantly deleted.
def allowed_to_uninstall?
false
end
def install_command
Gitlab::Kubernetes::Helm::InstallCommand.new(
name: name,

View File

@ -51,6 +51,12 @@ module Clusters
{ "domain" => hostname }.to_yaml
end
# Handled in a new issue:
# https://gitlab.com/gitlab-org/gitlab-ce/issues/59369
def allowed_to_uninstall?
false
end
def install_command
Gitlab::Kubernetes::Helm::InstallCommand.new(
name: name,

View File

@ -16,10 +16,12 @@ module Clusters
default_value_for :version, VERSION
after_destroy :disable_prometheus_integration
state_machine :status do
after_transition any => [:installed] do |application|
application.cluster.projects.each do |project|
project.find_or_initialize_service('prometheus').update(active: true)
project.find_or_initialize_service('prometheus').update!(active: true)
end
end
end
@ -47,6 +49,14 @@ module Clusters
)
end
def uninstall_command
Gitlab::Kubernetes::Helm::DeleteCommand.new(
name: name,
rbac: cluster.platform_kubernetes_rbac?,
files: files
)
end
def upgrade_command(values)
::Gitlab::Kubernetes::Helm::InstallCommand.new(
name: name,
@ -82,6 +92,12 @@ module Clusters
private
def disable_prometheus_integration
cluster.projects.each do |project|
project.prometheus_service&.update!(active: false)
end
end
def kube_client
cluster&.kubeclient&.core_client
end

View File

@ -29,6 +29,13 @@ module Clusters
content_values.to_yaml
end
# Need to investigate if pipelines run by this runner will stop upon the
# executor pod stopping
# I.e.run a pipeline, and uninstall runner while pipeline is running
def allowed_to_uninstall?
false
end
def install_command
Gitlab::Kubernetes::Helm::InstallCommand.new(
name: name,

View File

@ -18,6 +18,16 @@ module Clusters
self.status = 'installable' if cluster&.application_helm_available?
end
def can_uninstall?
allowed_to_uninstall?
end
# All new applications should uninstall by default
# Override if there's dependencies that needs to be uninstalled first
def allowed_to_uninstall?
true
end
def self.application_name
self.to_s.demodulize.underscore
end

View File

@ -25,9 +25,11 @@ module Clusters
state :updating, value: 4
state :updated, value: 5
state :update_errored, value: 6
state :uninstalling, value: 7
state :uninstall_errored, value: 8
event :make_scheduled do
transition [:installable, :errored, :installed, :updated, :update_errored] => :scheduled
transition [:installable, :errored, :installed, :updated, :update_errored, :uninstall_errored] => :scheduled
end
event :make_installing do
@ -40,8 +42,9 @@ module Clusters
end
event :make_errored do
transition any - [:updating] => :errored
transition any - [:updating, :uninstalling] => :errored
transition [:updating] => :update_errored
transition [:uninstalling] => :uninstall_errored
end
event :make_updating do
@ -52,6 +55,10 @@ module Clusters
transition any => :update_errored
end
event :make_uninstalling do
transition [:scheduled] => :uninstalling
end
before_transition any => [:scheduled] do |app_status, _|
app_status.status_reason = nil
end
@ -65,7 +72,7 @@ module Clusters
app_status.status_reason = nil
end
before_transition any => [:update_errored] do |app_status, transition|
before_transition any => [:update_errored, :uninstall_errored] do |app_status, transition|
status_reason = transition.args.first
app_status.status_reason = status_reason if status_reason
end

View File

@ -76,6 +76,10 @@ module Clusters
end
end
def namespace_for(project)
cluster.find_or_initialize_kubernetes_namespace_for_project(project).namespace
end
def predefined_variables(project:)
Gitlab::Ci::Variables::Collection.new.tap do |variables|
variables.append(key: 'KUBE_URL', value: api_url)

View File

@ -91,7 +91,8 @@ module Avatarable
private
def retrieve_upload_from_batch(identifier)
BatchLoader.for(identifier: identifier, model: self).batch(key: self.class) do |upload_params, loader, args|
BatchLoader.for(identifier: identifier, model: self)
.batch(key: self.class, cache: true, replace_methods: false) do |upload_params, loader, args|
model_class = args[:key]
paths = upload_params.flat_map do |params|
params[:model].upload_paths(params[:identifier])

View File

@ -70,8 +70,8 @@ module Ci
variables.append(key: 'CI_COMMIT_SHA', value: sha)
variables.append(key: 'CI_COMMIT_SHORT_SHA', value: short_sha)
variables.append(key: 'CI_COMMIT_BEFORE_SHA', value: before_sha)
variables.append(key: 'CI_COMMIT_REF_NAME', value: ref)
variables.append(key: 'CI_COMMIT_REF_SLUG', value: ref_slug)
variables.append(key: 'CI_COMMIT_REF_NAME', value: source_ref)
variables.append(key: 'CI_COMMIT_REF_SLUG', value: source_ref_slug)
variables.append(key: "CI_COMMIT_TAG", value: ref) if tag?
variables.append(key: "CI_PIPELINE_TRIGGERED", value: 'true') if trigger_request
variables.append(key: "CI_JOB_MANUAL", value: 'true') if action?
@ -85,8 +85,8 @@ module Ci
Gitlab::Ci::Variables::Collection.new.tap do |variables|
variables.append(key: 'CI_BUILD_REF', value: sha)
variables.append(key: 'CI_BUILD_BEFORE_SHA', value: before_sha)
variables.append(key: 'CI_BUILD_REF_NAME', value: ref)
variables.append(key: 'CI_BUILD_REF_SLUG', value: ref_slug)
variables.append(key: 'CI_BUILD_REF_NAME', value: source_ref)
variables.append(key: 'CI_BUILD_REF_SLUG', value: source_ref_slug)
variables.append(key: 'CI_BUILD_NAME', value: name)
variables.append(key: 'CI_BUILD_STAGE', value: stage)
variables.append(key: "CI_BUILD_TAG", value: ref) if tag?

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
##
# This module is mainly used by child associations of `Ci::Pipeline` that needs to look up
# single source of truth. For example, `Ci::Build` has `git_ref` method, which behaves
# slightly different from `Ci::Pipeline`'s `git_ref`. This is very confusing as
# the system could behave differently time to time.
# We should have a single interface in `Ci::Pipeline` and access the method always.
module Ci
module PipelineDelegator
extend ActiveSupport::Concern
included do
delegate :merge_request_event?,
:merge_request_ref?,
:source_ref,
:source_ref_slug,
:legacy_detached_merge_request_pipeline?, to: :pipeline
end
end
end

View File

@ -1,5 +1,8 @@
# frozen_string_literal: true
##
# We will disable `ref` and `sha` attributes in `Ci::Build` in the future
# and remove this module in favor of Ci::PipelineDelegator.
module HasRef
extend ActiveSupport::Concern

View File

@ -1054,6 +1054,16 @@ class MergeRequest < ApplicationRecord
@environments[current_user]
end
##
# This method is for looking for active environments which created via pipelines for merge requests.
# Since deployments run on a merge request ref (e.g. `refs/merge-requests/:iid/head`),
# we cannot look up environments with source branch name.
def environments
return Environment.none unless actual_head_pipeline&.triggered_by_merge_request?
actual_head_pipeline.environments
end
def state_human_name
if merged?
"Merged"

View File

@ -38,6 +38,8 @@ class PagesDomain < ApplicationRecord
where(verified_at.eq(nil).or(enabled_until.eq(nil).or(enabled_until.lt(threshold))))
end
scope :for_removal, -> { where("remove_at < ?", Time.now) }
def verified?
!!verified_at
end

View File

@ -94,6 +94,10 @@ class KubernetesService < DeploymentService
end
end
def namespace_for(project)
actual_namespace
end
# Check we can connect to the Kubernetes API
def test(*args)
kubeclient = build_kube_client!

View File

@ -63,19 +63,11 @@ module Ci
end
def link_to_merge_request_source_branch
return unless merge_request_presenter
link_to(merge_request_presenter.source_branch,
merge_request_presenter.source_branch_commits_path,
class: 'ref-name')
merge_request_presenter&.source_branch_link
end
def link_to_merge_request_target_branch
return unless merge_request_presenter
link_to(merge_request_presenter.target_branch,
merge_request_presenter.target_branch_commits_path,
class: 'ref-name')
merge_request_presenter&.target_branch_link
end
private

View File

@ -216,6 +216,22 @@ class MergeRequestPresenter < Gitlab::View::Presenter::Delegated
help_page_path('ci/merge_request_pipelines/index.md')
end
def source_branch_link
if source_branch_exists?
link_to(source_branch, source_branch_commits_path, class: 'ref-name')
else
content_tag(:span, source_branch, class: 'ref-name')
end
end
def target_branch_link
if target_branch_exists?
link_to(target_branch, target_branch_commits_path, class: 'ref-name')
else
content_tag(:span, target_branch, class: 'ref-name')
end
end
private
def cached_can_be_reverted?

View File

@ -10,4 +10,5 @@ class ClusterApplicationEntity < Grape::Entity
expose :hostname, if: -> (e, _) { e.respond_to?(:hostname) }
expose :email, if: -> (e, _) { e.respond_to?(:email) }
expose :update_available?, as: :update_available, if: -> (e, _) { e.respond_to?(:update_available?) }
expose :can_uninstall?, as: :can_uninstall
end

View File

@ -9,12 +9,11 @@ module Ci
return unless @ref.present?
environments.each do |environment|
next unless environment.stop_action_available?
next unless can?(current_user, :stop_environment, environment)
environments.each { |environment| stop(environment) }
end
environment.stop_with_action!(current_user)
end
def execute_for_merge_request(merge_request)
merge_request.environments.each { |environment| stop(environment) }
end
private
@ -24,5 +23,12 @@ module Ci
.new(project, current_user, ref: @ref, recently_updated: true)
.execute
end
def stop(environment)
return unless environment.stop_action_available?
return unless can?(current_user, :stop_environment, environment)
environment.stop_with_action!(current_user)
end
end
end

View File

@ -37,7 +37,7 @@ module Clusters
end
def check_timeout
if timeouted?
if timed_out?
begin
app.make_errored!("Operation timed out. Check pod logs for #{pod_name} for more details.")
end
@ -51,8 +51,8 @@ module Clusters
install_command.pod_name
end
def timeouted?
Time.now.utc - app.updated_at.to_time.utc > ClusterWaitForAppInstallationWorker::TIMEOUT
def timed_out?
Time.now.utc - app.updated_at.utc > ClusterWaitForAppInstallationWorker::TIMEOUT
end
def remove_installation_pod

View File

@ -0,0 +1,62 @@
# frozen_string_literal: true
module Clusters
module Applications
class CheckUninstallProgressService < BaseHelmService
def execute
return unless app.uninstalling?
case installation_phase
when Gitlab::Kubernetes::Pod::SUCCEEDED
on_success
when Gitlab::Kubernetes::Pod::FAILED
on_failed
else
check_timeout
end
rescue Kubeclient::HttpError => e
log_error(e)
app.make_errored!(_('Kubernetes error: %{error_code}') % { error_code: e.error_code })
end
private
def on_success
app.destroy!
rescue StandardError => e
app.make_errored!(_('Application uninstalled but failed to destroy: %{error_message}') % { error_message: e.message })
ensure
remove_installation_pod
end
def on_failed
app.make_errored!(_('Operation failed. Check pod logs for %{pod_name} for more details.') % { pod_name: pod_name })
end
def check_timeout
if timed_out?
app.make_errored!(_('Operation timed out. Check pod logs for %{pod_name} for more details.') % { pod_name: pod_name })
else
WaitForUninstallAppWorker.perform_in(WaitForUninstallAppWorker::INTERVAL, app.name, app.id)
end
end
def pod_name
app.uninstall_command.pod_name
end
def timed_out?
Time.now.utc - app.updated_at.utc > WaitForUninstallAppWorker::TIMEOUT
end
def remove_installation_pod
helm_api.delete_pod!(pod_name)
end
def installation_phase
helm_api.status(pod_name)
end
end
end
end

View File

@ -10,8 +10,8 @@ module Clusters
end
def builder
cluster.method("application_#{application_name}").call ||
cluster.method("build_application_#{application_name}").call
cluster.public_send(:"application_#{application_name}") || # rubocop:disable GitlabSecurity/PublicSend
cluster.public_send(:"build_application_#{application_name}") # rubocop:disable GitlabSecurity/PublicSend
end
end
end

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
module Clusters
module Applications
class DestroyService < ::Clusters::Applications::BaseService
def execute(_request)
instantiate_application.tap do |application|
break unless application.can_uninstall?
application.make_scheduled!
Clusters::Applications::UninstallWorker.perform_async(application.name, application.id)
end
end
private
def builder
cluster.public_send(:"application_#{application_name}") # rubocop:disable GitlabSecurity/PublicSend
end
end
end
end

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
module Clusters
module Applications
class UninstallService < BaseHelmService
def execute
return unless app.scheduled?
app.make_uninstalling!
uninstall
end
private
def uninstall
helm_api.uninstall(app.uninstall_command)
Clusters::Applications::WaitForUninstallAppWorker.perform_in(
Clusters::Applications::WaitForUninstallAppWorker::INTERVAL, app.name, app.id)
rescue Kubeclient::HttpError => e
log_error(e)
app.make_errored!("Kubernetes error: #{e.error_code}")
rescue StandardError => e
log_error(e)
app.make_errored!('Failed to uninstall.')
end
end
end
end

View File

@ -10,7 +10,7 @@ module Clusters
end
def builder
cluster.method("application_#{application_name}").call
cluster.public_send(:"application_#{application_name}") # rubocop:disable GitlabSecurity/PublicSend
end
end
end

View File

@ -73,13 +73,13 @@ module Git
def push_data
@push_data ||= Gitlab::DataBuilder::Push.build(
project,
current_user,
params[:oldrev],
params[:newrev],
params[:ref],
limited_commits,
event_message,
project: project,
user: current_user,
oldrev: params[:oldrev],
newrev: params[:newrev],
ref: params[:ref],
commits: limited_commits,
message: event_message,
commits_count: commits_count,
push_options: params[:push_options] || {}
)

View File

@ -24,6 +24,11 @@ module MergeRequests
end
end
def cleanup_environments(merge_request)
Ci::StopEnvironmentsService.new(merge_request.source_project, current_user)
.execute_for_merge_request(merge_request)
end
private
def handle_wip_event(merge_request)

View File

@ -17,6 +17,7 @@ module MergeRequests
execute_hooks(merge_request, 'close')
invalidate_cache_counts(merge_request, users: merge_request.assignees)
merge_request.update_project_counter_caches
cleanup_environments(merge_request)
end
merge_request

View File

@ -18,6 +18,7 @@ module MergeRequests
invalidate_cache_counts(merge_request, users: merge_request.assignees)
merge_request.update_project_counter_caches
delete_non_latest_diffs(merge_request)
cleanup_environments(merge_request)
end
private

View File

@ -94,16 +94,13 @@ module Projects
return unless project.lfs_enabled?
lfs_objects_to_download = Projects::LfsPointers::LfsImportService.new(project).execute
result = Projects::LfsPointers::LfsImportService.new(project).execute
lfs_objects_to_download.each do |lfs_download_object|
Projects::LfsPointers::LfsDownloadService.new(project, lfs_download_object)
.execute
if result[:status] == :error
# To avoid aborting the importing process, we silently fail
# if any exception raises.
Gitlab::AppLogger.error("The Lfs import process failed. #{result[:message]}")
end
rescue => e
# Right now, to avoid aborting the importing process, we silently fail
# if any exception raises.
Rails.logger.error("The Lfs import process failed. #{e.message}")
end
def import_data

View File

@ -21,9 +21,9 @@ module Projects
# This method accepts two parameters:
# - oids: hash of oids to query. The structure is { lfs_file_oid => lfs_file_size }
#
# Returns a hash with the structure { lfs_file_oids => download_link }
# Returns an array of LfsDownloadObject
def execute(oids)
return {} unless project&.lfs_enabled? && remote_uri && oids.present?
return [] unless project&.lfs_enabled? && remote_uri && oids.present?
get_download_links(oids)
end

View File

@ -1,95 +1,23 @@
# frozen_string_literal: true
# This service manages the whole worflow of discovering the Lfs files in a
# repository, linking them to the project and downloading (and linking) the non
# existent ones.
# This service is responsible of managing the retrieval of the lfs objects,
# and call the service LfsDownloadService, which performs the download
# for each of the retrieved lfs objects
module Projects
module LfsPointers
class LfsImportService < BaseService
include Gitlab::Utils::StrongMemoize
HEAD_REV = 'HEAD'.freeze
LFS_ENDPOINT_PATTERN = /^\t?url\s*=\s*(.+)$/.freeze
LFS_BATCH_API_ENDPOINT = '/info/lfs/objects/batch'.freeze
LfsImportError = Class.new(StandardError)
def execute
return {} unless project&.lfs_enabled?
return success unless project&.lfs_enabled?
if external_lfs_endpoint?
# If the endpoint host is different from the import_url it means
# that the repo is using a third party service for storing the LFS files.
# In this case, we have to disable lfs in the project
disable_lfs!
lfs_objects_to_download = LfsObjectDownloadListService.new(project).execute
return {}
lfs_objects_to_download.each do |lfs_download_object|
LfsDownloadService.new(project, lfs_download_object).execute
end
get_download_links
rescue LfsDownloadLinkListService::DownloadLinksError => e
raise LfsImportError, "The LFS objects download list couldn't be imported. Error: #{e.message}"
end
private
def external_lfs_endpoint?
lfsconfig_endpoint_uri && lfsconfig_endpoint_uri.host != import_uri.host
end
def disable_lfs!
project.update(lfs_enabled: false)
end
# rubocop: disable CodeReuse/ActiveRecord
def get_download_links
existent_lfs = LfsListService.new(project).execute
linked_oids = LfsLinkService.new(project).execute(existent_lfs.keys)
# Retrieving those oids not linked and which we need to download
not_linked_lfs = existent_lfs.except(*linked_oids)
LfsDownloadLinkListService.new(project, remote_uri: current_endpoint_uri).execute(not_linked_lfs)
end
# rubocop: enable CodeReuse/ActiveRecord
def lfsconfig_endpoint_uri
strong_memoize(:lfsconfig_endpoint_uri) do
# Retrieveing the blob data from the .lfsconfig file
data = project.repository.lfsconfig_for(HEAD_REV)
# Parsing the data to retrieve the url
parsed_data = data&.match(LFS_ENDPOINT_PATTERN)
if parsed_data
URI.parse(parsed_data[1]).tap do |endpoint|
endpoint.user ||= import_uri.user
endpoint.password ||= import_uri.password
end
end
end
rescue URI::InvalidURIError
raise LfsImportError, 'Invalid URL in .lfsconfig file'
end
def import_uri
@import_uri ||= URI.parse(project.import_url)
rescue URI::InvalidURIError
raise LfsImportError, 'Invalid project import URL'
end
def current_endpoint_uri
(lfsconfig_endpoint_uri || default_endpoint_uri)
end
# The import url must end with '.git' here we ensure it is
def default_endpoint_uri
@default_endpoint_uri ||= begin
import_uri.dup.tap do |uri|
path = uri.path.gsub(%r(/$), '')
path += '.git' unless path.ends_with?('.git')
uri.path = path + LFS_BATCH_API_ENDPOINT
end
end
success
rescue => e
error(e.message)
end
end
end

View File

@ -6,9 +6,9 @@ module Projects
class LfsLinkService < BaseService
# Accept an array of oids to link
#
# Returns a hash with the same structure with oids linked
# Returns an array with the oid of the existent lfs objects
def execute(oids)
return {} unless project&.lfs_enabled?
return [] unless project&.lfs_enabled?
# Search and link existing LFS Object
link_existing_lfs_objects(oids)

View File

@ -0,0 +1,96 @@
# frozen_string_literal: true
# This service manages the whole worflow of discovering the Lfs files in a
# repository, linking them to the project and downloading (and linking) the non
# existent ones.
module Projects
module LfsPointers
class LfsObjectDownloadListService < BaseService
include Gitlab::Utils::StrongMemoize
HEAD_REV = 'HEAD'.freeze
LFS_ENDPOINT_PATTERN = /^\t?url\s*=\s*(.+)$/.freeze
LFS_BATCH_API_ENDPOINT = '/info/lfs/objects/batch'.freeze
LfsObjectDownloadListError = Class.new(StandardError)
def execute
return [] unless project&.lfs_enabled?
if external_lfs_endpoint?
# If the endpoint host is different from the import_url it means
# that the repo is using a third party service for storing the LFS files.
# In this case, we have to disable lfs in the project
disable_lfs!
return []
end
# Getting all Lfs pointers already in the database and linking them to the project
linked_oids = LfsLinkService.new(project).execute(lfs_pointers_in_repository.keys)
# Retrieving those oids not present in the database which we need to download
missing_oids = lfs_pointers_in_repository.except(*linked_oids) # rubocop: disable CodeReuse/ActiveRecord
# Downloading the required information and gathering it inside a LfsDownloadObject for each oid
LfsDownloadLinkListService.new(project, remote_uri: current_endpoint_uri).execute(missing_oids)
rescue LfsDownloadLinkListService::DownloadLinksError => e
raise LfsObjectDownloadListError, "The LFS objects download list couldn't be imported. Error: #{e.message}"
end
private
def external_lfs_endpoint?
lfsconfig_endpoint_uri && lfsconfig_endpoint_uri.host != import_uri.host
end
def disable_lfs!
unless project.update(lfs_enabled: false)
raise LfsDownloadLinkListService::DownloadLinksError, "Invalid project state"
end
end
# Retrieves all lfs pointers in the repository
def lfs_pointers_in_repository
@lfs_pointers_in_repository ||= LfsListService.new(project).execute
end
def lfsconfig_endpoint_uri
strong_memoize(:lfsconfig_endpoint_uri) do
# Retrieveing the blob data from the .lfsconfig file
data = project.repository.lfsconfig_for(HEAD_REV)
# Parsing the data to retrieve the url
parsed_data = data&.match(LFS_ENDPOINT_PATTERN)
if parsed_data
URI.parse(parsed_data[1]).tap do |endpoint|
endpoint.user ||= import_uri.user
endpoint.password ||= import_uri.password
end
end
end
rescue URI::InvalidURIError
raise LfsObjectDownloadListError, 'Invalid URL in .lfsconfig file'
end
def import_uri
@import_uri ||= URI.parse(project.import_url)
rescue URI::InvalidURIError
raise LfsObjectDownloadListError, 'Invalid project import URL'
end
def current_endpoint_uri
(lfsconfig_endpoint_uri || default_endpoint_uri)
end
# The import url must end with '.git' here we ensure it is
def default_endpoint_uri
@default_endpoint_uri ||= begin
import_uri.dup.tap do |uri|
path = uri.path.gsub(%r(/$), '')
path += '.git' unless path.ends_with?('.git')
uri.path = path + LFS_BATCH_API_ENDPOINT
end
end
end
end
end
end

View File

@ -41,12 +41,11 @@ module Tags
def build_push_data(tag)
Gitlab::DataBuilder::Push.build(
project,
current_user,
tag.dereferenced_target.sha,
Gitlab::Git::BLANK_SHA,
"#{Gitlab::Git::TAG_REF_PREFIX}#{tag.name}",
[])
project: project,
user: current_user,
oldrev: tag.dereferenced_target.sha,
newrev: Gitlab::Git::BLANK_SHA,
ref: "#{Gitlab::Git::TAG_REF_PREFIX}#{tag.name}")
end
end
end

View File

@ -37,8 +37,8 @@ module Todos
private
def enqueue_private_features_worker
project_ids.each do |project_id|
TodosDestroyer::PrivateFeaturesWorker.perform_async(project_id, user.id)
projects.each do |project|
TodosDestroyer::PrivateFeaturesWorker.perform_async(project.id, user.id)
end
end
@ -62,9 +62,8 @@ module Todos
end
# rubocop: enable CodeReuse/ActiveRecord
override :project_ids
# rubocop: disable CodeReuse/ActiveRecord
def project_ids
def projects
condition = case entity
when Project
{ id: entity.id }
@ -72,13 +71,13 @@ module Todos
{ namespace_id: non_member_groups }
end
Project.where(condition).select(:id)
Project.where(condition)
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def non_authorized_projects
project_ids.where('id NOT IN (?)', user.authorized_projects.select(:id))
projects.where('id NOT IN (?)', user.authorized_projects.select(:id))
end
# rubocop: enable CodeReuse/ActiveRecord
@ -110,7 +109,7 @@ module Todos
authorized_reporter_projects = user
.authorized_projects(Gitlab::Access::REPORTER).select(:id)
Issue.where(project_id: project_ids, confidential: true)
Issue.where(project_id: projects, confidential: true)
.where('project_id NOT IN(?)', authorized_reporter_projects)
.where('author_id != ?', user.id)
.where('id NOT IN (?)', assigned_ids)

View File

@ -1,6 +1,13 @@
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-logging-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%p
%strong
NOTE:
These settings will be removed from the UI in a GitLab 12.0 release and made available within gitlab.yml.
The specific client side DSN setting is already handled as a component from a Sentry perspective and will be removed.
In addition, you will be able to define a Sentry Environment to differentiate between multiple deployments. For example, development, staging, and production.
%fieldset
.form-group
.form-check

View File

@ -4,7 +4,7 @@
- page_title _('Kubernetes Cluster')
- manage_prometheus_path = edit_project_service_path(@cluster.project, 'prometheus') if @project
- expanded = Rails.env.test?
- expanded = expanded_by_default?
- status_path = clusterable.cluster_status_cluster_path(@cluster.id, format: :json) if can?(current_user, :admin_cluster, @cluster)
.edit-cluster-form.js-edit-cluster-form{ data: { status_path: status_path,

View File

@ -1,6 +1,6 @@
- breadcrumb_title "General Settings"
- @content_class = "limit-container-width" unless fluid_layout
- expanded = Rails.env.test?
- expanded = expanded_by_default?
%section.settings.gs-general.no-animate#js-general-settings{ class: ('expanded') }

View File

@ -1,7 +1,7 @@
- breadcrumb_title "CI / CD Settings"
- page_title "CI / CD"
- expanded = Rails.env.test?
- expanded = expanded_by_default?
%section.settings#ci-variables.no-animate{ class: ('expanded' if expanded) }
.settings-header

View File

@ -1,6 +1,7 @@
%div
- if Gitlab::CurrentSettings.help_page_text.present?
= markdown_field(Gitlab::CurrentSettings.current_application_settings, :help_page_text)
.prepend-top-default.md
= markdown_field(Gitlab::CurrentSettings.current_application_settings, :help_page_text)
%hr
%h1

View File

@ -1,4 +1,4 @@
- expanded = Rails.env.test?
- expanded = expanded_by_default?
%section.settings.no-animate#cleanup{ class: ('expanded' if expanded) }
.settings-header

View File

@ -1,4 +1,4 @@
- expanded = Rails.env.test?
- expanded = expanded_by_default?
%section.settings.no-animate#default-branch-settings{ class: ('expanded' if expanded) }
.settings-header

View File

@ -1,4 +1,4 @@
- expanded = Rails.env.test?
- expanded = expanded_by_default?
%section.qa-deploy-keys-settings.settings.no-animate#js-deploy-keys-settings{ class: ('expanded' if expanded) }
.settings-header
%h4

View File

@ -1,7 +1,7 @@
- breadcrumb_title _("General Settings")
- page_title _("General")
- @content_class = "limit-container-width" unless fluid_layout
- expanded = Rails.env.test?
- expanded = expanded_by_default?
%section.settings.general-settings.no-animate.expanded#js-general-settings
.settings-header

View File

@ -39,6 +39,8 @@
- presented_labels_sorted_by_title(issue.labels, issue.project).each do |label|
= link_to_label(label, css_class: 'label-link')
= render_if_exists "projects/issues/issue_weight", issue: issue
.issuable-meta
%ul.controls
- if issue.closed?

View File

@ -1,25 +0,0 @@
- time_format = '%b %e, %Y %l:%M%P %Z%z'
- if merge_request.merged?
- mr_status_date = merge_request.merged_at
- mr_status_title = _('Merged')
- mr_status_icon = 'merge'
- mr_status_class = 'merged'
- elsif merge_request.closed?
- mr_status_date = merge_request.closed_event&.created_at
- mr_status_title = _('Closed')
- mr_status_icon = 'issue-close'
- mr_status_class = 'closed'
- else
- mr_status_date = merge_request.created_at
- mr_status_title = mr_status_date ? _('Opened') : _('Open')
- mr_status_icon = 'issue-open-m'
- mr_status_class = 'open'
- if mr_status_date
- mr_status_tooltip = "<div><span class=\"bold\">#{mr_status_title}</span> #{time_ago_in_words(mr_status_date)} ago</div><span class=\"text-tertiary\">#{l(mr_status_date.to_time, format: time_format)}</span>"
- else
- mr_status_tooltip = "<div><span class=\"bold\">#{mr_status_title}</span></div>"
%span.mr-status-wrapper.suggestion-help-hover{ class: css_class, data: { toggle: 'tooltip', placement: 'bottom', html: 'true', title: mr_status_tooltip } }
= sprite_icon(mr_status_icon, size: 16, css_class: "merge-request-status #{mr_status_class}")

View File

@ -1,4 +1,4 @@
- expanded = Rails.env.test?
- expanded = expanded_by_default?
- protocols = Gitlab::UrlSanitizer::ALLOWED_SCHEMES.join('|')
%section.settings.project-mirror-settings.js-mirror-settings.no-animate.qa-mirroring-repositories-settings#js-push-remote-settings{ class: ('expanded' if expanded) }

View File

@ -16,6 +16,7 @@
= _('A project is where you house your files (repository), plan your work (issues), and publish your documentation (wiki), %{among_other_things_link}.').html_safe % { among_other_things_link: among_other_things_link }
%p
= _('All features are enabled for blank projects, from templates, or when importing, but you can disable them afterward in the project settings.')
= render_if_exists 'projects/new_ci_cd_banner_external_repo'
%p
- pages_getting_started_guide = link_to _('Pages getting started guide'), help_page_path("user/project/pages/getting_started_part_two", anchor: "fork-a-project-to-get-started-from"), target: '_blank'
= _('Information about additional Pages templates and how to install them can be found in our %{pages_getting_started_guide}.').html_safe % { pages_getting_started_guide: pages_getting_started_guide }
@ -42,6 +43,7 @@
%a.nav-link{ href: '#import-project-pane', id: 'import-project-tab', data: { toggle: 'tab', track_label: 'import_project', track_event: "click_tab" }, role: 'tab' }
%span.d-none.d-sm-block Import project
%span.d-block.d-sm-none Import
= render_if_exists 'projects/new_ci_cd_only_project_tab', active_tab: active_tab
.tab-content.gitlab-tab-content
.tab-pane{ id: 'blank-project-pane', class: active_when(active_tab == 'blank'), role: 'tabpanel' }
@ -68,6 +70,8 @@
%h4 No import options available
%p Contact an administrator to enable options for importing your project.
= render_if_exists 'projects/new_ci_cd_only_project_pane', active_tab: active_tab
.save-project-loader.d-none
.center
%h2

View File

@ -1,4 +1,4 @@
- expanded = Rails.env.test?
- expanded = expanded_by_default?
%section.qa-protected-branches-settings.settings.no-animate#js-protected-branches-settings{ class: ('expanded' if expanded) }
.settings-header

View File

@ -1,4 +1,4 @@
- expanded = Rails.env.test?
- expanded = expanded_by_default?
%section.settings.no-animate#js-protected-tags-settings{ class: ('expanded' if expanded) }
.settings-header

View File

@ -2,7 +2,7 @@
- page_title _("CI / CD Settings")
- page_title _("CI / CD")
- expanded = Rails.env.test?
- expanded = expanded_by_default?
- general_expanded = @project.errors.empty? ? expanded : true
%section.settings#js-general-pipeline-settings.no-animate{ class: ('expanded' if general_expanded) }

View File

@ -6,6 +6,7 @@
- cronjob:gitlab_usage_ping
- cronjob:import_export_project_cleanup
- cronjob:pages_domain_verification_cron
- cronjob:pages_domain_removal_cron
- cronjob:pipeline_schedule
- cronjob:prune_old_events
- cronjob:remove_expired_group_links
@ -32,6 +33,8 @@
- gcp_cluster:cluster_wait_for_ingress_ip_address
- gcp_cluster:cluster_configure
- gcp_cluster:cluster_project_configure
- gcp_cluster:clusters_applications_wait_for_uninstall_app
- gcp_cluster:clusters_applications_uninstall
- github_import_advance_stage
- github_importer:github_import_import_diff_note

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
module Clusters
module Applications
class UninstallWorker
include ApplicationWorker
include ClusterQueue
include ClusterApplications
def perform(app_name, app_id)
find_application(app_name, app_id) do |app|
Clusters::Applications::UninstallService.new(app).execute
end
end
end
end
end

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
module Clusters
module Applications
class WaitForUninstallAppWorker
include ApplicationWorker
include ClusterQueue
include ClusterApplications
INTERVAL = 10.seconds
TIMEOUT = 20.minutes
def perform(app_name, app_id)
find_application(app_name, app_id) do |app|
Clusters::Applications::CheckUninstallProgressService.new(app).execute
end
end
end
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class PagesDomainRemovalCronWorker
include ApplicationWorker
include CronjobQueue
def perform
return unless Feature.enabled?(:remove_disabled_domains)
PagesDomain.for_removal.find_each do |domain|
domain.destroy!
rescue => e
Raven.capture_exception(e)
end
end
end

View File

@ -3,20 +3,26 @@
class PipelineScheduleWorker
include ApplicationWorker
include CronjobQueue
include ::Gitlab::ExclusiveLeaseHelpers
EXCLUSIVE_LOCK_KEY = 'pipeline_schedules:run:lock'
LOCK_TIMEOUT = 50.minutes
# rubocop: disable CodeReuse/ActiveRecord
def perform
Ci::PipelineSchedule.active.where("next_run_at < ?", Time.now)
.preload(:owner, :project).find_each do |schedule|
in_lock(EXCLUSIVE_LOCK_KEY, ttl: LOCK_TIMEOUT, retries: 1) do
Ci::PipelineSchedule.active.where("next_run_at < ?", Time.now)
.preload(:owner, :project).find_each do |schedule|
Ci::CreatePipelineService.new(schedule.project,
schedule.owner,
ref: schedule.ref)
.execute!(:schedule, ignore_skip_ci: true, save_on_errors: true, schedule: schedule)
rescue => e
error(schedule, e)
ensure
schedule.schedule_next_run!
schedule.schedule_next_run!
Ci::CreatePipelineService.new(schedule.project,
schedule.owner,
ref: schedule.ref)
.execute!(:schedule, ignore_skip_ci: true, save_on_errors: true, schedule: schedule)
rescue => e
error(schedule, e)
end
end
end
# rubocop: enable CodeReuse/ActiveRecord

View File

@ -0,0 +1,5 @@
---
title: Format extra help page text like wiki
merge_request: 26782
author: Bastian Blank
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Fix base domain help text update
merge_request: 27746
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Add initial GraphQL query for Groups
merge_request: 27492
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Allow Sentry configuration to be passed on gitlab.yml
merge_request: 27091
author: Roger Meier
type: added

Some files were not shown because too many files have changed in this diff Show More