Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-06-17 18:09:08 +00:00
parent c61d90dbfa
commit 6b922f9bb0
58 changed files with 605 additions and 440 deletions

View file

@ -1 +1 @@
8.35.0 8.36.0

View file

@ -497,7 +497,7 @@ export default class LabelsSelect {
const scopedLabelTemplate = template( const scopedLabelTemplate = template(
[ [
'<span class="gl-label gl-label-scoped" style="color: <%= escapeStr(label.color) %>;">', '<span class="gl-label gl-label-scoped" style="color: <%= escapeStr(label.color) %>; --label-inset-border: inset 0 0 0 2px <%= escapeStr(label.color) %>;">',
linkOpenTag, linkOpenTag,
spanOpenTag, spanOpenTag,
'<%- label.title.slice(0, label.title.lastIndexOf("::")) %>', '<%- label.title.slice(0, label.title.lastIndexOf("::")) %>',
@ -526,9 +526,7 @@ export default class LabelsSelect {
[ [
'<% labels.forEach(function(label){ %>', '<% labels.forEach(function(label){ %>',
'<% if (isScopedLabel(label) && enableScopedLabels) { %>', '<% if (isScopedLabel(label) && enableScopedLabels) { %>',
'<span class="d-inline-block position-relative scoped-label-wrapper">',
'<%= scopedLabelTemplate({ label, issueUpdateURL, isScopedLabel, enableScopedLabels, rightLabelTextColor, tooltipTitleTemplate, escapeStr, linkAttrs: \'data-html="true"\' }) %>', '<%= scopedLabelTemplate({ label, issueUpdateURL, isScopedLabel, enableScopedLabels, rightLabelTextColor, tooltipTitleTemplate, escapeStr, linkAttrs: \'data-html="true"\' }) %>',
'</span>',
'<% } else { %>', '<% } else { %>',
'<%= labelTemplate({ label, issueUpdateURL, isScopedLabel, enableScopedLabels, tooltipTitleTemplate, escapeStr, linkAttrs: "" }) %>', '<%= labelTemplate({ label, issueUpdateURL, isScopedLabel, enableScopedLabels, tooltipTitleTemplate, escapeStr, linkAttrs: "" }) %>',
'<% } %>', '<% } %>',

View file

@ -160,6 +160,7 @@ export default {
'variables', 'variables',
'links', 'links',
'currentDashboard', 'currentDashboard',
'hasDashboardValidationWarnings',
]), ]),
...mapGetters('monitoringDashboard', ['selectedDashboard', 'getMetricStates']), ...mapGetters('monitoringDashboard', ['selectedDashboard', 'getMetricStates']),
shouldShowVariablesSection() { shouldShowVariablesSection() {
@ -197,6 +198,19 @@ export default {
selectedDashboard(dashboard) { selectedDashboard(dashboard) {
this.prependToDocumentTitle(dashboard?.display_name); this.prependToDocumentTitle(dashboard?.display_name);
}, },
hasDashboardValidationWarnings(hasWarnings) {
/**
* This watcher is set for future SPA behaviour of the dashboard
*/
if (hasWarnings) {
createFlash(
s__(
'Metrics|Your dashboard schema is invalid. Edit the dashboard to correct the YAML schema.',
),
'warning',
);
}
},
}, },
created() { created() {
window.addEventListener('keyup', this.onKeyup); window.addEventListener('keyup', this.onKeyup);

View file

@ -0,0 +1,18 @@
query getDashboardValidationWarnings(
$projectPath: ID!
$environmentName: String
$dashboardPath: String!
) {
project(fullPath: $projectPath) {
id
environments(name: $environmentName) {
nodes {
name
metricsDashboard(path: $dashboardPath) {
path
schemaValidationWarnings
}
}
}
}
}

View file

@ -12,6 +12,7 @@ import {
import trackDashboardLoad from '../monitoring_tracking_helper'; import trackDashboardLoad from '../monitoring_tracking_helper';
import getEnvironments from '../queries/getEnvironments.query.graphql'; import getEnvironments from '../queries/getEnvironments.query.graphql';
import getAnnotations from '../queries/getAnnotations.query.graphql'; import getAnnotations from '../queries/getAnnotations.query.graphql';
import getDashboardValidationWarnings from '../queries/getDashboardValidationWarnings.query.graphql';
import statusCodes from '../../lib/utils/http_status'; import statusCodes from '../../lib/utils/http_status';
import { backOff, convertObjectPropsToCamelCase } from '../../lib/utils/common_utils'; import { backOff, convertObjectPropsToCamelCase } from '../../lib/utils/common_utils';
import { s__, sprintf } from '../../locale'; import { s__, sprintf } from '../../locale';
@ -126,7 +127,17 @@ export const fetchDashboard = ({ state, commit, dispatch }) => {
return backOffRequest(() => axios.get(state.dashboardEndpoint, { params })) return backOffRequest(() => axios.get(state.dashboardEndpoint, { params }))
.then(resp => resp.data) .then(resp => resp.data)
.then(response => dispatch('receiveMetricsDashboardSuccess', { response })) .then(response => {
dispatch('receiveMetricsDashboardSuccess', { response });
/**
* After the dashboard is fetched, there can be non-blocking invalid syntax
* in the dashboard file. This call will fetch such syntax warnings
* and surface a warning on the UI. If the invalid syntax is blocking,
* the `fetchDashboard` returns a 404 with error messages that are displayed
* on the UI.
*/
dispatch('fetchDashboardValidationWarnings');
})
.catch(error => { .catch(error => {
Sentry.captureException(error); Sentry.captureException(error);
@ -344,6 +355,46 @@ export const receiveAnnotationsSuccess = ({ commit }, data) =>
commit(types.RECEIVE_ANNOTATIONS_SUCCESS, data); commit(types.RECEIVE_ANNOTATIONS_SUCCESS, data);
export const receiveAnnotationsFailure = ({ commit }) => commit(types.RECEIVE_ANNOTATIONS_FAILURE); export const receiveAnnotationsFailure = ({ commit }) => commit(types.RECEIVE_ANNOTATIONS_FAILURE);
export const fetchDashboardValidationWarnings = ({ state, dispatch }) => {
/**
* Normally, the default dashboard won't throw any validation warnings.
*
* However, if a bug sneaks into the default dashboard making it invalid,
* this might come handy for our clients
*/
const dashboardPath = state.currentDashboard || DEFAULT_DASHBOARD_PATH;
return gqClient
.mutate({
mutation: getDashboardValidationWarnings,
variables: {
projectPath: removeLeadingSlash(state.projectPath),
environmentName: state.currentEnvironmentName,
dashboardPath,
},
})
.then(resp => resp.data?.project?.environments?.nodes?.[0]?.metricsDashboard)
.then(({ schemaValidationWarnings }) => {
const hasWarnings = schemaValidationWarnings && schemaValidationWarnings.length !== 0;
/**
* The payload of the dispatch is a boolean, because at the moment a standard
* warning message is shown instead of the warnings the BE returns
*/
dispatch('receiveDashboardValidationWarningsSuccess', hasWarnings || false);
})
.catch(err => {
Sentry.captureException(err);
dispatch('receiveDashboardValidationWarningsFailure');
createFlash(
s__('Metrics|There was an error getting dashboard validation warnings information.'),
);
});
};
export const receiveDashboardValidationWarningsSuccess = ({ commit }, hasWarnings) =>
commit(types.RECEIVE_DASHBOARD_VALIDATION_WARNINGS_SUCCESS, hasWarnings);
export const receiveDashboardValidationWarningsFailure = ({ commit }) =>
commit(types.RECEIVE_DASHBOARD_VALIDATION_WARNINGS_FAILURE);
// Dashboard manipulation // Dashboard manipulation
export const toggleStarredValue = ({ commit, state, getters }) => { export const toggleStarredValue = ({ commit, state, getters }) => {

View file

@ -13,6 +13,12 @@ export const RECEIVE_DASHBOARD_STARRING_FAILURE = 'RECEIVE_DASHBOARD_STARRING_FA
export const RECEIVE_ANNOTATIONS_SUCCESS = 'RECEIVE_ANNOTATIONS_SUCCESS'; export const RECEIVE_ANNOTATIONS_SUCCESS = 'RECEIVE_ANNOTATIONS_SUCCESS';
export const RECEIVE_ANNOTATIONS_FAILURE = 'RECEIVE_ANNOTATIONS_FAILURE'; export const RECEIVE_ANNOTATIONS_FAILURE = 'RECEIVE_ANNOTATIONS_FAILURE';
// Dashboard validation warnings
export const RECEIVE_DASHBOARD_VALIDATION_WARNINGS_SUCCESS =
'RECEIVE_DASHBOARD_VALIDATION_WARNINGS_SUCCESS';
export const RECEIVE_DASHBOARD_VALIDATION_WARNINGS_FAILURE =
'RECEIVE_DASHBOARD_VALIDATION_WARNINGS_FAILURE';
// Git project deployments // Git project deployments
export const REQUEST_DEPLOYMENTS_DATA = 'REQUEST_DEPLOYMENTS_DATA'; export const REQUEST_DEPLOYMENTS_DATA = 'REQUEST_DEPLOYMENTS_DATA';
export const RECEIVE_DEPLOYMENTS_DATA_SUCCESS = 'RECEIVE_DEPLOYMENTS_DATA_SUCCESS'; export const RECEIVE_DEPLOYMENTS_DATA_SUCCESS = 'RECEIVE_DEPLOYMENTS_DATA_SUCCESS';

View file

@ -125,6 +125,16 @@ export default {
state.annotations = []; state.annotations = [];
}, },
/**
* Dashboard Validation Warnings
*/
[types.RECEIVE_DASHBOARD_VALIDATION_WARNINGS_SUCCESS](state, hasDashboardValidationWarnings) {
state.hasDashboardValidationWarnings = hasDashboardValidationWarnings;
},
[types.RECEIVE_DASHBOARD_VALIDATION_WARNINGS_FAILURE](state) {
state.hasDashboardValidationWarnings = false;
},
/** /**
* Individual panel/metric results * Individual panel/metric results
*/ */

View file

@ -12,6 +12,7 @@ export default () => ({
currentDashboard: null, currentDashboard: null,
// Dashboard data // Dashboard data
hasDashboardValidationWarnings: false,
emptyState: 'gettingStarted', emptyState: 'gettingStarted',
showEmptyState: true, showEmptyState: true,
showErrorBanner: true, showErrorBanner: true,

View file

@ -115,20 +115,6 @@
font-size: 0; font-size: 0;
margin-bottom: -5px; margin-bottom: -5px;
} }
.scoped-label-wrapper {
> a {
max-width: 100%;
}
.color-label {
padding-right: $gl-padding-24;
}
.scoped-label {
right: 12px;
}
}
} }
.assignee { .assignee {

View file

@ -310,7 +310,6 @@
width: 200px; width: 200px;
flex-shrink: 0; flex-shrink: 0;
.scoped-label-wrapper,
.gl-label { .gl-label {
line-height: $gl-line-height; line-height: $gl-line-height;
} }
@ -415,40 +414,6 @@
color: $indigo-300; color: $indigo-300;
} }
.scoped-label-wrapper {
max-width: 100%;
vertical-align: top;
.badge {
text-overflow: ellipsis;
overflow-x: hidden;
}
&.label-link .color-label a {
color: inherit;
}
.color-label {
padding-right: $gl-padding-24;
max-width: 100%;
}
.scoped-label {
position: absolute;
top: 4px;
right: 8px;
padding: 0;
margin: 0;
line-height: $gl-line-height;
}
&.board-label {
.scoped-label {
top: 1px;
}
}
}
.gl-label-scoped { .gl-label-scoped {
box-shadow: 0 0 0 2px currentColor inset; box-shadow: 0 0 0 2px currentColor inset;
@ -456,29 +421,3 @@
box-shadow: 0 0 0 1px inset; box-shadow: 0 0 0 1px inset;
} }
} }
// Label inside title of Delete Label Modal
.modal-header .page-title {
.scoped-label-wrapper {
.scoped-label {
line-height: 20px;
}
span.color-label {
padding-right: $gl-padding-24;
}
}
}
// Don't hide the overflow in system messages
.system-note-message,
.issuable-details,
.md-preview-holder,
.referenced-commands,
.note-body {
.scoped-label-wrapper {
.badge {
overflow: initial;
}
}
}

View file

@ -4,6 +4,9 @@ class Admin::ServicesController < Admin::ApplicationController
include ServiceParams include ServiceParams
before_action :service, only: [:edit, :update] before_action :service, only: [:edit, :update]
before_action only: :edit do
push_frontend_feature_flag(:integration_form_refactor)
end
def index def index
@services = Service.find_or_create_templates.sort_by(&:title) @services = Service.find_or_create_templates.sort_by(&:title)

View file

@ -8,6 +8,9 @@ module IntegrationsActions
before_action :not_found, unless: :integrations_enabled? before_action :not_found, unless: :integrations_enabled?
before_action :integration, only: [:edit, :update, :test] before_action :integration, only: [:edit, :update, :test]
before_action only: :edit do
push_frontend_feature_flag(:integration_form_refactor)
end
end end
def edit def edit

View file

@ -30,7 +30,7 @@ class Projects::ServicesController < Projects::ApplicationController
respond_to do |format| respond_to do |format|
format.html do format.html do
if saved if saved
target_url = safe_redirect_path(params[:redirect_to]).presence || project_settings_integrations_path(@project) target_url = safe_redirect_path(params[:redirect_to]).presence || edit_project_service_path(@project, @service)
redirect_to target_url, notice: success_message redirect_to target_url, notice: success_message
else else
render 'edit' render 'edit'

View file

@ -0,0 +1,5 @@
---
title: Remove Rails Optimistic Locking monkeypatch
merge_request: 25566
author:
type: fixed

View file

@ -0,0 +1,5 @@
---
title: Add dashboard validation warning to metrics dashboard
merge_request: 33769
author:
type: added

View file

@ -0,0 +1,5 @@
---
title: Update Workhorse to v8.36.0
merge_request: 34759
author:
type: other

View file

@ -0,0 +1,5 @@
---
title: Change redirect path after integration save
merge_request: 34697
author:
type: changed

View file

@ -0,0 +1,5 @@
---
title: Remove partial clone feature flag
merge_request: 34703
author:
type: added

View file

@ -1,46 +0,0 @@
# frozen_string_literal: true
# ensure ActiveRecord's version has been required already
require 'active_record/locking/optimistic'
# rubocop:disable Lint/RescueException
module ActiveRecord
module Locking
module Optimistic
private
def _update_row(attribute_names, attempted_action = "update")
return super unless locking_enabled?
begin
locking_column = self.class.locking_column
previous_lock_value = read_attribute_before_type_cast(locking_column)
attribute_names << locking_column
self[locking_column] += 1
# Patched because when `lock_version` is read as `0`, it may actually be `NULL` in the DB.
possible_previous_lock_value = previous_lock_value.to_i == 0 ? [nil, 0] : previous_lock_value
affected_rows = self.class.unscoped.where(
locking_column => possible_previous_lock_value,
self.class.primary_key => id_in_database
).update_all(
attributes_with_values(attribute_names)
)
if affected_rows != 1
raise ActiveRecord::StaleObjectError.new(self, attempted_action)
end
affected_rows
# If something went wrong, revert the locking_column value.
rescue Exception
self[locking_column] = previous_lock_value.to_i
raise
end
end
end
end
end

View file

@ -10,9 +10,7 @@ def get_vue_files_with_ce_and_ee_versions(files)
"ee/#{file}" "ee/#{file}"
end end
escaped_path = CGI.escape(counterpart_path) response = gitlab.api.get_file(gitlab.mr_json['project_id'], counterpart_path, 'master')
api_endpoint = "https://gitlab.com/api/v4/projects/gitlab-org%2Fgitlab-ee/repository/files/#{escaped_path}?ref=master"
response = HTTParty.get(api_endpoint) # rubocop:disable Gitlab/HTTParty
response.code != 404 response.code != 404
else else
false false

View file

@ -0,0 +1,23 @@
# frozen_string_literal: true
class SetLockVersionNotNullConstraint < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
TABLES = %i(epics merge_requests issues ci_stages ci_builds ci_pipelines).freeze
def up
TABLES.each do |table|
add_not_null_constraint table, :lock_version, validate: false
end
end
def down
TABLES.each do |table|
remove_not_null_constraint table, :lock_version
end
end
end

View file

@ -0,0 +1,19 @@
# frozen_string_literal: true
class SetLockVersionToNotNull < ActiveRecord::Migration[6.0]
DOWNTIME = false
MODELS = [Epic, MergeRequest, Issue, Ci::Stage, Ci::Build, Ci::Pipeline].freeze
disable_ddl_transaction!
def up
MODELS.each do |model|
model.where(lock_version: nil).update_all(lock_version: 0)
end
end
def down
# Nothing to do...
end
end

View file

@ -0,0 +1,18 @@
# frozen_string_literal: true
class LockVersionCleanupForEpics < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
validate_not_null_constraint :epics, :lock_version
remove_concurrent_index :epics, :lock_version, where: "lock_version IS NULL"
end
def down
add_concurrent_index :epics, :lock_version, where: "lock_version IS NULL"
end
end

View file

@ -0,0 +1,18 @@
# frozen_string_literal: true
class LockVersionCleanupForMergeRequests < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
validate_not_null_constraint :merge_requests, :lock_version
remove_concurrent_index :merge_requests, :lock_version, where: "lock_version IS NULL"
end
def down
add_concurrent_index :merge_requests, :lock_version, where: "lock_version IS NULL"
end
end

View file

@ -0,0 +1,18 @@
# frozen_string_literal: true
class LockVersionCleanupForIssues < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
validate_not_null_constraint :issues, :lock_version
remove_concurrent_index :issues, :lock_version, where: "lock_version IS NULL"
end
def down
add_concurrent_index :issues, :lock_version, where: "lock_version IS NULL"
end
end

View file

@ -0,0 +1,18 @@
# frozen_string_literal: true
class LockVersionCleanupForCiStages < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
validate_not_null_constraint :ci_stages, :lock_version
remove_concurrent_index :ci_stages, :id, where: "lock_version IS NULL", name: "tmp_index_ci_stages_lock_version"
end
def down
add_concurrent_index :ci_stages, :id, where: "lock_version IS NULL", name: "tmp_index_ci_stages_lock_version"
end
end

View file

@ -0,0 +1,18 @@
# frozen_string_literal: true
class LockVersionCleanupForCiBuilds < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
validate_not_null_constraint :ci_builds, :lock_version
remove_concurrent_index :ci_builds, :id, where: "lock_version IS NULL", name: "tmp_index_ci_builds_lock_version"
end
def down
add_concurrent_index :ci_builds, :id, where: "lock_version IS NULL", name: "tmp_index_ci_builds_lock_version"
end
end

View file

@ -0,0 +1,18 @@
# frozen_string_literal: true
class LockVersionCleanupForCiPipelines < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
validate_not_null_constraint :ci_pipelines, :lock_version
remove_concurrent_index :ci_pipelines, :id, where: "lock_version IS NULL", name: "tmp_index_ci_pipelines_lock_version"
end
def down
add_concurrent_index :ci_pipelines, :id, where: "lock_version IS NULL", name: "tmp_index_ci_pipelines_lock_version"
end
end

View file

@ -1052,7 +1052,8 @@ CREATE TABLE public.ci_builds (
resource_group_id bigint, resource_group_id bigint,
waiting_for_resource_at timestamp with time zone, waiting_for_resource_at timestamp with time zone,
processed boolean, processed boolean,
scheduling_type smallint scheduling_type smallint,
CONSTRAINT check_1e2fbd1b39 CHECK ((lock_version IS NOT NULL))
); );
CREATE SEQUENCE public.ci_builds_id_seq CREATE SEQUENCE public.ci_builds_id_seq
@ -1362,7 +1363,8 @@ CREATE TABLE public.ci_pipelines (
source_sha bytea, source_sha bytea,
target_sha bytea, target_sha bytea,
external_pull_request_id bigint, external_pull_request_id bigint,
ci_ref_id bigint ci_ref_id bigint,
CONSTRAINT check_d7e99a025e CHECK ((lock_version IS NOT NULL))
); );
CREATE TABLE public.ci_pipelines_config ( CREATE TABLE public.ci_pipelines_config (
@ -1547,7 +1549,8 @@ CREATE TABLE public.ci_stages (
name character varying, name character varying,
status integer, status integer,
lock_version integer DEFAULT 0, lock_version integer DEFAULT 0,
"position" integer "position" integer,
CONSTRAINT check_81b431e49b CHECK ((lock_version IS NOT NULL))
); );
CREATE SEQUENCE public.ci_stages_id_seq CREATE SEQUENCE public.ci_stages_id_seq
@ -2506,7 +2509,8 @@ CREATE TABLE public.epics (
start_date_sourcing_epic_id integer, start_date_sourcing_epic_id integer,
due_date_sourcing_epic_id integer, due_date_sourcing_epic_id integer,
confidential boolean DEFAULT false NOT NULL, confidential boolean DEFAULT false NOT NULL,
external_key character varying(255) external_key character varying(255),
CONSTRAINT check_fcfb4a93ff CHECK ((lock_version IS NOT NULL))
); );
CREATE SEQUENCE public.epics_id_seq CREATE SEQUENCE public.epics_id_seq
@ -3532,7 +3536,8 @@ CREATE TABLE public.issues (
promoted_to_epic_id integer, promoted_to_epic_id integer,
health_status smallint, health_status smallint,
external_key character varying(255), external_key character varying(255),
sprint_id bigint sprint_id bigint,
CONSTRAINT check_fba63f706d CHECK ((lock_version IS NOT NULL))
); );
CREATE SEQUENCE public.issues_id_seq CREATE SEQUENCE public.issues_id_seq
@ -4111,7 +4116,8 @@ CREATE TABLE public.merge_requests (
state_id smallint DEFAULT 1 NOT NULL, state_id smallint DEFAULT 1 NOT NULL,
rebase_jid character varying, rebase_jid character varying,
squash_commit_sha bytea, squash_commit_sha bytea,
sprint_id bigint sprint_id bigint,
CONSTRAINT check_970d272570 CHECK ((lock_version IS NOT NULL))
); );
CREATE TABLE public.merge_requests_closing_issues ( CREATE TABLE public.merge_requests_closing_issues (
@ -9835,8 +9841,6 @@ CREATE INDEX index_epics_on_iid ON public.epics USING btree (iid);
CREATE INDEX index_epics_on_last_edited_by_id ON public.epics USING btree (last_edited_by_id); CREATE INDEX index_epics_on_last_edited_by_id ON public.epics USING btree (last_edited_by_id);
CREATE INDEX index_epics_on_lock_version ON public.epics USING btree (lock_version) WHERE (lock_version IS NULL);
CREATE INDEX index_epics_on_parent_id ON public.epics USING btree (parent_id); CREATE INDEX index_epics_on_parent_id ON public.epics USING btree (parent_id);
CREATE INDEX index_epics_on_start_date ON public.epics USING btree (start_date); CREATE INDEX index_epics_on_start_date ON public.epics USING btree (start_date);
@ -10079,8 +10083,6 @@ CREATE INDEX index_issues_on_duplicated_to_id ON public.issues USING btree (dupl
CREATE INDEX index_issues_on_last_edited_by_id ON public.issues USING btree (last_edited_by_id); CREATE INDEX index_issues_on_last_edited_by_id ON public.issues USING btree (last_edited_by_id);
CREATE INDEX index_issues_on_lock_version ON public.issues USING btree (lock_version) WHERE (lock_version IS NULL);
CREATE INDEX index_issues_on_milestone_id ON public.issues USING btree (milestone_id); CREATE INDEX index_issues_on_milestone_id ON public.issues USING btree (milestone_id);
CREATE INDEX index_issues_on_moved_to_id ON public.issues USING btree (moved_to_id) WHERE (moved_to_id IS NOT NULL); CREATE INDEX index_issues_on_moved_to_id ON public.issues USING btree (moved_to_id) WHERE (moved_to_id IS NOT NULL);
@ -10247,8 +10249,6 @@ CREATE INDEX index_merge_requests_on_head_pipeline_id ON public.merge_requests U
CREATE INDEX index_merge_requests_on_latest_merge_request_diff_id ON public.merge_requests USING btree (latest_merge_request_diff_id); CREATE INDEX index_merge_requests_on_latest_merge_request_diff_id ON public.merge_requests USING btree (latest_merge_request_diff_id);
CREATE INDEX index_merge_requests_on_lock_version ON public.merge_requests USING btree (lock_version) WHERE (lock_version IS NULL);
CREATE INDEX index_merge_requests_on_merge_user_id ON public.merge_requests USING btree (merge_user_id) WHERE (merge_user_id IS NOT NULL); CREATE INDEX index_merge_requests_on_merge_user_id ON public.merge_requests USING btree (merge_user_id) WHERE (merge_user_id IS NOT NULL);
CREATE INDEX index_merge_requests_on_milestone_id ON public.merge_requests USING btree (milestone_id); CREATE INDEX index_merge_requests_on_milestone_id ON public.merge_requests USING btree (milestone_id);
@ -11249,12 +11249,6 @@ CREATE INDEX tmp_build_stage_position_index ON public.ci_builds USING btree (sta
CREATE INDEX tmp_idx_on_user_id_where_bio_is_filled ON public.users USING btree (id) WHERE ((COALESCE(bio, ''::character varying))::text IS DISTINCT FROM ''::text); CREATE INDEX tmp_idx_on_user_id_where_bio_is_filled ON public.users USING btree (id) WHERE ((COALESCE(bio, ''::character varying))::text IS DISTINCT FROM ''::text);
CREATE INDEX tmp_index_ci_builds_lock_version ON public.ci_builds USING btree (id) WHERE (lock_version IS NULL);
CREATE INDEX tmp_index_ci_pipelines_lock_version ON public.ci_pipelines USING btree (id) WHERE (lock_version IS NULL);
CREATE INDEX tmp_index_ci_stages_lock_version ON public.ci_stages USING btree (id) WHERE (lock_version IS NULL);
CREATE UNIQUE INDEX users_security_dashboard_projects_unique_index ON public.users_security_dashboard_projects USING btree (project_id, user_id); CREATE UNIQUE INDEX users_security_dashboard_projects_unique_index ON public.users_security_dashboard_projects USING btree (project_id, user_id);
CREATE UNIQUE INDEX vulnerability_feedback_unique_idx ON public.vulnerability_feedback USING btree (project_id, category, feedback_type, project_fingerprint); CREATE UNIQUE INDEX vulnerability_feedback_unique_idx ON public.vulnerability_feedback USING btree (project_id, category, feedback_type, project_fingerprint);
@ -13989,6 +13983,14 @@ COPY "schema_migrations" (version) FROM STDIN;
20200605093113 20200605093113
20200608072931 20200608072931
20200608075553 20200608075553
20200608195222
20200608205813
20200608212030
20200608212435
20200608212549
20200608212652
20200608212807
20200608212824
20200608214008 20200608214008
20200609002841 20200609002841
20200609142506 20200609142506

View file

@ -766,12 +766,8 @@ sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.t
### Checking repository checksums ### Checking repository checksums
To check a project's repository checksums across on all Gitaly nodes, the To check a project's repository checksums across on all Gitaly nodes, run the
replicas Rake task can be run on the main GitLab node: [replicas Rake task](../raketasks/praefect.md#replica-checksums) on the main GitLab node.
```shell
sudo gitlab-rake "gitlab:praefect:replicas[project_id]"
```
## Backend Node Recovery ## Backend Node Recovery

View file

@ -1,13 +1,23 @@
# Praefect Rake Tasks **(CORE ONLY)** ---
stage: Create
group: Gitaly
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
type: reference
---
# Praefect Rake tasks **(CORE ONLY)**
> [Introduced]( https://gitlab.com/gitlab-org/gitlab/-/merge_requests/28369) in GitLab 12.10. > [Introduced]( https://gitlab.com/gitlab-org/gitlab/-/merge_requests/28369) in GitLab 12.10.
Rake tasks are available for projects that have been created on Praefect storage. See the
[Praefect documentation](../gitaly/praefect.md) for information on configuring Praefect.
## Replica checksums ## Replica checksums
Prints out checksums of the repository of a given project_id on the primary as well as secondary internal Gitaly nodes. `gitlab:praefect:replicas` prints out checksums of the repository of a given `project_id` on:
NOTE: **Note:** - The primary Gitaly node.
This only is relevant and works for projects that have been created on a Praefect storage. See the [Praefect Documentation](../gitaly/praefect.md) for configuring Praefect. - Secondary internal Gitaly nodes.
**Omnibus Installation** **Omnibus Installation**

View file

@ -7,6 +7,16 @@ GitLab provides Rake tasks relating to project import and export. For more infor
- [Project import/export documentation](../../user/project/settings/import_export.md). - [Project import/export documentation](../../user/project/settings/import_export.md).
- [Project import/export API](../../api/project_import_export.md). - [Project import/export API](../../api/project_import_export.md).
- [Developer documentation: project import/export](../../development/import_export.md)
## Project import status
You can query an import through the [Project import/export API](../../api/project_import_export.md#import-status).
As described in the API documentation, the query may return an import error or exceptions.
## Import large projects
If you have a larger project, consider using a Rake task, as described in our [developer documentation](../../development/import_project.md#importing-via-a-rake-task).
## Import/export tasks ## Import/export tasks

View file

@ -31,6 +31,7 @@ The following are available Rake tasks:
| [LDAP maintenance](../administration/raketasks/ldap.md) | [LDAP](../administration/auth/ldap/index.md)-related tasks. | | [LDAP maintenance](../administration/raketasks/ldap.md) | [LDAP](../administration/auth/ldap/index.md)-related tasks. |
| [List repositories](list_repos.md) | List of all GitLab-managed Git repositories on disk. | | [List repositories](list_repos.md) | List of all GitLab-managed Git repositories on disk. |
| [Migrate Snippets to Git](migrate_snippets.md) | Migrate GitLab Snippets to Git repositories and show migration status | | [Migrate Snippets to Git](migrate_snippets.md) | Migrate GitLab Snippets to Git repositories and show migration status |
| [Praefect Rake tasks](../administration/raketasks/praefect.md) | [Praefect](../administration/gitaly/praefect.md)-related tasks. |
| [Project import/export](../administration/raketasks/project_import_export.md) | Prepare for [project exports and imports](../user/project/settings/import_export.md). | | [Project import/export](../administration/raketasks/project_import_export.md) | Prepare for [project exports and imports](../user/project/settings/import_export.md). |
| [Sample Prometheus data](generate_sample_prometheus_data.md) | Generate sample Prometheus data. | | [Sample Prometheus data](generate_sample_prometheus_data.md) | Generate sample Prometheus data. |
| [Repository storage](../administration/raketasks/storage.md) | List and migrate existing projects and attachments from legacy storage to hashed storage. | | [Repository storage](../administration/raketasks/storage.md) | List and migrate existing projects and attachments from legacy storage to hashed storage. |

View file

@ -113,9 +113,6 @@ file to specify which files should be included when cloning and fetching.
For more details, see the Git documentation for For more details, see the Git documentation for
[`rev-list-options`](https://gitlab.com/gitlab-org/git/-/blob/9fadedd637b312089337d73c3ed8447e9f0aa775/Documentation/rev-list-options.txt#L735-780). [`rev-list-options`](https://gitlab.com/gitlab-org/git/-/blob/9fadedd637b312089337d73c3ed8447e9f0aa775/Documentation/rev-list-options.txt#L735-780).
With the `uploadpack.allowFilter` and `uploadpack.allowAnySHA1InWant` options
enabled on the Git server:
1. **Create a filter spec.** For example, consider a monolithic repository with 1. **Create a filter spec.** For example, consider a monolithic repository with
many applications, each in a different subdirectory in the root. Create a file many applications, each in a different subdirectory in the root. Create a file
`shiny-app/.filterspec` using the GitLab web interface: `shiny-app/.filterspec` using the GitLab web interface:

View file

@ -30,7 +30,7 @@ several different ways:
You can switch the status of a vulnerability using the **Status** dropdown to one of You can switch the status of a vulnerability using the **Status** dropdown to one of
the following values: the following values:
| State | Description | | Status | Description |
|-----------|-------------------------------------------------------------------| |-----------|-------------------------------------------------------------------|
| Detected | The default state for a newly discovered vulnerability | | Detected | The default state for a newly discovered vulnerability |
| Confirmed | A user has seen this vulnerability and confirmed it to be real | | Confirmed | A user has seen this vulnerability and confirmed it to be real |

View file

@ -64,6 +64,18 @@ Each alert contains the following metrics:
- **Event count** - The number of times that an alert has fired. - **Event count** - The number of times that an alert has fired.
- **Status** - The [current status](#alert-management-statuses) of the alert. - **Status** - The [current status](#alert-management-statuses) of the alert.
### Alert Management list sorting
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/217745) in GitLab 13.1.
The Alert Management list displays alerts sorted by start time, but you can
change the sort order by clicking the headers in the Alert Management list.
To see if a column is sortable, point your mouse at the header. Sortable columns
display an arrow next to the column name, as shown in this example:
![Alert Management List Sorting](img/alert_list_sort_v13_1.png)
### Alert Management statuses ### Alert Management statuses
Each alert contains a status dropdown to indicate which alerts need investigation. Each alert contains a status dropdown to indicate which alerts need investigation.

View file

@ -146,6 +146,14 @@ CAUTION: **Caution:**
The Unleash client **must** be given a user ID for the feature to be enabled for The Unleash client **must** be given a user ID for the feature to be enabled for
target users. See the [Ruby example](#ruby-application-example) below. target users. See the [Ruby example](#ruby-application-example) below.
#### List
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/35930) in GitLab 13.1.
A feature flag can be enabled for lists of users created with the [Feature Flag User List API](../../../api/feature_flag_user_lists.md).
Similar to [User IDs](#user-ids), it uses the Unleash [`userWithId`](https://unleash.github.io/docs/activation_strategy#userwithid)
activation strategy.
## Integrate feature flags with your application ## Integrate feature flags with your application
To use feature flags with your application, get access credentials from GitLab. To use feature flags with your application, get access credentials from GitLab.

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

View file

@ -46,10 +46,10 @@ To create a GitLab Pages website:
| Document | Description | | Document | Description |
| -------- | ----------- | | -------- | ----------- |
| [Fork a sample project](getting_started/fork_sample_project.md) | Create a new project with Pages already configured by forking a sample project. |
| [Use a new project template](getting_started/pages_bundled_template.md) | Create a new project with Pages already configured by using a new project template. |
| [Use a `.gitlab-ci.yml` template](getting_started/new_or_existing_website.md) | Add a Pages site to an existing project. Use a pre-populated CI template file. | | [Use a `.gitlab-ci.yml` template](getting_started/new_or_existing_website.md) | Add a Pages site to an existing project. Use a pre-populated CI template file. |
| [Create a `gitlab-ci.yml` file from scratch](getting_started_part_four.md) | Add a Pages site to an existing project. Learn how to create and configure your own CI file. | | [Create a `gitlab-ci.yml` file from scratch](getting_started_part_four.md) | Add a Pages site to an existing project. Learn how to create and configure your own CI file. |
| [Use a new project template](getting_started/pages_bundled_template.md) | Create a new project with Pages already configured by using a new project template. |
| [Fork a sample project](getting_started/fork_sample_project.md) | Create a new project with Pages already configured by forking a sample project. |
To update a GitLab Pages website: To update a GitLab Pages website:

View file

@ -162,6 +162,22 @@ NOTE: **Note:**
The maximum import file size can be set by the Administrator, default is 50MB. The maximum import file size can be set by the Administrator, default is 50MB.
As an administrator, you can modify the maximum import file size. To do so, use the `max_import_size` option in the [Application settings API](../../../api/settings.md#change-application-settings) or the [Admin UI](../../admin_area/settings/account_and_limit_settings.md). As an administrator, you can modify the maximum import file size. To do so, use the `max_import_size` option in the [Application settings API](../../../api/settings.md#change-application-settings) or the [Admin UI](../../admin_area/settings/account_and_limit_settings.md).
### Project import status
The status of an import [can be queried via the API](../../../api/project_import_export.md#import-status).
If the import fails, the API returns the error message, and if it completes, the API will return any
issues which were encountered.
### Project import with a Rake task. **(CORE ONLY)**
Projects can be imported to a self-managed GitLab instance using a Rake task.
The Rake task is often more effective at importing large project exports that don't complete
when imported using the web interface.
Further details in the [developer documentation](../../../development/import_project.md#importing-via-a-rake-task).
## Rate limits ## Rate limits
To help avoid abuse, users are rate limited to: To help avoid abuse, users are rate limited to:

View file

@ -63,15 +63,13 @@ module API
gl_project_path: gl_repository_path, gl_project_path: gl_repository_path,
gl_id: Gitlab::GlId.gl_id(actor.user), gl_id: Gitlab::GlId.gl_id(actor.user),
gl_username: actor.username, gl_username: actor.username,
git_config_options: [], git_config_options: ["uploadpack.allowFilter=true",
"uploadpack.allowAnySHA1InWant=true"],
gitaly: gitaly_payload(params[:action]), gitaly: gitaly_payload(params[:action]),
gl_console_messages: check_result.console_messages gl_console_messages: check_result.console_messages
} }
# Custom option for git-receive-pack command # Custom option for git-receive-pack command
if Feature.enabled?(:gitaly_upload_pack_filter, project, default_enabled: true)
payload[:git_config_options] << "uploadpack.allowFilter=true" << "uploadpack.allowAnySHA1InWant=true"
end
receive_max_input_size = Gitlab::CurrentSettings.receive_max_input_size.to_i receive_max_input_size = Gitlab::CurrentSettings.receive_max_input_size.to_i

View file

@ -5,7 +5,7 @@ require_relative 'teammate'
module Gitlab module Gitlab
module Danger module Danger
module Roulette module Roulette
ROULETTE_DATA_URL = 'https://about.gitlab.com/roulette.json' ROULETTE_DATA_URL = 'https://gitlab-org.gitlab.io/gitlab-roulette/roulette.json'
OPTIONAL_CATEGORIES = [:qa, :test].freeze OPTIONAL_CATEGORIES = [:qa, :test].freeze
Spin = Struct.new(:category, :reviewer, :maintainer, :optional_role) Spin = Struct.new(:category, :reviewer, :maintainer, :optional_role)
@ -90,7 +90,7 @@ module Gitlab
# @param [Teammate] person # @param [Teammate] person
# @return [Boolean] # @return [Boolean]
def valid_person?(person) def valid_person?(person)
!mr_author?(person) && person.available? !mr_author?(person) && person.available && person.has_capacity
end end
# @param [Teammate] person # @param [Teammate] person

View file

@ -1,28 +1,19 @@
# frozen_string_literal: true # frozen_string_literal: true
require 'cgi'
require 'set'
module Gitlab module Gitlab
module Danger module Danger
class Teammate class Teammate
attr_reader :name, :username, :role, :projects attr_reader :username, :name, :markdown_name, :role, :projects, :available, :has_capacity
AT_CAPACITY_EMOJI = Set.new(%w[red_circle]).freeze
OOO_EMOJI = Set.new(%w[
palm_tree
beach beach_umbrella beach_with_umbrella
]).freeze
# The options data are produced by https://gitlab.com/gitlab-org/gitlab-roulette/-/blob/master/lib/team_member.rb
def initialize(options = {}) def initialize(options = {})
@username = options['username'] @username = options['username']
@name = options['name'] || @username @name = options['name']
@markdown_name = options['markdown_name']
@role = options['role'] @role = options['role']
@projects = options['projects'] @projects = options['projects']
end @available = options['available']
@has_capacity = options['has_capacity']
def markdown_name
"[#{name}](https://gitlab.com/#{username}) (`@#{username}`)"
end end
def in_project?(name) def in_project?(name)
@ -43,42 +34,8 @@ module Gitlab
has_capability?(project, category, :maintainer, labels) has_capability?(project, category, :maintainer, labels)
end end
def status
return @status if defined?(@status)
@status ||=
begin
Gitlab::Danger::RequestHelper.http_get_json(status_api_endpoint)
rescue Gitlab::Danger::RequestHelper::HTTPError, JSON::ParserError
nil # better no status than a crashing Danger
end
end
# @return [Boolean]
def available?
!out_of_office? && has_capacity?
end
private private
def status_api_endpoint
"https://gitlab.com/api/v4/users/#{CGI.escape(username)}/status"
end
def status_emoji
status&.dig("emoji")
end
# @return [Boolean]
def out_of_office?
status&.dig("message")&.match?(/OOO/i) || OOO_EMOJI.include?(status_emoji)
end
# @return [Boolean]
def has_capacity?
!AT_CAPACITY_EMOJI.include?(status_emoji)
end
def has_capability?(project, category, kind, labels) def has_capability?(project, category, kind, labels)
case category case category
when :test when :test

View file

@ -3,7 +3,7 @@
module Gitlab module Gitlab
module MarkdownCache module MarkdownCache
# Increment this number every time the renderer changes its output # Increment this number every time the renderer changes its output
CACHE_COMMONMARK_VERSION = 21 CACHE_COMMONMARK_VERSION = 22
CACHE_COMMONMARK_VERSION_START = 10 CACHE_COMMONMARK_VERSION_START = 10
BaseError = Class.new(StandardError) BaseError = Class.new(StandardError)

View file

@ -6783,6 +6783,9 @@ msgstr ""
msgid "CurrentUser|Buy Pipeline minutes" msgid "CurrentUser|Buy Pipeline minutes"
msgstr "" msgstr ""
msgid "CurrentUser|One of your groups is running out"
msgstr ""
msgid "CurrentUser|Profile" msgid "CurrentUser|Profile"
msgstr "" msgstr ""
@ -14218,6 +14221,9 @@ msgstr ""
msgid "Metrics|There was an error getting annotations information." msgid "Metrics|There was an error getting annotations information."
msgstr "" msgstr ""
msgid "Metrics|There was an error getting dashboard validation warnings information."
msgstr ""
msgid "Metrics|There was an error getting deployment information." msgid "Metrics|There was an error getting deployment information."
msgstr "" msgstr ""
@ -14266,6 +14272,9 @@ msgstr ""
msgid "Metrics|You're about to permanently delete this metric. This cannot be undone." msgid "Metrics|You're about to permanently delete this metric. This cannot be undone."
msgstr "" msgstr ""
msgid "Metrics|Your dashboard schema is invalid. Edit the dashboard to correct the YAML schema."
msgstr ""
msgid "Metrics|e.g. HTTP requests" msgid "Metrics|e.g. HTTP requests"
msgstr "" msgstr ""

View file

@ -137,7 +137,7 @@ RSpec.describe Projects::ServicesController do
let(:params) { project_params(service: service_params) } let(:params) { project_params(service: service_params) }
let(:message) { 'Jira activated.' } let(:message) { 'Jira activated.' }
let(:redirect_url) { project_settings_integrations_path(project) } let(:redirect_url) { edit_project_service_path(project, service) }
before do before do
put :update, params: params put :update, params: params

View file

@ -30,7 +30,7 @@ RSpec.describe 'User activates issue tracker', :js do
it 'activates the service' do it 'activates the service' do
expect(page).to have_content("#{tracker} activated.") expect(page).to have_content("#{tracker} activated.")
expect(current_path).to eq(project_settings_integrations_path(project)) expect(current_path).to eq(edit_project_service_path(project, tracker.parameterize(separator: '_')))
end end
it 'shows the link in the menu' do it 'shows the link in the menu' do
@ -50,7 +50,7 @@ RSpec.describe 'User activates issue tracker', :js do
click_test_then_save_integration click_test_then_save_integration
expect(page).to have_content("#{tracker} activated.") expect(page).to have_content("#{tracker} activated.")
expect(current_path).to eq(project_settings_integrations_path(project)) expect(current_path).to eq(edit_project_service_path(project, tracker.parameterize(separator: '_')))
end end
end end
end end
@ -65,7 +65,7 @@ RSpec.describe 'User activates issue tracker', :js do
it 'saves but does not activate the service' do it 'saves but does not activate the service' do
expect(page).to have_content("#{tracker} settings saved, but not activated.") expect(page).to have_content("#{tracker} settings saved, but not activated.")
expect(current_path).to eq(project_settings_integrations_path(project)) expect(current_path).to eq(edit_project_service_path(project, tracker.parameterize(separator: '_')))
end end
it 'does not show the external tracker link in the menu' do it 'does not show the external tracker link in the menu' do

View file

@ -30,7 +30,7 @@ RSpec.describe 'User activates Jira', :js do
it 'activates the Jira service' do it 'activates the Jira service' do
expect(page).to have_content('Jira activated.') expect(page).to have_content('Jira activated.')
expect(current_path).to eq(project_settings_integrations_path(project)) expect(current_path).to eq(edit_project_service_path(project, :jira))
end end
it 'shows the Jira link in the menu' do it 'shows the Jira link in the menu' do
@ -61,7 +61,7 @@ RSpec.describe 'User activates Jira', :js do
click_test_then_save_integration click_test_then_save_integration
expect(page).to have_content('Jira activated.') expect(page).to have_content('Jira activated.')
expect(current_path).to eq(project_settings_integrations_path(project)) expect(current_path).to eq(edit_project_service_path(project, :jira))
end end
end end
end end
@ -75,7 +75,7 @@ RSpec.describe 'User activates Jira', :js do
it 'saves but does not activate the Jira service' do it 'saves but does not activate the Jira service' do
expect(page).to have_content('Jira settings saved, but not activated.') expect(page).to have_content('Jira settings saved, but not activated.')
expect(current_path).to eq(project_settings_integrations_path(project)) expect(current_path).to eq(edit_project_service_path(project, :jira))
end end
it 'does not show the Jira link in the menu' do it 'does not show the Jira link in the menu' do

View file

@ -31,7 +31,7 @@ RSpec.describe 'Set up Mattermost slash commands', :js do
click_active_toggle click_active_toggle
click_on 'Save changes' click_on 'Save changes'
expect(current_path).to eq(project_settings_integrations_path(project)) expect(current_path).to eq(edit_project_service_path(project, :mattermost_slash_commands))
expect(page).to have_content('Mattermost slash commands settings saved, but not activated.') expect(page).to have_content('Mattermost slash commands settings saved, but not activated.')
end end
@ -41,7 +41,7 @@ RSpec.describe 'Set up Mattermost slash commands', :js do
fill_in 'service_token', with: token fill_in 'service_token', with: token
click_on 'Save changes' click_on 'Save changes'
expect(current_path).to eq(project_settings_integrations_path(project)) expect(current_path).to eq(edit_project_service_path(project, :mattermost_slash_commands))
expect(page).to have_content('Mattermost slash commands activated.') expect(page).to have_content('Mattermost slash commands activated.')
end end

View file

@ -24,7 +24,7 @@ RSpec.describe 'Slack slash commands', :js do
click_active_toggle click_active_toggle
click_on 'Save' click_on 'Save'
expect(current_path).to eq(project_settings_integrations_path(project)) expect(current_path).to eq(edit_project_service_path(project, :slack_slash_commands))
expect(page).to have_content('Slack slash commands settings saved, but not activated.') expect(page).to have_content('Slack slash commands settings saved, but not activated.')
end end
@ -32,7 +32,7 @@ RSpec.describe 'Slack slash commands', :js do
fill_in 'Token', with: 'token' fill_in 'Token', with: 'token'
click_on 'Save' click_on 'Save'
expect(current_path).to eq(project_settings_integrations_path(project)) expect(current_path).to eq(edit_project_service_path(project, :slack_slash_commands))
expect(page).to have_content('Slack slash commands activated.') expect(page).to have_content('Slack slash commands activated.')
end end

View file

@ -157,6 +157,34 @@ describe('Dashboard', () => {
}); });
}); });
describe('dashboard validation warning', () => {
it('displays a warning if there are validation warnings', () => {
createMountedWrapper({ hasMetrics: true });
store.commit(
`monitoringDashboard/${types.RECEIVE_DASHBOARD_VALIDATION_WARNINGS_SUCCESS}`,
true,
);
return wrapper.vm.$nextTick().then(() => {
expect(createFlash).toHaveBeenCalled();
});
});
it('does not display a warning if there are no validation warnings', () => {
createMountedWrapper({ hasMetrics: true });
store.commit(
`monitoringDashboard/${types.RECEIVE_DASHBOARD_VALIDATION_WARNINGS_SUCCESS}`,
false,
);
return wrapper.vm.$nextTick().then(() => {
expect(createFlash).not.toHaveBeenCalled();
});
});
});
describe('when the URL contains a reference to a panel', () => { describe('when the URL contains a reference to a panel', () => {
let location; let location;

View file

@ -18,6 +18,7 @@ import {
fetchEnvironmentsData, fetchEnvironmentsData,
fetchDashboardData, fetchDashboardData,
fetchAnnotations, fetchAnnotations,
fetchDashboardValidationWarnings,
toggleStarredValue, toggleStarredValue,
fetchPrometheusMetric, fetchPrometheusMetric,
setInitialState, setInitialState,
@ -35,6 +36,7 @@ import {
} from '~/monitoring/stores/utils'; } from '~/monitoring/stores/utils';
import getEnvironments from '~/monitoring/queries/getEnvironments.query.graphql'; import getEnvironments from '~/monitoring/queries/getEnvironments.query.graphql';
import getAnnotations from '~/monitoring/queries/getAnnotations.query.graphql'; import getAnnotations from '~/monitoring/queries/getAnnotations.query.graphql';
import getDashboardValidationWarnings from '~/monitoring/queries/getDashboardValidationWarnings.query.graphql';
import storeState from '~/monitoring/stores/state'; import storeState from '~/monitoring/stores/state';
import { import {
deploymentData, deploymentData,
@ -335,6 +337,106 @@ describe('Monitoring store actions', () => {
}); });
}); });
describe('fetchDashboardValidationWarnings', () => {
let mockMutate;
let mutationVariables;
beforeEach(() => {
state.projectPath = 'gitlab-org/gitlab-test';
state.currentEnvironmentName = 'production';
state.currentDashboard = '.gitlab/dashboards/dashboard_with_warnings.yml';
mockMutate = jest.spyOn(gqClient, 'mutate');
mutationVariables = {
mutation: getDashboardValidationWarnings,
variables: {
projectPath: state.projectPath,
environmentName: state.currentEnvironmentName,
dashboardPath: state.currentDashboard,
},
};
});
it('dispatches receiveDashboardValidationWarningsSuccess with true payload when there are warnings', () => {
mockMutate.mockResolvedValue({
data: {
project: {
id: 'gid://gitlab/Project/29',
environments: {
nodes: [
{
name: 'production',
metricsDashboard: {
path: '.gitlab/dashboards/dashboard_errors_test.yml',
schemaValidationWarnings: ["unit: can't be blank"],
},
},
],
},
},
},
});
return testAction(
fetchDashboardValidationWarnings,
null,
state,
[],
[{ type: 'receiveDashboardValidationWarningsSuccess', payload: true }],
() => {
expect(mockMutate).toHaveBeenCalledWith(mutationVariables);
},
);
});
it('dispatches receiveDashboardValidationWarningsSuccess with false payload when there are no warnings', () => {
mockMutate.mockResolvedValue({
data: {
project: {
id: 'gid://gitlab/Project/29',
environments: {
nodes: [
{
name: 'production',
metricsDashboard: {
path: '.gitlab/dashboards/dashboard_errors_test.yml',
schemaValidationWarnings: [],
},
},
],
},
},
},
});
return testAction(
fetchDashboardValidationWarnings,
null,
state,
[],
[{ type: 'receiveDashboardValidationWarningsSuccess', payload: false }],
() => {
expect(mockMutate).toHaveBeenCalledWith(mutationVariables);
},
);
});
it('dispatches receiveDashboardValidationWarningsFailure if the warnings API call fails', () => {
mockMutate.mockRejectedValue({});
return testAction(
fetchDashboardValidationWarnings,
null,
state,
[],
[{ type: 'receiveDashboardValidationWarningsFailure' }],
() => {
expect(mockMutate).toHaveBeenCalledWith(mutationVariables);
},
);
});
});
describe('Toggles starred value of current dashboard', () => { describe('Toggles starred value of current dashboard', () => {
let unstarredDashboard; let unstarredDashboard;
let starredDashboard; let starredDashboard;
@ -455,7 +557,7 @@ describe('Monitoring store actions', () => {
state.dashboardEndpoint = '/dashboard'; state.dashboardEndpoint = '/dashboard';
}); });
it('on success, dispatches receive and success actions', () => { it('on success, dispatches receive and success actions, then fetches dashboard warnings', () => {
document.body.dataset.page = 'projects:environments:metrics'; document.body.dataset.page = 'projects:environments:metrics';
mock.onGet(state.dashboardEndpoint).reply(200, response); mock.onGet(state.dashboardEndpoint).reply(200, response);
@ -470,6 +572,7 @@ describe('Monitoring store actions', () => {
type: 'receiveMetricsDashboardSuccess', type: 'receiveMetricsDashboardSuccess',
payload: { response }, payload: { response },
}, },
{ type: 'fetchDashboardValidationWarnings' },
], ],
); );
}); });

View file

@ -11,7 +11,9 @@ describe Gitlab::Danger::Roulette do
username: 'backend-maintainer', username: 'backend-maintainer',
name: 'Backend maintainer', name: 'Backend maintainer',
role: 'Backend engineer', role: 'Backend engineer',
projects: { 'gitlab' => 'maintainer backend' } projects: { 'gitlab' => 'maintainer backend' },
available: true,
has_capacity: true
} }
end end
let(:frontend_reviewer) do let(:frontend_reviewer) do
@ -19,7 +21,9 @@ describe Gitlab::Danger::Roulette do
username: 'frontend-reviewer', username: 'frontend-reviewer',
name: 'Frontend reviewer', name: 'Frontend reviewer',
role: 'Frontend engineer', role: 'Frontend engineer',
projects: { 'gitlab' => 'reviewer frontend' } projects: { 'gitlab' => 'reviewer frontend' },
available: true,
has_capacity: true
} }
end end
let(:frontend_maintainer) do let(:frontend_maintainer) do
@ -27,7 +31,9 @@ describe Gitlab::Danger::Roulette do
username: 'frontend-maintainer', username: 'frontend-maintainer',
name: 'Frontend maintainer', name: 'Frontend maintainer',
role: 'Frontend engineer', role: 'Frontend engineer',
projects: { 'gitlab' => "maintainer frontend" } projects: { 'gitlab' => "maintainer frontend" },
available: true,
has_capacity: true
} }
end end
let(:software_engineer_in_test) do let(:software_engineer_in_test) do
@ -38,7 +44,9 @@ describe Gitlab::Danger::Roulette do
projects: { projects: {
'gitlab' => 'reviewer qa', 'gitlab' => 'reviewer qa',
'gitlab-qa' => 'maintainer' 'gitlab-qa' => 'maintainer'
} },
available: true,
has_capacity: true
} }
end end
let(:engineering_productivity_reviewer) do let(:engineering_productivity_reviewer) do
@ -46,7 +54,9 @@ describe Gitlab::Danger::Roulette do
username: 'eng-prod-reviewer', username: 'eng-prod-reviewer',
name: 'EP engineer', name: 'EP engineer',
role: 'Engineering Productivity', role: 'Engineering Productivity',
projects: { 'gitlab' => 'reviewer backend' } projects: { 'gitlab' => 'reviewer backend' },
available: true,
has_capacity: true
} }
end end
@ -73,10 +83,17 @@ describe Gitlab::Danger::Roulette do
def matching_spin(category, reviewer: { username: nil }, maintainer: { username: nil }, optional: nil) def matching_spin(category, reviewer: { username: nil }, maintainer: { username: nil }, optional: nil)
satisfy do |spin| satisfy do |spin|
spin.category == category && bool = spin.category == category
spin.reviewer&.username == reviewer[:username] && bool &&= spin.reviewer&.username == reviewer[:username]
spin.maintainer&.username == maintainer[:username] &&
spin.optional_role == optional bool &&=
if maintainer
spin.maintainer&.username == maintainer[:username]
else
spin.maintainer.nil?
end
bool && spin.optional_role == optional
end end
end end
@ -85,66 +102,76 @@ describe Gitlab::Danger::Roulette do
let!(:branch_name) { 'a-branch' } let!(:branch_name) { 'a-branch' }
let!(:mr_labels) { ['backend', 'devops::create'] } let!(:mr_labels) { ['backend', 'devops::create'] }
let!(:author) { Gitlab::Danger::Teammate.new('username' => 'filipa') } let!(:author) { Gitlab::Danger::Teammate.new('username' => 'filipa') }
let(:spins) do
before do # Stub the request at the latest time so that we can modify the raw data, e.g. available and has_capacity fields.
[
backend_maintainer,
frontend_reviewer,
frontend_maintainer,
software_engineer_in_test,
engineering_productivity_reviewer
].each do |person|
stub_person_status(instance_double(Gitlab::Danger::Teammate, username: person[:username]), message: 'making GitLab magic')
end
WebMock WebMock
.stub_request(:get, described_class::ROULETTE_DATA_URL) .stub_request(:get, described_class::ROULETTE_DATA_URL)
.to_return(body: teammate_json) .to_return(body: teammate_json)
subject.spin(project, categories, branch_name)
end
before do
allow(subject).to receive_message_chain(:gitlab, :mr_author).and_return(author.username) allow(subject).to receive_message_chain(:gitlab, :mr_author).and_return(author.username)
allow(subject).to receive_message_chain(:gitlab, :mr_labels).and_return(mr_labels) allow(subject).to receive_message_chain(:gitlab, :mr_labels).and_return(mr_labels)
end end
context 'when change contains backend category' do context 'when change contains backend category' do
it 'assigns backend reviewer and maintainer' do let(:categories) { [:backend] }
categories = [:backend]
spins = subject.spin(project, categories, branch_name)
it 'assigns backend reviewer and maintainer' do
expect(spins).to contain_exactly(matching_spin(:backend, reviewer: engineering_productivity_reviewer, maintainer: backend_maintainer)) expect(spins).to contain_exactly(matching_spin(:backend, reviewer: engineering_productivity_reviewer, maintainer: backend_maintainer))
end end
context 'when teammate is not available' do
before do
backend_maintainer[:available] = false
end
it 'assigns backend reviewer and no maintainer' do
expect(spins).to contain_exactly(matching_spin(:backend, reviewer: engineering_productivity_reviewer, maintainer: nil))
end
end
context 'when teammate has no capacity' do
before do
backend_maintainer[:has_capacity] = false
end
it 'assigns backend reviewer and no maintainer' do
expect(spins).to contain_exactly(matching_spin(:backend, reviewer: engineering_productivity_reviewer, maintainer: nil))
end
end
end end
context 'when change contains frontend category' do context 'when change contains frontend category' do
it 'assigns frontend reviewer and maintainer' do let(:categories) { [:frontend] }
categories = [:frontend]
spins = subject.spin(project, categories, branch_name)
it 'assigns frontend reviewer and maintainer' do
expect(spins).to contain_exactly(matching_spin(:frontend, reviewer: frontend_reviewer, maintainer: frontend_maintainer)) expect(spins).to contain_exactly(matching_spin(:frontend, reviewer: frontend_reviewer, maintainer: frontend_maintainer))
end end
end end
context 'when change contains QA category' do context 'when change contains QA category' do
it 'assigns QA reviewer and sets optional QA maintainer' do let(:categories) { [:qa] }
categories = [:qa]
spins = subject.spin(project, categories, branch_name)
it 'assigns QA reviewer and sets optional QA maintainer' do
expect(spins).to contain_exactly(matching_spin(:qa, reviewer: software_engineer_in_test, optional: :maintainer)) expect(spins).to contain_exactly(matching_spin(:qa, reviewer: software_engineer_in_test, optional: :maintainer))
end end
end end
context 'when change contains Engineering Productivity category' do context 'when change contains Engineering Productivity category' do
it 'assigns Engineering Productivity reviewer and fallback to backend maintainer' do let(:categories) { [:engineering_productivity] }
categories = [:engineering_productivity]
spins = subject.spin(project, categories, branch_name)
it 'assigns Engineering Productivity reviewer and fallback to backend maintainer' do
expect(spins).to contain_exactly(matching_spin(:engineering_productivity, reviewer: engineering_productivity_reviewer, maintainer: backend_maintainer)) expect(spins).to contain_exactly(matching_spin(:engineering_productivity, reviewer: engineering_productivity_reviewer, maintainer: backend_maintainer))
end end
end end
context 'when change contains test category' do context 'when change contains test category' do
it 'assigns corresponding SET and sets optional test maintainer' do let(:categories) { [:test] }
categories = [:test]
spins = subject.spin(project, categories, branch_name)
it 'assigns corresponding SET and sets optional test maintainer' do
expect(spins).to contain_exactly(matching_spin(:test, reviewer: software_engineer_in_test, optional: :maintainer)) expect(spins).to contain_exactly(matching_spin(:test, reviewer: software_engineer_in_test, optional: :maintainer))
end end
end end
@ -217,20 +244,13 @@ describe Gitlab::Danger::Roulette do
end end
describe '#spin_for_person' do describe '#spin_for_person' do
let(:person1) { Gitlab::Danger::Teammate.new('username' => 'rymai') } let(:person1) { Gitlab::Danger::Teammate.new('username' => 'rymai', 'available' => true, 'has_capacity' => true) }
let(:person2) { Gitlab::Danger::Teammate.new('username' => 'godfat') } let(:person2) { Gitlab::Danger::Teammate.new('username' => 'godfat', 'available' => true, 'has_capacity' => true) }
let(:author) { Gitlab::Danger::Teammate.new('username' => 'filipa') } let(:author) { Gitlab::Danger::Teammate.new('username' => 'filipa', 'available' => true, 'has_capacity' => true) }
let(:ooo) { Gitlab::Danger::Teammate.new('username' => 'jacopo-beschi') } let(:ooo) { Gitlab::Danger::Teammate.new('username' => 'jacopo-beschi', 'available' => false, 'has_capacity' => true) }
let(:no_capacity) { Gitlab::Danger::Teammate.new('username' => 'uncharged') } let(:no_capacity) { Gitlab::Danger::Teammate.new('username' => 'uncharged', 'available' => true, 'has_capacity' => false) }
before do before do
stub_person_status(person1, message: 'making GitLab magic')
stub_person_status(person2, message: 'making GitLab magic')
stub_person_status(ooo, message: 'OOO till 15th')
stub_person_status(no_capacity, message: 'At capacity for the next few days', emoji: 'red_circle')
# we don't stub Filipa, as she is the author and
# we should not fire request checking for her
allow(subject).to receive_message_chain(:gitlab, :mr_author).and_return(author.username) allow(subject).to receive_message_chain(:gitlab, :mr_author).and_return(author.username)
end end
@ -254,14 +274,4 @@ describe Gitlab::Danger::Roulette do
expect(subject.spin_for_person([no_capacity], random: Random.new)).to be_nil expect(subject.spin_for_person([no_capacity], random: Random.new)).to be_nil
end end
end end
private
def stub_person_status(person, message: 'dummy message', emoji: 'unicorn')
body = { message: message, emoji: emoji }.to_json
WebMock
.stub_request(:get, "https://gitlab.com/api/v4/users/#{person.username}/status")
.to_return(body: body)
end
end end

View file

@ -114,79 +114,4 @@ describe Gitlab::Danger::Teammate do
expect(subject.maintainer?(project, :frontend, labels)).to be_falsey expect(subject.maintainer?(project, :frontend, labels)).to be_falsey
end end
end end
describe '#status' do
let(:capabilities) { ['dish washing'] }
context 'with empty cache' do
context 'for successful request' do
it 'returns the response' do
mock_status = double(does_not: 'matter')
expect(Gitlab::Danger::RequestHelper).to receive(:http_get_json)
.and_return(mock_status)
expect(subject.status).to be mock_status
end
end
context 'for failing request' do
it 'returns nil' do
expect(Gitlab::Danger::RequestHelper).to receive(:http_get_json)
.and_raise(Gitlab::Danger::RequestHelper::HTTPError.new)
expect(subject.status).to be nil
end
end
end
context 'with filled cache' do
it 'returns the cached response' do
mock_status = double(does_not: 'matter')
expect(Gitlab::Danger::RequestHelper).to receive(:http_get_json)
.and_return(mock_status)
subject.status
expect(Gitlab::Danger::RequestHelper).not_to receive(:http_get_json)
expect(subject.status).to be mock_status
end
end
end
describe '#available?' do
using RSpec::Parameterized::TableSyntax
let(:capabilities) { ['dry head'] }
where(:status, :result) do
{} | true
{ message: 'dear reader' } | true
{ message: 'OOO: massage' } | false
{ message: 'love it SOOO much' } | false
{ emoji: 'red_circle' } | false
{ emoji: 'palm_tree' } | false
{ emoji: 'beach' } | false
{ emoji: 'beach_umbrella' } | false
{ emoji: 'beach_with_umbrella' } | false
{ emoji: nil } | true
{ emoji: '' } | true
{ emoji: 'dancer' } | true
end
with_them do
before do
expect(Gitlab::Danger::RequestHelper).to receive(:http_get_json)
.and_return(status&.stringify_keys)
end
it { expect(subject.available?).to be result }
end
it 'returns true if request fails' do
expect(Gitlab::Danger::RequestHelper)
.to receive(:http_get_json)
.and_raise(Gitlab::Danger::RequestHelper::HTTPError.new)
expect(subject.available?).to be true
end
end
end end

View file

@ -3,7 +3,7 @@
require 'spec_helper' require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200427064130_cleanup_optimistic_locking_nulls_pt2_fixed.rb') require Rails.root.join('db', 'post_migrate', '20200427064130_cleanup_optimistic_locking_nulls_pt2_fixed.rb')
describe CleanupOptimisticLockingNullsPt2Fixed, :migration do describe CleanupOptimisticLockingNullsPt2Fixed, :migration, schema: 20200219193117 do
test_tables = %w(ci_stages ci_builds ci_pipelines).freeze test_tables = %w(ci_stages ci_builds ci_pipelines).freeze
test_tables.each do |table| test_tables.each do |table|
let(table.to_sym) { table(table.to_sym) } let(table.to_sym) { table(table.to_sym) }

View file

@ -95,29 +95,6 @@ describe Issue do
end end
end end
describe 'locking' do
using RSpec::Parameterized::TableSyntax
where(:lock_version) do
[
[0],
["0"]
]
end
with_them do
it 'works when an issue has a NULL lock_version' do
issue = create(:issue)
described_class.where(id: issue.id).update_all('lock_version = NULL')
issue.update!(lock_version: lock_version, title: 'locking test')
expect(issue.reload.title).to eq('locking test')
end
end
end
describe '.simple_sorts' do describe '.simple_sorts' do
it 'includes all keys' do it 'includes all keys' do
expect(described_class.simple_sorts.keys).to include( expect(described_class.simple_sorts.keys).to include(

View file

@ -55,29 +55,6 @@ describe MergeRequest do
end end
end end
describe 'locking' do
using RSpec::Parameterized::TableSyntax
where(:lock_version) do
[
[0],
["0"]
]
end
with_them do
it 'works when a merge request has a NULL lock_version' do
merge_request = create(:merge_request)
described_class.where(id: merge_request.id).update_all('lock_version = NULL')
merge_request.update!(lock_version: lock_version, title: 'locking test')
expect(merge_request.reload.title).to eq('locking test')
end
end
end
describe '#squash_in_progress?' do describe '#squash_in_progress?' do
let(:repo_path) do let(:repo_path) do
Gitlab::GitalyClient::StorageSettings.allow_disk_access do Gitlab::GitalyClient::StorageSettings.allow_disk_access do

View file

@ -467,21 +467,6 @@ describe API::Internal::Base do
expect(json_response["git_config_options"]).to include("uploadpack.allowFilter=true") expect(json_response["git_config_options"]).to include("uploadpack.allowFilter=true")
expect(json_response["git_config_options"]).to include("uploadpack.allowAnySHA1InWant=true") expect(json_response["git_config_options"]).to include("uploadpack.allowAnySHA1InWant=true")
end end
context 'when gitaly_upload_pack_filter feature flag is disabled' do
before do
stub_feature_flags(gitaly_upload_pack_filter: false)
end
it 'returns only maxInputSize and not partial clone git config' do
push(key, project)
expect(json_response["git_config_options"]).to be_present
expect(json_response["git_config_options"]).to include("receive.maxInputSize=1048576")
expect(json_response["git_config_options"]).not_to include("uploadpack.allowFilter=true")
expect(json_response["git_config_options"]).not_to include("uploadpack.allowAnySHA1InWant=true")
end
end
end end
context 'when receive_max_input_size is empty' do context 'when receive_max_input_size is empty' do
@ -496,18 +481,6 @@ describe API::Internal::Base do
expect(json_response["git_config_options"]).to include("uploadpack.allowFilter=true") expect(json_response["git_config_options"]).to include("uploadpack.allowFilter=true")
expect(json_response["git_config_options"]).to include("uploadpack.allowAnySHA1InWant=true") expect(json_response["git_config_options"]).to include("uploadpack.allowAnySHA1InWant=true")
end end
context 'when gitaly_upload_pack_filter feature flag is disabled' do
before do
stub_feature_flags(gitaly_upload_pack_filter: false)
end
it 'returns an empty git config' do
push(key, project)
expect(json_response["git_config_options"]).to be_empty
end
end
end end
end end