Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-12-08 06:13:27 +00:00
parent 849d5912a8
commit 99bcbec56c
36 changed files with 375 additions and 64 deletions

View file

@ -50,21 +50,21 @@ export default {
if (authorUsername) {
filteredSearchValue.push({
type: 'author_username',
type: 'author',
value: { data: authorUsername, operator: '=' },
});
}
if (assigneeUsername) {
filteredSearchValue.push({
type: 'assignee_username',
type: 'assignee',
value: { data: assigneeUsername, operator: '=' },
});
}
if (types) {
filteredSearchValue.push({
type: 'types',
type: 'type',
value: { data: types, operator: '=' },
});
}
@ -72,7 +72,7 @@ export default {
if (labelName?.length) {
filteredSearchValue.push(
...labelName.map((label) => ({
type: 'label_name',
type: 'label',
value: { data: label, operator: '=' },
})),
);
@ -101,7 +101,7 @@ export default {
if (myReactionEmoji) {
filteredSearchValue.push({
type: 'my_reaction_emoji',
type: 'my-reaction',
value: { data: myReactionEmoji, operator: '=' },
});
}
@ -115,14 +115,14 @@ export default {
if (epicId) {
filteredSearchValue.push({
type: 'epic_id',
type: 'epic',
value: { data: epicId, operator: '=' },
});
}
if (this.filterParams['not[authorUsername]']) {
filteredSearchValue.push({
type: 'author_username',
type: 'author',
value: { data: this.filterParams['not[authorUsername]'], operator: '!=' },
});
}
@ -150,7 +150,7 @@ export default {
if (this.filterParams['not[assigneeUsername]']) {
filteredSearchValue.push({
type: 'assignee_username',
type: 'assignee',
value: { data: this.filterParams['not[assigneeUsername]'], operator: '!=' },
});
}
@ -158,7 +158,7 @@ export default {
if (this.filterParams['not[labelName]']) {
filteredSearchValue.push(
...this.filterParams['not[labelName]'].map((label) => ({
type: 'label_name',
type: 'label',
value: { data: label, operator: '!=' },
})),
);
@ -166,21 +166,21 @@ export default {
if (this.filterParams['not[types]']) {
filteredSearchValue.push({
type: 'types',
type: 'type',
value: { data: this.filterParams['not[types]'], operator: '!=' },
});
}
if (this.filterParams['not[epicId]']) {
filteredSearchValue.push({
type: 'epic_id',
type: 'epic',
value: { data: this.filterParams['not[epicId]'], operator: '!=' },
});
}
if (this.filterParams['not[myReactionEmoji]']) {
filteredSearchValue.push({
type: 'my_reaction_emoji',
type: 'my-reaction',
value: { data: this.filterParams['not[myReactionEmoji]'], operator: '!=' },
});
}
@ -281,16 +281,16 @@ export default {
filters.forEach((filter) => {
switch (filter.type) {
case 'author_username':
case 'author':
filterParams.authorUsername = filter.value.data;
break;
case 'assignee_username':
case 'assignee':
filterParams.assigneeUsername = filter.value.data;
break;
case 'types':
case 'type':
filterParams.types = filter.value.data;
break;
case 'label_name':
case 'label':
labels.push(filter.value.data);
break;
case 'milestone':
@ -302,10 +302,10 @@ export default {
case 'weight':
filterParams.weight = filter.value.data;
break;
case 'epic_id':
case 'epic':
filterParams.epicId = filter.value.data;
break;
case 'my_reaction_emoji':
case 'my-reaction':
filterParams.myReactionEmoji = filter.value.data;
break;
case 'release':

View file

@ -80,7 +80,7 @@ export default {
{
icon: 'user',
title: assignee,
type: 'assignee_username',
type: 'assignee',
operators: OPERATOR_IS_AND_IS_NOT,
token: AuthorToken,
unique: true,
@ -90,7 +90,7 @@ export default {
{
icon: 'pencil',
title: author,
type: 'author_username',
type: 'author',
operators: OPERATOR_IS_AND_IS_NOT,
symbol: '@',
token: AuthorToken,
@ -101,7 +101,7 @@ export default {
{
icon: 'labels',
title: label,
type: 'label_name',
type: 'label',
operators: OPERATOR_IS_AND_IS_NOT,
token: LabelToken,
unique: false,
@ -111,7 +111,7 @@ export default {
...(this.isSignedIn
? [
{
type: 'my_reaction_emoji',
type: 'my-reaction',
title: TOKEN_TITLE_MY_REACTION,
icon: 'thumb-up',
token: EmojiToken,
@ -146,7 +146,7 @@ export default {
{
icon: 'issues',
title: type,
type: 'types',
type: 'type',
token: GlFilteredSearchToken,
unique: true,
options: [

View file

@ -452,7 +452,7 @@ module Ci
end
def retryable?
return false if retried? || archived?
return false if retried? || archived? || deployment_rejected?
success? || failed? || canceled?
end

View file

@ -146,7 +146,7 @@ class CommitStatus < Ci::ApplicationRecord
end
event :drop do
transition [:created, :waiting_for_resource, :preparing, :pending, :running, :scheduled] => :failed
transition [:created, :waiting_for_resource, :preparing, :pending, :running, :manual, :scheduled] => :failed
end
event :success do

View file

@ -28,6 +28,7 @@ module Enums
trace_size_exceeded: 19,
builds_disabled: 20,
environment_creation_failure: 21,
deployment_rejected: 22,
insufficient_bridge_permissions: 1_001,
downstream_bridge_project_not_found: 1_002,
invalid_bridge_trigger: 1_003,

View file

@ -15,7 +15,7 @@ module ImportState
def refresh_jid_expiration
return unless jid
Gitlab::SidekiqStatus.set(jid, Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
Gitlab::SidekiqStatus.set(jid, Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION, value: 2)
end
def self.jid_by(project_id:, status:)

View file

@ -46,9 +46,10 @@ class Deployment < ApplicationRecord
scope :for_project, -> (project_id) { where(project_id: project_id) }
scope :for_projects, -> (projects) { where(project: projects) }
scope :visible, -> { where(status: %i[running success failed canceled]) }
scope :visible, -> { where(status: %i[running success failed canceled blocked]) }
scope :stoppable, -> { where.not(on_stop: nil).where.not(deployable_id: nil).success }
scope :active, -> { where(status: %i[created running]) }
scope :upcoming, -> { where(status: %i[blocked running]) }
scope :older_than, -> (deployment) { where('deployments.id < ?', deployment.id) }
scope :with_api_entity_associations, -> { preload({ deployable: { runner: [], tags: [], user: [], job_artifacts_archive: [] } }) }
@ -64,6 +65,10 @@ class Deployment < ApplicationRecord
transition created: :running
end
event :block do
transition created: :blocked
end
event :succeed do
transition any - [:success] => :success
end
@ -140,7 +145,8 @@ class Deployment < ApplicationRecord
success: 2,
failed: 3,
canceled: 4,
skipped: 5
skipped: 5,
blocked: 6
}
def self.archivables_in(project, limit:)
@ -391,6 +397,8 @@ class Deployment < ApplicationRecord
cancel!
when 'skipped'
skip!
when 'blocked'
block!
else
raise ArgumentError, "The status #{status.inspect} is invalid"
end

View file

@ -31,7 +31,7 @@ class Environment < ApplicationRecord
has_one :last_visible_deployable, through: :last_visible_deployment, source: 'deployable', source_type: 'CommitStatus', disable_joins: true
has_one :last_visible_pipeline, through: :last_visible_deployable, source: 'pipeline', disable_joins: true
has_one :upcoming_deployment, -> { running.distinct_on_environment }, class_name: 'Deployment', inverse_of: :environment
has_one :upcoming_deployment, -> { upcoming.distinct_on_environment }, class_name: 'Deployment', inverse_of: :environment
has_one :latest_opened_most_severe_alert, -> { order_severity_with_open_prometheus_alert }, class_name: 'AlertManagement::Alert', inverse_of: :environment
before_validation :generate_slug, if: ->(env) { env.slug.blank? }

View file

@ -29,7 +29,8 @@ class CommitStatusPresenter < Gitlab::View::Presenter::Delegated
no_matching_runner: 'No matching runner available',
trace_size_exceeded: 'The job log size limit was reached',
builds_disabled: 'The CI/CD is disabled for this project',
environment_creation_failure: 'This job could not be executed because it would create an environment with an invalid parameter.'
environment_creation_failure: 'This job could not be executed because it would create an environment with an invalid parameter.',
deployment_rejected: 'This deployment job was rejected.'
}.freeze
TROUBLESHOOTING_DOC = {

View file

@ -0,0 +1,8 @@
---
name: deployment_approvals
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/74932
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/347342
milestone: '14.6'
type: development
group: group::release
default_enabled: false

View file

@ -0,0 +1,7 @@
# frozen_string_literal: true
class AddRequiredApprovalCountToProtectedEnvironments < Gitlab::Database::Migration[1.0]
def change
add_column :protected_environments, :required_approval_count, :integer, default: 0, null: false
end
end

View file

@ -0,0 +1,13 @@
# frozen_string_literal: true
class CreateDeploymentApprovals < Gitlab::Database::Migration[1.0]
def change
create_table :deployment_approvals do |t|
t.bigint :deployment_id, null: false
t.bigint :user_id, null: false, index: true
t.timestamps_with_timezone null: false
t.integer :status, limit: 2, null: false
t.index [:deployment_id, :user_id], unique: true
end
end
end

View file

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddUserForeignKeyToDeploymentApprovals < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
def up
add_concurrent_foreign_key :deployment_approvals, :users, column: :user_id
end
def down
with_lock_retries do
remove_foreign_key :deployment_approvals, :users
end
end
end

View file

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddDeploymentForeignKeyToDeploymentApprovals < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
def up
add_concurrent_foreign_key :deployment_approvals, :deployments, column: :deployment_id
end
def down
with_lock_retries do
remove_foreign_key :deployment_approvals, :deployments
end
end
end

View file

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddProtectedEnvironmentsRequiredApprovalCountCheckConstraint < Gitlab::Database::Migration[1.0]
CONSTRAINT_NAME = 'protected_environments_required_approval_count_positive'
disable_ddl_transaction!
def up
add_check_constraint :protected_environments, 'required_approval_count >= 0', CONSTRAINT_NAME
end
def down
remove_check_constraint :protected_environments, CONSTRAINT_NAME
end
end

View file

@ -0,0 +1 @@
ac2e376ad32f0e2fd45d8695f13a0b46c2d5964b881f79e3a30a51ac85d4359b

View file

@ -0,0 +1 @@
caaf92f12bf0ed144d99f629c9e5d64fd45832a90bbd743e40febcdc4802cd59

View file

@ -0,0 +1 @@
ac21109099642d5934c16b3f0130736a587c4f20143552545c2b524062ff71e0

View file

@ -0,0 +1 @@
61c949b42338b248a0950cfafc82d58816c3fec44a2bf41c4ecb4cf09340a424

View file

@ -0,0 +1 @@
d1ed3ddf51c0bcebbac2a8dee05aa168daa35129110a463ac296ff2e640b0dbd

View file

@ -13410,6 +13410,24 @@ CREATE SEQUENCE deploy_tokens_id_seq
ALTER SEQUENCE deploy_tokens_id_seq OWNED BY deploy_tokens.id;
CREATE TABLE deployment_approvals (
id bigint NOT NULL,
deployment_id bigint NOT NULL,
user_id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
status smallint NOT NULL
);
CREATE SEQUENCE deployment_approvals_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE deployment_approvals_id_seq OWNED BY deployment_approvals.id;
CREATE TABLE deployment_clusters (
deployment_id integer NOT NULL,
cluster_id integer NOT NULL,
@ -18696,7 +18714,9 @@ CREATE TABLE protected_environments (
updated_at timestamp with time zone NOT NULL,
name character varying NOT NULL,
group_id bigint,
CONSTRAINT protected_environments_project_or_group_existence CHECK (((project_id IS NULL) <> (group_id IS NULL)))
required_approval_count integer DEFAULT 0 NOT NULL,
CONSTRAINT protected_environments_project_or_group_existence CHECK (((project_id IS NULL) <> (group_id IS NULL))),
CONSTRAINT protected_environments_required_approval_count_positive CHECK ((required_approval_count >= 0))
);
CREATE SEQUENCE protected_environments_id_seq
@ -21517,6 +21537,8 @@ ALTER TABLE ONLY deploy_keys_projects ALTER COLUMN id SET DEFAULT nextval('deplo
ALTER TABLE ONLY deploy_tokens ALTER COLUMN id SET DEFAULT nextval('deploy_tokens_id_seq'::regclass);
ALTER TABLE ONLY deployment_approvals ALTER COLUMN id SET DEFAULT nextval('deployment_approvals_id_seq'::regclass);
ALTER TABLE ONLY deployments ALTER COLUMN id SET DEFAULT nextval('deployments_id_seq'::regclass);
ALTER TABLE ONLY description_versions ALTER COLUMN id SET DEFAULT nextval('description_versions_id_seq'::regclass);
@ -23086,6 +23108,9 @@ ALTER TABLE ONLY deploy_keys_projects
ALTER TABLE ONLY deploy_tokens
ADD CONSTRAINT deploy_tokens_pkey PRIMARY KEY (id);
ALTER TABLE ONLY deployment_approvals
ADD CONSTRAINT deployment_approvals_pkey PRIMARY KEY (id);
ALTER TABLE ONLY deployment_clusters
ADD CONSTRAINT deployment_clusters_pkey PRIMARY KEY (deployment_id);
@ -25831,6 +25856,10 @@ CREATE INDEX index_deploy_tokens_on_token_and_expires_at_and_id ON deploy_tokens
CREATE UNIQUE INDEX index_deploy_tokens_on_token_encrypted ON deploy_tokens USING btree (token_encrypted);
CREATE UNIQUE INDEX index_deployment_approvals_on_deployment_id_and_user_id ON deployment_approvals USING btree (deployment_id, user_id);
CREATE INDEX index_deployment_approvals_on_user_id ON deployment_approvals USING btree (user_id);
CREATE UNIQUE INDEX index_deployment_clusters_on_cluster_id_and_deployment_id ON deployment_clusters USING btree (cluster_id, deployment_id);
CREATE INDEX index_deployment_merge_requests_on_merge_request_id ON deployment_merge_requests USING btree (merge_request_id);
@ -28974,6 +29003,9 @@ ALTER TABLE ONLY lists
ALTER TABLE ONLY ci_unit_test_failures
ADD CONSTRAINT fk_0f09856e1f FOREIGN KEY (build_id) REFERENCES ci_builds(id) ON DELETE CASCADE;
ALTER TABLE ONLY deployment_approvals
ADD CONSTRAINT fk_0f58311058 FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
ALTER TABLE ONLY project_pages_metadata
ADD CONSTRAINT fk_0fd5b22688 FOREIGN KEY (pages_deployment_id) REFERENCES pages_deployments(id) ON DELETE SET NULL;
@ -29082,6 +29114,9 @@ ALTER TABLE ONLY coverage_fuzzing_corpuses
ALTER TABLE ONLY agent_group_authorizations
ADD CONSTRAINT fk_2c9f941965 FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY deployment_approvals
ADD CONSTRAINT fk_2d060dfc73 FOREIGN KEY (deployment_id) REFERENCES deployments(id) ON DELETE CASCADE;
ALTER TABLE ONLY ci_freeze_periods
ADD CONSTRAINT fk_2e02bbd1a6 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;

View file

@ -960,9 +960,16 @@ An on-demand scan can be run in active or passive mode:
### View on-demand DAST scans
To view running and completed on-demand DAST scans for a project, go to
To view running completed and scheduled on-demand DAST scans for a project, go to
**Security & Compliance > On-demand Scans** in the left sidebar.
- To view both running and completed scans, select **All**.
- To view running scans only, select **Running**.
- To view finished scans, select **Finished**. A finished scan is a scan that either succeeded,
failed, or was canceled.
- To view scheduled scans, select **Scheduled**. It shows on-demand scans that have a schedule
set up. Those are _not_ included in the **All** tab.
### Run an on-demand DAST scan
Prerequisites:

View file

@ -313,7 +313,7 @@ To support the following package managers, the GitLab analyzers proceed in two s
| sbt | [1.3.8](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-maven/-/blob/v2.23.0/config/.tool-versions#L4) | [1.0.4](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-maven/-/blob/master/.gitlab-ci.yml#L263), [1.1.4](https://gitlab.com/gitlab-org/security-products/tests/scala-sbt-multiproject/-/blob/main/project/build.properties#L1), [1.1.6](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-maven/-/blob/master/.gitlab-ci.yml#L272), [1.2.8](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-maven/-/blob/master/.gitlab-ci.yml#L281), [1.3.12](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-maven/-/blob/master/.gitlab-ci.yml#L290), [1.4.6](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-maven/-/blob/master/.gitlab-ci.yml#L299) |
| Maven | [3.6.3](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-maven/-/blob/v2.23.0/config/.tool-versions#L3) | [3.6.3](https://gitlab.com/gitlab-org/security-products/tests/java-maven/-/blob/master/pom.xml#L3) |
| Gradle | [6.7.1](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-maven/-/blob/v2.23.0/config/.tool-versions#L5) | [5.6.4](https://gitlab.com/gitlab-org/security-products/tests/java-gradle/-/blob/master/gradle/wrapper/gradle-wrapper.properties#L3), [6.5](https://gitlab.com/gitlab-org/security-products/tests/java-gradle/-/blob/java-14/gradle/wrapper/gradle-wrapper.properties#L3), [6.7-rc-1](https://gitlab.com/gitlab-org/security-products/tests/java-gradle/-/blob/java-15/gradle/wrapper/gradle-wrapper.properties#L3), [6.9](https://gitlab.com/gitlab-org/security-products/tests/java-gradle/-/blob/java-14-gradle-6-9/gradle/wrapper/gradle-wrapper.properties#L3), [7.0-rc-2](https://gitlab.com/gitlab-org/security-products/tests/java-gradle/-/blob/java-16/gradle/wrapper/gradle-wrapper.properties#L3) |
| setuptools | [50.3.2](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium/-/blob/v2.29.9/Dockerfile#L27) | |
| setuptools | [50.3.2](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium/-/blob/v2.29.9/Dockerfile#L27) | [57.5.0](https://gitlab.com/gitlab-org/security-products/tests/python-setuptools/-/blob/main/setup.py) |
| pip | [20.2.4](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium/-/blob/v2.29.9/Dockerfile#L26) | [20.x](https://gitlab.com/gitlab-org/security-products/tests/python-pip/-/blob/master/requirements.txt) |
| Pipenv | [2018.11.26](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-python/-/blob/v2.18.4/requirements.txt#L13) | [2018.11.26](https://gitlab.com/gitlab-org/security-products/tests/python-pipenv/-/blob/pipfile-lock-FREEZE/Pipfile.lock#L6)<sup><b><a href="#exported-dependency-information-notes-2">2</a></b></sup>, [2018.11.26](https://gitlab.com/gitlab-org/security-products/tests/python-pipenv/-/blob/master/Pipfile) |

View file

@ -131,7 +131,7 @@ domain to the same website, for instance, `example.com` and `www.example.com`.
They require:
- A DNS A record for the domain.
- A DNS `A` record for the domain.
- A DNS `ALIAS`/`CNAME` record for the subdomain.
- A DNS `TXT` record for each.
@ -148,7 +148,7 @@ If you're using Cloudflare, check
> **Notes**:
>
> - **Do not** use a CNAME record if you want to point your
> - **Do not** use a `CNAME` record if you want to point your
`domain.com` to your GitLab Pages site. Use an `A` record instead.
> - **Do not** add any special chars after the default Pages
domain. For example, don't point `subdomain.domain.com` to
@ -232,7 +232,7 @@ If you use Cloudflare, you can redirect `www` to `domain.com`
without adding both `www.domain.com` and `domain.com` to GitLab.
To do so, you can use Cloudflare's page rules associated to a
CNAME record to redirect `www.domain.com` to `domain.com`. You
`CNAME` record to redirect `www.domain.com` to `domain.com`. You
can use the following setup:
1. In Cloudflare, create a DNS `A` record pointing `domain.com` to `35.185.44.232`.

View file

@ -34,7 +34,8 @@ module Gitlab
no_matching_runner: 'no matching runner available',
trace_size_exceeded: 'log size limit exceeded',
builds_disabled: 'project builds are disabled',
environment_creation_failure: 'environment creation failure'
environment_creation_failure: 'environment creation failure',
deployment_rejected: 'deployment rejected'
}.freeze
private_constant :REASONS

View file

@ -164,6 +164,7 @@ dependency_proxy_group_settings: :gitlab_main
dependency_proxy_image_ttl_group_policies: :gitlab_main
dependency_proxy_manifests: :gitlab_main
deploy_keys_projects: :gitlab_main
deployment_approvals: :gitlab_main
deployment_clusters: :gitlab_main
deployment_merge_requests: :gitlab_main
deployments: :gitlab_main

View file

@ -13,7 +13,7 @@ module Gitlab
def self.set_jid(import_state)
jid = generate_jid(import_state)
Gitlab::SidekiqStatus.set(jid, Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
Gitlab::SidekiqStatus.set(jid, Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION, value: 2)
import_state.update_column(:jid, jid)
end

View file

@ -6,14 +6,16 @@ RSpec.describe 'Issue board filters', :js do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let_it_be(:board) { create(:board, project: project) }
let_it_be(:project_label) { create(:label, project: project, title: 'Label') }
let_it_be(:milestone_1) { create(:milestone, project: project) }
let_it_be(:milestone_2) { create(:milestone, project: project) }
let_it_be(:release) { create(:release, tag: 'v1.0', project: project, milestones: [milestone_1]) }
let_it_be(:release_2) { create(:release, tag: 'v2.0', project: project, milestones: [milestone_2]) }
let_it_be(:issue) { create(:issue, project: project, milestone: milestone_1) }
let_it_be(:issue_2) { create(:issue, project: project, milestone: milestone_2) }
let_it_be(:issue_1) { create(:issue, project: project, milestone: milestone_1, author: user) }
let_it_be(:issue_2) { create(:labeled_issue, project: project, milestone: milestone_2, assignees: [user], labels: [project_label]) }
let_it_be(:award_emoji1) { create(:award_emoji, name: 'thumbsup', user: user, awardable: issue_1) }
let(:filtered_search) { find('[data-testid="issue-board-filtered-search"]') }
let(:filtered_search) { find('[data-testid="issue_1-board-filtered-search"]') }
let(:filter_input) { find('.gl-filtered-search-term-input')}
let(:filter_dropdown) { find('.gl-filtered-search-suggestion-list') }
let(:filter_first_suggestion) { find('.gl-filtered-search-suggestion-list').first('.gl-filtered-search-suggestion') }
@ -28,6 +30,58 @@ RSpec.describe 'Issue board filters', :js do
visit_project_board
end
shared_examples 'loads all the users when opened' do
it 'and submit one as filter', :aggregate_failures do
expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2)
expect_filtered_search_dropdown_results(filter_dropdown, 3)
click_on user.username
filter_submit.click
expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1)
expect(find('.board-card')).to have_content(issue.title)
end
end
describe 'filters by assignee' do
before do
set_filter('assignee')
end
it_behaves_like 'loads all the users when opened' do
let(:issue) { issue_2 }
end
end
describe 'filters by author' do
before do
set_filter('author')
end
it_behaves_like 'loads all the users when opened' do
let(:issue) { issue_1 }
end
end
describe 'filters by label' do
before do
set_filter('label')
end
it 'loads all the labels when opened and submit one as filter', :aggregate_failures do
expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2)
expect_filtered_search_dropdown_results(filter_dropdown, 3)
filter_dropdown.click_on project_label.title
filter_submit.click
expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1)
expect(find('.board-card')).to have_content(issue_2.title)
end
end
describe 'filters by releases' do
before do
set_filter('release')
@ -42,6 +96,7 @@ RSpec.describe 'Issue board filters', :js do
filter_submit.click
expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1)
expect(find('.board-card')).to have_content(issue_1.title)
end
end
@ -68,6 +123,44 @@ RSpec.describe 'Issue board filters', :js do
end
end
describe 'filters by reaction emoji' do
before do
set_filter('my-reaction')
end
it 'loads all the emojis when opened and submit one as filter', :aggregate_failures do
expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2)
expect_filtered_search_dropdown_results(filter_dropdown, 3)
click_on 'thumbsup'
filter_submit.click
expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1)
expect(find('.board-card')).to have_content(issue_1.title)
end
end
describe 'filters by type' do
let_it_be(:incident) { create(:incident, project: project)}
before do
set_filter('type')
end
it 'loads all the types when opened and submit one as filter', :aggregate_failures do
expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 3)
expect_filtered_search_dropdown_results(filter_dropdown, 2)
click_on 'Incident'
filter_submit.click
expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1)
expect(find('.board-card')).to have_content(incident.title)
end
end
def set_filter(filter)
filter_input.click
filter_input.set("#{filter}:")

View file

@ -18,7 +18,7 @@ describe('BoardFilteredSearch', () => {
{
icon: 'labels',
title: __('Label'),
type: 'label_name',
type: 'label',
operators: [
{ value: '=', description: 'is' },
{ value: '!=', description: 'is not' },
@ -31,7 +31,7 @@ describe('BoardFilteredSearch', () => {
{
icon: 'pencil',
title: __('Author'),
type: 'author_username',
type: 'author',
operators: [
{ value: '=', description: 'is' },
{ value: '!=', description: 'is not' },
@ -97,7 +97,7 @@ describe('BoardFilteredSearch', () => {
createComponent({ props: { eeFilters: { labelName: ['label'] } } });
expect(findFilteredSearch().props('initialFilterValue')).toEqual([
{ type: 'label_name', value: { data: 'label', operator: '=' } },
{ type: 'label', value: { data: 'label', operator: '=' } },
]);
});
});
@ -117,11 +117,11 @@ describe('BoardFilteredSearch', () => {
it('sets the url params to the correct results', async () => {
const mockFilters = [
{ type: 'author_username', value: { data: 'root', operator: '=' } },
{ type: 'label_name', value: { data: 'label', operator: '=' } },
{ type: 'label_name', value: { data: 'label2', operator: '=' } },
{ type: 'author', value: { data: 'root', operator: '=' } },
{ type: 'label', value: { data: 'label', operator: '=' } },
{ type: 'label', value: { data: 'label2', operator: '=' } },
{ type: 'milestone', value: { data: 'New Milestone', operator: '=' } },
{ type: 'types', value: { data: 'INCIDENT', operator: '=' } },
{ type: 'type', value: { data: 'INCIDENT', operator: '=' } },
{ type: 'weight', value: { data: '2', operator: '=' } },
{ type: 'iteration', value: { data: '3341', operator: '=' } },
{ type: 'release', value: { data: 'v1.0.0', operator: '=' } },
@ -147,8 +147,8 @@ describe('BoardFilteredSearch', () => {
it('passes the correct props to FilterSearchBar', () => {
expect(findFilteredSearch().props('initialFilterValue')).toEqual([
{ type: 'author_username', value: { data: 'root', operator: '=' } },
{ type: 'label_name', value: { data: 'label', operator: '=' } },
{ type: 'author', value: { data: 'root', operator: '=' } },
{ type: 'label', value: { data: 'label', operator: '=' } },
]);
});
});

View file

@ -544,7 +544,7 @@ export const mockMoveData = {
};
export const mockEmojiToken = {
type: 'my_reaction_emoji',
type: 'my-reaction',
icon: 'thumb-up',
title: 'My-Reaction',
unique: true,
@ -556,7 +556,7 @@ export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones, hasEmoji)
{
icon: 'user',
title: __('Assignee'),
type: 'assignee_username',
type: 'assignee',
operators: [
{ value: '=', description: 'is' },
{ value: '!=', description: 'is not' },
@ -569,7 +569,7 @@ export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones, hasEmoji)
{
icon: 'pencil',
title: __('Author'),
type: 'author_username',
type: 'author',
operators: [
{ value: '=', description: 'is' },
{ value: '!=', description: 'is not' },
@ -583,7 +583,7 @@ export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones, hasEmoji)
{
icon: 'labels',
title: __('Label'),
type: 'label_name',
type: 'label',
operators: [
{ value: '=', description: 'is' },
{ value: '!=', description: 'is not' },
@ -606,7 +606,7 @@ export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones, hasEmoji)
{
icon: 'issues',
title: __('Type'),
type: 'types',
type: 'type',
token: GlFilteredSearchToken,
unique: true,
options: [

View file

@ -8,7 +8,7 @@ RSpec.describe Gitlab::Import::SetAsyncJid do
it 'sets the JID in Redis' do
expect(Gitlab::SidekiqStatus)
.to receive(:set)
.with("async-import/project-import-state/#{project.id}", Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
.with("async-import/project-import-state/#{project.id}", Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION, value: 2)
.and_call_original
described_class.set_jid(project.import_state)

View file

@ -1994,6 +1994,14 @@ RSpec.describe Ci::Build do
it { is_expected.not_to be_retryable }
end
context 'when deployment is rejected' do
before do
build.drop!(:deployment_rejected)
end
it { is_expected.not_to be_retryable }
end
end
end

View file

@ -765,6 +765,14 @@ RSpec.describe CommitStatus do
it_behaves_like 'incrementing failure reason counter'
end
context 'when status is manual' do
let(:commit_status) { create(:commit_status, :manual) }
it 'is able to be dropped' do
expect { commit_status.drop! }.to change { commit_status.status }.from('manual').to('failed')
end
end
end
describe 'ensure stage assignment' do

View file

@ -268,6 +268,29 @@ RSpec.describe Deployment do
end
end
context 'when deployment is blocked' do
let(:deployment) { create(:deployment, :created) }
it 'has correct status' do
deployment.block!
expect(deployment).to be_blocked
expect(deployment.finished_at).to be_nil
end
it 'does not execute Deployments::LinkMergeRequestWorker asynchronously' do
expect(Deployments::LinkMergeRequestWorker).not_to receive(:perform_async)
deployment.block!
end
it 'does not execute Deployments::HooksWorker' do
expect(Deployments::HooksWorker).not_to receive(:perform_async)
deployment.block!
end
end
describe 'synching status to Jira' do
let_it_be(:project) { create(:project, :repository) }
@ -463,11 +486,12 @@ RSpec.describe Deployment do
subject { described_class.active }
it 'retrieves the active deployments' do
deployment1 = create(:deployment, status: :created )
deployment2 = create(:deployment, status: :running )
create(:deployment, status: :failed )
create(:deployment, status: :canceled )
deployment1 = create(:deployment, status: :created)
deployment2 = create(:deployment, status: :running)
create(:deployment, status: :failed)
create(:deployment, status: :canceled)
create(:deployment, status: :skipped)
create(:deployment, status: :blocked)
is_expected.to contain_exactly(deployment1, deployment2)
end
@ -527,9 +551,25 @@ RSpec.describe Deployment do
deployment2 = create(:deployment, status: :success)
deployment3 = create(:deployment, status: :failed)
deployment4 = create(:deployment, status: :canceled)
deployment5 = create(:deployment, status: :blocked)
create(:deployment, status: :skipped)
is_expected.to contain_exactly(deployment1, deployment2, deployment3, deployment4)
is_expected.to contain_exactly(deployment1, deployment2, deployment3, deployment4, deployment5)
end
end
describe 'upcoming' do
subject { described_class.upcoming }
it 'retrieves the upcoming deployments' do
deployment1 = create(:deployment, status: :running)
deployment2 = create(:deployment, status: :blocked)
create(:deployment, status: :success)
create(:deployment, status: :failed)
create(:deployment, status: :canceled)
create(:deployment, status: :skipped)
is_expected.to contain_exactly(deployment1, deployment2)
end
end
end
@ -855,6 +895,27 @@ RSpec.describe Deployment do
expect(deploy.update_status('created')).to eq(false)
end
context 'mapping status to event' do
using RSpec::Parameterized::TableSyntax
where(:status, :method) do
'running' | :run!
'success' | :succeed!
'failed' | :drop!
'canceled' | :cancel!
'skipped' | :skip!
'blocked' | :block!
end
with_them do
it 'calls the correct method for the given status' do
expect(deploy).to receive(method)
deploy.update_status(status)
end
end
end
end
describe '#sync_status_with' do

View file

@ -947,6 +947,12 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
it { is_expected.to eq(deployment) }
end
context 'when environment has a blocked deployment' do
let!(:deployment) { create(:deployment, :blocked, environment: environment, project: project) }
it { is_expected.to eq(deployment) }
end
end
describe '#has_terminals?' do

View file

@ -70,10 +70,13 @@ RSpec.describe Deployments::OlderDeploymentsDropService do
let(:older_deployment) { create(:deployment, :created, environment: environment, deployable: build) }
let(:build) { create(:ci_build, :manual) }
it 'does not drop any builds nor track the exception' do
expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
it 'drops the older deployment' do
deployable = older_deployment.deployable
expect(deployable.failed?).to be_falsey
expect { subject }.not_to change { Ci::Build.failed.count }
subject
expect(deployable.reload.failed?).to be_truthy
end
end