Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-11-16 09:13:21 +00:00
parent ccca6cec34
commit 2c90b9b579
62 changed files with 704 additions and 637 deletions

View File

@ -30,6 +30,17 @@ Are there any other stages or teams involved that need to be kept in the loop?
<!-- Describe the expected outcome when rolling out this feature -->
### When is the feature viable?
<!-- What are the settings we need to configure in order to have this feature viable? -->
<!--
Example below:
1. Enable service ping collection
`ApplicationSetting.first.update(usage_ping_enabled: true)`
-->
### What might happen if this goes wrong?
<!-- Should the feature flag be turned off? Any MRs that need to be rolled back? Communication that needs to happen? What are some things you can think of that could go wrong - data loss or broken pages? -->

View File

@ -467,6 +467,7 @@ RSpec/AnyInstanceOf:
- 'ee/spec/models/project_import_state_spec.rb'
- 'ee/spec/models/push_rule_spec.rb'
- 'ee/spec/presenters/ci/pipeline_presenter_spec.rb'
- 'ee/spec/presenters/projects/security/configuration_presenter_spec.rb'
- 'ee/spec/requests/api/geo_nodes_spec.rb'
- 'ee/spec/requests/api/graphql/mutations/dast_on_demand_scans/create_spec.rb'
- 'ee/spec/requests/api/graphql/mutations/dast_site_profiles/delete_spec.rb'
@ -2399,14 +2400,9 @@ Database/MultipleDatabases:
- 'lib/gitlab/database/load_balancing/sticking.rb'
- 'lib/gitlab/database/migrations/observers/migration_observer.rb'
- 'lib/gitlab/database/migrations/observers/query_log.rb'
- 'lib/gitlab/database/multi_threaded_migration.rb'
- 'lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table.rb'
- 'lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin.rb'
- 'lib/gitlab/database/postgresql_database_tasks/load_schema_versions_mixin.rb'
- 'lib/gitlab/database.rb'
- 'lib/gitlab/database/schema_cache_with_renamed_table.rb'
- 'lib/gitlab/database/schema_migrations/context.rb'
- 'lib/gitlab/database/schema_version_files.rb'
- 'lib/gitlab/database/with_lock_retries.rb'
- 'lib/gitlab/gitlab_import/importer.rb'
- 'lib/gitlab/health_checks/db_check.rb'

View File

@ -42,6 +42,7 @@ export default {
types,
weight,
epicId,
myReactionEmoji,
} = this.filterParams;
const filteredSearchValue = [];
@ -89,6 +90,13 @@ export default {
});
}
if (myReactionEmoji) {
filteredSearchValue.push({
type: 'my_reaction_emoji',
value: { data: myReactionEmoji, operator: '=' },
});
}
if (epicId) {
filteredSearchValue.push({
type: 'epic_id',
@ -147,6 +155,13 @@ export default {
});
}
if (this.filterParams['not[myReactionEmoji]']) {
filteredSearchValue.push({
type: 'my_reaction_emoji',
value: { data: this.filterParams['not[myReactionEmoji]'], operator: '!=' },
});
}
if (search) {
filteredSearchValue.push(search);
}
@ -163,6 +178,7 @@ export default {
types,
weight,
epicId,
myReactionEmoji,
} = this.filterParams;
let notParams = {};
@ -177,6 +193,7 @@ export default {
'not[milestone_title]': this.filterParams.not.milestoneTitle,
'not[weight]': this.filterParams.not.weight,
'not[epic_id]': this.filterParams.not.epicId,
'not[my_reaction_emoji]': this.filterParams.not.myReactionEmoji,
},
undefined,
);
@ -192,6 +209,7 @@ export default {
types,
weight,
epic_id: getIdFromGraphQLId(epicId),
my_reaction_emoji: myReactionEmoji,
};
},
},
@ -249,6 +267,9 @@ export default {
case 'epic_id':
filterParams.epicId = filter.value.data;
break;
case 'my_reaction_emoji':
filterParams.myReactionEmoji = filter.value.data;
break;
case 'filtered-search-term':
if (filter.value.data) plainText.push(filter.value.data);
break;

View File

@ -1,14 +1,20 @@
<script>
import { GlFilteredSearchToken } from '@gitlab/ui';
import fuzzaldrinPlus from 'fuzzaldrin-plus';
import { mapActions } from 'vuex';
import BoardFilteredSearch from 'ee_else_ce/boards/components/board_filtered_search.vue';
import { BoardType } from '~/boards/constants';
import axios from '~/lib/utils/axios_utils';
import issueBoardFilters from '~/boards/issue_board_filters';
import { TYPE_USER } from '~/graphql_shared/constants';
import { convertToGraphQLId } from '~/graphql_shared/utils';
import { __ } from '~/locale';
import { DEFAULT_MILESTONES_GRAPHQL } from '~/vue_shared/components/filtered_search_bar/constants';
import {
DEFAULT_MILESTONES_GRAPHQL,
TOKEN_TITLE_MY_REACTION,
} from '~/vue_shared/components/filtered_search_bar/constants';
import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue';
import EmojiToken from '~/vue_shared/components/filtered_search_bar/tokens/emoji_token.vue';
import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label_token.vue';
import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue';
import WeightToken from '~/vue_shared/components/filtered_search_bar/tokens/weight_token.vue';
@ -33,6 +39,7 @@ export default {
isNot: __('is not'),
},
components: { BoardFilteredSearch },
inject: ['isSignedIn'],
props: {
fullPath: {
type: String,
@ -113,6 +120,32 @@ export default {
symbol: '~',
fetchLabels,
},
...(this.isSignedIn
? [
{
type: 'my_reaction_emoji',
title: TOKEN_TITLE_MY_REACTION,
icon: 'thumb-up',
token: EmojiToken,
unique: true,
fetchEmojis: (search = '') => {
// TODO: Switch to GraphQL query when backend is ready: https://gitlab.com/gitlab-org/gitlab/-/issues/339694
return axios
.get(`${gon.relative_url_root || ''}/-/autocomplete/award_emojis`)
.then(({ data }) => {
if (search) {
return {
data: fuzzaldrinPlus.filter(data, search, {
key: ['name'],
}),
};
}
return { data };
});
},
},
]
: []),
{
type: 'milestone_title',
title: milestone,

View File

@ -13,7 +13,7 @@ import FilteredSearchBoards from '~/boards/filtered_search_boards';
import initBoardsFilteredSearch from '~/boards/mount_filtered_search_issue_boards';
import store from '~/boards/stores';
import toggleFocusMode from '~/boards/toggle_focus';
import { NavigationType, parseBoolean } from '~/lib/utils/common_utils';
import { NavigationType, isLoggedIn, parseBoolean } from '~/lib/utils/common_utils';
import { fullBoardId } from './boards_util';
import boardConfigToggle from './config_toggle';
import initNewBoard from './new_board';
@ -110,7 +110,7 @@ export default () => {
});
if (gon?.features?.issueBoardsFilteredSearch) {
initBoardsFilteredSearch(apolloProvider, parseBoolean($boardApp.dataset.epicFeatureAvailable));
initBoardsFilteredSearch(apolloProvider, isLoggedIn());
}
mountBoardApp($boardApp);

View File

@ -4,7 +4,7 @@ import store from '~/boards/stores';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import { queryToObject } from '~/lib/utils/url_utility';
export default (apolloProvider) => {
export default (apolloProvider, isSignedIn) => {
const el = document.getElementById('js-issue-board-filtered-search');
const rawFilterParams = queryToObject(window.location.search, { gatherArrays: true });
@ -20,6 +20,7 @@ export default (apolloProvider) => {
el,
provide: {
initialFilterParams,
isSignedIn,
},
store, // TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/324094
apolloProvider,

View File

@ -98,15 +98,17 @@ export default {
return this.noteableData.noteableType;
},
allDiscussions() {
let skeletonNotes = [];
if (this.renderSkeleton || this.isLoading) {
const prerenderedNotesCount = parseInt(this.notesData.prerenderedNotesCount, 10) || 0;
return new Array(prerenderedNotesCount).fill({
skeletonNotes = new Array(prerenderedNotesCount).fill({
isSkeletonNote: true,
});
}
return this.discussions;
return this.discussions.concat(skeletonNotes);
},
canReply() {
return this.userCanReply && !this.commentsDisabled && !this.timelineEnabled;

View File

@ -70,7 +70,7 @@ export const setUserData = ({ commit }, data) => commit(types.SET_USER_DATA, dat
export const setLastFetchedAt = ({ commit }, data) => commit(types.SET_LAST_FETCHED_AT, data);
export const setInitialNotes = ({ commit }, discussions) =>
commit(types.SET_INITIAL_DISCUSSIONS, discussions);
commit(types.ADD_OR_UPDATE_DISCUSSIONS, discussions);
export const setTargetNoteHash = ({ commit }, data) => commit(types.SET_TARGET_NOTE_HASH, data);
@ -89,14 +89,51 @@ export const fetchDiscussions = ({ commit, dispatch }, { path, filter, persistFi
? { params: { notes_filter: filter, persist_filter: persistFilter } }
: null;
if (window.gon?.features?.paginatedIssueDiscussions) {
return dispatch('fetchDiscussionsBatch', { path, config, perPage: 20 });
}
return axios.get(path, config).then(({ data }) => {
commit(types.SET_INITIAL_DISCUSSIONS, data);
commit(types.ADD_OR_UPDATE_DISCUSSIONS, data);
commit(types.SET_FETCHING_DISCUSSIONS, false);
dispatch('updateResolvableDiscussionsCounts');
});
};
export const fetchDiscussionsBatch = ({ commit, dispatch }, { path, config, cursor, perPage }) => {
const params = { ...config?.params, per_page: perPage };
if (cursor) {
params.cursor = cursor;
}
return axios.get(path, { params }).then(({ data, headers }) => {
commit(types.ADD_OR_UPDATE_DISCUSSIONS, data);
if (headers['x-next-page-cursor']) {
const nextConfig = { ...config };
if (config?.params?.persist_filter) {
delete nextConfig.params.notes_filter;
delete nextConfig.params.persist_filter;
}
return dispatch('fetchDiscussionsBatch', {
path,
config: nextConfig,
cursor: headers['x-next-page-cursor'],
perPage: Math.min(Math.round(perPage * 1.5), 100),
});
}
commit(types.SET_FETCHING_DISCUSSIONS, false);
dispatch('updateResolvableDiscussionsCounts');
return undefined;
});
};
export const updateDiscussion = ({ commit, state }, discussion) => {
commit(types.UPDATE_DISCUSSION, discussion);

View File

@ -1,11 +1,11 @@
export const ADD_NEW_NOTE = 'ADD_NEW_NOTE';
export const ADD_NEW_REPLY_TO_DISCUSSION = 'ADD_NEW_REPLY_TO_DISCUSSION';
export const ADD_OR_UPDATE_DISCUSSIONS = 'ADD_OR_UPDATE_DISCUSSIONS';
export const DELETE_NOTE = 'DELETE_NOTE';
export const REMOVE_PLACEHOLDER_NOTES = 'REMOVE_PLACEHOLDER_NOTES';
export const SET_NOTES_DATA = 'SET_NOTES_DATA';
export const SET_NOTEABLE_DATA = 'SET_NOTEABLE_DATA';
export const SET_USER_DATA = 'SET_USER_DATA';
export const SET_INITIAL_DISCUSSIONS = 'SET_INITIAL_DISCUSSIONS';
export const SET_LAST_FETCHED_AT = 'SET_LAST_FETCHED_AT';
export const SET_TARGET_NOTE_HASH = 'SET_TARGET_NOTE_HASH';
export const SHOW_PLACEHOLDER_NOTE = 'SHOW_PLACEHOLDER_NOTE';

View File

@ -129,8 +129,8 @@ export default {
Object.assign(state, { userData: data });
},
[types.SET_INITIAL_DISCUSSIONS](state, discussionsData) {
const discussions = discussionsData.reduce((acc, d) => {
[types.ADD_OR_UPDATE_DISCUSSIONS](state, discussionsData) {
discussionsData.forEach((d) => {
const discussion = { ...d };
const diffData = {};
@ -145,27 +145,38 @@ export default {
// To support legacy notes, should be very rare case.
if (discussion.individual_note && discussion.notes.length > 1) {
discussion.notes.forEach((n) => {
acc.push({
const newDiscussion = {
...discussion,
...diffData,
notes: [n], // override notes array to only have one item to mimick individual_note
});
};
const oldDiscussion = state.discussions.find(
(existingDiscussion) =>
existingDiscussion.id === discussion.id && existingDiscussion.notes[0].id === n.id,
);
if (oldDiscussion) {
state.discussions.splice(state.discussions.indexOf(oldDiscussion), 1, newDiscussion);
} else {
state.discussions.push(newDiscussion);
}
});
} else {
const oldNote = utils.findNoteObjectById(state.discussions, discussion.id);
const oldDiscussion = utils.findNoteObjectById(state.discussions, discussion.id);
acc.push({
...discussion,
...diffData,
expanded: oldNote ? oldNote.expanded : discussion.expanded,
});
if (oldDiscussion) {
state.discussions.splice(state.discussions.indexOf(oldDiscussion), 1, {
...discussion,
...diffData,
expanded: oldDiscussion.expanded,
});
} else {
state.discussions.push({ ...discussion, ...diffData });
}
}
return acc;
}, []);
Object.assign(state, { discussions });
});
},
[types.SET_LAST_FETCHED_AT](state, fetchedAt) {
Object.assign(state, { lastFetchedAt: fetchedAt });
},

View File

@ -149,8 +149,20 @@ module IssuableActions
.includes(:noteable)
.fresh
if paginated_discussions
paginated_discussions_by_type = paginated_discussions.records.group_by(&:table_name)
notes = if paginated_discussions_by_type['notes'].present?
notes.with_discussion_ids(paginated_discussions_by_type['notes'].map(&:discussion_id))
else
notes.none
end
response.headers['X-Next-Page-Cursor'] = paginated_discussions.cursor_for_next_page if paginated_discussions.has_next_page?
end
if notes_filter != UserPreference::NOTES_FILTERS[:only_comments]
notes = ResourceEvents::MergeIntoNotesService.new(issuable, current_user).execute(notes)
notes = ResourceEvents::MergeIntoNotesService.new(issuable, current_user, paginated_notes: paginated_discussions_by_type).execute(notes)
end
notes = prepare_notes_for_rendering(notes)
@ -170,6 +182,17 @@ module IssuableActions
private
def paginated_discussions
return if params[:per_page].blank?
return unless issuable.instance_of?(Issue) && Feature.enabled?(:paginated_issue_discussions, project, default_enabled: :yaml)
strong_memoize(:paginated_discussions) do
issuable
.discussion_root_note_ids(notes_filter: notes_filter)
.keyset_paginate(cursor: params[:cursor], per_page: params[:per_page].to_i)
end
end
def notes_filter
strong_memoize(:notes_filter) do
notes_filter_param = params[:notes_filter]&.to_i

View File

@ -66,7 +66,7 @@ class Profiles::TwoFactorAuthsController < Profiles::ApplicationController
render 'create'
else
@error = _('Invalid pin code')
@error = { message: _('Invalid pin code.') }
@qr_code = build_qr_code
if Feature.enabled?(:webauthn)

View File

@ -52,6 +52,7 @@ class Projects::IssuesController < Projects::ApplicationController
push_frontend_feature_flag(:confidential_notes, @project, default_enabled: :yaml)
push_frontend_feature_flag(:issue_assignees_widget, @project, default_enabled: :yaml)
push_frontend_feature_flag(:labels_widget, @project, default_enabled: :yaml)
push_frontend_feature_flag(:paginated_issue_discussions, @project, default_enabled: :yaml)
experiment(:invite_members_in_comment, namespace: @project.root_ancestor) do |experiment_instance|
experiment_instance.exclude! unless helpers.can_admin_project_member?(@project)

View File

@ -167,11 +167,11 @@ module NotesHelper
}
end
def discussions_path(issuable)
def discussions_path(issuable, **params)
if issuable.is_a?(Issue)
discussions_project_issue_path(@project, issuable, format: :json)
discussions_project_issue_path(@project, issuable, params.merge(format: :json))
else
discussions_project_merge_request_path(@project, issuable, format: :json)
discussions_project_merge_request_path(@project, issuable, params.merge(format: :json))
end
end

View File

@ -98,6 +98,27 @@ module Noteable
.order('MIN(created_at), MIN(id)')
end
# This does not consider OutOfContextDiscussions in MRs
# where notes from commits are overriden so that they have
# the same discussion_id
def discussion_root_note_ids(notes_filter:)
relations = []
relations << discussion_notes.select(
"'notes' AS table_name",
'discussion_id',
'MIN(id) AS id',
'MIN(created_at) AS created_at'
).with_notes_filter(notes_filter)
.group(:discussion_id)
if notes_filter != UserPreference::NOTES_FILTERS[:only_comments]
relations += synthetic_note_ids_relations
end
Note.from_union(relations, remove_duplicates: false).fresh
end
def capped_notes_count(max)
notes.limit(max).count
end
@ -179,6 +200,18 @@ module Noteable
project_email.sub('@', "-#{iid}@")
end
private
# Synthetic system notes don't have discussion IDs because these are generated dynamically
# in Ruby. These are always root notes anyway so we don't need to group by discussion ID.
def synthetic_note_ids_relations
[
resource_label_events.select("'resource_label_events'", "'NULL'", :id, :created_at),
resource_milestone_events.select("'resource_milestone_events'", "'NULL'", :id, :created_at),
resource_state_events.select("'resource_state_events'", "'NULL'", :id, :created_at)
]
end
end
Noteable.extend(Noteable::ClassMethods)

View File

@ -114,6 +114,7 @@ class Note < ApplicationRecord
scope :fresh, -> { order_created_asc.with_order_id_asc }
scope :updated_after, ->(time) { where('updated_at > ?', time) }
scope :with_updated_at, ->(time) { where(updated_at: time) }
scope :with_discussion_ids, ->(discussion_ids) { where(discussion_id: discussion_ids) }
scope :with_suggestions, -> { joins(:suggestions) }
scope :inc_author, -> { includes(:author) }
scope :with_api_entity_associations, -> { preload(:note_diff_file, :author) }

View File

@ -1,100 +0,0 @@
# frozen_string_literal: true
module Projects
module Security
class ConfigurationPresenter < Gitlab::View::Presenter::Delegated
include AutoDevopsHelper
include ::Security::LatestPipelineInformation
delegator_override_with Gitlab::Utils::StrongMemoize
presents ::Project, as: :project
def to_h
{
auto_devops_enabled: auto_devops_source?,
auto_devops_help_page_path: help_page_path('topics/autodevops/index'),
auto_devops_path: auto_devops_settings_path(project),
can_enable_auto_devops: can_enable_auto_devops?,
features: features,
help_page_path: help_page_path('user/application_security/index'),
latest_pipeline_path: latest_pipeline_path,
# TODO: gitlab_ci_present will incorrectly report `false` if the CI/CD configuration file name
# has been customized and a file with the given custom name exists in the repo. This edge case
# will be addressed in https://gitlab.com/gitlab-org/gitlab/-/issues/342465
gitlab_ci_present: project.repository.gitlab_ci_yml.present?,
gitlab_ci_history_path: gitlab_ci_history_path,
auto_fix_enabled: autofix_enabled,
can_toggle_auto_fix_settings: can_toggle_autofix,
auto_fix_user_path: auto_fix_user_path
}
end
def to_html_data_attribute
data = to_h
data[:features] = data[:features].to_json
data[:auto_fix_enabled] = data[:auto_fix_enabled].to_json
data
end
private
def autofix_enabled; end
def auto_fix_user_path; end
def can_enable_auto_devops?
feature_available?(:builds, current_user) &&
can?(current_user, :admin_project, self) &&
!archived?
end
def can_toggle_autofix; end
def gitlab_ci_history_path
return '' if project.empty_repo?
gitlab_ci = ::Gitlab::FileDetector::PATTERNS[:gitlab_ci]
::Gitlab::Routing.url_helpers.project_blame_path(project, File.join(project.default_branch_or_main, gitlab_ci))
end
def features
scans = scan_types.map do |scan_type|
scan(scan_type, configured: scanner_enabled?(scan_type))
end
# These scans are "fake" (non job) entries. Add them manually.
scans << scan(:corpus_management, configured: true)
scans << scan(:dast_profiles, configured: true)
end
def latest_pipeline_path
return help_page_path('ci/pipelines') unless latest_default_branch_pipeline
project_pipeline_path(self, latest_default_branch_pipeline)
end
def scan(type, configured: false)
scan = ::Gitlab::Security::ScanConfiguration.new(project: project, type: type, configured: configured)
{
type: scan.type,
configured: scan.configured?,
configuration_path: scan.configuration_path,
available: scan.available?
}
end
def scan_types
::Security::SecurityJobsFinder.allowed_job_types + ::Security::LicenseComplianceJobsFinder.allowed_job_types
end
def project_settings
project.security_setting
end
end
end
end
Projects::Security::ConfigurationPresenter.prepend_mod_with('Projects::Security::ConfigurationPresenter')

View File

@ -24,10 +24,18 @@ module ResourceEvents
private
def apply_common_filters(events)
events = apply_pagination(events)
events = apply_last_fetched_at(events)
apply_fetch_until(events)
end
def apply_pagination(events)
return events if params[:paginated_notes].nil?
return events.none if params[:paginated_notes][table_name].blank?
events.id_in(params[:paginated_notes][table_name].map(&:id))
end
def apply_last_fetched_at(events)
return events unless params[:last_fetched_at].present?
@ -47,5 +55,9 @@ module ResourceEvents
resource.project || resource.group
end
end
def table_name
raise NotImplementedError
end
end
end

View File

@ -23,5 +23,9 @@ module ResourceEvents
events.group_by { |event| event.discussion_id }
end
def table_name
'resource_label_events'
end
end
end

View File

@ -21,5 +21,9 @@ module ResourceEvents
events = resource.resource_milestone_events.includes(user: :status) # rubocop: disable CodeReuse/ActiveRecord
apply_common_filters(events)
end
def table_name
'resource_milestone_events'
end
end
end

View File

@ -16,5 +16,9 @@ module ResourceEvents
events = resource.resource_state_events.includes(user: :status) # rubocop: disable CodeReuse/ActiveRecord
apply_common_filters(events)
end
def table_name
'resource_state_events'
end
end
end

View File

@ -43,7 +43,9 @@
.gl-alert.gl-alert-danger.gl-mb-5
.gl-alert-container
.gl-alert-content
= @error
%p.gl-alert-body.gl-md-0
= @error[:message]
= link_to _('Try the troubleshooting steps here.'), help_page_path('user/profile/account/two_factor_authentication.md', anchor: 'troubleshooting'), target: '_blank', rel: 'noopener noreferrer'
.form-group
= label_tag :pin_code, _('Pin code'), class: "label-bold"
= text_field_tag :pin_code, nil, class: "form-control gl-form-input", required: true, data: { qa_selector: 'pin_code_field' }

View File

@ -1,4 +1,4 @@
- add_page_startup_api_call discussions_path(@issue)
- add_page_startup_api_call Feature.enabled?(:paginated_issue_discussions, @project, default_enabled: :yaml) ? discussions_path(@issue, per_page: 20) : discussions_path(@issue)
- @gfm_form = true

View File

@ -0,0 +1,8 @@
---
name: paginated_issue_discussions
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/69933
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/345351
milestone: '14.5'
type: development
group: group::project management
default_enabled: false

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
class PopulateDefaultValueForPersonalAccessTokensPrefix < Gitlab::Database::Migration[1.0]
def up
execute(
<<-SQL
UPDATE
application_settings
SET
personal_access_token_prefix = default
WHERE
personal_access_token_prefix IS NULL
SQL
)
end
def down
# no-op
end
end

View File

@ -0,0 +1 @@
0c5627518093f6261679940402cbd756a91bf4617a37eecdbbc82dc57856dcec

View File

@ -12855,7 +12855,6 @@ Represents vulnerability finding of a security report on the pipeline.
| <a id="projectrequestaccessenabled"></a>`requestAccessEnabled` | [`Boolean`](#boolean) | Indicates if users can request member access to the project. |
| <a id="projectrequirementstatescount"></a>`requirementStatesCount` | [`RequirementStatesCount`](#requirementstatescount) | Number of requirements for the project by their state. |
| <a id="projectsastciconfiguration"></a>`sastCiConfiguration` | [`SastCiConfiguration`](#sastciconfiguration) | SAST CI configuration for the project. |
| <a id="projectscanexecutionpolicies"></a>`scanExecutionPolicies` | [`ScanExecutionPolicyConnection`](#scanexecutionpolicyconnection) | Scan Execution Policies of the project. (see [Connections](#connections)) |
| <a id="projectsecuritydashboardpath"></a>`securityDashboardPath` | [`String`](#string) | Path to project's security dashboard. |
| <a id="projectsecurityscanners"></a>`securityScanners` | [`SecurityScanners`](#securityscanners) | Information about security analyzers used in the project. |
| <a id="projectsentryerrors"></a>`sentryErrors` | [`SentryErrorCollection`](#sentryerrorcollection) | Paginated collection of Sentry errors on the project. |
@ -13589,6 +13588,22 @@ four standard [pagination arguments](#connection-pagination-arguments):
| <a id="projectrequirementssort"></a>`sort` | [`Sort`](#sort) | List requirements by sort order. |
| <a id="projectrequirementsstate"></a>`state` | [`RequirementState`](#requirementstate) | Filter requirements by state. |
##### `Project.scanExecutionPolicies`
Scan Execution Policies of the project.
Returns [`ScanExecutionPolicyConnection`](#scanexecutionpolicyconnection).
This field returns a [connection](#connections). It accepts the
four standard [pagination arguments](#connection-pagination-arguments):
`before: String`, `after: String`, `first: Int`, `last: Int`.
###### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="projectscanexecutionpoliciesactionscantypes"></a>`actionScanTypes` | [`[SecurityReportTypeEnum!]`](#securityreporttypeenum) | Filters policies by the action scan type. Only these scan types are supported: `dast`, `secret_detection`, `cluster_image_scanning`, `container_scanning`. |
##### `Project.sentryDetailedError`
Detailed version of a Sentry error on the project.

View File

@ -6,6 +6,8 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# DingTalk OAuth 2.0 OmniAuth provider **(FREE SELF)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/341898) in GitLab 14.5.
You can sign in to GitLab using your DingTalk account.
Sign in to DingTalk Open Platform and create an application on it. DingTalk generates a client ID and secret key for you to use.

View File

@ -4,7 +4,7 @@ group: Composition Analysis
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# SPDX license list import **(PREMIUM SELF)**
# SPDX license list import **(ULTIMATE SELF)**
GitLab provides a Rake task for uploading a fresh copy of the [SPDX license list](https://spdx.org/licenses/)
to a GitLab instance. This list is needed for matching the names of [License Compliance policies](../user/compliance/license_compliance/index.md).

View File

@ -6,6 +6,8 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# ZenTao product integration **(PREMIUM)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/338178) in GitLab 14.5.
[ZenTao](https://www.zentao.net/) is a web-based project management platform.
## Configure ZenTao

View File

@ -239,7 +239,7 @@ module API
# rubocop: disable CodeReuse/ActiveRecord
def readable_discussion_notes(noteable, discussion_ids)
notes = noteable.notes
.where(discussion_id: discussion_ids)
.with_discussion_ids(discussion_ids)
.inc_relations_for_view
.includes(:noteable)
.fresh

View File

@ -681,20 +681,27 @@ module API
def send_git_blob(repository, blob)
env['api.format'] = :txt
content_type 'text/plain'
header['Content-Disposition'] = ActionDispatch::Http::ContentDisposition.format(disposition: 'inline', filename: blob.name)
# Let Workhorse examine the content and determine the better content disposition
header[Gitlab::Workhorse::DETECT_HEADER] = "true"
header(*Gitlab::Workhorse.send_git_blob(repository, blob))
body ''
end
def send_git_archive(repository, **kwargs)
header(*Gitlab::Workhorse.send_git_archive(repository, **kwargs))
body ''
end
def send_artifacts_entry(file, entry)
header(*Gitlab::Workhorse.send_artifacts_entry(file, entry))
body ''
end
# The Grape Error Middleware only has access to `env` but not `params` nor

View File

@ -11,6 +11,8 @@ module API
def send_git_snapshot(repository)
header(*Gitlab::Workhorse.send_git_snapshot(repository))
body ''
end
def snapshot_project

View File

@ -36,38 +36,42 @@ module Gitlab
end
def steal(steal_class, retry_dead_jobs: false)
queues = [
Sidekiq::ScheduledSet.new,
Sidekiq::Queue.new(self.queue)
]
with_shared_connection do
queues = [
Sidekiq::ScheduledSet.new,
Sidekiq::Queue.new(self.queue)
]
if retry_dead_jobs
queues << Sidekiq::RetrySet.new
queues << Sidekiq::DeadSet.new
end
if retry_dead_jobs
queues << Sidekiq::RetrySet.new
queues << Sidekiq::DeadSet.new
end
queues.each do |queue|
queue.each do |job|
migration_class, migration_args = job.args
queues.each do |queue|
queue.each do |job|
migration_class, migration_args = job.args
next unless job.klass == worker_class.name
next unless migration_class == steal_class
next if block_given? && !(yield job)
next unless job.klass == worker_class.name
next unless migration_class == steal_class
next if block_given? && !(yield job)
begin
perform(migration_class, migration_args) if job.delete
rescue Exception # rubocop:disable Lint/RescueException
worker_class # enqueue this migration again
.perform_async(migration_class, migration_args)
begin
perform(migration_class, migration_args) if job.delete
rescue Exception # rubocop:disable Lint/RescueException
worker_class # enqueue this migration again
.perform_async(migration_class, migration_args)
raise
raise
end
end
end
end
end
def perform(class_name, arguments)
migration_class_for(class_name).new.perform(*arguments)
with_shared_connection do
migration_class_for(class_name).new.perform(*arguments)
end
end
def remaining

View File

@ -1,47 +0,0 @@
# frozen_string_literal: true
module Gitlab
module Security
class ScanConfiguration
include ::Gitlab::Utils::StrongMemoize
include Gitlab::Routing.url_helpers
attr_reader :type
def initialize(project:, type:, configured: false)
@project = project
@type = type
@configured = configured
end
def available?
# SAST and Secret Detection are always available, but this isn't
# reflected by our license model yet.
# TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/333113
%i[sast secret_detection].include?(type)
end
def configured?
configured
end
def configuration_path
configurable_scans[type]
end
private
attr_reader :project, :configured
def configurable_scans
strong_memoize(:configurable_scans) do
{
sast: project_security_configuration_sast_path(project)
}
end
end
end
end
end
Gitlab::Security::ScanConfiguration.prepend_mod_with('Gitlab::Security::ScanConfiguration')

View File

@ -18859,7 +18859,7 @@ msgstr ""
msgid "Invalid period"
msgstr ""
msgid "Invalid pin code"
msgid "Invalid pin code."
msgstr ""
msgid "Invalid pod_name"
@ -36651,6 +36651,9 @@ msgstr ""
msgid "Try out GitLab Pipelines"
msgstr ""
msgid "Try the troubleshooting steps here."
msgstr ""
msgid "Try to fork again"
msgstr ""

View File

@ -4,7 +4,6 @@ module QA
RSpec.describe 'Manage', :github, :requires_admin do
describe 'Project import' do
let(:github_repo) { 'gitlab-qa-github/test-project' }
let(:imported_project_name) { 'imported-project' }
let(:api_client) { Runtime::API::Client.as_admin }
let(:group) { Resource::Group.fabricate_via_api! { |resource| resource.api_client = api_client } }
let(:user) do
@ -17,11 +16,10 @@ module QA
let(:imported_project) do
Resource::ProjectImportedFromGithub.init do |project|
project.import = true
project.add_name_uuid = false
project.name = imported_project_name
project.group = group
project.github_personal_access_token = Runtime::Env.github_access_token
project.github_repository_path = github_repo
project.api_client = api_client
end
end
@ -43,7 +41,7 @@ module QA
it 'imports a GitHub repo', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/quality/test_cases/1607' do
Page::Project::Import::Github.perform do |import_page|
import_page.add_personal_access_token(Runtime::Env.github_access_token)
import_page.import!(github_repo, group.full_path, imported_project_name)
import_page.import!(github_repo, group.full_path, imported_project.name)
aggregate_failures do
expect(import_page).to have_imported_project(github_repo)
@ -56,7 +54,7 @@ module QA
imported_project.reload!.visit!
Page::Project::Show.perform do |project|
aggregate_failures do
expect(project).to have_content(imported_project_name)
expect(project).to have_content(imported_project.name)
expect(project).to have_content('This test project is used for automated GitHub import by GitLab QA.')
end
end

View File

@ -175,7 +175,7 @@ RSpec.describe Profiles::TwoFactorAuthsController do
it 'assigns error' do
go
expect(assigns[:error]).to eq _('Invalid pin code')
expect(assigns[:error]).to eq({ message: 'Invalid pin code.' })
end
it 'assigns qr_code' do

View File

@ -45,6 +45,19 @@ RSpec.describe 'Two factor auths' do
expect(page).to have_content('Status: Enabled')
end
end
context 'when invalid pin is provided' do
let_it_be(:user) { create(:omniauth_user) }
it 'renders a error alert with a link to the troubleshooting section' do
visit profile_two_factor_auth_path
fill_in 'pin_code', with: '123'
click_button 'Register with two-factor app'
expect(page).to have_link('Try the troubleshooting steps here.', href: help_page_path('user/profile/account/two_factor_authentication.md', anchor: 'troubleshooting'))
end
end
end
context 'when user has two-factor authentication enabled' do

View File

@ -11,11 +11,11 @@ describe('IssueBoardFilter', () => {
const findBoardsFilteredSearch = () => wrapper.findComponent(BoardFilteredSearch);
const createComponent = ({ epicFeatureAvailable = false } = {}) => {
const createComponent = ({ isSignedIn = false } = {}) => {
wrapper = shallowMount(IssueBoardFilteredSpec, {
propsData: { fullPath: 'gitlab-org', boardType: 'group' },
provide: {
epicFeatureAvailable,
isSignedIn,
},
});
};
@ -45,10 +45,24 @@ describe('IssueBoardFilter', () => {
expect(findBoardsFilteredSearch().exists()).toBe(true);
});
it('passes the correct tokens to BoardFilteredSearch', () => {
const tokens = mockTokens(fetchLabelsSpy, fetchAuthorsSpy, wrapper.vm.fetchMilestones);
it.each`
isSignedIn
${true}
${false}
`(
'passes the correct tokens to BoardFilteredSearch when user sign in is $isSignedIn',
({ isSignedIn }) => {
createComponent({ isSignedIn });
expect(findBoardsFilteredSearch().props('tokens')).toEqual(tokens);
});
const tokens = mockTokens(
fetchLabelsSpy,
fetchAuthorsSpy,
wrapper.vm.fetchMilestones,
isSignedIn,
);
expect(findBoardsFilteredSearch().props('tokens')).toEqual(tokens);
},
);
});
});

View File

@ -4,6 +4,7 @@ import { ListType } from '~/boards/constants';
import { __ } from '~/locale';
import { DEFAULT_MILESTONES_GRAPHQL } from '~/vue_shared/components/filtered_search_bar/constants';
import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue';
import EmojiToken from '~/vue_shared/components/filtered_search_bar/tokens/emoji_token.vue';
import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label_token.vue';
import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue';
import WeightToken from '~/vue_shared/components/filtered_search_bar/tokens/weight_token.vue';
@ -549,7 +550,16 @@ export const mockMoveData = {
...mockMoveIssueParams,
};
export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones) => [
export const mockEmojiToken = {
type: 'my_reaction_emoji',
icon: 'thumb-up',
title: 'My-Reaction',
unique: true,
token: EmojiToken,
fetchEmojis: expect.any(Function),
};
export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones, hasEmoji) => [
{
icon: 'user',
title: __('Assignee'),
@ -590,6 +600,7 @@ export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones) => [
symbol: '~',
fetchLabels,
},
...(hasEmoji ? [mockEmojiToken] : []),
{
icon: 'clock',
title: __('Milestone'),

View File

@ -53,7 +53,7 @@ describe('DiscussionCounter component', () => {
describe('has no resolvable discussions', () => {
it('does not render', () => {
store.commit(types.SET_INITIAL_DISCUSSIONS, [{ ...discussionMock, resolvable: false }]);
store.commit(types.ADD_OR_UPDATE_DISCUSSIONS, [{ ...discussionMock, resolvable: false }]);
store.dispatch('updateResolvableDiscussionsCounts');
wrapper = shallowMount(DiscussionCounter, { store, localVue });
@ -64,7 +64,7 @@ describe('DiscussionCounter component', () => {
describe('has resolvable discussions', () => {
const updateStore = (note = {}) => {
discussionMock.notes[0] = { ...discussionMock.notes[0], ...note };
store.commit(types.SET_INITIAL_DISCUSSIONS, [discussionMock]);
store.commit(types.ADD_OR_UPDATE_DISCUSSIONS, [discussionMock]);
store.dispatch('updateResolvableDiscussionsCounts');
};
@ -97,7 +97,7 @@ describe('DiscussionCounter component', () => {
let toggleAllButton;
const updateStoreWithExpanded = (expanded) => {
const discussion = { ...discussionMock, expanded };
store.commit(types.SET_INITIAL_DISCUSSIONS, [discussion]);
store.commit(types.ADD_OR_UPDATE_DISCUSSIONS, [discussion]);
store.dispatch('updateResolvableDiscussionsCounts');
wrapper = shallowMount(DiscussionCounter, { store, localVue });
toggleAllButton = wrapper.find('.toggle-all-discussions-btn');

View File

@ -119,7 +119,7 @@ describe('Actions Notes Store', () => {
actions.setInitialNotes,
[individualNote],
{ notes: [] },
[{ type: 'SET_INITIAL_DISCUSSIONS', payload: [individualNote] }],
[{ type: 'ADD_OR_UPDATE_DISCUSSIONS', payload: [individualNote] }],
[],
done,
);
@ -1395,4 +1395,93 @@ describe('Actions Notes Store', () => {
);
});
});
describe('fetchDiscussions', () => {
const discussion = { notes: [] };
afterEach(() => {
window.gon = {};
});
it('updates the discussions and dispatches `updateResolvableDiscussionsCounts`', (done) => {
axiosMock.onAny().reply(200, { discussion });
testAction(
actions.fetchDiscussions,
{},
null,
[
{ type: mutationTypes.ADD_OR_UPDATE_DISCUSSIONS, payload: { discussion } },
{ type: mutationTypes.SET_FETCHING_DISCUSSIONS, payload: false },
],
[{ type: 'updateResolvableDiscussionsCounts' }],
done,
);
});
it('dispatches `fetchDiscussionsBatch` action if `paginatedIssueDiscussions` feature flag is enabled', (done) => {
window.gon = { features: { paginatedIssueDiscussions: true } };
testAction(
actions.fetchDiscussions,
{ path: 'test-path', filter: 'test-filter', persistFilter: 'test-persist-filter' },
null,
[],
[
{
type: 'fetchDiscussionsBatch',
payload: {
config: {
params: { notes_filter: 'test-filter', persist_filter: 'test-persist-filter' },
},
path: 'test-path',
perPage: 20,
},
},
],
done,
);
});
});
describe('fetchDiscussionsBatch', () => {
const discussion = { notes: [] };
const config = {
params: { notes_filter: 'test-filter', persist_filter: 'test-persist-filter' },
};
const actionPayload = { config, path: 'test-path', perPage: 20 };
it('updates the discussions and dispatches `updateResolvableDiscussionsCounts if there are no headers', (done) => {
axiosMock.onAny().reply(200, { discussion }, {});
testAction(
actions.fetchDiscussionsBatch,
actionPayload,
null,
[
{ type: mutationTypes.ADD_OR_UPDATE_DISCUSSIONS, payload: { discussion } },
{ type: mutationTypes.SET_FETCHING_DISCUSSIONS, payload: false },
],
[{ type: 'updateResolvableDiscussionsCounts' }],
done,
);
});
it('dispatches itself if there is `x-next-page-cursor` header', (done) => {
axiosMock.onAny().reply(200, { discussion }, { 'x-next-page-cursor': 1 });
testAction(
actions.fetchDiscussionsBatch,
actionPayload,
null,
[{ type: mutationTypes.ADD_OR_UPDATE_DISCUSSIONS, payload: { discussion } }],
[
{
type: 'fetchDiscussionsBatch',
payload: { ...actionPayload, perPage: 30, cursor: 1 },
},
],
done,
);
});
});
});

View File

@ -159,7 +159,7 @@ describe('Notes Store mutations', () => {
});
});
describe('SET_INITIAL_DISCUSSIONS', () => {
describe('ADD_OR_UPDATE_DISCUSSIONS', () => {
it('should set the initial notes received', () => {
const state = {
discussions: [],
@ -169,15 +169,17 @@ describe('Notes Store mutations', () => {
individual_note: true,
notes: [
{
id: 100,
note: '1',
},
{
id: 101,
note: '2',
},
],
};
mutations.SET_INITIAL_DISCUSSIONS(state, [note, legacyNote]);
mutations.ADD_OR_UPDATE_DISCUSSIONS(state, [note, legacyNote]);
expect(state.discussions[0].id).toEqual(note.id);
expect(state.discussions[1].notes[0].note).toBe(legacyNote.notes[0].note);
@ -190,7 +192,7 @@ describe('Notes Store mutations', () => {
discussions: [],
};
mutations.SET_INITIAL_DISCUSSIONS(state, [
mutations.ADD_OR_UPDATE_DISCUSSIONS(state, [
{
...note,
diff_file: {
@ -208,7 +210,7 @@ describe('Notes Store mutations', () => {
discussions: [],
};
mutations.SET_INITIAL_DISCUSSIONS(state, [
mutations.ADD_OR_UPDATE_DISCUSSIONS(state, [
{
...note,
diff_file: {

View File

@ -351,12 +351,14 @@ RSpec.describe API::Helpers do
let(:send_git_blob) do
subject.send(:send_git_blob, repository, blob)
subject.header
end
before do
allow(subject).to receive(:env).and_return({})
allow(subject).to receive(:content_type)
allow(subject).to receive(:header).and_return({})
allow(subject).to receive(:body).and_return('')
allow(Gitlab::Workhorse).to receive(:send_git_blob)
end

View File

@ -73,6 +73,25 @@ RSpec.describe Gitlab::BackgroundMigration::JobCoordinator do
coordinator.steal('Foo')
end
it 'sets up the shared connection while stealing jobs' do
connection = double('connection')
allow(coordinator).to receive(:connection).and_return(connection)
expect(coordinator).to receive(:with_shared_connection).and_call_original
expect(queue[0]).to receive(:delete).and_return(true)
expect(coordinator).to receive(:perform).with('Foo', [10, 20]) do
expect(Gitlab::Database::SharedModel.connection).to be(connection)
end
coordinator.steal('Foo') do
expect(Gitlab::Database::SharedModel.connection).to be(connection)
true # the job is only performed if the block returns true
end
end
it 'does not steal job that has already been taken' do
expect(queue[0]).to receive(:delete).and_return(false)
@ -194,13 +213,20 @@ RSpec.describe Gitlab::BackgroundMigration::JobCoordinator do
describe '#perform' do
let(:migration) { spy(:migration) }
let(:connection) { double('connection') }
before do
stub_const('Gitlab::BackgroundMigration::Foo', migration)
allow(coordinator).to receive(:connection).and_return(connection)
end
it 'performs a background migration' do
expect(migration).to receive(:perform).with(10, 20).once
it 'performs a background migration with the configured shared connection' do
expect(coordinator).to receive(:with_shared_connection).and_call_original
expect(migration).to receive(:perform).with(10, 20).once do
expect(Gitlab::Database::SharedModel.connection).to be(connection)
end
coordinator.perform('Foo', [10, 20])
end

View File

@ -1,64 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ::Gitlab::Security::ScanConfiguration do
let_it_be(:project) { create(:project, :repository) }
let(:scan) { described_class.new(project: project, type: type, configured: configured) }
describe '#available?' do
subject { scan.available? }
let(:configured) { true }
context 'with a core scanner' do
let(:type) { :sast }
it { is_expected.to be_truthy }
end
context 'with custom scanner' do
let(:type) { :my_scanner }
it { is_expected.to be_falsey }
end
end
describe '#configured?' do
subject { scan.configured? }
let(:type) { :sast }
let(:configured) { false }
it { is_expected.to be_falsey }
end
describe '#configuration_path' do
subject { scan.configuration_path }
let(:configured) { true }
context 'with a non configurable scaner' do
let(:type) { :secret_detection }
it { is_expected.to be_nil }
end
context 'with licensed scanner for FOSS environment' do
let(:type) { :dast }
before do
stub_env('FOSS_ONLY', '1')
end
it { is_expected.to be_nil }
end
context 'with custom scanner' do
let(:type) { :my_scanner }
it { is_expected.to be_nil }
end
end
end

View File

@ -77,6 +77,70 @@ RSpec.describe Noteable do
end
end
describe '#discussion_root_note_ids' do
let!(:label_event) { create(:resource_label_event, merge_request: subject) }
let!(:system_note) { create(:system_note, project: project, noteable: subject) }
let!(:milestone_event) { create(:resource_milestone_event, merge_request: subject) }
let!(:state_event) { create(:resource_state_event, merge_request: subject) }
it 'returns ordered discussion_ids and synthetic note ids' do
discussions = subject.discussion_root_note_ids(notes_filter: UserPreference::NOTES_FILTERS[:all_notes]).map do |n|
{ table_name: n.table_name, discussion_id: n.discussion_id, id: n.id }
end
expect(discussions).to match([
a_hash_including(table_name: 'notes', discussion_id: active_diff_note1.discussion_id),
a_hash_including(table_name: 'notes', discussion_id: active_diff_note3.discussion_id),
a_hash_including(table_name: 'notes', discussion_id: outdated_diff_note1.discussion_id),
a_hash_including(table_name: 'notes', discussion_id: discussion_note1.discussion_id),
a_hash_including(table_name: 'notes', discussion_id: commit_diff_note1.discussion_id),
a_hash_including(table_name: 'notes', discussion_id: commit_note1.discussion_id),
a_hash_including(table_name: 'notes', discussion_id: commit_note2.discussion_id),
a_hash_including(table_name: 'notes', discussion_id: commit_discussion_note1.discussion_id),
a_hash_including(table_name: 'notes', discussion_id: commit_discussion_note3.discussion_id),
a_hash_including(table_name: 'notes', discussion_id: note1.discussion_id),
a_hash_including(table_name: 'notes', discussion_id: note2.discussion_id),
a_hash_including(table_name: 'resource_label_events', id: label_event.id),
a_hash_including(table_name: 'notes', discussion_id: system_note.discussion_id),
a_hash_including(table_name: 'resource_milestone_events', id: milestone_event.id),
a_hash_including(table_name: 'resource_state_events', id: state_event.id)
])
end
it 'filters by comments only' do
discussions = subject.discussion_root_note_ids(notes_filter: UserPreference::NOTES_FILTERS[:only_comments]).map do |n|
{ table_name: n.table_name, discussion_id: n.discussion_id, id: n.id }
end
expect(discussions).to match([
a_hash_including(table_name: 'notes', discussion_id: active_diff_note1.discussion_id),
a_hash_including(table_name: 'notes', discussion_id: active_diff_note3.discussion_id),
a_hash_including(table_name: 'notes', discussion_id: outdated_diff_note1.discussion_id),
a_hash_including(table_name: 'notes', discussion_id: discussion_note1.discussion_id),
a_hash_including(table_name: 'notes', discussion_id: commit_diff_note1.discussion_id),
a_hash_including(table_name: 'notes', discussion_id: commit_note1.discussion_id),
a_hash_including(table_name: 'notes', discussion_id: commit_note2.discussion_id),
a_hash_including(table_name: 'notes', discussion_id: commit_discussion_note1.discussion_id),
a_hash_including(table_name: 'notes', discussion_id: commit_discussion_note3.discussion_id),
a_hash_including(table_name: 'notes', discussion_id: note1.discussion_id),
a_hash_including(table_name: 'notes', discussion_id: note2.discussion_id)
])
end
it 'filters by system notes only' do
discussions = subject.discussion_root_note_ids(notes_filter: UserPreference::NOTES_FILTERS[:only_activity]).map do |n|
{ table_name: n.table_name, discussion_id: n.discussion_id, id: n.id }
end
expect(discussions).to match([
a_hash_including(table_name: 'resource_label_events', id: label_event.id),
a_hash_including(table_name: 'notes', discussion_id: system_note.discussion_id),
a_hash_including(table_name: 'resource_milestone_events', id: milestone_event.id),
a_hash_including(table_name: 'resource_state_events', id: state_event.id)
])
end
end
describe '#grouped_diff_discussions' do
let(:grouped_diff_discussions) { subject.grouped_diff_discussions }

View File

@ -1,301 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::Security::ConfigurationPresenter do
include Gitlab::Routing.url_helpers
using RSpec::Parameterized::TableSyntax
let(:project_with_repo) { create(:project, :repository) }
let(:project_with_no_repo) { create(:project) }
let(:current_user) { create(:user) }
let(:presenter) { described_class.new(project, current_user: current_user) }
before do
stub_licensed_features(licensed_scan_types.to_h { |type| [type, true] })
stub_feature_flags(corpus_management: false)
end
describe '#to_html_data_attribute' do
subject(:html_data) { presenter.to_html_data_attribute }
context 'when latest default branch pipeline`s source is not auto devops' do
let(:project) { project_with_repo }
let(:pipeline) do
create(
:ci_pipeline,
project: project,
ref: project.default_branch,
sha: project.commit.sha
)
end
let!(:build_sast) { create(:ci_build, :sast, pipeline: pipeline) }
let!(:build_dast) { create(:ci_build, :dast, pipeline: pipeline) }
let!(:build_license_scanning) { create(:ci_build, :license_scanning, pipeline: pipeline) }
it 'includes links to auto devops and secure product docs' do
expect(html_data[:auto_devops_help_page_path]).to eq(help_page_path('topics/autodevops/index'))
expect(html_data[:help_page_path]).to eq(help_page_path('user/application_security/index'))
end
it 'returns info that Auto DevOps is not enabled' do
expect(html_data[:auto_devops_enabled]).to eq(false)
expect(html_data[:auto_devops_path]).to eq(project_settings_ci_cd_path(project, anchor: 'autodevops-settings'))
end
it 'includes a link to the latest pipeline' do
expect(html_data[:latest_pipeline_path]).to eq(project_pipeline_path(project, pipeline))
end
it 'has stubs for autofix' do
expect(html_data.keys).to include(:can_toggle_auto_fix_settings, :auto_fix_enabled, :auto_fix_user_path)
end
context "while retrieving information about user's ability to enable auto_devops" do
where(:is_admin, :archived, :feature_available, :result) do
true | true | true | false
false | true | true | false
true | false | true | true
false | false | true | false
true | true | false | false
false | true | false | false
true | false | false | false
false | false | false | false
end
with_them do
before do
allow_next_instance_of(described_class) do |presenter|
allow(presenter).to receive(:can?).and_return(is_admin)
allow(presenter).to receive(:archived?).and_return(archived)
allow(presenter).to receive(:feature_available?).and_return(feature_available)
end
end
it 'includes can_enable_auto_devops' do
expect(html_data[:can_enable_auto_devops]).to eq(result)
end
end
end
it 'includes feature information' do
feature = Gitlab::Json.parse(html_data[:features]).find { |scan| scan['type'] == 'sast' }
expect(feature['type']).to eq('sast')
expect(feature['configured']).to eq(true)
expect(feature['configuration_path']).to eq(project_security_configuration_sast_path(project))
expect(feature['available']).to eq(true)
end
context 'when checking features configured status' do
let(:features) { Gitlab::Json.parse(html_data[:features]) }
where(:type, :configured) do
:dast | true
:dast_profiles | true
:sast | true
:sast_iac | false
:container_scanning | false
:cluster_image_scanning | false
:dependency_scanning | false
:license_scanning | true
:secret_detection | false
:coverage_fuzzing | false
:api_fuzzing | false
:corpus_management | true
end
with_them do
it 'returns proper configuration status' do
feature = features.find { |scan| scan['type'] == type.to_s }
expect(feature['configured']).to eq(configured)
end
end
end
context 'when the job has more than one report' do
let(:features) { Gitlab::Json.parse(html_data[:features]) }
let!(:artifacts) do
{ artifacts: { reports: { other_job: ['gl-other-report.json'], sast: ['gl-sast-report.json'] } } }
end
let!(:complicated_job) { build_stubbed(:ci_build, options: artifacts) }
before do
allow_next_instance_of(::Security::SecurityJobsFinder) do |finder|
allow(finder).to receive(:execute).and_return([complicated_job])
end
end
where(:type, :configured) do
:dast | false
:dast_profiles | true
:sast | true
:sast_iac | false
:container_scanning | false
:cluster_image_scanning | false
:dependency_scanning | false
:license_scanning | true
:secret_detection | false
:coverage_fuzzing | false
:api_fuzzing | false
:corpus_management | true
end
with_them do
it 'properly detects security jobs' do
feature = features.find { |scan| scan['type'] == type.to_s }
expect(feature['configured']).to eq(configured)
end
end
end
it 'includes a link to the latest pipeline' do
expect(subject[:latest_pipeline_path]).to eq(project_pipeline_path(project, pipeline))
end
context "while retrieving information about gitlab ci file" do
context 'when a .gitlab-ci.yml file exists' do
let!(:ci_config) do
project.repository.create_file(
project.creator,
Gitlab::FileDetector::PATTERNS[:gitlab_ci],
'contents go here',
message: 'test',
branch_name: 'master')
end
it 'expects gitlab_ci_present to be true' do
expect(html_data[:gitlab_ci_present]).to eq(true)
end
end
context 'when a .gitlab-ci.yml file does not exist' do
it 'expects gitlab_ci_present to be false if the file is not present' do
expect(html_data[:gitlab_ci_present]).to eq(false)
end
end
end
it 'includes the path to gitlab_ci history' do
expect(subject[:gitlab_ci_history_path]).to eq(project_blame_path(project, 'master/.gitlab-ci.yml'))
end
end
context 'when the project is empty' do
let(:project) { project_with_no_repo }
it 'includes a blank gitlab_ci history path' do
expect(html_data[:gitlab_ci_history_path]).to eq('')
end
end
context 'when the project has no default branch set' do
let(:project) { project_with_repo }
it 'includes the path to gitlab_ci history' do
allow(project).to receive(:default_branch).and_return(nil)
expect(html_data[:gitlab_ci_history_path]).to eq(project_blame_path(project, 'master/.gitlab-ci.yml'))
end
end
context "when the latest default branch pipeline's source is auto devops" do
let(:project) { project_with_repo }
let(:pipeline) do
create(
:ci_pipeline,
:auto_devops_source,
project: project,
ref: project.default_branch,
sha: project.commit.sha
)
end
let!(:build_sast) { create(:ci_build, :sast, pipeline: pipeline, status: 'success') }
let!(:build_dast) { create(:ci_build, :dast, pipeline: pipeline, status: 'success') }
let!(:ci_build) { create(:ci_build, :secret_detection, pipeline: pipeline, status: 'pending') }
it 'reports that auto devops is enabled' do
expect(html_data[:auto_devops_enabled]).to be_truthy
end
context 'when gathering feature data' do
let(:features) { Gitlab::Json.parse(html_data[:features]) }
where(:type, :configured) do
:dast | true
:dast_profiles | true
:sast | true
:sast_iac | false
:container_scanning | false
:cluster_image_scanning | false
:dependency_scanning | false
:license_scanning | false
:secret_detection | true
:coverage_fuzzing | false
:api_fuzzing | false
:corpus_management | true
end
with_them do
it 'reports that all scanners are configured for which latest pipeline has builds' do
feature = features.find { |scan| scan['type'] == type.to_s }
expect(feature['configured']).to eq(configured)
end
end
end
end
context 'when the project has no default branch pipeline' do
let(:project) { project_with_repo }
it 'reports that auto devops is disabled' do
expect(html_data[:auto_devops_enabled]).to be_falsy
end
it 'includes a link to CI pipeline docs' do
expect(html_data[:latest_pipeline_path]).to eq(help_page_path('ci/pipelines'))
end
context 'when gathering feature data' do
let(:features) { Gitlab::Json.parse(html_data[:features]) }
where(:type, :configured) do
:dast | false
:dast_profiles | true
:sast | false
:sast_iac | false
:container_scanning | false
:cluster_image_scanning | false
:dependency_scanning | false
:license_scanning | false
:secret_detection | false
:coverage_fuzzing | false
:api_fuzzing | false
:corpus_management | true
end
with_them do
it 'reports all security jobs as unconfigured with exception of "fake" jobs' do
feature = features.find { |scan| scan['type'] == type.to_s }
expect(feature['configured']).to eq(configured)
end
end
end
end
def licensed_scan_types
::Security::SecurityJobsFinder.allowed_job_types + ::Security::LicenseComplianceJobsFinder.allowed_job_types - [:cluster_image_scanning]
end
end
end

View File

@ -578,6 +578,7 @@ RSpec.describe API::Ci::Jobs do
expect(response.headers.to_h)
.to include('Content-Type' => 'application/json',
'Gitlab-Workhorse-Send-Data' => /artifacts-entry/)
expect(response.parsed_body).to be_empty
end
context 'when artifacts are locked' do
@ -948,6 +949,7 @@ RSpec.describe API::Ci::Jobs do
expect(response.headers.to_h)
.to include('Content-Type' => 'application/json',
'Gitlab-Workhorse-Send-Data' => /artifacts-entry/)
expect(response.parsed_body).to be_empty
end
end

View File

@ -47,6 +47,15 @@ RSpec.describe API::Files do
"/projects/#{project.id}/repository/files/#{file_path}"
end
def expect_to_send_git_blob(url, params)
expect(Gitlab::Workhorse).to receive(:send_git_blob)
get url, params: params
expect(response).to have_gitlab_http_status(:ok)
expect(response.parsed_body).to be_empty
end
context 'http headers' do
it 'converts value into string' do
helper.set_http_headers(test: 1)
@ -257,11 +266,7 @@ RSpec.describe API::Files do
it 'returns raw file info' do
url = route(file_path) + "/raw"
expect(Gitlab::Workhorse).to receive(:send_git_blob)
get api(url, api_user, **options), params: params
expect(response).to have_gitlab_http_status(:ok)
expect_to_send_git_blob(api(url, api_user, **options), params)
expect(headers[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
end
@ -523,11 +528,8 @@ RSpec.describe API::Files do
it 'returns raw file info' do
url = route(file_path) + "/raw"
expect(Gitlab::Workhorse).to receive(:send_git_blob)
get api(url, current_user), params: params
expect(response).to have_gitlab_http_status(:ok)
expect_to_send_git_blob(api(url, current_user), params)
end
context 'when ref is not provided' do
@ -537,39 +539,29 @@ RSpec.describe API::Files do
it 'returns response :ok', :aggregate_failures do
url = route(file_path) + "/raw"
expect(Gitlab::Workhorse).to receive(:send_git_blob)
get api(url, current_user), params: {}
expect(response).to have_gitlab_http_status(:ok)
expect_to_send_git_blob(api(url, current_user), {})
end
end
it 'returns raw file info for files with dots' do
url = route('.gitignore') + "/raw"
expect(Gitlab::Workhorse).to receive(:send_git_blob)
get api(url, current_user), params: params
expect(response).to have_gitlab_http_status(:ok)
expect_to_send_git_blob(api(url, current_user), params)
end
it 'returns file by commit sha' do
# This file is deleted on HEAD
file_path = "files%2Fjs%2Fcommit%2Ejs%2Ecoffee"
params[:ref] = "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9"
expect(Gitlab::Workhorse).to receive(:send_git_blob)
get api(route(file_path) + "/raw", current_user), params: params
expect(response).to have_gitlab_http_status(:ok)
expect_to_send_git_blob(api(route(file_path) + "/raw", current_user), params)
end
it 'sets no-cache headers' do
url = route('.gitignore') + "/raw"
expect(Gitlab::Workhorse).to receive(:send_git_blob)
get api(url, current_user), params: params
expect_to_send_git_blob(api(url, current_user), params)
expect(response.headers["Cache-Control"]).to eq("max-age=0, private, must-revalidate, no-store, no-cache")
expect(response.headers["Pragma"]).to eq("no-cache")
@ -633,11 +625,9 @@ RSpec.describe API::Files do
# This file is deleted on HEAD
file_path = "files%2Fjs%2Fcommit%2Ejs%2Ecoffee"
params[:ref] = "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9"
expect(Gitlab::Workhorse).to receive(:send_git_blob)
url = api(route(file_path) + "/raw", personal_access_token: token)
get api(route(file_path) + "/raw", personal_access_token: token), params: params
expect(response).to have_gitlab_http_status(:ok)
expect_to_send_git_blob(url, params)
end
end
end

View File

@ -29,6 +29,7 @@ RSpec.describe API::ProjectSnapshots do
repository: repository.gitaly_repository
).to_json
)
expect(response.parsed_body).to be_empty
end
it 'returns authentication error as project owner' do

View File

@ -400,6 +400,7 @@ RSpec.describe API::ProjectSnippets do
expect(response).to have_gitlab_http_status(:ok)
expect(response.media_type).to eq 'text/plain'
expect(response.parsed_body).to be_empty
end
it 'returns 404 for invalid snippet id' do

View File

@ -197,6 +197,7 @@ RSpec.describe API::Repositories do
expect(response).to have_gitlab_http_status(:ok)
expect(headers[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
expect(response.parsed_body).to be_empty
end
it 'sets inline content disposition by default' do
@ -274,6 +275,7 @@ RSpec.describe API::Repositories do
expect(type).to eq('git-archive')
expect(params['ArchivePath']).to match(/#{project.path}\-[^\.]+\.tar.gz/)
expect(response.parsed_body).to be_empty
end
it 'returns the repository archive archive.zip' do

View File

@ -113,6 +113,7 @@ RSpec.describe API::Snippets, factory_default: :keep do
expect(response).to have_gitlab_http_status(:ok)
expect(response.media_type).to eq 'text/plain'
expect(headers['Content-Disposition']).to match(/^inline/)
expect(response.parsed_body).to be_empty
end
it 'returns 404 for invalid snippet id' do

View File

@ -0,0 +1,71 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::IssuesController do
let_it_be(:issue) { create(:issue) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { issue.project }
let_it_be(:user) { issue.author }
before do
login_as(user)
end
describe 'GET #discussions' do
let_it_be(:discussion) { create(:discussion_note_on_issue, noteable: issue, project: issue.project) }
let_it_be(:discussion_reply) { create(:discussion_note_on_issue, noteable: issue, project: issue.project, in_reply_to: discussion) }
let_it_be(:state_event) { create(:resource_state_event, issue: issue) }
let_it_be(:discussion_2) { create(:discussion_note_on_issue, noteable: issue, project: issue.project) }
let_it_be(:discussion_3) { create(:discussion_note_on_issue, noteable: issue, project: issue.project) }
context 'pagination' do
def get_discussions(**params)
get discussions_project_issue_path(project, issue, params: params.merge(format: :json))
end
it 'returns paginated notes and cursor based on per_page param' do
get_discussions(per_page: 2)
discussions = Gitlab::Json.parse(response.body)
notes = discussions.flat_map { |d| d['notes'] }
expect(discussions.count).to eq(2)
expect(notes).to match([
a_hash_including('id' => discussion.id.to_s),
a_hash_including('id' => discussion_reply.id.to_s),
a_hash_including('type' => 'StateNote')
])
cursor = response.header['X-Next-Page-Cursor']
expect(cursor).to be_present
get_discussions(per_page: 1, cursor: cursor)
discussions = Gitlab::Json.parse(response.body)
notes = discussions.flat_map { |d| d['notes'] }
expect(discussions.count).to eq(1)
expect(notes).to match([
a_hash_including('id' => discussion_2.id.to_s)
])
end
context 'when paginated_issue_discussions is disabled' do
before do
stub_feature_flags(paginated_issue_discussions: false)
end
it 'returns all discussions and ignores per_page param' do
get_discussions(per_page: 2)
discussions = Gitlab::Json.parse(response.body)
notes = discussions.flat_map { |d| d['notes'] }
expect(discussions.count).to eq(4)
expect(notes.count).to eq(5)
end
end
end
end
end

View File

@ -4,18 +4,20 @@ require 'spec_helper'
RSpec.describe ResourceEvents::SyntheticLabelNotesBuilderService do
describe '#execute' do
let!(:user) { create(:user) }
let_it_be(:user) { create(:user) }
let!(:issue) { create(:issue, author: user) }
let_it_be(:issue) { create(:issue, author: user) }
let!(:event1) { create(:resource_label_event, issue: issue) }
let!(:event2) { create(:resource_label_event, issue: issue) }
let!(:event3) { create(:resource_label_event, issue: issue) }
let_it_be(:event1) { create(:resource_label_event, issue: issue) }
let_it_be(:event2) { create(:resource_label_event, issue: issue) }
let_it_be(:event3) { create(:resource_label_event, issue: issue) }
it 'returns the expected synthetic notes' do
notes = ResourceEvents::SyntheticLabelNotesBuilderService.new(issue, user).execute
expect(notes.size).to eq(3)
end
it_behaves_like 'filters by paginated notes', :resource_label_event
end
end

View File

@ -24,5 +24,7 @@ RSpec.describe ResourceEvents::SyntheticMilestoneNotesBuilderService do
'removed milestone'
])
end
it_behaves_like 'filters by paginated notes', :resource_milestone_event
end
end

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ResourceEvents::SyntheticStateNotesBuilderService do
describe '#execute' do
let_it_be(:user) { create(:user) }
it_behaves_like 'filters by paginated notes', :resource_state_event
end
end

View File

@ -8,14 +8,6 @@
- "./ee/spec/models/group_member_spec.rb"
- "./ee/spec/replicators/geo/pipeline_artifact_replicator_spec.rb"
- "./ee/spec/replicators/geo/terraform_state_version_replicator_spec.rb"
- "./ee/spec/requests/api/graphql/mutations/dast/profiles/create_spec.rb"
- "./ee/spec/requests/api/graphql/mutations/dast/profiles/run_spec.rb"
- "./ee/spec/requests/api/graphql/mutations/dast/profiles/update_spec.rb"
- "./ee/spec/requests/api/graphql/mutations/dast_on_demand_scans/create_spec.rb"
- "./ee/spec/services/app_sec/dast/profiles/create_service_spec.rb"
- "./ee/spec/services/app_sec/dast/profiles/update_service_spec.rb"
- "./ee/spec/services/app_sec/dast/scans/create_service_spec.rb"
- "./ee/spec/services/app_sec/dast/scans/run_service_spec.rb"
- "./ee/spec/services/ci/destroy_pipeline_service_spec.rb"
- "./ee/spec/services/ci/retry_build_service_spec.rb"
- "./ee/spec/services/ci/subscribe_bridge_service_spec.rb"
@ -25,7 +17,6 @@
- "./ee/spec/services/ee/users/destroy_service_spec.rb"
- "./ee/spec/services/projects/transfer_service_spec.rb"
- "./ee/spec/services/security/security_orchestration_policies/rule_schedule_service_spec.rb"
- "./ee/spec/services/vulnerability_feedback/create_service_spec.rb"
- "./spec/controllers/abuse_reports_controller_spec.rb"
- "./spec/controllers/admin/spam_logs_controller_spec.rb"
- "./spec/controllers/admin/users_controller_spec.rb"

View File

@ -86,6 +86,7 @@ RSpec.shared_examples 'snippet blob content' do
expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq 'true'
expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('git-blob:')
expect(response.parsed_body).to be_empty
end
context 'when snippet repository is empty' do

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
RSpec.shared_examples 'filters by paginated notes' do |event_type|
let(:event) { create(event_type) } # rubocop:disable Rails/SaveBang
before do
create(event_type, issue: event.issue)
end
it 'only returns given notes' do
paginated_notes = { event_type.to_s.pluralize => [double(id: event.id)] }
notes = described_class.new(event.issue, user, paginated_notes: paginated_notes).execute
expect(notes.size).to eq(1)
expect(notes.first.event).to eq(event)
end
context 'when paginated notes is empty' do
it 'does not return any notes' do
notes = described_class.new(event.issue, user, paginated_notes: {}).execute
expect(notes.size).to eq(0)
end
end
end