Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-10-13 21:10:36 +00:00
parent b1928c08f1
commit 9f4c898b9d
40 changed files with 453 additions and 62 deletions

View File

@ -49,6 +49,7 @@ workflow:
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PIPELINE_SOURCE == "schedule" && $SCHEDULE_TYPE == "maintenance"'
variables:
CRYSTALBALL: "true"
NOTIFY_PIPELINE_FAILURE_CHANNEL: "master-broken"
# Run pipelines for ruby3 branch
- if: '$CI_COMMIT_BRANCH == "ruby3"'
variables:
@ -63,6 +64,8 @@ workflow:
GITLAB_DEPENDENCY_PROXY_ADDRESS: ""
# For `$CI_DEFAULT_BRANCH` branch, create a pipeline (this includes on schedules, pushes, merges, etc.).
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
variables:
NOTIFY_PIPELINE_FAILURE_CHANNEL: "master-broken"
# For tags, create a pipeline.
- if: '$CI_COMMIT_TAG'
# If `$GITLAB_INTERNAL` isn't set, don't create a pipeline.

View File

@ -1,12 +1,12 @@
.notify-slack:
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}alpine
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}alpine/curl
stage: notify
dependencies: []
cache: {}
variables:
MERGE_REQUEST_URL: ${CI_MERGE_REQUEST_PROJECT_URL}/-/merge_requests/${CI_MERGE_REQUEST_IID}
before_script:
- apk update && apk add git curl bash
- apk update && apk add git bash
- echo "NOTIFY_CHANNEL is ${NOTIFY_CHANNEL}"
- echo "CI_PIPELINE_URL is ${CI_PIPELINE_URL}"
@ -34,13 +34,28 @@ notify-security-pipeline:
- scripts/slack ${NOTIFY_CHANNEL} "<!subteam^S0127FU8PDE> ☠️ Pipeline for merged result failed! ☠️ See ${CI_PIPELINE_URL} (triggered from ${MERGE_REQUEST_URL})" ci_failing "GitLab Release Tools Bot"
notify-pipeline-failure:
extends:
- .notify-slack
extends: .notify-slack
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}ruby:${RUBY_VERSION}
rules:
- if: '$NOTIFY_PIPELINE_FAILURE_CHANNEL'
# Don't report child pipeline failures
- if: '$CI_PIPELINE_SOURCE == "parent_pipeline"'
when: never
- if: '$CI_SLACK_WEBHOOK_URL && $NOTIFY_PIPELINE_FAILURE_CHANNEL'
when: on_failure
allow_failure: true
variables:
NOTIFY_CHANNEL: "${NOTIFY_PIPELINE_FAILURE_CHANNEL}"
SLACK_CHANNEL: "${NOTIFY_PIPELINE_FAILURE_CHANNEL}"
FAILED_PIPELINE_REPORT_FILE: "failed_pipeline_report.json"
before_script:
- source scripts/utils.sh
- apt-get update && apt-get install -y jq
- install_gitlab_gem
script:
- scripts/slack ${NOTIFY_CHANNEL} "❌ \`${CI_COMMIT_REF_NAME}\` pipeline failed! See ${CI_PIPELINE_URL}" ci_failing "notify-pipeline-failure"
- scripts/generate-failed-pipeline-slack-message.rb
- |
curl -X POST -H 'Content-Type: application/json' --data @${FAILED_PIPELINE_REPORT_FILE} "$CI_SLACK_WEBHOOK_URL"
artifacts:
paths:
- ${FAILED_PIPELINE_REPORT_FILE}
when: always
expire_in: 2 days

View File

@ -67,6 +67,10 @@ class Groups::ApplicationController < ApplicationController
end
end
def authorize_billings_page!
render_404 unless can?(current_user, :read_billing, group)
end
def authorize_read_group_member!
unless can?(current_user, :read_group_member, group)
render_403

View File

@ -41,7 +41,7 @@ class Projects::AutocompleteSourcesController < Projects::ApplicationController
end
def contacts
render json: autocomplete_service.contacts
render json: autocomplete_service.contacts(target)
end
private

View File

@ -42,6 +42,9 @@ module Types
value 'POLICIES_DENIED',
value: :policies_denied,
description: 'There are denied policies for the merge request.'
value 'EXTERNAL_STATUS_CHECKS',
value: :status_checks_must_pass,
description: 'Status checks must pass.'
end
end
end

View File

@ -427,7 +427,7 @@ module ApplicationHelper
milestones: milestones_project_autocomplete_sources_path(object),
commands: commands_project_autocomplete_sources_path(object, type: noteable_type, type_id: params[:id]),
snippets: snippets_project_autocomplete_sources_path(object),
contacts: contacts_project_autocomplete_sources_path(object)
contacts: contacts_project_autocomplete_sources_path(object, type: noteable_type, type_id: params[:id])
}
end
end

View File

@ -174,6 +174,13 @@ module AtomicInternalId
#
# bulk_insert(attributes)
# end
#
# - track_#{scope}_#{column}!
# This method can be used to set a new greatest IID value during import operations.
#
# Example:
#
# MyClass.track_project_iid!(project, value)
def define_singleton_internal_id_methods(scope, column, init)
define_singleton_method("with_#{scope}_#{column}_supply") do |scope_value, &block|
subject = find_by(scope => scope_value) || self
@ -183,6 +190,16 @@ module AtomicInternalId
supply = Supply.new(-> { InternalId.generate_next(subject, scope_attrs, usage, init) })
block.call(supply)
end
define_singleton_method("track_#{scope}_#{column}!") do |scope_value, value|
InternalId.track_greatest(
self,
::AtomicInternalId.scope_attrs(scope_value),
::AtomicInternalId.scope_usage(self),
value,
init
)
end
end
end

View File

@ -213,6 +213,9 @@ class GroupPolicy < Namespaces::GroupProjectNamespaceSharedPolicy
enable :destroy_deploy_token
enable :update_runners_registration_token
enable :owner_access
enable :read_billing
enable :edit_billing
end
rule { can?(:read_nested_project_resources) }.policy do

View File

@ -15,6 +15,8 @@ module Namespaces
enable :read_statistics
enable :create_jira_connect_subscription
enable :admin_package
enable :read_billing
enable :edit_billing
end
rule { ~can_create_personal_project }.prevent :create_projects

View File

@ -33,9 +33,21 @@ module Projects
SnippetsFinder.new(current_user, project: project).execute.select([:id, :title])
end
def contacts
Crm::ContactsFinder.new(current_user, group: project.group).execute
.select([:id, :email, :first_name, :last_name])
def contacts(target)
available_contacts = Crm::ContactsFinder.new(current_user, group: project.group).execute
.select([:id, :email, :first_name, :last_name, :state])
contact_hashes = available_contacts.as_json
return contact_hashes unless target.is_a?(Issue)
ids = target.customer_relations_contacts.ids # rubocop:disable CodeReuse/ActiveRecord
contact_hashes.each do |hash|
hash[:set] = ids.include?(hash['id'])
end
contact_hashes
end
def labels_as_hash(target)

View File

@ -26,6 +26,11 @@ module Gitlab
RefreshImportJidWorker.perform_in_the_future(project.id, jid)
info(project.id, message: "starting importer", importer: 'Importer::RepositoryImporter')
# If a user creates an issue while the import is in progress, this can lead to an import failure.
# The workaround is to allocate IIDs before starting the importer.
allocate_issues_internal_id!(project, client)
importer = Importer::RepositoryImporter.new(project, client)
importer.execute
@ -56,6 +61,19 @@ module Gitlab
def abort_on_failure
true
end
private
def allocate_issues_internal_id!(project, client)
return if InternalId.exists?(project: project, usage: :issues) # rubocop: disable CodeReuse/ActiveRecord
options = { state: 'all', sort: 'number', direction: 'desc', per_page: '1' }
last_github_issue = client.each_object(:issues, project.import_source, options).first
return unless last_github_issue
Issue.track_project_iid!(project, last_github_issue[:number])
end
end
end
end

View File

@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/360808
milestone: '15.3'
type: development
group: group::source code
default_enabled: false
default_enabled: true

View File

@ -0,0 +1,8 @@
---
name: only_allow_merge_if_all_status_checks_passed
introduced_by_url: "https://gitlab.com/gitlab-org/gitlab/-/merge_requests/96765"
rollout_issue_url: "https://gitlab.com/gitlab-org/gitlab/-/issues/372340"
milestone: '15.5'
type: development
group: group::compliance
default_enabled: false

View File

@ -0,0 +1,7 @@
# frozen_string_literal: true
class OnlyAllowMergeIfAllStatusChecksPassed < Gitlab::Database::Migration[2.0]
def change
add_column :project_settings, :only_allow_merge_if_all_status_checks_passed, :boolean, default: false, null: false
end
end

View File

@ -0,0 +1 @@
2c18be04f3b5800c84a50763e7650229a6ae02619a2913966af2c936d3d9aec1

View File

@ -20112,6 +20112,7 @@ CREATE TABLE project_settings (
show_diff_preview_in_email boolean DEFAULT true NOT NULL,
jitsu_key text,
suggested_reviewers_enabled boolean DEFAULT false NOT NULL,
only_allow_merge_if_all_status_checks_passed boolean DEFAULT false NOT NULL,
CONSTRAINT check_2981f15877 CHECK ((char_length(jitsu_key) <= 100)),
CONSTRAINT check_3a03e7557a CHECK ((char_length(previous_default_branch) <= 4096)),
CONSTRAINT check_b09644994b CHECK ((char_length(squash_commit_template) <= 500)),

View File

@ -16108,6 +16108,7 @@ Represents vulnerability finding of a security report on the pipeline.
| <a id="projectnamewithnamespace"></a>`nameWithNamespace` | [`String!`](#string) | Full name of the project with its namespace. |
| <a id="projectnamespace"></a>`namespace` | [`Namespace`](#namespace) | Namespace of the project. |
| <a id="projectonlyallowmergeifalldiscussionsareresolved"></a>`onlyAllowMergeIfAllDiscussionsAreResolved` | [`Boolean`](#boolean) | Indicates if merge requests of the project can only be merged when all the discussions are resolved. |
| <a id="projectonlyallowmergeifallstatuscheckspassed"></a>`onlyAllowMergeIfAllStatusChecksPassed` | [`Boolean`](#boolean) | Indicates that merges of merge requests should be blocked unless all status checks have passed. |
| <a id="projectonlyallowmergeifpipelinesucceeds"></a>`onlyAllowMergeIfPipelineSucceeds` | [`Boolean`](#boolean) | Indicates if merge requests of the project can only be merged with successful jobs. |
| <a id="projectopenissuescount"></a>`openIssuesCount` | [`Int`](#int) | Number of open issues for the project. |
| <a id="projectpackagescleanuppolicy"></a>`packagesCleanupPolicy` | [`PackagesCleanupPolicy`](#packagescleanuppolicy) | Packages cleanup policy for the project. |
@ -20374,6 +20375,7 @@ Detailed representation of whether a GitLab merge request can be merged.
| <a id="detailedmergestatusci_still_running"></a>`CI_STILL_RUNNING` | Pipeline is still running. |
| <a id="detailedmergestatusdiscussions_not_resolved"></a>`DISCUSSIONS_NOT_RESOLVED` | Discussions must be resolved before merging. |
| <a id="detailedmergestatusdraft_status"></a>`DRAFT_STATUS` | Merge request must not be draft before merging. |
| <a id="detailedmergestatusexternal_status_checks"></a>`EXTERNAL_STATUS_CHECKS` | Status checks must pass. |
| <a id="detailedmergestatusmergeable"></a>`MERGEABLE` | Branch can be merged. |
| <a id="detailedmergestatusnot_approved"></a>`NOT_APPROVED` | Merge request must be approved before merging. |
| <a id="detailedmergestatusnot_open"></a>`NOT_OPEN` | Merge request must be open before merging. |

View File

@ -1014,6 +1014,19 @@ can also see the `approvals_before_merge` parameter:
}
```
Users of [GitLab Ultimate](https://about.gitlab.com/pricing/)
can also see the `only_allow_merge_if_all_status_checks_passed`
parameters using GitLab 15.5 and later:
```json
{
"id": 1,
"project_id": 3,
"only_allow_merge_if_all_status_checks_passed": false,
...
}
```
If the project is a fork, the `forked_from_project` field appears in the response.
For this field, if the upstream project is private, a valid token for authentication must be provided.
The field `mr_default_target_self` appears as well. If this value is `false`, then all merge requests
@ -1188,6 +1201,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your-token>" \
| `name` | string | **{check-circle}** Yes (if path isn't provided) | The name of the new project. Equals path if not provided. |
| `path` | string | **{check-circle}** Yes (if name isn't provided) | Repository name for new project. Generated based on name if not provided (generated as lowercase with dashes). Starting with GitLab 14.9, path must not start or end with a special character and must not contain consecutive special characters. |
| `allow_merge_on_skipped_pipeline` | boolean | **{dotted-circle}** No | Set whether or not merge requests can be merged with skipped jobs. |
| `only_allow_merge_if_all_status_checks_passed` **(ULTIMATE)** | boolean | **{dotted-circle}** No | Indicates that merges of merge requests should be blocked unless all status checks have passed. Defaults to false. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/369859) in GitLab 15.5 with feature flag `only_allow_merge_if_all_status_checks_passed` disabled by default. |
| `analytics_access_level` | string | **{dotted-circle}** No | One of `disabled`, `private` or `enabled` |
| `approvals_before_merge` **(PREMIUM)** | integer | **{dotted-circle}** No | How many approvers should approve merge requests by default. To configure approval rules, see [Merge request approvals API](merge_request_approvals.md). |
| `auto_cancel_pending_pipelines` | string | **{dotted-circle}** No | Auto-cancel pending pipelines. This isn't a boolean, but enabled/disabled. |
@ -1267,6 +1281,7 @@ POST /projects/user/:user_id
| `user_id` | integer | **{check-circle}** Yes | The user ID of the project owner. |
| `name` | string | **{check-circle}** Yes | The name of the new project. |
| `allow_merge_on_skipped_pipeline` | boolean | **{dotted-circle}** No | Set whether or not merge requests can be merged with skipped jobs. |
| `only_allow_merge_if_all_status_checks_passed` **(ULTIMATE)** | boolean | **{dotted-circle}** No | Indicates that merges of merge requests should be blocked unless all status checks have passed. Defaults to false. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/369859) in GitLab 15.5 with feature flag `only_allow_merge_if_all_status_checks_passed` disabled by default. |
| `analytics_access_level` | string | **{dotted-circle}** No | One of `disabled`, `private` or `enabled` |
| `approvals_before_merge` **(PREMIUM)** | integer | **{dotted-circle}** No | How many approvers should approve merge requests by default. To configure approval rules, see [Merge request approvals API](merge_request_approvals.md). |
| `auto_cancel_pending_pipelines` | string | **{dotted-circle}** No | Auto-cancel pending pipelines. This isn't a boolean, but enabled/disabled. |
@ -1357,6 +1372,7 @@ Supported attributes:
|-------------------------------------------------------------|----------------|------------------------|-------------|
| `id` | integer or string | **{check-circle}** Yes | The ID or [URL-encoded path of the project](index.md#namespaced-path-encoding). |
| `allow_merge_on_skipped_pipeline` | boolean | **{dotted-circle}** No | Set whether or not merge requests can be merged with skipped jobs. |
| `only_allow_merge_if_all_status_checks_passed` **(ULTIMATE)** | boolean | **{dotted-circle}** No | Indicates that merges of merge requests should be blocked unless all status checks have passed. Defaults to false. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/369859) in GitLab 15.5 with feature flag `only_allow_merge_if_all_status_checks_passed` disabled by default. |
| `analytics_access_level` | string | **{dotted-circle}** No | One of `disabled`, `private` or `enabled` |
| `approvals_before_merge` **(PREMIUM)** | integer | **{dotted-circle}** No | How many approvers should approve merge request by default. To configure approval rules, see [Merge request approvals API](merge_request_approvals.md). |
| `auto_cancel_pending_pipelines` | string | **{dotted-circle}** No | Auto-cancel pending pipelines. This isn't a boolean, but enabled/disabled. |

View File

@ -1,4 +1,7 @@
---
stage: none
group: unassigned
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
status: proposed
creation-date: yyyy-mm-dd
authors: [ "@username" ]

View File

@ -120,6 +120,10 @@ sudo apt-get install -y build-essential zlib1g-dev libyaml-dev libssl-dev libgdb
libcurl4-openssl-dev libicu-dev logrotate rsync python3-docutils pkg-config cmake runit-systemd
```
NOTE:
GitLab requires OpenSSL version 1.1. If your Linux distribution includes a different version of OpenSSL,
you might have to install 1.1 manually.
If you want to use Kerberos for user authentication, install `libkrb5-dev`
(if you don't know what Kerberos is, you can assume you don't need it):

View File

@ -63,7 +63,7 @@ Use the `/zoom` [quick action](../../user/project/quick_actions.md) to add multi
You can also submit a short optional description with the link. The description shows instead of the URL in the **Linked resources** section of the incident issue:
```plaintext
/zoom https://example.zoom.us/j/123456789, Low on memory incident
/zoom https://example.zoom.us/j/123456789 Low on memory incident
```
## Remove a linked resource

View File

@ -121,13 +121,23 @@ It can also help to compare the XML response from your provider with our [exampl
> - [Improved](https://gitlab.com/gitlab-org/gitlab/-/issues/211962) in GitLab 13.8 with allowing group owners to not go through SSO.
> - [Improved](https://gitlab.com/gitlab-org/gitlab/-/issues/9152) in GitLab 13.11 with enforcing open SSO session to use Git if this setting is switched on.
> - [Improved](https://gitlab.com/gitlab-org/gitlab/-/issues/339888) in GitLab 14.7 to not enforce SSO checks for Git activity originating from CI/CD jobs.
> - [Improved](https://gitlab.com/gitlab-org/gitlab/-/issues/215155) in GitLab 15.5 [with a flag](../../../administration/feature_flags.md) named `transparent_sso_enforcement` to include transparent enforcement even when SSO enforcement is not enabled. Enabled on GitLab.com.
With this option enabled, users must access GitLab using your group GitLab single sign-on URL to access group resources.
Users can't be added as new members manually.
SSO is enforced when users access groups and projects in the organization's group hierarchy. Users can view other groups and projects without SSO sign in.
When SAML SSO is enabled, SSO is enforced for each user with an existing SAML identity.
A user has a SAML identity if one or both of the following are true:
- They have signed in to GitLab by using their GitLab group's single sign-on URL.
- They were provisioned by SCIM.
Users without SAML identities are not required to use SSO unless explicit enforcement is enabled.
When the **Enforce SSO-only authentication for web activity for this group** option is enabled, all users must access GitLab by using their GitLab group's single sign-on URL to access group resources,
regardless of whether they have an existing SAML identity.
Users also cannot be added as new members manually.
Users with the Owner role can use the standard sign in process to make necessary changes to top-level group settings.
SSO enforcement does not affect sign in or access to any resources outside of the group. Users can view which groups and projects they are a member of without SSO sign in.
However, users are not prompted to sign in through SSO on each visit. GitLab checks whether a user
has authenticated through SSO. If it's been more than 1 day since the last sign-in, GitLab
prompts the user to sign in again through SSO.

View File

@ -29,6 +29,18 @@ You can configure merge request status checks for each individual project. These
To learn more about use cases, feature discovery, and development timelines,
see the [external status checks epic](https://gitlab.com/groups/gitlab-org/-/epics/3869).
## Block merges of merge requests unless all status checks have passed
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/369859) in GitLab 15.5 [with a flag](../../../administration/feature_flags.md) named `only_allow_merge_if_all_status_checks_passed`. Disabled by default.
FLAG:
On self-managed GitLab, by default this feature is not available. To make it available per project or for your entire instance, ask an administrator to
[enable the feature flag](../../../administration/feature_flags.md) named `only_allow_merge_if_all_status_checks_passed`. On GitLab.com, this feature is not available.
By default, merge requests in projects can be merged even if external status checks fail. To block the merging of merge requests when external checks fail, enable this feature
using the [project API](../../../api/projects.md#edit-project). You must also [enable the feature flag](../../../administration/feature_flags.md) named
`only_allow_merge_if_all_status_checks_passed`.
## Lifecycle
External status checks have an **asynchronous** workflow. Merge requests emit a merge request webhook payload to an external service whenever:

View File

@ -72,11 +72,13 @@ namespace :gitlab do
# Events for templates included in a .gitlab-ci.yml using include:template
def explicit_template_includes
Gitlab::UsageDataCounters::CiTemplateUniqueCounter.ci_templates("lib/gitlab/ci/templates/").map do |template|
Gitlab::UsageDataCounters::CiTemplateUniqueCounter.ci_templates("lib/gitlab/ci/templates/").each_with_object([]) do |template, result|
expanded_template_name = Gitlab::UsageDataCounters::CiTemplateUniqueCounter.expand_template_name(template)
next unless expanded_template_name # guard against templates unavailable on FOSS
event_name = Gitlab::UsageDataCounters::CiTemplateUniqueCounter.ci_template_event_name(expanded_template_name, :repository_source)
ci_template_event(event_name)
result << ci_template_event(event_name)
end
end

View File

@ -43104,6 +43104,12 @@ msgstr ""
msgid "Usage statistics"
msgstr ""
msgid "UsageQuotas|The project-level storage statistics for the Container Registry are directional only and do not include savings for instance-wide deduplication."
msgstr ""
msgid "UsageQuotas|This project-level storage statistic does not include savings for site-wide deduplication and is not used to calculate total namespace storage."
msgstr ""
msgid "UsageQuota|%{help_link_start}Shared runners%{help_link_end} are disabled, so there are no limits set on pipeline usage"
msgstr ""

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
require 'gitlab'
require 'optparse'
require_relative 'default_options'
class PipelineFailedJobs
def initialize(options)
@project = options.delete(:project)
@pipeline_id = options.delete(:pipeline_id)
@exclude_allowed_to_fail_jobs = options.delete(:exclude_allowed_to_fail_jobs)
# Force the token to be a string so that if api_token is nil, it's set to '',
# allowing unauthenticated requests (for forks).
api_token = options.delete(:api_token).to_s
warn "No API token given." if api_token.empty?
@client = Gitlab.client(
endpoint: options.delete(:endpoint) || API::DEFAULT_OPTIONS[:endpoint],
private_token: api_token
)
end
def execute
failed_jobs = []
client.pipeline_jobs(project, pipeline_id, scope: 'failed', per_page: 100).auto_paginate do |job|
next if exclude_allowed_to_fail_jobs && job.allow_failure
failed_jobs << job
end
failed_jobs
end
private
attr_reader :project, :pipeline_id, :exclude_allowed_to_fail_jobs, :client
end

View File

@ -0,0 +1,105 @@
#!/usr/bin/env ruby
# frozen_string_literal: true
require_relative 'api/pipeline_failed_jobs'
finder_options = API::DEFAULT_OPTIONS.dup.merge(exclude_allowed_to_fail_jobs: true)
failed_jobs = PipelineFailedJobs.new(finder_options).execute
class SlackReporter
def initialize(failed_jobs)
@failed_jobs = failed_jobs
end
def report
payload = {
channel: ENV['SLACK_CHANNEL'],
username: "Failed pipeline reporter",
icon_emoji: ":boom:",
text: "*#{title}*",
blocks: [
{
type: "section",
text: {
type: "mrkdwn",
text: "*#{title}*"
}
},
{
type: "section",
fields: [
{
type: "mrkdwn",
text: "*Commit*\n#{commit_link}"
},
{
type: "mrkdwn",
text: "*Triggered by*\n#{triggered_by_link}"
}
]
},
{
type: "section",
fields: [
{
type: "mrkdwn",
text: "*Source*\n#{source}"
},
{
type: "mrkdwn",
text: "*Duration*\n#{pipeline_duration} minutes"
}
]
},
{
type: "section",
text: {
type: "mrkdwn",
text: "*Failed jobs (#{failed_jobs.size}):* #{failed_jobs_list}"
}
}
]
}
File.write(ENV['FAILED_PIPELINE_REPORT_FILE'], JSON.pretty_generate(payload))
end
private
attr_reader :failed_jobs
def title
"Pipeline #{pipeline_link} for #{branch_link} failed"
end
def pipeline_link
"<#{ENV['CI_PIPELINE_URL']}|##{ENV['CI_PIPELINE_ID']}>"
end
def branch_link
"<#{ENV['CI_PROJECT_URL']}/-/commits/#{ENV['CI_COMMIT_REF_NAME']}|`#{ENV['CI_COMMIT_REF_NAME']}`>"
end
def pipeline_duration
((Time.now - Time.parse(ENV['CI_PIPELINE_CREATED_AT'])) / 60.to_f).round(2)
end
def commit_link
"<#{ENV['CI_PROJECT_URL']}/-/commit/#{ENV['CI_COMMIT_SHA']}|#{ENV['CI_COMMIT_TITLE']}>"
end
def source
"`#{ENV['CI_PIPELINE_SOURCE']}`"
end
def triggered_by_link
"<#{ENV['CI_SERVER_URL']}/#{ENV['GITLAB_USER_LOGIN']}|#{ENV['GITLAB_USER_NAME']}>"
end
def failed_jobs_list
failed_jobs.map { |job| "<#{job.web_url}|#{job.name}>" }.join(', ')
end
end
SlackReporter.new(failed_jobs).report

View File

@ -184,7 +184,7 @@ RSpec.describe Projects::AutocompleteSourcesController do
it 'lists contacts' do
group.add_developer(user)
get :contacts, format: :json, params: { namespace_id: group.path, project_id: project.path }
get :contacts, format: :json, params: { namespace_id: group.path, project_id: project.path, type: issue.class.name, type_id: issue.id }
emails = json_response.map { |contact_data| contact_data["email"] }
expect(emails).to match_array([contact_1.email, contact_2.email])
@ -193,7 +193,7 @@ RSpec.describe Projects::AutocompleteSourcesController do
context 'when a user can not read contacts' do
it 'renders 404' do
get :contacts, format: :json, params: { namespace_id: group.path, project_id: project.path }
get :contacts, format: :json, params: { namespace_id: group.path, project_id: project.path, type: issue.class.name, type_id: issue.id }
expect(response).to have_gitlab_http_status(:not_found)
end
@ -204,7 +204,7 @@ RSpec.describe Projects::AutocompleteSourcesController do
it 'renders 404' do
group.add_developer(user)
get :contacts, format: :json, params: { namespace_id: group.path, project_id: project.path }
get :contacts, format: :json, params: { namespace_id: group.path, project_id: project.path, type: issue.class.name, type_id: issue.id }
expect(response).to have_gitlab_http_status(:not_found)
end

View File

@ -11,5 +11,9 @@ FactoryBot.define do
trait :with_organization do
organization
end
trait :inactive do
state { :inactive }
end
end
end

View File

@ -112,13 +112,12 @@ RSpec.describe 'Dashboard Projects' do
end
context 'when on Starred projects tab', :js do
it 'shows the empty state when there are no starred projects', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/222357' do
it 'shows the empty state when there are no starred projects' do
visit(starred_dashboard_projects_path)
element = page.find('.row.empty-state')
expect(element).to have_content("You don't have starred projects yet.")
expect(element.find('.svg-content img')['src']).to have_content('illustrations/starred_empty')
end
it 'shows only starred projects' do

View File

@ -702,6 +702,7 @@ ProjectCiCdSetting:
- runner_token_expiration_interval
ProjectSetting:
- allow_merge_on_skipped_pipeline
- only_allow_merge_if_all_status_checks_passed
- has_confluence
- has_shimo
- has_vulnerabilities

View File

@ -3,10 +3,11 @@
require 'spec_helper'
RSpec.describe AtomicInternalId do
let(:milestone) { build(:milestone) }
let_it_be(:project) { create(:project) }
let(:milestone) { build(:milestone, project: project) }
let(:iid) { double('iid', to_i: 42) }
let(:external_iid) { 100 }
let(:scope_attrs) { { project: milestone.project } }
let(:scope_attrs) { { project: project } }
let(:usage) { :milestones }
describe '#save!' do
@ -248,4 +249,12 @@ RSpec.describe AtomicInternalId do
end.to change { InternalId.find_by(project: milestone.project, usage: :milestones)&.last_value.to_i }.by(4)
end
end
describe '.track_project_iid!' do
it 'tracks the present value' do
expect do
::Issue.track_project_iid!(milestone.project, external_iid)
end.to change { InternalId.find_by(project: milestone.project, usage: :issues)&.last_value.to_i }.to(external_iid)
end
end
end

View File

@ -8,7 +8,7 @@ RSpec.describe Namespaces::UserNamespacePolicy do
let_it_be(:admin) { create(:admin) }
let_it_be(:namespace) { create(:user_namespace, owner: owner) }
let(:owner_permissions) { [:owner_access, :create_projects, :admin_namespace, :read_namespace, :read_statistics, :transfer_projects, :admin_package] }
let(:owner_permissions) { [:owner_access, :create_projects, :admin_namespace, :read_namespace, :read_statistics, :transfer_projects, :admin_package, :read_billing, :edit_billing] }
subject { described_class.new(current_user, namespace) }

View File

@ -147,6 +147,7 @@ project_setting:
- has_vulnerabilities
- legacy_open_source_license_available
- prevent_merge_without_jira_issue
- only_allow_merge_if_all_status_checks_passed
- warn_about_potentially_unwanted_characters
- previous_default_branch
- project_id

View File

@ -34,19 +34,19 @@ RSpec.describe MergeRequests::CreateFromIssueService do
expect(result[:message]).to eq('Invalid issue iid')
end
it 'creates a branch based on issue title', :sidekiq_might_not_need_inline do
it 'creates a branch based on issue title' do
service.execute
expect(target_project.repository.branch_exists?(issue.to_branch_name)).to be_truthy
end
it 'creates a branch using passed name', :sidekiq_might_not_need_inline do
it 'creates a branch using passed name' do
service_with_custom_source_branch.execute
expect(target_project.repository.branch_exists?(custom_source_branch)).to be_truthy
end
it 'creates the new_merge_request system note', :sidekiq_might_not_need_inline do
it 'creates the new_merge_request system note' do
expect(SystemNoteService).to receive(:new_merge_request).with(issue, project, user, instance_of(MergeRequest))
service.execute
@ -60,7 +60,7 @@ RSpec.describe MergeRequests::CreateFromIssueService do
service.execute
end
it 'creates the new_issue_branch system note when the branch could be created but the merge_request cannot be created', :sidekiq_might_not_need_inline do
it 'creates the new_issue_branch system note when the branch could be created but the merge_request cannot be created' do
expect_next_instance_of(MergeRequest) do |instance|
expect(instance).to receive(:valid?).at_least(:once).and_return(false)
end
@ -81,36 +81,36 @@ RSpec.describe MergeRequests::CreateFromIssueService do
service.execute
end
it 'creates a merge request', :sidekiq_might_not_need_inline do
it 'creates a merge request' do
expect { service.execute }.to change(target_project.merge_requests, :count).by(1)
end
it 'sets the merge request author to current user and assigns them', :sidekiq_might_not_need_inline do
it 'sets the merge request author to current user and assigns them' do
result = service.execute
expect(result[:merge_request].author).to eq(user)
expect(result[:merge_request].assignees).to eq([user])
end
it 'sets the merge request source branch to the new issue branch', :sidekiq_might_not_need_inline do
it 'sets the merge request source branch to the new issue branch' do
result = service.execute
expect(result[:merge_request].source_branch).to eq(issue.to_branch_name)
end
it 'sets the merge request source branch to the passed branch name', :sidekiq_might_not_need_inline do
it 'sets the merge request source branch to the passed branch name' do
result = service_with_custom_source_branch.execute
expect(result[:merge_request].source_branch).to eq(custom_source_branch)
end
it 'sets the merge request target branch to the project default branch', :sidekiq_might_not_need_inline do
it 'sets the merge request target branch to the project default branch' do
result = service.execute
expect(result[:merge_request].target_branch).to eq(target_project.default_branch)
end
it 'executes quick actions if the build service sets them in the description', :sidekiq_might_not_need_inline do
it 'executes quick actions if the build service sets them in the description' do
allow(service).to receive(:merge_request).and_wrap_original do |m, *args|
m.call(*args).tap do |merge_request|
merge_request.description = "/assign #{user.to_reference}"
@ -122,7 +122,7 @@ RSpec.describe MergeRequests::CreateFromIssueService do
expect(result[:merge_request].assignees).to eq([user])
end
context 'when ref branch is set', :sidekiq_might_not_need_inline do
context 'when ref branch is set' do
subject { described_class.new(project: project, current_user: user, mr_params: { ref: 'feature', **service_params }).execute }
it 'sets the merge request source branch to the new issue branch' do
@ -213,7 +213,7 @@ RSpec.describe MergeRequests::CreateFromIssueService do
it_behaves_like 'a service that creates a merge request from an issue'
it 'sets the merge request title to: "Draft: $issue-branch-name', :sidekiq_might_not_need_inline do
it 'sets the merge request title to: "Draft: $issue-branch-name' do
result = service.execute
expect(result[:merge_request].title).to eq("Draft: #{issue.to_branch_name.titleize.humanize}")

View File

@ -189,7 +189,7 @@ RSpec.describe MergeRequests::RefreshService do
subject { service.new(project: @project, current_user: @user).execute(@oldrev, @newrev, 'refs/heads/master') }
it 'updates the head_pipeline_id for @merge_request', :sidekiq_might_not_need_inline do
it 'updates the head_pipeline_id for @merge_request', :sidekiq_inline do
expect { subject }.to change { @merge_request.reload.head_pipeline_id }.from(nil).to(pipeline.id)
end
@ -306,7 +306,7 @@ RSpec.describe MergeRequests::RefreshService do
subject
end
it 'sets the latest detached merge request pipeline as a head pipeline', :sidekiq_might_not_need_inline do
it 'sets the latest detached merge request pipeline as a head pipeline' do
@merge_request.reload
expect(@merge_request.actual_head_pipeline).to be_merge_request_event
end
@ -424,7 +424,7 @@ RSpec.describe MergeRequests::RefreshService do
end
end
context 'push to origin repo target branch', :sidekiq_might_not_need_inline do
context 'push to origin repo target branch' do
context 'when all MRs to the target branch had diffs' do
before do
service.new(project: @project, current_user: @user).execute(@oldrev, @newrev, 'refs/heads/feature')
@ -474,7 +474,7 @@ RSpec.describe MergeRequests::RefreshService do
end
end
context 'manual merge of source branch', :sidekiq_might_not_need_inline do
context 'manual merge of source branch' do
before do
# Merge master -> feature branch
@project.repository.merge(@user, @merge_request.diff_head_sha, @merge_request, 'Test message')
@ -496,7 +496,7 @@ RSpec.describe MergeRequests::RefreshService do
end
end
context 'push to fork repo source branch', :sidekiq_might_not_need_inline do
context 'push to fork repo source branch' do
let(:refresh_service) { service.new(project: @fork_project, current_user: @user) }
def refresh
@ -561,7 +561,7 @@ RSpec.describe MergeRequests::RefreshService do
end
end
context 'push to fork repo target branch', :sidekiq_might_not_need_inline do
context 'push to fork repo target branch' do
describe 'changes to merge requests' do
before do
service.new(project: @fork_project, current_user: @user).execute(@oldrev, @newrev, 'refs/heads/feature')
@ -587,7 +587,7 @@ RSpec.describe MergeRequests::RefreshService do
end
end
context 'forked projects with the same source branch name as target branch', :sidekiq_might_not_need_inline do
context 'forked projects with the same source branch name as target branch' do
let!(:first_commit) do
@fork_project.repository.create_file(@user, 'test1.txt', 'Test data',
message: 'Test commit',
@ -671,7 +671,7 @@ RSpec.describe MergeRequests::RefreshService do
context 'push new branch that exists in a merge request' do
let(:refresh_service) { service.new(project: @fork_project, current_user: @user) }
it 'refreshes the merge request', :sidekiq_might_not_need_inline do
it 'refreshes the merge request' do
expect(refresh_service).to receive(:execute_hooks)
.with(@fork_merge_request, 'update', old_rev: Gitlab::Git::BLANK_SHA)
allow_any_instance_of(Repository).to receive(:merge_base).and_return(@oldrev)

View File

@ -154,23 +154,49 @@ RSpec.describe Projects::AutocompleteService do
let_it_be(:project) { create(:project, group: group) }
let_it_be(:contact_1) { create(:contact, group: group) }
let_it_be(:contact_2) { create(:contact, group: group) }
let_it_be(:contact_3) { create(:contact, :inactive, group: group) }
subject { described_class.new(project, user).contacts.as_json }
let(:issue) { nil }
subject { described_class.new(project, user).contacts(issue).as_json }
before do
group.add_developer(user)
end
it 'returns contact data correctly' do
it 'returns CRM contacts from group' do
expected_contacts = [
{ 'id' => contact_1.id, 'email' => contact_1.email,
'first_name' => contact_1.first_name, 'last_name' => contact_1.last_name },
'first_name' => contact_1.first_name, 'last_name' => contact_1.last_name, 'state' => contact_1.state },
{ 'id' => contact_2.id, 'email' => contact_2.email,
'first_name' => contact_2.first_name, 'last_name' => contact_2.last_name }
'first_name' => contact_2.first_name, 'last_name' => contact_2.last_name, 'state' => contact_2.state },
{ 'id' => contact_3.id, 'email' => contact_3.email,
'first_name' => contact_3.first_name, 'last_name' => contact_3.last_name, 'state' => contact_3.state }
]
expect(subject).to match_array(expected_contacts)
end
context 'some contacts are already assigned to the issue' do
let(:issue) { create(:issue, project: project) }
before do
issue.customer_relations_contacts << [contact_2, contact_3]
end
it 'marks already assigned contacts as set' do
expected_contacts = [
{ 'id' => contact_1.id, 'email' => contact_1.email,
'first_name' => contact_1.first_name, 'last_name' => contact_1.last_name, 'state' => contact_1.state, 'set' => false },
{ 'id' => contact_2.id, 'email' => contact_2.email,
'first_name' => contact_2.first_name, 'last_name' => contact_2.last_name, 'state' => contact_2.state, 'set' => true },
{ 'id' => contact_3.id, 'email' => contact_3.email,
'first_name' => contact_3.first_name, 'last_name' => contact_3.last_name, 'state' => contact_3.state, 'set' => true }
]
expect(subject).to match_array(expected_contacts)
end
end
end
describe '#labels_as_hash' do

View File

@ -74,6 +74,8 @@ RSpec.shared_context 'GroupPolicy context' do
read_group_runners
admin_group_runners
register_group_runners
read_billing
edit_billing
]
end

View File

@ -69,9 +69,9 @@ RSpec.describe 'gitlab:usage data take tasks', :silence_stdout do
expect { run_rake_task('gitlab:usage_data:generate_and_send') }.to output(/.*201.*/).to_stdout
end
describe 'generate_ci_template_events', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/377698' do
describe 'generate_ci_template_events' do
it "generates #{Gitlab::UsageDataCounters::CiTemplateUniqueCounter::KNOWN_EVENTS_FILE_PATH}" do
FileUtils.rm(Gitlab::UsageDataCounters::CiTemplateUniqueCounter::KNOWN_EVENTS_FILE_PATH)
FileUtils.rm_rf(Gitlab::UsageDataCounters::CiTemplateUniqueCounter::KNOWN_EVENTS_FILE_PATH)
run_rake_task('gitlab:usage_data:generate_ci_template_events')
expect(File.exist?(Gitlab::UsageDataCounters::CiTemplateUniqueCounter::KNOWN_EVENTS_FILE_PATH)).to be true

View File

@ -19,18 +19,69 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportRepositoryWorker do
end
context 'when the import succeeds' do
it 'schedules the importing of the base data' do
client = double(:client)
context 'with issues' do
it 'schedules the importing of the base data' do
client = double(:client)
options = { state: 'all', sort: 'number', direction: 'desc', per_page: '1' }
expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
expect(instance).to receive(:execute).and_return(true)
expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
expect(instance).to receive(:execute).and_return(true)
end
expect(InternalId).to receive(:exists?).and_return(false)
expect(client).to receive(:each_object).with(
:issues, project.import_source, options
).and_return([{ number: 5 }].each)
expect(Issue).to receive(:track_project_iid!).with(project, 5)
expect(Gitlab::GithubImport::Stage::ImportBaseDataWorker)
.to receive(:perform_async)
.with(project.id)
worker.import(client, project)
end
end
expect(Gitlab::GithubImport::Stage::ImportBaseDataWorker)
.to receive(:perform_async)
.with(project.id)
context 'without issues' do
it 'schedules the importing of the base data' do
client = double(:client)
options = { state: 'all', sort: 'number', direction: 'desc', per_page: '1' }
worker.import(client, project)
expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
expect(instance).to receive(:execute).and_return(true)
end
expect(InternalId).to receive(:exists?).and_return(false)
expect(client).to receive(:each_object).with(:issues, project.import_source, options).and_return([nil].each)
expect(Issue).not_to receive(:track_project_iid!)
expect(Gitlab::GithubImport::Stage::ImportBaseDataWorker)
.to receive(:perform_async)
.with(project.id)
worker.import(client, project)
end
end
context 'when retrying' do
it 'does not allocate internal ids' do
client = double(:client)
expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
expect(instance).to receive(:execute).and_return(true)
end
expect(InternalId).to receive(:exists?).and_return(true)
expect(client).not_to receive(:each_object)
expect(Issue).not_to receive(:track_project_iid!)
expect(Gitlab::GithubImport::Stage::ImportBaseDataWorker)
.to receive(:perform_async)
.with(project.id)
worker.import(client, project)
end
end
end
@ -43,6 +94,10 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportRepositoryWorker do
expect(instance).to receive(:execute).and_raise(exception_class)
end
expect(InternalId).to receive(:exists?).and_return(false)
expect(client).to receive(:each_object).and_return([nil].each)
expect(Issue).not_to receive(:track_project_iid!)
expect(Gitlab::Import::ImportFailureService).to receive(:track)
.with(
project_id: project.id,