Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-03-23 00:09:09 +00:00
parent b489f0f0a1
commit 5df6990dac
64 changed files with 844 additions and 218 deletions

View file

@ -0,0 +1,40 @@
<script>
import { GlAlert, GlButton } from '@gitlab/ui';
export default {
components: {
GlAlert,
GlButton,
},
props: {
message: {
type: String,
required: true,
},
action: {
type: Object,
required: false,
default: null,
},
},
computed: {
hasAction() {
return Boolean(this.action?.href);
},
actionButtonMethod() {
return this.action?.isForm ? 'post' : null;
},
},
};
</script>
<template>
<gl-alert :dismissible="false">
{{ message }}
<template v-if="hasAction" #actions>
<gl-button variant="confirm" :href="action.href" :data-method="actionButtonMethod">
{{ action.text }}
</gl-button>
</template>
</gl-alert>
</template>

View file

@ -1,5 +1,5 @@
<script>
import { GlAlert, GlButton, GlLoadingIcon } from '@gitlab/ui';
import { GlButton, GlLoadingIcon } from '@gitlab/ui';
import { mapActions, mapGetters, mapState } from 'vuex';
import { __ } from '~/locale';
import {
@ -14,6 +14,7 @@ import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import { modalTypes } from '../constants';
import eventHub from '../eventhub';
import { measurePerformance } from '../utils';
import CannotPushCodeAlert from './cannot_push_code_alert.vue';
import IdeSidebar from './ide_side_bar.vue';
import RepoEditor from './repo_editor.vue';
@ -29,7 +30,6 @@ export default {
components: {
IdeSidebar,
RepoEditor,
GlAlert,
GlButton,
GlLoadingIcon,
ErrorMessage: () => import(/* webpackChunkName: 'ide_runtime' */ './error_message.vue'),
@ -41,6 +41,7 @@ export default {
import(/* webpackChunkName: 'ide_runtime' */ '~/vue_shared/components/file_finder/index.vue'),
RightPane: () => import(/* webpackChunkName: 'ide_runtime' */ './panes/right.vue'),
NewModal: () => import(/* webpackChunkName: 'ide_runtime' */ './new_dropdown/modal.vue'),
CannotPushCodeAlert,
},
mixins: [glFeatureFlagsMixin()],
data() {
@ -120,9 +121,11 @@ export default {
class="ide position-relative d-flex flex-column align-items-stretch"
:class="{ [`theme-${themeName}`]: themeName }"
>
<gl-alert v-if="!canPushCodeStatus.isAllowed" :dismissible="false">{{
canPushCodeStatus.message
}}</gl-alert>
<cannot-push-code-alert
v-if="!canPushCodeStatus.isAllowed"
:message="canPushCodeStatus.message"
:action="canPushCodeStatus.action"
/>
<error-message v-if="errorMessage" :message="errorMessage" />
<div class="ide-view flex-grow d-flex">
<template v-if="loadDeferred">

View file

@ -54,6 +54,7 @@ export function initIde(el, options = {}) {
});
this.setLinks({
webIDEHelpPagePath: el.dataset.webIdeHelpPagePath,
forkInfo: el.dataset.forkInfo ? JSON.parse(el.dataset.forkInfo) : null,
});
this.setInitialData({
clientsidePreviewEnabled: parseBoolean(el.dataset.clientsidePreviewEnabled),

View file

@ -1,10 +1,14 @@
import { s__ } from '~/locale';
export const MSG_CANNOT_PUSH_CODE = s__(
export const MSG_CANNOT_PUSH_CODE_SHOULD_FORK = s__(
'WebIDE|You need permission to edit files directly in this project. Fork this project to make your changes and submit a merge request.',
);
export const MSG_CANNOT_PUSH_CODE_SHORT = s__(
export const MSG_CANNOT_PUSH_CODE_GO_TO_FORK = s__(
'WebIDE|You need permission to edit files directly in this project. Go to your fork to make changes and submit a merge request.',
);
export const MSG_CANNOT_PUSH_CODE = s__(
'WebIDE|You need permission to edit files directly in this project.',
);
@ -15,3 +19,7 @@ export const MSG_CANNOT_PUSH_UNSIGNED = s__(
export const MSG_CANNOT_PUSH_UNSIGNED_SHORT = s__(
'WebIDE|This project does not accept unsigned commits.',
);
export const MSG_FORK = s__('WebIDE|Fork project');
export const MSG_GO_TO_FORK = s__('WebIDE|Go to fork');

View file

@ -11,12 +11,42 @@ import {
} from '../constants';
import {
MSG_CANNOT_PUSH_CODE,
MSG_CANNOT_PUSH_CODE_SHORT,
MSG_CANNOT_PUSH_CODE_SHOULD_FORK,
MSG_CANNOT_PUSH_CODE_GO_TO_FORK,
MSG_CANNOT_PUSH_UNSIGNED,
MSG_CANNOT_PUSH_UNSIGNED_SHORT,
MSG_FORK,
MSG_GO_TO_FORK,
} from '../messages';
import { getChangesCountForFiles, filePathMatches } from './utils';
const getCannotPushCodeViewModel = (state) => {
const { ide_path: idePath, fork_path: forkPath } = state.links.forkInfo || {};
if (idePath) {
return {
message: MSG_CANNOT_PUSH_CODE_GO_TO_FORK,
action: {
href: idePath,
text: MSG_GO_TO_FORK,
},
};
} else if (forkPath) {
return {
message: MSG_CANNOT_PUSH_CODE_SHOULD_FORK,
action: {
href: forkPath,
isForm: true,
text: MSG_FORK,
},
};
}
return {
message: MSG_CANNOT_PUSH_CODE,
};
};
export const activeFile = (state) => state.openFiles.find((file) => file.active) || null;
export const addedFiles = (state) => state.changedFiles.filter((f) => f.tempFile);
@ -188,8 +218,8 @@ export const canPushCodeStatus = (state, getters) => {
if (!canPushCode) {
return {
isAllowed: false,
message: MSG_CANNOT_PUSH_CODE,
messageShort: MSG_CANNOT_PUSH_CODE_SHORT,
messageShort: MSG_CANNOT_PUSH_CODE,
...getCannotPushCodeViewModel(state),
};
}

View file

@ -27,9 +27,20 @@ class IdeController < ApplicationController
@branch = params[:branch]
@path = params[:path]
@merge_request = params[:merge_request_id]
@fork_info = fork_info(project, @branch)
end
unless can?(current_user, :push_code, project)
@forked_project = ForkProjectsFinder.new(project, current_user: current_user).execute.first
def fork_info(project, branch)
return if can?(current_user, :push_code, project)
existing_fork = current_user.fork_of(project)
if existing_fork
path = helpers.ide_edit_path(existing_fork, branch, '')
{ ide_path: path }
elsif can?(current_user, :fork_project, project)
path = helpers.ide_fork_and_edit_path(project, branch, '', with_notice: false)
{ fork_path: path }
end
end

View file

@ -41,20 +41,20 @@ module BlobHelper
result
end
def ide_fork_and_edit_path(project = @project, ref = @ref, path = @path, options = {})
fork_path_for_current_user(project, ide_edit_path(project, ref, path))
def ide_fork_and_edit_path(project = @project, ref = @ref, path = @path, with_notice: true)
fork_path_for_current_user(project, ide_edit_path(project, ref, path), with_notice: with_notice)
end
def fork_and_edit_path(project = @project, ref = @ref, path = @path, options = {})
fork_path_for_current_user(project, edit_blob_path(project, ref, path, options))
end
def fork_path_for_current_user(project, path)
def fork_path_for_current_user(project, path, with_notice: true)
return unless current_user
project_forks_path(project,
namespace_key: current_user.namespace&.id,
continue: edit_blob_fork_params(path))
continue: edit_blob_fork_params(path, with_notice: with_notice))
end
def encode_ide_path(path)
@ -330,12 +330,12 @@ module BlobHelper
blob if blob&.readable_text?
end
def edit_blob_fork_params(path)
def edit_blob_fork_params(path, with_notice: true)
{
to: path,
notice: edit_in_new_fork_notice,
notice_now: edit_in_new_fork_notice_now
}
notice: (edit_in_new_fork_notice if with_notice),
notice_now: (edit_in_new_fork_notice_now if with_notice)
}.compact
end
def edit_modify_file_fork_params(action)

View file

@ -16,7 +16,7 @@ module IdeHelper
'branch-name' => @branch,
'file-path' => @path,
'merge-request' => @merge_request,
'forked-project' => convert_to_project_entity_json(@forked_project),
'fork-info' => @fork_info&.to_json,
'project' => convert_to_project_entity_json(@project)
}
end

View file

@ -19,6 +19,7 @@ module Ci
has_many :bridges, foreign_key: :stage_id
scope :ordered, -> { order(position: :asc) }
scope :in_pipelines, ->(pipelines) { where(pipeline: pipelines) }
with_options unless: :importing? do
validates :project, presence: true

View file

@ -2,11 +2,17 @@
module Clusters
class AgentToken < ApplicationRecord
include RedisCacheable
include TokenAuthenticatable
add_authentication_token_field :token, encrypted: :required, token_generator: -> { Devise.friendly_token(50) }
cached_attr_reader :last_contacted_at
self.table_name = 'cluster_agent_tokens'
# The `UPDATE_USED_COLUMN_EVERY` defines how often the token DB entry can be updated
UPDATE_USED_COLUMN_EVERY = (40.minutes..55.minutes).freeze
belongs_to :agent, class_name: 'Clusters::Agent', optional: false
belongs_to :created_by_user, class_name: 'User', optional: true
@ -14,5 +20,27 @@ module Clusters
validates :description, length: { maximum: 1024 }
validates :name, presence: true, length: { maximum: 255 }, on: :create
def track_usage
track_values = { last_used_at: Time.current.utc }
cache_attributes(track_values)
# Use update_column so updated_at is skipped
update_columns(track_values) if can_update_track_values?
end
private
def can_update_track_values?
# Use a random threshold to prevent beating DB updates.
last_used_at_max_age = Random.rand(UPDATE_USED_COLUMN_EVERY)
real_last_used_at = read_attribute(:last_used_at)
# Handle too many updates from high token traffic
real_last_used_at.nil? ||
(Time.current - real_last_used_at) >= last_used_at_max_age
end
end
end

View file

@ -352,7 +352,12 @@ class User < ApplicationRecord
# this state transition object in order to do a rollback.
# For this reason the tradeoff is to disable this cop.
after_transition any => :blocked do |user|
Ci::CancelUserPipelinesService.new.execute(user)
if Feature.enabled?(:abort_user_pipelines_on_block, user)
Ci::AbortPipelinesService.new.execute(user.pipelines)
else
Ci::CancelUserPipelinesService.new.execute(user)
end
Ci::DisableUserPipelineSchedulesService.new.execute(user)
end
# rubocop: enable CodeReuse/ServiceClass
@ -1598,32 +1603,40 @@ class User < ApplicationRecord
@global_notification_setting
end
def count_cache_validity_period
if Feature.enabled?(:longer_count_cache_validity, self, default_enabled: :yaml)
24.hours
else
20.minutes
end
end
def assigned_open_merge_requests_count(force: false)
Rails.cache.fetch(['users', id, 'assigned_open_merge_requests_count'], force: force, expires_in: 20.minutes) do
Rails.cache.fetch(['users', id, 'assigned_open_merge_requests_count'], force: force, expires_in: count_cache_validity_period) do
MergeRequestsFinder.new(self, assignee_id: self.id, state: 'opened', non_archived: true).execute.count
end
end
def review_requested_open_merge_requests_count(force: false)
Rails.cache.fetch(['users', id, 'review_requested_open_merge_requests_count'], force: force, expires_in: 20.minutes) do
Rails.cache.fetch(['users', id, 'review_requested_open_merge_requests_count'], force: force, expires_in: count_cache_validity_period) do
MergeRequestsFinder.new(self, reviewer_id: id, state: 'opened', non_archived: true).execute.count
end
end
def assigned_open_issues_count(force: false)
Rails.cache.fetch(['users', id, 'assigned_open_issues_count'], force: force, expires_in: 20.minutes) do
Rails.cache.fetch(['users', id, 'assigned_open_issues_count'], force: force, expires_in: count_cache_validity_period) do
IssuesFinder.new(self, assignee_id: self.id, state: 'opened', non_archived: true).execute.count
end
end
def todos_done_count(force: false)
Rails.cache.fetch(['users', id, 'todos_done_count'], force: force, expires_in: 20.minutes) do
Rails.cache.fetch(['users', id, 'todos_done_count'], force: force, expires_in: count_cache_validity_period) do
TodosFinder.new(self, state: :done).execute.count
end
end
def todos_pending_count(force: false)
Rails.cache.fetch(['users', id, 'todos_pending_count'], force: force, expires_in: 20.minutes) do
Rails.cache.fetch(['users', id, 'todos_pending_count'], force: force, expires_in: count_cache_validity_period) do
TodosFinder.new(self, state: :pending).execute.count
end
end

View file

@ -0,0 +1,30 @@
# frozen_string_literal: true
module Ci
class AbortPipelinesService
# Danger: Cancels in bulk without callbacks
# Only for pipeline abandonment scenarios (examples: project delete, user block)
def execute(pipelines)
bulk_abort!(pipelines.cancelable, status: :canceled)
ServiceResponse.success(message: 'Pipelines canceled')
end
private
def bulk_abort!(pipelines, status:)
pipelines.each_batch(of: 100) do |pipeline_batch|
update_status_for(Ci::Stage, pipeline_batch, status)
update_status_for(CommitStatus, pipeline_batch, status)
pipeline_batch.update_all(status: status, finished_at: Time.current)
end
end
def update_status_for(klass, pipelines, status)
klass.in_pipelines(pipelines)
.cancelable
.in_batches(of: 150) # rubocop:disable Cop/InBatches
.update_all(status: status)
end
end
end

View file

@ -1,25 +0,0 @@
# frozen_string_literal: true
module Ci
class AbortProjectPipelinesService
# Danger: Cancels in bulk without callbacks
# Only for pipeline abandonment scenarios (current example: project delete)
def execute(project)
return unless Feature.enabled?(:abort_deleted_project_pipelines, default_enabled: :yaml)
pipelines = project.all_pipelines.cancelable
bulk_abort!(pipelines, status: :canceled)
ServiceResponse.success(message: 'Pipelines canceled')
end
private
def bulk_abort!(pipelines, status:)
pipelines.each_batch do |pipeline_batch|
CommitStatus.in_pipelines(pipeline_batch).in_batches.update_all(status: status) # rubocop: disable Cop/InBatches
pipeline_batch.update_all(status: status)
end
end
end
end

View file

@ -27,7 +27,9 @@ module Projects
# Git data (e.g. a list of branch names).
flush_caches(project)
::Ci::AbortProjectPipelinesService.new.execute(project)
if Feature.enabled?(:abort_deleted_project_pipelines, default_enabled: :yaml)
::Ci::AbortPipelinesService.new.execute(project.all_pipelines)
end
Projects::UnlinkForkService.new(project, current_user).execute

View file

@ -9,7 +9,7 @@
%ul#resolvable-comment-menu.dropdown-menu.dropdown-open-top{ data: { dropdown: true } }
%li#comment.droplab-item-selected{ data: { value: '', 'submit-text' => _('Comment'), 'close-text' => _("Comment & close %{noteable_name}") % { noteable_name: noteable_name }, 'reopen-text' => _("Comment & reopen %{noteable_name}") % { noteable_name: noteable_name } } }
%button.btn.gl-button.btn-transparent
%button{ type: 'button' }
= sprite_icon('check', css_class: 'icon')
.description
%strong= _("Comment")
@ -19,7 +19,7 @@
%li.divider.droplab-item-ignore
%li#discussion{ data: { value: 'DiscussionNote', 'submit-text' => _('Start thread'), 'close-text' => _("Start thread & close %{noteable_name}") % { noteable_name: noteable_name }, 'reopen-text' => _("Start thread & reopen %{noteable_name}") % { noteable_name: noteable_name } } }
%button.btn.gl-button.btn-transparent
%button{ type: 'button' }
= sprite_icon('check', css_class: 'icon')
.description
%strong= _("Start thread")

View file

@ -0,0 +1,5 @@
---
title: WebIDE show fork button when cannot push code
merge_request: 56608
author:
type: changed

View file

@ -0,0 +1,5 @@
---
title: Track agent token last_used
merge_request: 56143
author:
type: added

View file

@ -0,0 +1,5 @@
---
title: Update learn gitlab template for new registrations
merge_request: 57098
author:
type: changed

View file

@ -0,0 +1,5 @@
---
title: Add index on ci_stages to speed up batch pipeline cancellation
merge_request: 56126
author:
type: performance

View file

@ -0,0 +1,5 @@
---
title: Fixed styling of commit comment buttons
merge_request: 56982
author:
type: fixed

View file

@ -0,0 +1,8 @@
---
name: abort_user_pipelines_on_block
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56126
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/324045
milestone: '13.10'
type: development
group: group::memory
default_enabled: false

View file

@ -0,0 +1,8 @@
---
name: longer_count_cache_validity
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/57122
rollout_issue_url:
milestone: '13.11'
type: development
group: group::source code
default_enabled: false

View file

@ -0,0 +1,18 @@
# frozen_string_literal: true
class AddIndexCiStagesOnPipelineIdAndId < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_NAME = 'index_ci_stages_on_pipeline_id_and_id'
disable_ddl_transaction!
def up
add_concurrent_index :ci_stages, %i[pipeline_id id], where: 'status IN (0, 1, 2, 8, 9, 10)', name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :ci_stages, INDEX_NAME
end
end

View file

@ -0,0 +1 @@
c2e3f8f6f283d919d99b0acf970f663fef8ca30ef277116401549014fc99ae91

View file

@ -22220,6 +22220,8 @@ CREATE UNIQUE INDEX index_ci_sources_projects_on_source_project_id_and_pipeline_
CREATE INDEX index_ci_stages_on_pipeline_id ON ci_stages USING btree (pipeline_id);
CREATE INDEX index_ci_stages_on_pipeline_id_and_id ON ci_stages USING btree (pipeline_id, id) WHERE (status = ANY (ARRAY[0, 1, 2, 8, 9, 10]));
CREATE UNIQUE INDEX index_ci_stages_on_pipeline_id_and_name ON ci_stages USING btree (pipeline_id, name);
CREATE INDEX index_ci_stages_on_pipeline_id_and_position ON ci_stages USING btree (pipeline_id, "position");

View file

@ -123,8 +123,8 @@ attribute. As a prerequisite, you must use an LDAP server that:
NOTE: **Note**
Assign a value to at least one of the following variables:
gitlab_rails['smartcard_client_certificate_required_host'] or
gitlab_rails['smartcard_client_certificate_required_port'].
`gitlab_rails['smartcard_client_certificate_required_host']` or
`gitlab_rails['smartcard_client_certificate_required_port']`.
1. Save the file and [reconfigure](../restart_gitlab.md#omnibus-gitlab-reconfigure)
GitLab for the changes to take effect.

View file

@ -25,10 +25,9 @@ repository access for GitLab. Other types of GitLab data aren't accessed using G
<!-- vale gitlab.FutureTense = NO -->
WARNING:
From GitLab 13.0, Gitaly support for NFS is deprecated. As of GitLab 14.0, NFS-related issues
with Gitaly will no longer be addressed. Upgrade to [Gitaly Cluster](praefect.md) as soon as
possible. Tools to [enable bulk moves](https://gitlab.com/groups/gitlab-org/-/epics/4916)
of projects to Gitaly Cluster are planned.
From GitLab 14.0, enhancements and bug fixes for NFS for Git repositories will no longer be
considered and customer technical support will be considered out of scope.
[Read more about Gitaly and NFS](#nfs-deprecation-notice).
<!-- vale gitlab.FutureTense = YES -->
@ -156,6 +155,28 @@ There are two facets to our efforts to remove direct Git access in GitLab:
The second facet presents the only real solution. For this, we developed
[Gitaly Cluster](praefect.md).
## NFS deprecation notice
<!-- vale gitlab.FutureTense = NO -->
From GitLab 14.0, enhancements and bug fixes for NFS for Git repositories will no longer be
considered and customer technical support will be considered out of scope.
Additional information:
- [Recommended NFS mount options and known issues with Gitaly and NFS](../nfs.md#upgrade-to-gitaly-cluster-or-disable-caching-if-experiencing-data-loss).
- [GitLab statement of support](https://about.gitlab.com/support/statement-of-support.html#gitaly-and-nfs).
<!-- vale gitlab.FutureTense = YES -->
GitLab recommends:
- Creating a [Gitaly Cluster](praefect.md) as soon as possible.
- [Moving your projects](praefect.md#migrate-existing-repositories-to-gitaly-cluster) from NFS-based
storage to the Gitaly Cluster.
We welcome your feedback on this process: raise a support ticket, or [comment on the epic](https://gitlab.com/groups/gitlab-org/-/epics/4916).
## Troubleshooting Gitaly
Check [Gitaly timeouts](../../user/admin_area/settings/gitaly_timeouts.md) when troubleshooting

View file

@ -14,17 +14,19 @@ Pages](https://gitlab.com/gitlab-org/gitlab-pages/-/issues/196).
For data objects such as LFS, Uploads, Artifacts, etc., an [Object Storage service](object_storage.md)
is recommended over NFS where possible, due to better performance.
WARNING:
From GitLab 13.0, using NFS for Git repositories is deprecated.
From GitLab 14.0, technical support for NFS for Git repositories
will no longer be provided. Upgrade to [Gitaly Cluster](gitaly/praefect.md)
as soon as possible.
File system performance can impact overall GitLab performance, especially for
actions that read or write to Git repositories. For steps you can use to test
file system performance, see
[File system Performance Benchmarking](operations/filesystem_benchmarking.md).
## Gitaly and NFS deprecation
WARNING:
From GitLab 14.0, enhancements and bug fixes for NFS for Git repositories will no longer be
considered and customer technical support will be considered out of scope.
[Read more about Gitaly and NFS](gitaly/index.md#nfs-deprecation-notice) and
[the correct mount options to use](#upgrade-to-gitaly-cluster-or-disable-caching-if-experiencing-data-loss).
## Known kernel version incompatibilities
RedHat Enterprise Linux (RHEL) and CentOS v7.7 and v7.8 ship with kernel
@ -346,12 +348,18 @@ sudo ufw allow from <client_ip_address> to any port nfs
### Upgrade to Gitaly Cluster or disable caching if experiencing data loss
WARNING:
From GitLab 13.0, using NFS for Git repositories is deprecated. In GitLab 14.0,
support for NFS for Git repositories is scheduled to be removed. Upgrade to
[Gitaly Cluster](gitaly/praefect.md) as soon as possible.
From GitLab 13.0, using NFS for Git repositories is deprecated.
As of GitLab 14.0, NFS-related issues with Gitaly will no longer be addressed. Read
more about [Gitaly and NFS deprecation](gitaly/index.md#nfs-deprecation-notice).
Customers and users have reported data loss on high-traffic repositories when using NFS for Git repositories.
For example, we have seen [inconsistent updates after a push](https://gitlab.com/gitlab-org/gitaly/-/issues/2589). The problem may be partially mitigated by adjusting caching using the following NFS client mount options:
For example, we have seen:
- [Inconsistent updates after a push](https://gitlab.com/gitlab-org/gitaly/-/issues/2589).
- `git ls-remote` [returning the wrong (or no branches)](https://gitlab.com/gitlab-org/gitaly/-/issues/3083)
causing Jenkins to intermittently re-run all pipelines for a repository.
The problem may be partially mitigated by adjusting caching using the following NFS client mount options:
| Setting | Description |
| ------- | ----------- |
@ -362,7 +370,7 @@ For example, we have seen [inconsistent updates after a push](https://gitlab.com
WARNING:
The `actimeo=0` and `noac` options both result in a significant reduction in performance, possibly leading to timeouts.
You may be able to avoid timeouts and data loss using `actimeo=0` and `lookupcache=positive` _without_ `noac`, however
we expect the performance reduction will still be significant. As noted above, we strongly recommend upgrading to
we expect the performance reduction will still be significant. Upgrade to
[Gitaly Cluster](gitaly/praefect.md) as soon as possible.
### Avoid using cloud-based file systems

View file

@ -2338,10 +2338,10 @@ to use GitLab Pages, this currently [requires NFS](troubleshooting.md#gitlab-pag
See how to [configure NFS](../nfs.md).
WARNING:
From GitLab 13.0, using NFS for Git repositories is deprecated.
From GitLab 14.0, technical support for NFS for Git repositories
will no longer be provided. Upgrade to [Gitaly Cluster](../gitaly/praefect.md)
as soon as possible.
From GitLab 14.0, enhancements and bug fixes for NFS for Git repositories will no longer be
considered and customer technical support will be considered out of scope.
[Read more about Gitaly and NFS](../gitaly/index.md#nfs-deprecation-notice) and
[the correct mount options to use](../nfs.md#upgrade-to-gitaly-cluster-or-disable-caching-if-experiencing-data-loss).
<div align="right">
<a type="button" class="btn btn-default" href="#setup-components">

View file

@ -2342,10 +2342,10 @@ to use GitLab Pages, this currently [requires NFS](troubleshooting.md#gitlab-pag
See how to [configure NFS](../nfs.md).
WARNING:
From GitLab 13.0, using NFS for Git repositories is deprecated.
From GitLab 14.0, technical support for NFS for Git repositories
will no longer be provided. Upgrade to [Gitaly Cluster](../gitaly/praefect.md)
as soon as possible.
From GitLab 14.0, enhancements and bug fixes for NFS for Git repositories will no longer be
considered and customer technical support will be considered out of scope.
[Read more about Gitaly and NFS](../gitaly/index.md#nfs-deprecation-notice) and
[the correct mount options to use](../nfs.md#upgrade-to-gitaly-cluster-or-disable-caching-if-experiencing-data-loss).
<div align="right">
<a type="button" class="btn btn-default" href="#setup-components">

View file

@ -956,10 +956,10 @@ possible. However, if you intend to use GitLab Pages,
See how to [configure NFS](../nfs.md).
WARNING:
From GitLab 13.0, using NFS for Git repositories is deprecated.
From GitLab 14.0, technical support for NFS for Git repositories
will no longer be provided. Upgrade to [Gitaly Cluster](../gitaly/praefect.md)
as soon as possible.
From GitLab 14.0, enhancements and bug fixes for NFS for Git repositories will no longer be
considered and customer technical support will be considered out of scope.
[Read more about Gitaly and NFS](../gitaly/index.md#nfs-deprecation-notice) and
[the correct mount options to use](../nfs.md#upgrade-to-gitaly-cluster-or-disable-caching-if-experiencing-data-loss).
<div align="right">
<a type="button" class="btn btn-default" href="#setup-components">

View file

@ -2028,10 +2028,10 @@ to use GitLab Pages, this currently [requires NFS](troubleshooting.md#gitlab-pag
See how to [configure NFS](../nfs.md).
WARNING:
From GitLab 13.0, using NFS for Git repositories is deprecated.
From GitLab 14.0, technical support for NFS for Git repositories
will no longer be provided. Upgrade to [Gitaly Cluster](../gitaly/praefect.md)
as soon as possible.
From GitLab 14.0, enhancements and bug fixes for NFS for Git repositories will no longer be
considered and customer technical support will be considered out of scope.
[Read more about Gitaly and NFS](../gitaly/index.md#nfs-deprecation-notice) and
[the correct mount options to use](../nfs.md#upgrade-to-gitaly-cluster-or-disable-caching-if-experiencing-data-loss).
<div align="right">
<a type="button" class="btn btn-default" href="#setup-components">

View file

@ -2356,10 +2356,10 @@ to use GitLab Pages, this currently [requires NFS](troubleshooting.md#gitlab-pag
See how to [configure NFS](../nfs.md).
WARNING:
From GitLab 13.0, using NFS for Git repositories is deprecated.
From GitLab 14.0, technical support for NFS for Git repositories
will no longer be provided. Upgrade to [Gitaly Cluster](../gitaly/praefect.md)
as soon as possible.
From GitLab 14.0, enhancements and bug fixes for NFS for Git repositories will no longer be
considered and customer technical support will be considered out of scope.
[Read more about Gitaly and NFS](../gitaly/index.md#nfs-deprecation-notice) and
[the correct mount options to use](../nfs.md#upgrade-to-gitaly-cluster-or-disable-caching-if-experiencing-data-loss).
<div align="right">
<a type="button" class="btn btn-default" href="#setup-components">

View file

@ -2017,10 +2017,10 @@ to use GitLab Pages, this currently [requires NFS](troubleshooting.md#gitlab-pag
See how to [configure NFS](../nfs.md).
WARNING:
From GitLab 13.0, using NFS for Git repositories is deprecated.
From GitLab 14.0, technical support for NFS for Git repositories
will no longer be provided. Upgrade to [Gitaly Cluster](../gitaly/praefect.md)
as soon as possible.
From GitLab 14.0, enhancements and bug fixes for NFS for Git repositories will no longer be
considered and customer technical support will be considered out of scope.
[Read more about Gitaly and NFS](../gitaly/index.md#nfs-deprecation-notice) and
[the correct mount options to use](../nfs.md#upgrade-to-gitaly-cluster-or-disable-caching-if-experiencing-data-loss).
<div align="right">
<a type="button" class="btn btn-default" href="#setup-components">

View file

@ -1328,6 +1328,7 @@ An edge in a connection.
| `createdByUser` | [`User`](#user) | The user who created the token. |
| `description` | [`String`](#string) | Description of the token. |
| `id` | [`ClustersAgentTokenID!`](#clustersagenttokenid) | Global ID of the token. |
| `lastUsedAt` | [`Time`](#time) | Timestamp the token was last used. |
| `name` | [`String`](#string) | Name given to the token. |
### `ClusterAgentTokenConnection`

View file

@ -14,7 +14,7 @@ in the GitLab repository.
## Override Dockerfile API templates **(PREMIUM SELF)**
In [GitLab Premium and higher](https://about.gitlab.com/pricing) tiers, GitLab instance
In [GitLab Premium and higher](https://about.gitlab.com/pricing/) tiers, GitLab instance
administrators can override templates in the
[Admin Area](../../user/admin_area/settings/instance_template_repository.md).

View file

@ -187,7 +187,7 @@ During the discussion of the [initial database schema](https://gitlab.com/gitlab
PostgreSQL introduced significant improvements for partitioning in [version 12](https://www.postgresql.org/docs/12/release-12.html#id-1.11.6.9.5), among which we highlight:
- It's now possible for foreign keys to reference partitioned tables. This is a hard requirement for this project not only to guarantee consistency and integrity but also to enable cascading deletes at the database level;
- Major performance improvements for inserts, selects, and updates with less locking and consistent performance for a large number of partitions ([benchmarks](https://www.2ndquadrant.com/en/blog/postgresql-12-partitioning));
- Major performance improvements for inserts, selects, and updates with less locking and consistent performance for a large number of partitions ([benchmarks](https://www.2ndquadrant.com/en/blog/postgresql-12-partitioning/));
- Major improvements to the planning algorithm for tables with a large number of partitions, with some tests finding speedups of up to 10,000 times ([source](https://aws.amazon.com/blogs/database/postgresql-12-a-deep-dive-into-some-new-functionality/));
- Attaching new partitions to an existing table no longer requires locking the entire table;
- Bulk load (`COPY`) now uses bulk inserts instead of inserting one row at a time;

View file

@ -83,7 +83,7 @@ multiple tests, such as making sure you are logged in.
The function `it` defines an individual test.
[The `browser` object](http://v4.webdriver.io/guide/testrunner/browserobject.html) is WebdriverIO's
special sauce. It provides most of [the WebdriverIO API methods](http://v4.webdriver.io/docs/api/) that are the key to
special sauce. It provides most of [the WebdriverIO API methods](http://v4.webdriver.io/api.html) that are the key to
steering the browser. In this case, we can use
[`browser.url`](http://v4.webdriver.io/api/protocol/url.html) to visit `/page-that-does-not-exist` to
hit our 404 page. We can then use [`browser.getUrl`](http://v4.webdriver.io/api/property/getUrl.html)

View file

@ -2759,7 +2759,7 @@ patterns and:
- In [GitLab Runner 13.0](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/2620) and later,
[`doublestar.Glob`](https://pkg.go.dev/github.com/bmatcuk/doublestar@v1.2.2?tab=doc#Match).
- In GitLab Runner 12.10 and earlier,
[`filepath.Match`](https://pkg.go.dev/path/filepath/#Match).
[`filepath.Match`](https://pkg.go.dev/path/filepath#Match).
Cache all files in `binaries` that end in `.apk` and the `.config` file:
@ -3086,7 +3086,7 @@ patterns and:
- In [GitLab Runner 13.0](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/2620) and later,
[`doublestar.Glob`](https://pkg.go.dev/github.com/bmatcuk/doublestar@v1.2.2?tab=doc#Match).
- In GitLab Runner 12.10 and earlier,
[`filepath.Match`](https://pkg.go.dev/path/filepath/#Match).
[`filepath.Match`](https://pkg.go.dev/path/filepath#Match).
To restrict which jobs a specific job fetches artifacts from, see [dependencies](#dependencies).

View file

@ -586,7 +586,7 @@ Conditional means that it returns `true` in some situations, but not all situati
When a feature flag is disabled (meaning the state is `off`), the experiment is
considered _inactive_. You can visualize this in the [decision tree diagram](#how-it-works)
as reaching the first [Running?] node, and traversing the negative path.
as reaching the first `Running?` node, and traversing the negative path.
When a feature flag is rolled out to a `percentage_of_actors` or similar (meaning the
state is `conditional`) the experiment is considered to be _running_

View file

@ -104,7 +104,7 @@ Unleash activation strategy.
Enables the feature for a percentage of page views, with configurable consistency
of behavior. This consistency is also known as stickiness. It uses the
[`flexibleRollout`](https://unleash.github.io/docs/activation_strategy#flexiblerollout)
[`flexibleRollout`](https://docs.getunleash.io/docs/activation_strategy#flexiblerollout)
Unleash activation strategy.
You can configure the consistency to be based on:
@ -133,7 +133,7 @@ Selecting **Random** provides inconsistent application behavior for individual u
### Percent of Users
Enables the feature for a percentage of authenticated users. It uses the Unleash activation strategy
[`gradualRolloutUserId`](https://unleash.github.io/docs/activation_strategy#gradualrolloutuserid).
[`gradualRolloutUserId`](https://docs.getunleash.io/docs/activation_strategy#gradualrolloutuserid).
For example, set a value of 15% to enable the feature for 15% of authenticated users.
@ -155,7 +155,7 @@ ID for the feature to be enabled. See the [Ruby example](#ruby-application-examp
> - [Updated](https://gitlab.com/gitlab-org/gitlab/-/issues/34363) to be defined per environment in GitLab 12.6.
Enables the feature for a list of target users. It is implemented
using the Unleash [`userWithId`](https://unleash.github.io/docs/activation_strategy#userwithid)
using the Unleash [`userWithId`](https://docs.getunleash.io/docs/activation_strategy#userwithid)
activation strategy.
Enter user IDs as a comma-separated list of values (for example,
@ -171,7 +171,7 @@ target users. See the [Ruby example](#ruby-application-example) below.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/35930) in GitLab 13.1.
Enables the feature for lists of users created [in the Feature Flags UI](#create-a-user-list), or with the [Feature Flag User List API](../api/feature_flag_user_lists.md).
Similar to [User IDs](#user-ids), it uses the Unleash [`userWithId`](https://unleash.github.io/docs/activation_strategy#userwithid)
Similar to [User IDs](#user-ids), it uses the Unleash [`userWithId`](https://docs.getunleash.io/docs/activation_strategy#userwithid)
activation strategy.
It's not possible to *disable* a feature for members of a user list, but you can achieve the same

View file

@ -19,7 +19,7 @@ You can embed live [Grafana](https://docs.gitlab.com/omnibus/settings/grafana.ht
Your Grafana instance must:
- Be available to the target user, either as a public dashboard or on the same network.
- Have [Grafana Image Renderer](https://grafana.com/grafana/plugins/grafana-image-renderer) installed.
- Have [Grafana Image Renderer](https://grafana.com/grafana/plugins/grafana-image-renderer/) installed.
To use Grafana-rendered images:

View file

@ -1239,7 +1239,7 @@ of a WAF are:
By default, GitLab provides you with a WAF known as [`ModSecurity`](https://www.modsecurity.org/),
which is a toolkit for real-time web application monitoring, logging, and access
control. GitLab applies the [OWASP's Core Rule Set](https://www.modsecurity.org/CRS/Documentation/),
control. GitLab applies the [OWASP's Core Rule Set](https://coreruleset.org/),
which provides generic attack detection capabilities.
This feature:
@ -1314,7 +1314,7 @@ tracked over time:
- The total amount of traffic to your application.
- The proportion of traffic that's considered anomalous by the Web Application
Firewall's default [OWASP ruleset](https://www.modsecurity.org/CRS/Documentation/).
Firewall's default [OWASP ruleset](https://coreruleset.org/).
If a significant percentage of traffic is anomalous, investigate it for potential threats
by [examining the Web Application Firewall logs](#web-application-firewall-modsecurity).

View file

@ -428,7 +428,7 @@ To prevent a project from being shared with other groups:
1. Go to the group's **Settings > General** page.
1. Expand the **Permissions, LFS, 2FA** section.
1. Select **Prevent sharing a project within <group_name> with other groups**.
1. Select **Prevent sharing a project within `<group_name>` with other groups**.
1. Select **Save changes**.
## Prevent members from being added to a group **(PREMIUM)**

View file

@ -59,7 +59,7 @@ To discover all snippets visible to you in GitLab, you can:
- **View all snippets visible to you**: In the top navigation bar of your GitLab
instance, go to **More > Snippets** to view your snippets dashboard.
- **Visit [GitLab snippets](http://snippets.gitlab.com/)** for your snippets on GitLab.com.
- **Visit [GitLab snippets](https://gitlab.com/dashboard/snippets)** for your snippets on GitLab.com.
- **Explore all public snippets**: In the top navigation bar of your GitLab
instance, go to **More > Snippets** and select **Explore snippets** to view
[all public snippets](https://gitlab.com/explore/snippets).
@ -125,11 +125,16 @@ A single snippet can support up to 10 files, which helps keep related files toge
- A `gulpfile.js` file and a `package.json` file, which together can be
used to bootstrap a project and manage its dependencies.
If you need more than 10 files for your snippet, we recommend you a create a
[wiki](project/wiki/index.md) instead. Wikis are available for projects at all
subscription levels, and [groups](group/index.md#group-wikis) for
[GitLab Premium](https://about.gitlab.com/pricing).
Snippets with multiple files display a file count in the [snippet list](http://snippets.gitlab.com/):
![Example of snippet](img/snippet_tooltip_v13_10.png)
You can manage these by using Git (because they're [versioned](#versioned-snippets)
You can manage snippets with Git (because they're [versioned](#versioned-snippets)
by a Git repository), through the [Snippets API](../api/snippets.md), and in the GitLab UI.
To add a new file to your snippet through the GitLab UI:

View file

@ -3,7 +3,11 @@
module API
module Entities
class ProjectImportFailedRelation < Grape::Entity
expose :id, :created_at, :exception_class, :exception_message, :source
expose :id, :created_at, :exception_class, :source
expose :exception_message do |_|
nil
end
expose :relation_key, as: :relation_name
end

View file

@ -54,6 +54,8 @@ module API
forbidden! unless agent_token
forbidden! unless Gitlab::Kas.included_in_gitlab_com_rollout?(agent.project)
agent_token.track_usage
end
end

View file

@ -9,6 +9,19 @@ variables:
# Setting this variable will affect all Security templates
# (SAST, Dependency Scanning, ...)
SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/gitlab-org/security-products/analyzers"
#
DAST_API_PROFILE: Full
DAST_API_VERSION: latest
DAST_API_CONFIG: .gitlab-dast-api.yml
DAST_API_TIMEOUT: 30
DAST_API_REPORT: gl-dast-api-report.json
DAST_API_REPORT_ASSET_PATH: assets
#
# Wait up to 5 minutes for API Security and target url to become
# available (non 500 response to HTTP(s))
DAST_API_SERVICE_START_TIMEOUT: "300"
#
DAST_API_IMAGE: registry.gitlab.com/gitlab-org/security-products/analyzers/api-fuzzing:${DAST_API_VERSION}-engine
dast:
stage: dast
@ -25,6 +38,11 @@ dast:
reports:
dast: gl-dast-report.json
rules:
- if: $DAST_API_BETA && ( $DAST_API_SPECIFICATION ||
$DAST_API_OPENAPI ||
$DAST_API_POSTMAN_COLLECTION ||
$DAST_API_HAR )
when: never
- if: $DAST_DISABLED
when: never
- if: $DAST_DISABLED_FOR_DEFAULT_BRANCH &&
@ -40,4 +58,72 @@ dast:
- if: $CI_COMMIT_BRANCH &&
$DAST_WEBSITE
- if: $CI_COMMIT_BRANCH &&
$DAST_API_BETA == null &&
$DAST_API_SPECIFICATION
dast_api:
stage: dast
image:
name: $DAST_API_IMAGE
entrypoint: ["/bin/bash", "-l", "-c"]
variables:
API_SECURITY_MODE: DAST
DAST_API_NEW_REPORT: 1
DAST_API_PROJECT: $CI_PROJECT_PATH
DAST_API_API: http://127.0.0.1:5000
DAST_API_LOG_SCANNER: gl-dast-api-scanner.log
TZ: America/Los_Angeles
allow_failure: true
rules:
- if: $DAST_API_BETA == null
when: never
- if: $DAST_DISABLED
when: never
- if: $DAST_DISABLED_FOR_DEFAULT_BRANCH &&
$CI_DEFAULT_BRANCH == $CI_COMMIT_REF_NAME
when: never
- if: $CI_DEFAULT_BRANCH != $CI_COMMIT_REF_NAME &&
$REVIEW_DISABLED &&
$DAST_API_SPECIFICATION == null &&
$DAST_API_OPENAPI == null &&
$DAST_API_POSTMAN_COLLECTION == null &&
$DAST_API_HAR == null
when: never
- if: $DAST_API_SPECIFICATION == null &&
$DAST_API_OPENAPI == null &&
$DAST_API_POSTMAN_COLLECTION == null &&
$DAST_API_HAR == null
when: never
- if: $CI_COMMIT_BRANCH &&
$GITLAB_FEATURES =~ /\bdast\b/
script:
#
# Run user provided pre-script
- sh -c "$DAST_API_PRE_SCRIPT"
#
# Make sure asset path exists
- mkdir -p $DAST_API_REPORT_ASSET_PATH
#
# Start API Security background process
- dotnet /peach/Peach.Web.dll &> $DAST_API_LOG_SCANNER &
- APISEC_PID=$!
#
# Start scanning
- worker-entry
#
# Run user provided post-script
- sh -c "$DAST_API_POST_SCRIPT"
#
# Shutdown API Security
- kill $APISEC_PID
- wait $APISEC_PID
#
artifacts:
when: always
paths:
- $DAST_API_REPORT_ASSET_PATH
- $DAST_API_REPORT
- $DAST_API_LOG_SCANNER
- gl-*.log
reports:
dast: $DAST_API_REPORT

View file

@ -33912,6 +33912,12 @@ msgstr ""
msgid "WebAuthn only works with HTTPS-enabled websites. Contact your administrator for more details."
msgstr ""
msgid "WebIDE|Fork project"
msgstr ""
msgid "WebIDE|Go to fork"
msgstr ""
msgid "WebIDE|Merge request"
msgstr ""
@ -33927,6 +33933,9 @@ msgstr ""
msgid "WebIDE|You need permission to edit files directly in this project. Fork this project to make your changes and submit a merge request."
msgstr ""
msgid "WebIDE|You need permission to edit files directly in this project. Go to your fork to make changes and submit a merge request."
msgstr ""
msgid "Webhook"
msgstr ""

View file

@ -0,0 +1,72 @@
import { GlButton, GlAlert } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { stubComponent } from 'helpers/stub_component';
import CannotPushCodeAlert from '~/ide/components/cannot_push_code_alert.vue';
const TEST_MESSAGE = 'Hello test message!';
const TEST_HREF = '/test/path/to/fork';
const TEST_BUTTON_TEXT = 'Fork text';
describe('ide/components/cannot_push_code_alert', () => {
let wrapper;
afterEach(() => {
wrapper.destroy();
});
const createComponent = (props = {}) => {
wrapper = shallowMount(CannotPushCodeAlert, {
propsData: {
message: TEST_MESSAGE,
...props,
},
stubs: {
GlAlert: {
...stubComponent(GlAlert),
template: `<div><slot></slot><slot name="actions"></slot></div>`,
},
},
});
};
const findAlert = () => wrapper.findComponent(GlAlert);
const findButtonData = () => {
const button = findAlert().findComponent(GlButton);
if (!button.exists()) {
return null;
}
return {
href: button.attributes('href'),
method: button.attributes('data-method'),
text: button.text(),
};
};
describe('without actions', () => {
beforeEach(() => {
createComponent();
});
it('shows alert with message', () => {
expect(findAlert().props()).toMatchObject({ dismissible: false });
expect(findAlert().text()).toBe(TEST_MESSAGE);
});
});
describe.each`
action | buttonData
${{}} | ${null}
${{ href: TEST_HREF, text: TEST_BUTTON_TEXT }} | ${{ href: TEST_HREF, text: TEST_BUTTON_TEXT }}
${{ href: TEST_HREF, text: TEST_BUTTON_TEXT, isForm: true }} | ${{ href: TEST_HREF, text: TEST_BUTTON_TEXT, method: 'post' }}
`('with action=$action', ({ action, buttonData }) => {
beforeEach(() => {
createComponent({ action });
});
it(`show button=${JSON.stringify(buttonData)}`, () => {
expect(findButtonData()).toEqual(buttonData);
});
});
});

View file

@ -14,7 +14,7 @@ import {
createBranchChangedCommitError,
branchAlreadyExistsCommitError,
} from '~/ide/lib/errors';
import { MSG_CANNOT_PUSH_CODE_SHORT } from '~/ide/messages';
import { MSG_CANNOT_PUSH_CODE } from '~/ide/messages';
import { createStore } from '~/ide/stores';
import { COMMIT_TO_NEW_BRANCH } from '~/ide/stores/modules/commit/constants';
@ -85,8 +85,8 @@ describe('IDE commit form', () => {
${'when there are no changes'} | ${[]} | ${{ pushCode: true }} | ${goToEditView} | ${findBeginCommitButtonData} | ${true} | ${''}
${'when there are changes'} | ${['test']} | ${{ pushCode: true }} | ${goToEditView} | ${findBeginCommitButtonData} | ${false} | ${''}
${'when there are changes'} | ${['test']} | ${{ pushCode: true }} | ${goToCommitView} | ${findCommitButtonData} | ${false} | ${''}
${'when user cannot push'} | ${['test']} | ${{ pushCode: false }} | ${goToEditView} | ${findBeginCommitButtonData} | ${true} | ${MSG_CANNOT_PUSH_CODE_SHORT}
${'when user cannot push'} | ${['test']} | ${{ pushCode: false }} | ${goToCommitView} | ${findCommitButtonData} | ${true} | ${MSG_CANNOT_PUSH_CODE_SHORT}
${'when user cannot push'} | ${['test']} | ${{ pushCode: false }} | ${goToEditView} | ${findBeginCommitButtonData} | ${true} | ${MSG_CANNOT_PUSH_CODE}
${'when user cannot push'} | ${['test']} | ${{ pushCode: false }} | ${goToCommitView} | ${findCommitButtonData} | ${true} | ${MSG_CANNOT_PUSH_CODE}
`('$desc', ({ stagedFiles, userPermissions, viewFn, buttonFn, disabled, tooltip }) => {
beforeEach(async () => {
store.state.stagedFiles = stagedFiles;

View file

@ -1,10 +1,10 @@
import { GlAlert } from '@gitlab/ui';
import { createLocalVue, shallowMount } from '@vue/test-utils';
import Vuex from 'vuex';
import waitForPromises from 'helpers/wait_for_promises';
import CannotPushCodeAlert from '~/ide/components/cannot_push_code_alert.vue';
import ErrorMessage from '~/ide/components/error_message.vue';
import Ide from '~/ide/components/ide.vue';
import { MSG_CANNOT_PUSH_CODE } from '~/ide/messages';
import { MSG_CANNOT_PUSH_CODE_GO_TO_FORK, MSG_GO_TO_FORK } from '~/ide/messages';
import { createStore } from '~/ide/stores';
import { file } from '../helpers';
import { projectData } from '../mock_data';
@ -12,14 +12,15 @@ import { projectData } from '../mock_data';
const localVue = createLocalVue();
localVue.use(Vuex);
const TEST_FORK_IDE_PATH = '/test/ide/path';
describe('WebIDE', () => {
const emptyProjData = { ...projectData, empty_repo: true, branches: {} };
let store;
let wrapper;
const createComponent = ({ projData = emptyProjData, state = {} } = {}) => {
const store = createStore();
store.state.currentProjectId = 'abcproject';
store.state.currentBranchId = 'master';
store.state.projects.abcproject = projData && { ...projData };
@ -37,7 +38,11 @@ describe('WebIDE', () => {
});
};
const findAlert = () => wrapper.find(GlAlert);
const findAlert = () => wrapper.findComponent(CannotPushCodeAlert);
beforeEach(() => {
store = createStore();
});
afterEach(() => {
wrapper.destroy();
@ -148,6 +153,12 @@ describe('WebIDE', () => {
});
it('when user cannot push code, shows alert', () => {
store.state.links = {
forkInfo: {
ide_path: TEST_FORK_IDE_PATH,
},
};
createComponent({
projData: {
userPermissions: {
@ -157,9 +168,12 @@ describe('WebIDE', () => {
});
expect(findAlert().props()).toMatchObject({
dismissible: false,
message: MSG_CANNOT_PUSH_CODE_GO_TO_FORK,
action: {
href: TEST_FORK_IDE_PATH,
text: MSG_GO_TO_FORK,
},
});
expect(findAlert().text()).toBe(MSG_CANNOT_PUSH_CODE);
});
it.each`

View file

@ -6,15 +6,20 @@ import {
} from '~/ide/constants';
import {
MSG_CANNOT_PUSH_CODE,
MSG_CANNOT_PUSH_CODE_SHORT,
MSG_CANNOT_PUSH_CODE_GO_TO_FORK,
MSG_CANNOT_PUSH_CODE_SHOULD_FORK,
MSG_CANNOT_PUSH_UNSIGNED,
MSG_CANNOT_PUSH_UNSIGNED_SHORT,
MSG_FORK,
MSG_GO_TO_FORK,
} from '~/ide/messages';
import { createStore } from '~/ide/stores';
import * as getters from '~/ide/stores/getters';
import { file } from '../helpers';
const TEST_PROJECT_ID = 'test_project';
const TEST_IDE_PATH = '/test/ide/path';
const TEST_FORK_PATH = '/test/fork/path';
describe('IDE store getters', () => {
let localState;
@ -433,27 +438,84 @@ describe('IDE store getters', () => {
});
describe('canPushCodeStatus', () => {
it.each`
pushCode | rejectUnsignedCommits | expected
${true} | ${false} | ${{ isAllowed: true, message: '', messageShort: '' }}
${false} | ${false} | ${{ isAllowed: false, message: MSG_CANNOT_PUSH_CODE, messageShort: MSG_CANNOT_PUSH_CODE_SHORT }}
${false} | ${true} | ${{ isAllowed: false, message: MSG_CANNOT_PUSH_UNSIGNED, messageShort: MSG_CANNOT_PUSH_UNSIGNED_SHORT }}
`(
'with pushCode="$pushCode" and rejectUnsignedCommits="$rejectUnsignedCommits"',
({ pushCode, rejectUnsignedCommits, expected }) => {
localState.projects[TEST_PROJECT_ID] = {
pushRules: {
[PUSH_RULE_REJECT_UNSIGNED_COMMITS]: rejectUnsignedCommits,
it.each([
[
'when can push code, and can push unsigned commits',
{
input: { pushCode: true, rejectUnsignedCommits: false },
output: { isAllowed: true, message: '', messageShort: '' },
},
],
[
'when cannot push code, and can push unsigned commits',
{
input: { pushCode: false, rejectUnsignedCommits: false },
output: {
isAllowed: false,
message: MSG_CANNOT_PUSH_CODE,
messageShort: MSG_CANNOT_PUSH_CODE,
},
userPermissions: {
[PERMISSION_PUSH_CODE]: pushCode,
},
],
[
'when cannot push code, and has ide_path in forkInfo',
{
input: {
pushCode: false,
rejectUnsignedCommits: false,
forkInfo: { ide_path: TEST_IDE_PATH },
},
};
localState.currentProjectId = TEST_PROJECT_ID;
output: {
isAllowed: false,
message: MSG_CANNOT_PUSH_CODE_GO_TO_FORK,
messageShort: MSG_CANNOT_PUSH_CODE,
action: { href: TEST_IDE_PATH, text: MSG_GO_TO_FORK },
},
},
],
[
'when cannot push code, and has fork_path in forkInfo',
{
input: {
pushCode: false,
rejectUnsignedCommits: false,
forkInfo: { fork_path: TEST_FORK_PATH },
},
output: {
isAllowed: false,
message: MSG_CANNOT_PUSH_CODE_SHOULD_FORK,
messageShort: MSG_CANNOT_PUSH_CODE,
action: { href: TEST_FORK_PATH, text: MSG_FORK, isForm: true },
},
},
],
[
'when can push code, but cannot push unsigned commits',
{
input: { pushCode: true, rejectUnsignedCommits: true },
output: {
isAllowed: false,
message: MSG_CANNOT_PUSH_UNSIGNED,
messageShort: MSG_CANNOT_PUSH_UNSIGNED_SHORT,
},
},
],
])('%s', (testName, { input, output }) => {
const { forkInfo, rejectUnsignedCommits, pushCode } = input;
expect(localStore.getters.canPushCodeStatus).toEqual(expected);
},
);
localState.links = { forkInfo };
localState.projects[TEST_PROJECT_ID] = {
pushRules: {
[PUSH_RULE_REJECT_UNSIGNED_COMMITS]: rejectUnsignedCommits,
},
userPermissions: {
[PERMISSION_PUSH_CODE]: pushCode,
},
};
localState.currentProjectId = TEST_PROJECT_ID;
expect(localStore.getters.canPushCodeStatus).toEqual(output);
});
});
describe('canPushCode', () => {

View file

@ -489,9 +489,18 @@ RSpec.describe BlobHelper do
expect(uri.path).to eq("/#{project.namespace.path}/#{project.path}/-/forks")
expect(params).to include("continue[to]=/-/ide/project/#{project.namespace.path}/#{project.path}/edit/master")
expect(params).to include("continue[notice]=#{edit_in_new_fork_notice}")
expect(params).to include("continue[notice_now]=#{edit_in_new_fork_notice_now}")
expect(params).to include("namespace_key=#{current_user.namespace.id}")
end
it 'does not include notice params with_notice: false' do
uri = URI(helper.ide_fork_and_edit_path(project, "master", "", with_notice: false))
expect(uri.path).to eq("/#{project.namespace.path}/#{project.path}/-/forks")
expect(CGI.unescape(uri.query)).to eq("continue[to]=/-/ide/project/#{project.namespace.path}/#{project.path}/edit/master&namespace_key=#{current_user.namespace.id}")
end
context 'when user is not logged in' do
let(:current_user) { nil }

View file

@ -17,7 +17,7 @@ RSpec.describe IdeHelper do
'branch-name' => nil,
'file-path' => nil,
'merge-request' => nil,
'forked-project' => nil,
'fork-info' => nil,
'project' => nil
)
end
@ -25,10 +25,12 @@ RSpec.describe IdeHelper do
context 'when instance vars are set' do
it 'returns instance data in the hash' do
fork_info = { ide_path: '/test/ide/path' }
self.instance_variable_set(:@branch, 'master')
self.instance_variable_set(:@path, 'foo/bar')
self.instance_variable_set(:@merge_request, '1')
self.instance_variable_set(:@forked_project, project)
self.instance_variable_set(:@fork_info, fork_info)
self.instance_variable_set(:@project, project)
serialized_project = API::Entities::Project.represent(project).to_json
@ -38,7 +40,7 @@ RSpec.describe IdeHelper do
'branch-name' => 'master',
'file-path' => 'foo/bar',
'merge-request' => '1',
'forked-project' => serialized_project,
'fork-info' => fork_info.to_json,
'project' => serialized_project
)
end

View file

@ -14,7 +14,7 @@ RSpec.describe API::Entities::ProjectImportFailedRelation do
id: import_failure.id,
created_at: import_failure.created_at,
exception_class: import_failure.exception_class,
exception_message: import_failure.exception_message,
exception_message: nil,
relation_name: import_failure.relation_key,
source: import_failure.source
)

View file

@ -24,4 +24,53 @@ RSpec.describe Clusters::AgentToken do
expect(agent_token.token.length).to be >= 50
end
end
describe '#track_usage', :clean_gitlab_redis_cache do
let(:agent_token) { create(:cluster_agent_token) }
subject { agent_token.track_usage }
context 'when last_used_at was updated recently' do
before do
agent_token.update!(last_used_at: 10.minutes.ago)
end
it 'updates cache but not database' do
expect { subject }.not_to change { agent_token.reload.read_attribute(:last_used_at) }
expect_redis_update
end
end
context 'when last_used_at was not updated recently' do
it 'updates cache and database' do
does_db_update
expect_redis_update
end
context 'with invalid token' do
before do
agent_token.description = SecureRandom.hex(2000)
end
it 'still updates caches and database' do
expect(agent_token).to be_invalid
does_db_update
expect_redis_update
end
end
end
def expect_redis_update
Gitlab::Redis::Cache.with do |redis|
redis_key = "cache:#{described_class.name}:#{agent_token.id}:attributes"
expect(redis.get(redis_key)).to be_present
end
end
def does_db_update
expect { subject }.to change { agent_token.reload.read_attribute(:last_used_at) }
end
end
end

View file

@ -1778,16 +1778,27 @@ RSpec.describe User do
context 'when user has running CI pipelines' do
let(:service) { double }
before do
pipeline = create(:ci_pipeline, :running, user: user)
create(:ci_build, :running, pipeline: pipeline)
context 'with abort_user_pipelines_on_block feature enabled' do
let(:pipelines) { build_list(:ci_pipeline, 3, :running) }
it 'aborts all running pipelines and related jobs' do
stub_feature_flags(abort_user_pipelines_on_block: true)
expect(user).to receive(:pipelines).and_return(pipelines)
expect(Ci::AbortPipelinesService).to receive(:new).and_return(service)
expect(service).to receive(:execute).with(pipelines)
user.block
end
end
it 'cancels all running pipelines and related jobs' do
expect(Ci::CancelUserPipelinesService).to receive(:new).and_return(service)
expect(service).to receive(:execute).with(user)
context 'with abort_user_pipelines_on_block feature disabled' do
it 'cancels all running pipelines and related jobs' do
stub_feature_flags(abort_user_pipelines_on_block: false)
expect(Ci::CancelUserPipelinesService).to receive(:new).and_return(service)
expect(service).to receive(:execute).with(user)
user.block
user.block
end
end
end

View file

@ -51,6 +51,12 @@ RSpec.describe API::Internal::Kubernetes do
end
end
shared_examples 'agent token tracking' do
it 'tracks token usage' do
expect { response }.to change { agent_token.reload.read_attribute(:last_used_at) }
end
end
describe 'POST /internal/kubernetes/usage_metrics' do
def send_request(headers: {}, params: {})
post api('/internal/kubernetes/usage_metrics'), params: params, headers: headers.reverse_merge(jwt_auth_headers)
@ -101,6 +107,8 @@ RSpec.describe API::Internal::Kubernetes do
let(:agent) { agent_token.agent }
let(:project) { agent.project }
shared_examples 'agent token tracking'
it 'returns expected data', :aggregate_failures do
send_request(headers: { 'Authorization' => "Bearer #{agent_token.token}" })
@ -169,6 +177,8 @@ RSpec.describe API::Internal::Kubernetes do
context 'an agent is found' do
let_it_be(:agent_token) { create(:cluster_agent_token) }
shared_examples 'agent token tracking'
context 'project is public' do
let(:project) { create(:project, :public) }

View file

@ -8,6 +8,7 @@ RSpec.describe IdeController do
let_it_be(:other_user) { create(:user) }
let(:user) { creator }
let(:branch) { '' }
before do
sign_in(user)
@ -28,24 +29,33 @@ RSpec.describe IdeController do
let(:user) { other_user }
context 'when user does not have fork' do
it 'does not instantiate forked_project instance var and return 200' do
it 'instantiates fork_info instance var with fork_path and return 200' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:project)).to eq project
expect(assigns(:forked_project)).to be_nil
expect(assigns(:fork_info)).to eq({ fork_path: controller.helpers.ide_fork_and_edit_path(project, branch, '', with_notice: false) })
end
it 'has nil fork_info if user cannot fork' do
project.project_feature.update!(forking_access_level: ProjectFeature::DISABLED)
subject
expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:fork_info)).to be_nil
end
end
context 'when user has have fork' do
let!(:fork) { fork_project(project, user, repository: true) }
context 'when user has fork' do
let!(:fork) { fork_project(project, user, repository: true, namespace: user.namespace) }
it 'instantiates forked_project instance var and return 200' do
it 'instantiates fork_info instance var with ide_path and return 200' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:project)).to eq project
expect(assigns(:forked_project)).to eq fork
expect(assigns(:fork_info)).to eq({ ide_path: controller.helpers.ide_edit_path(fork, branch, '') })
end
end
end
@ -61,7 +71,7 @@ RSpec.describe IdeController do
expect(assigns(:branch)).to be_nil
expect(assigns(:path)).to be_nil
expect(assigns(:merge_request)).to be_nil
expect(assigns(:forked_project)).to be_nil
expect(assigns(:fork_info)).to be_nil
end
end
@ -76,7 +86,7 @@ RSpec.describe IdeController do
expect(assigns(:branch)).to be_nil
expect(assigns(:path)).to be_nil
expect(assigns(:merge_request)).to be_nil
expect(assigns(:forked_project)).to be_nil
expect(assigns(:fork_info)).to be_nil
end
end
@ -91,7 +101,7 @@ RSpec.describe IdeController do
expect(assigns(:branch)).to be_nil
expect(assigns(:path)).to be_nil
expect(assigns(:merge_request)).to be_nil
expect(assigns(:forked_project)).to be_nil
expect(assigns(:fork_info)).to be_nil
end
it_behaves_like 'user cannot push code'
@ -108,55 +118,58 @@ RSpec.describe IdeController do
expect(assigns(:branch)).to be_nil
expect(assigns(:path)).to be_nil
expect(assigns(:merge_request)).to be_nil
expect(assigns(:forked_project)).to be_nil
expect(assigns(:fork_info)).to be_nil
end
it_behaves_like 'user cannot push code'
context "/-/ide/project/:project/#{action}/:branch" do
let(:route) { "/-/ide/project/#{project.full_path}/#{action}/master" }
let(:branch) { 'master' }
let(:route) { "/-/ide/project/#{project.full_path}/#{action}/#{branch}" }
it 'instantiates project and branch instance vars and return 200' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:project)).to eq project
expect(assigns(:branch)).to eq 'master'
expect(assigns(:branch)).to eq branch
expect(assigns(:path)).to be_nil
expect(assigns(:merge_request)).to be_nil
expect(assigns(:forked_project)).to be_nil
expect(assigns(:fork_info)).to be_nil
end
it_behaves_like 'user cannot push code'
context "/-/ide/project/:project/#{action}/:branch/-" do
let(:route) { "/-/ide/project/#{project.full_path}/#{action}/branch/slash/-" }
let(:branch) { 'branch/slash' }
let(:route) { "/-/ide/project/#{project.full_path}/#{action}/#{branch}/-" }
it 'instantiates project and branch instance vars and return 200' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:project)).to eq project
expect(assigns(:branch)).to eq 'branch/slash'
expect(assigns(:branch)).to eq branch
expect(assigns(:path)).to be_nil
expect(assigns(:merge_request)).to be_nil
expect(assigns(:forked_project)).to be_nil
expect(assigns(:fork_info)).to be_nil
end
it_behaves_like 'user cannot push code'
context "/-/ide/project/:project/#{action}/:branch/-/:path" do
let(:route) { "/-/ide/project/#{project.full_path}/#{action}/master/-/foo/.bar" }
let(:branch) { 'master' }
let(:route) { "/-/ide/project/#{project.full_path}/#{action}/#{branch}/-/foo/.bar" }
it 'instantiates project, branch, and path instance vars and return 200' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:project)).to eq project
expect(assigns(:branch)).to eq 'master'
expect(assigns(:branch)).to eq branch
expect(assigns(:path)).to eq 'foo/.bar'
expect(assigns(:merge_request)).to be_nil
expect(assigns(:forked_project)).to be_nil
expect(assigns(:fork_info)).to be_nil
end
it_behaves_like 'user cannot push code'
@ -179,7 +192,7 @@ RSpec.describe IdeController do
expect(assigns(:branch)).to be_nil
expect(assigns(:path)).to be_nil
expect(assigns(:merge_request)).to eq merge_request.id.to_s
expect(assigns(:forked_project)).to be_nil
expect(assigns(:fork_info)).to be_nil
end
it_behaves_like 'user cannot push code'

View file

@ -0,0 +1,70 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::AbortPipelinesService do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, namespace: user.namespace) }
let_it_be(:cancelable_pipeline, reload: true) { create(:ci_pipeline, :running, project: project, user: user) }
let_it_be(:manual_pipeline, reload: true) { create(:ci_pipeline, status: :manual, project: project, user: user) } # not cancelable
let_it_be(:other_users_pipeline, reload: true) { create(:ci_pipeline, :running, project: project, user: create(:user)) } # not this user's pipeline
let_it_be(:cancelable_build, reload: true) { create(:ci_build, :running, pipeline: cancelable_pipeline) }
let_it_be(:non_cancelable_build, reload: true) { create(:ci_build, :success, pipeline: cancelable_pipeline) }
let_it_be(:cancelable_stage, reload: true) { create(:ci_stage_entity, name: 'stageA', status: :running, pipeline: cancelable_pipeline, project: project) }
let_it_be(:non_cancelable_stage, reload: true) { create(:ci_stage_entity, name: 'stageB', status: :success, pipeline: cancelable_pipeline, project: project) }
describe '#execute' do
def expect_correct_cancellations
expect(cancelable_pipeline.finished_at).not_to be_nil
expect(cancelable_pipeline).to be_canceled
expect(cancelable_pipeline.stages - [non_cancelable_stage]).to all(be_canceled)
expect(cancelable_build).to be_canceled
expect(manual_pipeline).not_to be_canceled
expect(non_cancelable_stage).not_to be_canceled
expect(non_cancelable_build).not_to be_canceled
end
context 'with project pipelines' do
it 'cancels all running pipelines and related jobs' do
expect(described_class.new.execute(project.all_pipelines)).to be_success
expect_correct_cancellations
expect(other_users_pipeline).to be_canceled
expect(other_users_pipeline.stages).to all(be_canceled)
end
it 'avoids N+1 queries' do
project_pipelines = project.all_pipelines
control_count = ActiveRecord::QueryRecorder.new { described_class.new.execute(project_pipelines) }.count
pipelines = create_list(:ci_pipeline, 5, :running, project: project)
create_list(:ci_build, 5, :running, pipeline: pipelines.first)
expect { described_class.new.execute(project_pipelines) }.not_to exceed_query_limit(control_count)
end
end
context 'with user pipelines' do
it 'cancels all running pipelines and related jobs' do
expect(described_class.new.execute(user.pipelines)).to be_success
expect_correct_cancellations
expect(other_users_pipeline).not_to be_canceled
end
it 'avoids N+1 queries' do
user_pipelines = user.pipelines
control_count = ActiveRecord::QueryRecorder.new { described_class.new.execute(user_pipelines) }.count
pipelines = create_list(:ci_pipeline, 5, :running, project: project, user: user)
create_list(:ci_build, 5, :running, pipeline: pipelines.first)
expect { described_class.new.execute(user_pipelines) }.not_to exceed_query_limit(control_count)
end
end
end
end

View file

@ -1,42 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::AbortProjectPipelinesService do
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, :running, project: project) }
let_it_be(:build) { create(:ci_build, :running, pipeline: pipeline) }
describe '#execute' do
it 'cancels all running pipelines and related jobs' do
result = described_class.new.execute(project)
expect(result).to be_success
expect(pipeline.reload).to be_canceled
expect(build.reload).to be_canceled
end
it 'avoids N+1 queries' do
control_count = ActiveRecord::QueryRecorder.new { described_class.new.execute(project) }.count
pipelines = create_list(:ci_pipeline, 5, :running, project: project)
create_list(:ci_build, 5, :running, pipeline: pipelines.first)
expect { described_class.new.execute(project) }.not_to exceed_query_limit(control_count)
end
end
context 'when feature disabled' do
before do
stub_feature_flags(abort_deleted_project_pipelines: false)
end
it 'does not abort the pipeline' do
result = described_class.new.execute(project)
expect(result).to be(nil)
expect(pipeline.reload).to be_running
expect(build.reload).to be_running
end
end
end

View file

@ -93,10 +93,26 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
destroy_project(project, user, {})
end
it 'performs cancel for project ci pipelines' do
expect(::Ci::AbortProjectPipelinesService).to receive_message_chain(:new, :execute).with(project)
context 'with abort_deleted_project_pipelines feature disabled' do
it 'does not cancel project ci pipelines' do
stub_feature_flags(abort_deleted_project_pipelines: false)
destroy_project(project, user, {})
expect(::Ci::AbortPipelinesService).not_to receive(:new)
destroy_project(project, user, {})
end
end
context 'with abort_deleted_project_pipelines feature enabled' do
it 'performs cancel for project ci pipelines' do
stub_feature_flags(abort_deleted_project_pipelines: true)
pipelines = build_list(:ci_pipeline, 3, :running)
allow(project).to receive(:all_pipelines).and_return(pipelines)
expect(::Ci::AbortPipelinesService).to receive_message_chain(:new, :execute).with(pipelines)
destroy_project(project, user, {})
end
end
context 'when project has remote mirrors' do

Binary file not shown.