Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-01-12 00:16:35 +00:00
parent 830c9943a9
commit d237ada361
90 changed files with 1513 additions and 642 deletions

View File

@ -598,8 +598,10 @@ rspec:undercoverage:
else
echo "Using \$CI_COMMIT_SHA ($CI_COMMIT_SHA) for this non-merge result pipeline.";
fi;
- UNDERCOVERAGE_COMPARE="${CI_MERGE_REQUEST_DIFF_BASE_SHA:-$(git merge-base origin/master HEAD)}"
- echo "Undercoverage comparing with ${UNDERCOVERAGE_COMPARE}"
- if [ -f scripts/undercoverage ]; then
run_timed_command "scripts/undercoverage";
run_timed_command "scripts/undercoverage ${UNDERCOVERAGE_COMPARE}";
fi;
rspec:feature-flags:

View File

@ -2,6 +2,10 @@
documentation](doc/development/changelog.md) for instructions on adding your own
entry.
## 14.6.2 (2022-01-10)
No changes.
## 14.6.1 (2022-01-04)
### Fixed (2 changes)
@ -387,6 +391,10 @@ entry.
- [Fix OpenStruct use](gitlab-org/gitlab@f8466f5943a1afeabaf9cf781f7804a8df515a0e) by @mehulsharma ([merge request](gitlab-org/gitlab!74702))
- [Update Sidekiq to 6.3.1](gitlab-org/gitlab@22e8bc0af656717e56428a7227c467fe08021c66) ([merge request](gitlab-org/gitlab!73973))
## 14.5.3 (2022-01-11)
No changes.
## 14.5.2 (2021-12-03)
No changes.
@ -929,6 +937,10 @@ No changes.
- [Add pipeline artifacts and uploads sizes to project REST API](gitlab-org/gitlab@58d66f28faf42ae98ca11ff1ba0bdd9180e988ad) by @guillaume.chauvel ([merge request](gitlab-org/gitlab!72075))
- [Remove not used parameter from epics finder](gitlab-org/gitlab@49fce172b57b2f376a114726b1dd1900fe36a238) ([merge request](gitlab-org/gitlab!72285)) **GitLab Enterprise Edition**
## 14.4.5 (2022-01-11)
No changes.
## 14.4.4 (2021-12-03)
No changes.

View File

@ -1 +1 @@
3627c14a64ce48446e8a67299c3161ff7290d1ad
e02b0d67e48ed5a4493b073c9836d376a780f34d

View File

@ -64,10 +64,12 @@ class GlEmoji extends HTMLElement {
this.classList.add('emoji-icon');
this.classList.add(fallbackSpriteClass);
} else if (hasImageFallback) {
this.innerHTML = emojiImageTag(name, fallbackSrc);
this.innerHTML = '';
this.appendChild(emojiImageTag(name, fallbackSrc));
} else {
const src = emojiFallbackImageSrc(name);
this.innerHTML = emojiImageTag(name, src);
this.innerHTML = '';
this.appendChild(emojiImageTag(name, src));
}
}
});

View File

@ -1,6 +1,7 @@
import { escape, minBy } from 'lodash';
import emojiRegexFactory from 'emoji-regex';
import emojiAliases from 'emojis/aliases.json';
import { setAttributes } from '~/lib/utils/dom_utils';
import AccessorUtilities from '../lib/utils/accessor';
import axios from '../lib/utils/axios_utils';
import { CACHE_KEY, CACHE_VERSION_KEY, CATEGORY_ICON_MAP, FREQUENTLY_USED_KEY } from './constants';
@ -220,7 +221,19 @@ export function emojiFallbackImageSrc(inputName) {
}
export function emojiImageTag(name, src) {
return `<img class="emoji" title=":${name}:" alt=":${name}:" src="${src}" width="20" height="20" align="absmiddle" />`;
const img = document.createElement('img');
img.className = 'emoji';
setAttributes(img, {
title: `:${name}:`,
alt: `:${name}:`,
src,
width: '20',
height: '20',
align: 'absmiddle',
});
return img;
}
export function glEmojiTag(inputName, options) {

View File

@ -111,7 +111,10 @@ export default {
<span v-else class="gl-display-inline-flex gl-white-space-nowrap gl-max-w-full">
<tooltip-on-truncate :title="message" class="gl-text-truncate">
<gl-icon :name="icon" /> <span data-testid="validationMsg">{{ message }}</span>
<gl-icon :name="icon" />
<span data-qa-selector="validation_message_content" data-testid="validationMsg">
{{ message }}
</span>
</tooltip-on-truncate>
<span v-if="!isEmpty" class="gl-flex-shrink-0 gl-pl-2">
<gl-link data-testid="learnMoreLink" :href="helpPath">

View File

@ -146,6 +146,7 @@ export default {
<template>
<gl-tabs
class="file-editor gl-mb-3"
data-qa-selector="file_editor_container"
:query-param-name="$options.query.TAB_QUERY_PARAM"
sync-active-tab-with-query-params
>

View File

@ -132,6 +132,7 @@ export default {
:ref="$options.CONTAINER_REF"
class="gl-bg-gray-10 gl-overflow-auto"
data-testid="graph-container"
data-qa-selector="pipeline_graph_container"
>
<links-layer
:pipeline-data="pipelineStages"
@ -147,7 +148,10 @@ export default {
:key="`${stage.name}-${index}`"
class="gl-flex-direction-column"
>
<div class="gl-display-flex gl-align-items-center gl-w-full gl-px-9 gl-py-4 gl-mb-5">
<div
class="gl-display-flex gl-align-items-center gl-w-full gl-px-9 gl-py-4 gl-mb-5"
data-qa-selector="stage_container"
>
<stage-name :stage-name="stage.name" />
</div>
<div :class="$options.jobWrapperClasses">
@ -158,6 +162,7 @@ export default {
:pipeline-id="$options.PIPELINE_ID"
:is-hovered="highlightedJob === group.name"
:is-faded-out="isFadedOut(group.name)"
data-qa-selector="job_container"
@on-mouse-enter="setHoveredJob"
@on-mouse-leave="removeHoveredJob"
/>

View File

@ -20,7 +20,7 @@ module SessionlessAuthentication
end
def sessionless_sign_in(user)
if user && can?(user, :log_in)
if can?(user, :log_in) && !user.password_expired_if_applicable?
# Notice we are passing store false, so the user is not
# actually stored in the session and a token is needed
# for every request. If you want the token to work as a

View File

@ -28,8 +28,14 @@ class Import::GithubController < Import::BaseController
end
def callback
session[access_token_key] = get_token(params[:code])
redirect_to status_import_url
auth_state = session[auth_state_key]
session[auth_state_key] = nil
if auth_state.blank? || !ActiveSupport::SecurityUtils.secure_compare(auth_state, params[:state])
provider_unauthorized
else
session[access_token_key] = get_token(params[:code])
redirect_to status_import_url
end
end
def personal_access_token
@ -154,13 +160,16 @@ class Import::GithubController < Import::BaseController
end
def authorize_url
state = SecureRandom.base64(64)
session[auth_state_key] = state
if Feature.enabled?(:remove_legacy_github_client)
oauth_client.auth_code.authorize_url(
redirect_uri: callback_import_url,
scope: 'repo, user, user:email'
scope: 'repo, user, user:email',
state: state
)
else
client.authorize_url(callback_import_url)
client.authorize_url(callback_import_url, state)
end
end
@ -219,6 +228,10 @@ class Import::GithubController < Import::BaseController
alert: _('Missing OAuth configuration for GitHub.')
end
def auth_state_key
:"#{provider_name}_auth_state_key"
end
def access_token_key
:"#{provider_name}_access_token"
end

View File

@ -36,7 +36,7 @@ module Resolvers
def unconditional_includes
[
{
project: [:project_feature]
project: [:project_feature, :group]
},
:author
]

View File

@ -6,6 +6,13 @@ module Integrations
def notify(message, opts)
# See https://gitlab.com/gitlab-org/slack-notifier/#custom-http-client
#
# TODO: By default both Markdown and HTML links are converted into Slack "mrkdwn" syntax,
# but it seems we only need to support Markdown and could disable HTML.
#
# See:
# - https://gitlab.com/gitlab-org/slack-notifier#middleware
# - https://gitlab.com/gitlab-org/gitlab/-/issues/347048
notifier = ::Slack::Messenger.new(webhook, opts.merge(http_client: HTTPClient))
notifier.ping(
message.pretext,

View File

@ -23,7 +23,7 @@ module Integrations
def attachments
[{
title: title,
title: strip_markup(title),
title_link: alert_url,
color: attachment_color,
fields: attachment_fields
@ -31,7 +31,7 @@ module Integrations
end
def message
"Alert firing in #{project_name}"
"Alert firing in #{strip_markup(project_name)}"
end
private

View File

@ -5,6 +5,10 @@ module Integrations
class BaseMessage
RELATIVE_LINK_REGEX = %r{!\[[^\]]*\]\((/uploads/[^\)]*)\)}.freeze
# Markup characters which are used for links in HTML, Markdown,
# and Slack "mrkdwn" syntax (`<http://example.com|Label>`).
UNSAFE_MARKUP_CHARACTERS = '<>[]|'
attr_reader :markdown
attr_reader :user_full_name
attr_reader :user_name
@ -65,12 +69,26 @@ module Integrations
string.gsub(RELATIVE_LINK_REGEX, "#{project_url}\\1")
end
# Remove unsafe markup from user input, which can be used to hijack links in our own markup,
# or insert new ones.
#
# This currently removes Markdown and Slack "mrkdwn" links (keeping the link label),
# and all HTML markup (keeping the text nodes).
# We can't just escape the markup characters, because each chat app handles this differently.
#
# See:
# - https://api.slack.com/reference/surfaces/formatting#escaping
# - https://gitlab.com/gitlab-org/slack-notifier#escaping
def strip_markup(string)
string&.delete(UNSAFE_MARKUP_CHARACTERS)
end
def attachment_color
'#345'
end
def link(text, url)
"[#{text}](#{url})"
"[#{strip_markup(text)}](#{url})"
end
def pretty_duration(seconds)

View File

@ -27,7 +27,7 @@ module Integrations
def attachments
[{
text: "#{project_link} with job #{deployment_link} by #{user_link}\n#{commit_link}: #{commit_title}",
text: "#{project_link} with job #{deployment_link} by #{user_link}\n#{commit_link}: #{strip_markup(commit_title)}",
color: color
}]
end
@ -40,9 +40,9 @@ module Integrations
def message
if running?
"Starting deploy to #{environment}"
"Starting deploy to #{strip_markup(environment)}"
else
"Deploy to #{environment} #{humanized_status}"
"Deploy to #{strip_markup(environment)} #{humanized_status}"
end
end

View File

@ -32,7 +32,7 @@ module Integrations
def activity
{
title: "Issue #{state} by #{user_combined_name}",
title: "Issue #{state} by #{strip_markup(user_combined_name)}",
subtitle: "in #{project_link}",
text: issue_link,
image: user_avatar
@ -42,7 +42,7 @@ module Integrations
private
def message
"[#{project_link}] Issue #{issue_link} #{state} by #{user_combined_name}"
"[#{project_link}] Issue #{issue_link} #{state} by #{strip_markup(user_combined_name)}"
end
def opened_issue?
@ -67,7 +67,7 @@ module Integrations
end
def issue_title
"#{Issue.reference_prefix}#{issue_iid} #{title}"
"#{Issue.reference_prefix}#{issue_iid} #{strip_markup(title)}"
end
end
end

View File

@ -29,7 +29,7 @@ module Integrations
def activity
{
title: "Merge request #{state_or_action_text} by #{user_combined_name}",
title: "Merge request #{state_or_action_text} by #{strip_markup(user_combined_name)}",
subtitle: "in #{project_link}",
text: merge_request_link,
image: user_avatar
@ -39,7 +39,7 @@ module Integrations
private
def format_title(title)
'*' + title.lines.first.chomp + '*'
'*' + strip_markup(title.lines.first.chomp) + '*'
end
def message
@ -51,7 +51,7 @@ module Integrations
end
def merge_request_message
"#{user_combined_name} #{state_or_action_text} merge request #{merge_request_link} in #{project_link}"
"#{strip_markup(user_combined_name)} #{state_or_action_text} merge request #{merge_request_link} in #{project_link}"
end
def merge_request_link
@ -59,7 +59,7 @@ module Integrations
end
def merge_request_title
"#{MergeRequest.reference_prefix}#{merge_request_iid} #{title}"
"#{MergeRequest.reference_prefix}#{merge_request_iid} #{strip_markup(title)}"
end
def merge_request_url

View File

@ -35,9 +35,9 @@ module Integrations
def activity
{
title: "#{user_combined_name} #{link('commented on ' + target, note_url)}",
title: "#{strip_markup(user_combined_name)} #{link('commented on ' + target, note_url)}",
subtitle: "in #{project_link}",
text: formatted_title,
text: strip_markup(formatted_title),
image: user_avatar
}
end
@ -45,7 +45,7 @@ module Integrations
private
def message
"#{user_combined_name} #{link('commented on ' + target, note_url)} in #{project_link}: *#{formatted_title}*"
"#{strip_markup(user_combined_name)} #{link('commented on ' + target, note_url)} in #{project_link}: *#{strip_markup(formatted_title)}*"
end
def format_title(title)

View File

@ -56,7 +56,7 @@ module Integrations
[{
fallback: format(message),
color: attachment_color,
author_name: user_combined_name,
author_name: strip_markup(user_combined_name),
author_icon: user_avatar,
author_link: author_url,
title: s_("ChatMessage|Pipeline #%{pipeline_id} %{humanized_status} in %{duration}") %
@ -80,7 +80,7 @@ module Integrations
pipeline_link: pipeline_link,
ref_type: ref_type,
ref_link: ref_link,
user_combined_name: user_combined_name,
user_combined_name: strip_markup(user_combined_name),
humanized_status: humanized_status
},
subtitle: s_("ChatMessage|in %{project_link}") % { project_link: project_link },
@ -154,7 +154,7 @@ module Integrations
pipeline_link: pipeline_link,
ref_type: ref_type,
ref_link: ref_link,
user_combined_name: user_combined_name,
user_combined_name: strip_markup(user_combined_name),
humanized_status: humanized_status,
duration: pretty_duration(duration)
}
@ -189,7 +189,7 @@ module Integrations
end
def ref_link
"[#{ref}](#{ref_url})"
link(ref, ref_url)
end
def project_url
@ -197,7 +197,7 @@ module Integrations
end
def project_link
"[#{project.name}](#{project_url})"
link(project.name, project_url)
end
def pipeline_failed_jobs_url
@ -213,7 +213,7 @@ module Integrations
end
def pipeline_link
"[##{pipeline_id}](#{pipeline_url})"
link("##{pipeline_id}", pipeline_url)
end
def job_url(job)
@ -221,7 +221,7 @@ module Integrations
end
def job_link(job)
"[#{job[:name]}](#{job_url(job)})"
link(job[:name], job_url(job))
end
def failed_jobs_links
@ -242,7 +242,7 @@ module Integrations
def stage_link(stage)
# All stages link to the pipeline page
"[#{stage}](#{pipeline_url})"
link(stage, pipeline_url)
end
def failed_stages_links
@ -254,7 +254,7 @@ module Integrations
end
def commit_link
"[#{commit.title}](#{commit_url})"
link(commit.title, commit_url)
end
def author_url

View File

@ -39,7 +39,7 @@ module Integrations
def humanized_action(short: false)
action, ref_link, target_link = compose_action_details
text = [user_combined_name, action, ref_type, ref_link]
text = [strip_markup(user_combined_name), action, ref_type, ref_link]
text << target_link unless short
text.join(' ')
end
@ -67,7 +67,7 @@ module Integrations
url = commit[:url]
"[#{id}](#{url}): #{title} - #{author}"
"#{link(id, url)}: #{strip_markup(title)} - #{strip_markup(author)}"
end
def new_branch?
@ -91,15 +91,15 @@ module Integrations
end
def ref_link
"[#{ref}](#{ref_url})"
link(ref, ref_url)
end
def project_link
"[#{project_name}](#{project_url})"
link(project_name, project_url)
end
def compare_link
"[Compare changes](#{compare_url})"
link('Compare changes', compare_url)
end
def compose_action_details

View File

@ -36,9 +36,9 @@ module Integrations
def activity
{
title: "#{user_combined_name} #{action} #{wiki_page_link}",
title: "#{strip_markup(user_combined_name)} #{action} #{wiki_page_link}",
subtitle: "in #{project_link}",
text: title,
text: strip_markup(title),
image: user_avatar
}
end
@ -46,7 +46,7 @@ module Integrations
private
def message
"#{user_combined_name} #{action} #{wiki_page_link} (#{diff_link}) in #{project_link}: *#{title}*"
"#{strip_markup(user_combined_name)} #{action} #{wiki_page_link} (#{diff_link}) in #{project_link}: *#{strip_markup(title)}*"
end
def description_message

View File

@ -0,0 +1,74 @@
# frozen_string_literal: true
# Archive Extraction Service allows extraction of contents
# from `tar` archives with an additional handling (removal)
# of file symlinks.
#
# @param tmpdir [String] A path where archive is located
# and where its contents are extracted.
# Tmpdir directory must be located under `Dir.tmpdir`.
# `BulkImports::Error` is raised if any other directory path is used.
#
# @param filename [String] Name of the file to extract contents from.
#
# @example
# dir = Dir.mktmpdir
# filename = 'things.tar'
# BulkImports::ArchiveExtractionService.new(tmpdir: dir, filename: filename).execute
# Dir.glob(File.join(dir, '**', '*'))
# => ['/path/to/tmp/dir/extracted_file_1', '/path/to/tmp/dir/extracted_file_2', '/path/to/tmp/dir/extracted_file_3']
module BulkImports
class ArchiveExtractionService
include Gitlab::ImportExport::CommandLineUtil
def initialize(tmpdir:, filename:)
@tmpdir = tmpdir
@filename = filename
@filepath = File.join(@tmpdir, @filename)
end
def execute
validate_filepath
validate_tmpdir
validate_symlink
extract_archive
remove_symlinks
tmpdir
end
private
attr_reader :tmpdir, :filename, :filepath
def validate_filepath
Gitlab::Utils.check_path_traversal!(filepath)
end
def validate_tmpdir
raise(BulkImports::Error, 'Invalid target directory') unless File.expand_path(tmpdir).start_with?(Dir.tmpdir)
end
def validate_symlink
raise(BulkImports::Error, 'Invalid file') if symlink?(filepath)
end
def symlink?(filepath)
File.lstat(filepath).symlink?
end
def extract_archive
untar_xf(archive: filepath, dir: tmpdir)
end
def extracted_files
Dir.glob(File.join(tmpdir, '**', '*'))
end
def remove_symlinks
extracted_files.each do |path|
FileUtils.rm(path) if symlink?(path)
end
end
end
end

View File

@ -70,10 +70,24 @@ module Packages
end
end
# TODO (technical debt): Extract the package size calculation to its own component and unit test it separately.
def calculated_package_file_size
strong_memoize(:calculated_package_file_size) do
# This calculation is based on:
# 1. 4 chars in a Base64 encoded string are 3 bytes in the original string. Meaning 1 char is 0.75 bytes.
# 2. The encoded string may have 1 or 2 extra '=' chars used for padding. Each padding char means 1 byte less in the original string.
# Reference:
# - https://blog.aaronlenoir.com/2017/11/10/get-original-length-from-base-64-string/
# - https://en.wikipedia.org/wiki/Base64#Decoding_Base64_with_padding
encoded_data = attachment['data']
((encoded_data.length * 0.75 ) - encoded_data[-2..].count('=')).to_i
end
end
def file_params
{
file: CarrierWaveStringFile.new(Base64.decode64(attachment['data'])),
size: attachment['length'],
size: calculated_package_file_size,
file_sha1: version_data[:dist][:shasum],
file_name: package_file_name,
build: params[:build]
@ -86,7 +100,7 @@ module Packages
end
def file_size_exceeded?
project.actual_limits.exceeded?(:npm_max_file_size, attachment['length'].to_i)
project.actual_limits.exceeded?(:npm_max_file_size, calculated_package_file_size)
end
end
end

View File

@ -15,7 +15,7 @@ Customize and configure your self-managed GitLab installation. Here are some qui
- [Geo](geo/index.md)
- [Packages](packages/index.md)
The following tables are intended to guide you to choose the right combination of capabilties based on your requirements. It is common to want the most
The following tables are intended to guide you to choose the right combination of capabilities based on your requirements. It is common to want the most
available, quickly recoverable, highly performant and fully data resilient solution. However, there are tradeoffs.
The tables lists features on the left and provides their capabilities to the right along with known trade-offs.

View File

@ -286,7 +286,7 @@ configuration option in `gitlab.yml`. These metrics are served from the
| `geo_uploads_verified` | Gauge | 14.6 | Number of uploads verified on secondary | `url` |
| `geo_uploads_verification_failed` | Gauge | 14.6 | Number of uploads verifications failed on secondary | `url` |
| `gitlab_sli:rails_request_apdex:total` | Counter | 14.4 | The number of request-apdex measurements, [more information the development documentation](../../../development/application_slis/rails_request_apdex.md) | `endpoint_id`, `feature_category`, `request_urgency` |
| `gitlab_sli:rails_request_apdex:success_total` | Counter | 14.4 | The number of succesful requests that met the target duration for their urgency. Devide by `gitlab_sli:rails_requests_apdex:total` to get a success ratio | `endpoint_id`, `feature_category`, `request_urgency` |
| `gitlab_sli:rails_request_apdex:success_total` | Counter | 14.4 | The number of successful requests that met the target duration for their urgency. Divide by `gitlab_sli:rails_requests_apdex:total` to get a success ratio | `endpoint_id`, `feature_category`, `request_urgency` |
## Database load balancing metrics **(PREMIUM SELF)**

View File

@ -38,7 +38,7 @@ We can differentiate two different types of configuration:
- Sensitive: Configuration that can cause major service outage (like data integrity,
installation integrity, or preventing users from reaching the installation)
- Regular: Configuration that can make a feature unavailable but still makes the
installation useable (like a change in default project/group settings, or
installation usable (like a change in default project/group settings, or
miscommunication with other components)
We also need to differentiate deprecation and removal procedure.

View File

@ -76,7 +76,7 @@ characters on each line.
## Init system detection
Omnibus GitLab attempts to query the underlaying system in order to
Omnibus GitLab attempts to query the underlying system in order to
check which init system it uses.
This manifests itself as a `WARNING` during the `sudo gitlab-ctl reconfigure`
run.

View File

@ -1833,7 +1833,7 @@ On each node perform the following:
external_url 'https://gitlab.example.com'
# git_data_dirs get configured for the Praefect virtual storage
# Address is Interal Load Balancer for Praefect
# Address is Internal Load Balancer for Praefect
# Token is praefect_external_token
git_data_dirs({
"default" => {

View File

@ -1794,7 +1794,7 @@ On each node perform the following:
external_url 'https://gitlab.example.com'
# git_data_dirs get configured for the Praefect virtual storage
# Address is Interal Load Balancer for Praefect
# Address is Internal Load Balancer for Praefect
# Token is praefect_external_token
git_data_dirs({
"default" => {

View File

@ -1855,7 +1855,7 @@ On each node perform the following:
external_url 'https://gitlab.example.com'
# git_data_dirs get configured for the Praefect virtual storage
# Address is Interal Load Balancer for Praefect
# Address is Internal Load Balancer for Praefect
# Token is praefect_external_token
git_data_dirs({
"default" => {

View File

@ -1791,7 +1791,7 @@ On each node perform the following:
external_url 'https://gitlab.example.com'
# git_data_dirs get configured for the Praefect virtual storage
# Address is Interal Load Balancer for Praefect
# Address is Internal Load Balancer for Praefect
# Token is praefect_external_token
git_data_dirs({
"default" => {

View File

@ -387,7 +387,7 @@ blocking all jobs on that worker from proceeding. If Rugged calls performed by S
background task processing.
By default, Rugged is used when Git repository data is stored on local storage or on an NFS mount.
[Using Rugged is recommened when using NFS](../nfs.md#improving-nfs-performance-with-gitlab), but if
[Using Rugged is recommended when using NFS](../nfs.md#improving-nfs-performance-with-gitlab), but if
you are using local storage, disabling Rugged can improve Sidekiq performance:
```shell

View File

@ -19,7 +19,7 @@ GitLab Runner to support `docker` commands.
To enable Docker commands for your CI/CD jobs, you can use:
- [The shell executor](#use-the-shell-executor)
- [The Docker executor with the Docker image (Docker-in-Docker)](#use-the-docker-executor-with-the-docker-image-docker-in-docker)
- [Docker-in-Docker](#use-docker-in-docker)
- [Docker socket binding](#use-docker-socket-binding)
If you don't want to execute a runner in privileged mode,
@ -78,54 +78,29 @@ You can now use `docker` commands (and install `docker-compose` if needed).
When you add `gitlab-runner` to the `docker` group, you are effectively granting `gitlab-runner` full root permissions.
Learn more about the [security of the `docker` group](https://blog.zopyx.com/on-docker-security-docker-group-considered-harmful/).
### Use the Docker executor with the Docker image (Docker-in-Docker)
### Use Docker-in-Docker
"Docker-in-Docker" (`dind`) means:
- Your registered runner uses the [Docker executor](https://docs.gitlab.com/runner/executors/docker.html).
- Your registered runner uses the [Docker executor](https://docs.gitlab.com/runner/executors/docker.html) or the [Kubernetes executor](https://docs.gitlab.com/runner/executors/kubernetes.html).
- The executor uses a [container image of Docker](https://hub.docker.com/_/docker/), provided
by Docker, to run your CI/CD jobs.
The Docker image has all of the `docker` tools installed and can run
the job script in context of the image in privileged mode.
We recommend you use [Docker-in-Docker with TLS enabled](#docker-in-docker-with-tls-enabled),
We recommend you use Docker-in-Docker with TLS enabled,
which is supported by [GitLab.com shared runners](../runners/index.md).
You should always specify a specific version of the image, like `docker:19.03.12`.
If you use a tag like `docker:stable`, you have no control over which version is used.
Unpredictable behavior can result, especially when new versions are released.
#### Limitations of Docker-in-Docker
#### Use the Docker executor with Docker-in-Docker
Docker-in-Docker is the recommended configuration, but is
not without its own challenges:
You can use the Docker executor to run jobs in a Docker container.
- **The `docker-compose` command**: This command is not available in this configuration by default.
To use `docker-compose` in your job scripts, follow the `docker-compose`
[installation instructions](https://docs.docker.com/compose/install/).
- **Cache**: Each job runs in a new environment. Concurrent jobs work fine,
because every build gets its own instance of Docker engine and they don't conflict with each other.
However, jobs can be slower because there's no caching of layers.
- **Storage drivers**: By default, earlier versions of Docker use the `vfs` storage driver,
which copies the file system for each job. Docker 17.09 and later use `--storage-driver overlay2`, which is
the recommended storage driver. See [Using the OverlayFS driver](#use-the-overlayfs-driver) for details.
- **Root file system**: Because the `docker:19.03.12-dind` container and the runner container don't share their
root file system, you can use the job's working directory as a mount point for
child containers. For example, if you have files you want to share with a
child container, you might create a subdirectory under `/builds/$CI_PROJECT_PATH`
and use it as your mount point. For a more detailed explanation, view [issue
#41227](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/41227).
```yaml
variables:
MOUNT_POINT: /builds/$CI_PROJECT_PATH/mnt
script:
- mkdir -p "$MOUNT_POINT"
- docker run -v "$MOUNT_POINT:/mnt" my-docker-image
```
#### Docker-in-Docker with TLS enabled
##### Docker-in-Docker with TLS enabled in the Docker executor
> Introduced in GitLab Runner 11.11.
@ -219,7 +194,72 @@ To use Docker-in-Docker with TLS enabled:
- docker run my-docker-image /script/to/run/tests
```
#### Docker-in-Docker with TLS enabled in Kubernetes
##### Docker-in-Docker with TLS disabled in the Docker executor
Sometimes you might have legitimate reasons to disable TLS.
For example, you have no control over the GitLab Runner configuration
that you are using.
Assuming that the runner's `config.toml` is similar to:
```toml
[[runners]]
url = "https://gitlab.com/"
token = TOKEN
executor = "docker"
[runners.docker]
tls_verify = false
image = "docker:19.03.12"
privileged = true
disable_cache = false
volumes = ["/cache"]
[runners.cache]
[runners.cache.s3]
[runners.cache.gcs]
```
You can now use `docker` in the job script. Note the inclusion of the
`docker:19.03.12-dind` service:
```yaml
image: docker:19.03.12
variables:
# When using dind service, you must instruct docker to talk with the
# daemon started inside of the service. The daemon is available with
# a network connection instead of the default /var/run/docker.sock socket.
#
# The 'docker' hostname is the alias of the service container as described at
# https://docs.gitlab.com/ee/ci/docker/using_docker_images.html#accessing-the-services
#
# If you're using GitLab Runner 12.7 or earlier with the Kubernetes executor and Kubernetes 1.6 or earlier,
# the variable must be set to tcp://localhost:2375 because of how the
# Kubernetes executor connects services to the job container
# DOCKER_HOST: tcp://localhost:2375
#
DOCKER_HOST: tcp://docker:2375
#
# This instructs Docker not to start over TLS.
DOCKER_TLS_CERTDIR: ""
services:
- docker:19.03.12-dind
before_script:
- docker info
build:
stage: build
script:
- docker build -t my-docker-image .
- docker run my-docker-image /script/to/run/tests
```
#### Use the Kubernetes executor with Docker-in-Docker
You can use the Kubernetes executor to run jobs in a Docker container.
##### Docker-in-Docker with TLS enabled in Kubernetes
> [Introduced](https://gitlab.com/gitlab-org/charts/gitlab-runner/-/issues/106) in GitLab Runner Helm Chart 0.23.0.
@ -287,66 +327,34 @@ To use Docker-in-Docker with TLS enabled in Kubernetes:
- docker run my-docker-image /script/to/run/tests
```
#### Docker-in-Docker with TLS disabled
#### Limitations of Docker-in-Docker
Sometimes you might have legitimate reasons to disable TLS.
For example, you have no control over the GitLab Runner configuration
that you are using.
Docker-in-Docker is the recommended configuration, but is
not without its own challenges:
Assuming that the runner's `config.toml` is similar to:
- **The `docker-compose` command**: This command is not available in this configuration by default.
To use `docker-compose` in your job scripts, follow the `docker-compose`
[installation instructions](https://docs.docker.com/compose/install/).
- **Cache**: Each job runs in a new environment. Concurrent jobs work fine,
because every build gets its own instance of Docker engine and they don't conflict with each other.
However, jobs can be slower because there's no caching of layers.
- **Storage drivers**: By default, earlier versions of Docker use the `vfs` storage driver,
which copies the file system for each job. Docker 17.09 and later use `--storage-driver overlay2`, which is
the recommended storage driver. See [Using the OverlayFS driver](#use-the-overlayfs-driver) for details.
- **Root file system**: Because the `docker:19.03.12-dind` container and the runner container don't share their
root file system, you can use the job's working directory as a mount point for
child containers. For example, if you have files you want to share with a
child container, you might create a subdirectory under `/builds/$CI_PROJECT_PATH`
and use it as your mount point. For a more detailed explanation, view [issue
#41227](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/41227).
```toml
[[runners]]
url = "https://gitlab.com/"
token = TOKEN
executor = "docker"
[runners.docker]
tls_verify = false
image = "docker:19.03.12"
privileged = true
disable_cache = false
volumes = ["/cache"]
[runners.cache]
[runners.cache.s3]
[runners.cache.gcs]
```
You can now use `docker` in the job script. Note the inclusion of the
`docker:19.03.12-dind` service:
```yaml
image: docker:19.03.12
variables:
# When using dind service, you must instruct docker to talk with the
# daemon started inside of the service. The daemon is available with
# a network connection instead of the default /var/run/docker.sock socket.
#
# The 'docker' hostname is the alias of the service container as described at
# https://docs.gitlab.com/ee/ci/services/#accessing-the-services
#
# If you're using GitLab Runner 12.7 or earlier with the Kubernetes executor and Kubernetes 1.6 or earlier,
# the variable must be set to tcp://localhost:2375 because of how the
# Kubernetes executor connects services to the job container
# DOCKER_HOST: tcp://localhost:2375
#
DOCKER_HOST: tcp://docker:2375
#
# This instructs Docker not to start over TLS.
DOCKER_TLS_CERTDIR: ""
services:
- docker:19.03.12-dind
before_script:
- docker info
build:
stage: build
```yaml
variables:
MOUNT_POINT: /builds/$CI_PROJECT_PATH/mnt
script:
- docker build -t my-docker-image .
- docker run my-docker-image /script/to/run/tests
```
- mkdir -p "$MOUNT_POINT"
- docker run -v "$MOUNT_POINT:/mnt" my-docker-image
```
### Use Docker socket binding
@ -359,87 +367,50 @@ If you bind the Docker socket and you are
you can no longer use `docker:19.03.12-dind` as a service. Volume bindings
are done to the services as well, making these incompatible.
To make Docker available in the context of the image:
#### Use the Docker executor with Docker socket binding
1. Install [GitLab Runner](https://docs.gitlab.com/runner/install/).
1. From the command line, register a runner with the `docker` executor and share `/var/run/docker.sock`:
To make Docker available in the context of the image, you will need to mount
`/var/run/docker.sock` into the launched containers. To do this with the Docker
executor, you need to add `"/var/run/docker.sock:/var/run/docker.sock"` to the
[Volumes in the `[runners.docker]` section](https://docs.gitlab.com/runner/configuration/advanced-configuration.html#volumes-in-the-runnersdocker-section).
```shell
sudo gitlab-runner register -n \
--url https://gitlab.com/ \
--registration-token REGISTRATION_TOKEN \
--executor docker \
--description "My Docker Runner" \
--docker-image "docker:19.03.12" \
--docker-volumes /var/run/docker.sock:/var/run/docker.sock
```
Your configuration should look something like this:
This command registers a new runner to use the
`docker:19.03.12` image provided by Docker. The command uses
the Docker daemon of the runner itself. Any containers spawned by Docker
commands are siblings of the runner rather than children of the runner.
This may have complications and limitations that are unsuitable for your workflow.
```toml
[[runners]]
url = "https://gitlab.com/"
token = RUNNER_TOKEN
executor = "docker"
[runners.docker]
tls_verify = false
image = "docker:19.03.12"
privileged = false
disable_cache = false
volumes = ["/var/run/docker.sock:/var/run/docker.sock", "/cache"]
[runners.cache]
Insecure = false
```
Your `config.toml` file should now have an entry like this:
You can also do this while registering your runner by providing the following options:
```toml
[[runners]]
url = "https://gitlab.com/"
token = REGISTRATION_TOKEN
executor = "docker"
[runners.docker]
tls_verify = false
image = "docker:19.03.12"
privileged = false
disable_cache = false
volumes = ["/var/run/docker.sock:/var/run/docker.sock", "/cache"]
[runners.cache]
Insecure = false
```
```shell
sudo gitlab-runner register -n \
--url https://gitlab.com/ \
--registration-token REGISTRATION_TOKEN \
--executor docker \
--description "My Docker Runner" \
--docker-image "docker:19.03.12" \
--docker-volumes /var/run/docker.sock:/var/run/docker.sock
```
1. Use `docker` in the job script. You don't need to
include the `docker:19.03.12-dind` service, like you do when you're using
the Docker-in-Docker executor:
```yaml
image: docker:19.03.12
before_script:
- docker info
build:
stage: build
script:
- docker build -t my-docker-image .
- docker run my-docker-image /script/to/run/tests
```
This method avoids using Docker in privileged mode. However,
the implications of this method are:
- By sharing the Docker daemon, you are effectively disabling all
the security mechanisms of containers and exposing your host to privilege
escalation, which can lead to container breakout. For example, if a project
ran `docker rm -f $(docker ps -a -q)` it would remove the GitLab Runner
containers.
- Concurrent jobs may not work; if your tests
create containers with specific names, they may conflict with each other.
- Sharing files and directories from the source repository into containers may not
work as expected. Volume mounting is done in the context of the host
machine, not the build container. For example:
```shell
docker run --rm -t -i -v $(pwd)/src:/home/app/src test-image:latest run_app_tests
```
#### Enable registry mirror for `docker:dind` service
##### Enable registry mirror for `docker:dind` service
When the Docker daemon starts inside of the service container, it uses
the default configuration. You may want to configure a [registry
mirror](https://docs.docker.com/registry/recipes/mirror/) for
performance improvements and to ensure you don't reach Docker Hub rate limits.
##### The service in the `.gitlab-ci.yml` file
###### The service in the `.gitlab-ci.yml` file
You can append extra CLI flags to the `dind` service to set the registry
mirror:
@ -450,7 +421,7 @@ services:
command: ["--registry-mirror", "https://registry-mirror.example.com"] # Specify the registry mirror to use
```
##### The service in the GitLab Runner configuration file
###### The service in the GitLab Runner configuration file
> [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27173) in GitLab Runner 13.6.
@ -487,7 +458,7 @@ Kubernetes:
command = ["--registry-mirror", "https://registry-mirror.example.com"]
```
##### The Docker executor in the GitLab Runner configuration file
###### The Docker executor in the GitLab Runner configuration file
If you are a GitLab Runner administrator, you can use
the mirror for every `dind` service. Update the
@ -520,7 +491,7 @@ picked up by the `dind` service.
volumes = ["/opt/docker/daemon.json:/etc/docker/daemon.json:ro"]
```
##### The Kubernetes executor in the GitLab Runner configuration file
###### The Kubernetes executor in the GitLab Runner configuration file
> [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/3223) in GitLab Runner 13.6.
@ -569,6 +540,45 @@ The configuration is picked up by the `dind` service.
sub_path = "daemon.json"
```
##### Limitations of Docker socket binding
When you use Docker socket binding, you avoid running Docker in privileged mode. However,
the implications of this method are:
- By sharing the Docker daemon, you are effectively disabling all
the security mechanisms of containers and exposing your host to privilege
escalation, which can lead to container breakout. For example, if a project
ran `docker rm -f $(docker ps -a -q)` it would remove the GitLab Runner
containers.
- Concurrent jobs may not work; if your tests
create containers with specific names, they may conflict with each other.
- Any containers spawned by Docker commands are siblings of the runner rather
than children of the runner. This may have complications and limitations that
are unsuitable for your workflow.
- Sharing files and directories from the source repository into containers may not
work as expected. Volume mounting is done in the context of the host
machine, not the build container. For example:
```shell
docker run --rm -t -i -v $(pwd)/src:/home/app/src test-image:latest run_app_tests
```
You don't need to include the `docker:19.03.12-dind` service, like you do when
you're using the Docker-in-Docker executor:
```yaml
image: docker:19.03.12
before_script:
- docker info
build:
stage: build
script:
- docker build -t my-docker-image .
- docker run my-docker-image /script/to/run/tests
```
## Authenticate with registry in Docker-in-Docker
When you use Docker-in-Docker, the
@ -831,13 +841,13 @@ After you've built a Docker image, you can push it up to the built-in
### `docker: Cannot connect to the Docker daemon at tcp://docker:2375. Is the docker daemon running?`
This is a common error when you are using
[Docker-in-Docker](#use-the-docker-executor-with-the-docker-image-docker-in-docker)
[Docker-in-Docker](#use-docker-in-docker)
v19.03 or later.
This issue occurs because Docker starts on TLS automatically.
- If this is your first time setting it up, read
[use the Docker executor with the Docker image](#use-the-docker-executor-with-the-docker-image-docker-in-docker).
[use the Docker executor with the Docker image](#use-docker-in-docker).
- If you are upgrading from v18.09 or earlier, read our
[upgrade guide](https://about.gitlab.com/blog/2019/07/31/docker-in-docker-with-docker-19-dot-03/).

View File

@ -13,7 +13,7 @@ type: howto
container images from a Dockerfile, inside a container or Kubernetes cluster.
kaniko solves two problems with using the
[Docker-in-Docker build](using_docker_build.md#use-the-docker-executor-with-the-docker-image-docker-in-docker)
[Docker-in-Docker build](using_docker_build.md#use-docker-in-docker)
method:
- Docker-in-Docker requires [privileged mode](https://docs.docker.com/engine/reference/run/#runtime-privilege-and-linux-capabilities)

View File

@ -238,7 +238,7 @@ If EKS node autoscaling is employed, it is likely that your average loading will
**Deploy Now**
Deploy Now links leverage the AWS Quick Start automation and only prepopulate the number of instances and instance types for the Quick Start based on the Bill of Meterials below. You must provide appropriate input for all other parameters by following the guidance in the [Quick Start documentation's Deployment steps](https://aws-quickstart.github.io/quickstart-eks-gitlab/#_deployment_steps) section.
Deploy Now links leverage the AWS Quick Start automation and only prepopulate the number of instances and instance types for the Quick Start based on the Bill of Materials below. You must provide appropriate input for all other parameters by following the guidance in the [Quick Start documentation's Deployment steps](https://aws-quickstart.github.io/quickstart-eks-gitlab/#_deployment_steps) section.
- **[Deploy Now: AWS Quick Start for 2 AZs](https://us-east-2.console.aws.amazon.com/cloudformation/home?region=us-east-2#/stacks/quickcreate?templateUrl=https://aws-quickstart.s3.us-east-1.amazonaws.com/quickstart-eks-gitlab/templates/gitlab-entry-new-vpc.template.yaml&stackName=Gitlab-EKS-5K-Users-2AZs&param_NumberOfAZs=2&param_NodeInstanceType=c5.2xlarge&param_NumberOfNodes=5&param_MaxNumberOfNodes=5&param_DBInstanceClass=db.r6g.2xlarge&param_CacheNodes=2&param_CacheNodeType=cache.m6g.xlarge&param_GitalyInstanceType=m5.2xlarge&param_NumberOfGitalyReplicas=2&param_PraefectInstanceType=c5.large&param_NumberOfPraefectReplicas=2)**
- **[Deploy Now: AWS Quick Start for 3 AZs](https://us-east-2.console.aws.amazon.com/cloudformation/home?region=us-east-2#/stacks/quickcreate?templateUrl=https://aws-quickstart.s3.us-east-1.amazonaws.com/quickstart-eks-gitlab/templates/gitlab-entry-new-vpc.template.yaml&stackName=Gitlab-EKS-5K-Users-3AZs&param_NumberOfAZs=3&param_NodeInstanceType=c5.2xlarge&param_NumberOfNodes=5&param_MaxNumberOfNodes=5&param_DBInstanceClass=db.r6g.2xlarge&param_CacheNodes=3&param_CacheNodeType=cache.m6g.xlarge&param_GitalyInstanceType=m5.2xlarge&param_NumberOfGitalyReplicas=3&param_PraefectInstanceType=c5.large&param_NumberOfPraefectReplicas=3)**
@ -292,7 +292,7 @@ If EKS node autoscaling is employed, it is likely that your average loading will
**Deploy Now**
Deploy Now links leverage the AWS Quick Start automation and only prepopulate the number of instances and instance types for the Quick Start based on the Bill of Meterials below. You must provide appropriate input for all other parameters by following the guidance in the [Quick Start documentation's Deployment steps](https://aws-quickstart.github.io/quickstart-eks-gitlab/#_deployment_steps) section.
Deploy Now links leverage the AWS Quick Start automation and only prepopulate the number of instances and instance types for the Quick Start based on the Bill of Materials below. You must provide appropriate input for all other parameters by following the guidance in the [Quick Start documentation's Deployment steps](https://aws-quickstart.github.io/quickstart-eks-gitlab/#_deployment_steps) section.
- **[Deploy Now: AWS Quick Start for 3 AZs](https://us-east-2.console.aws.amazon.com/cloudformation/home?region=us-east-2#/stacks/quickcreate?templateUrl=https://aws-quickstart.s3.us-east-1.amazonaws.com/quickstart-eks-gitlab/templates/gitlab-entry-new-vpc.template.yaml&stackName=Gitlab-EKS-10K-Users-3AZs&param_NumberOfAZs=3&param_NodeInstanceType=c5.4xlarge&param_NumberOfNodes=9&param_MaxNumberOfNodes=9&param_DBInstanceClass=db.r6g.2xlarge&param_CacheNodes=3&param_CacheNodeType=cache.m6g.2xlarge&param_GitalyInstanceType=m5.4xlarge&param_NumberOfGitalyReplicas=3&param_PraefectInstanceType=c5.large&param_NumberOfPraefectReplicas=3)**
@ -345,7 +345,7 @@ If EKS node autoscaling is employed, it is likely that your average loading will
**Deploy Now**
Deploy Now links leverage the AWS Quick Start automation and only prepopulate the number of instances and instance types for the Quick Start based on the Bill of Meterials below. You must provide appropriate input for all other parameters by following the guidance in the [Quick Start documentation's Deployment steps](https://aws-quickstart.github.io/quickstart-eks-gitlab/#_deployment_steps) section.
Deploy Now links leverage the AWS Quick Start automation and only prepopulate the number of instances and instance types for the Quick Start based on the Bill of Materials below. You must provide appropriate input for all other parameters by following the guidance in the [Quick Start documentation's Deployment steps](https://aws-quickstart.github.io/quickstart-eks-gitlab/#_deployment_steps) section.
- **[Deploy Now: AWS Quick Start for 3 AZs - 1/4 Scale EKS](https://us-east-2.console.aws.amazon.com/cloudformation/home?region=us-east-2#/stacks/quickcreate?templateUrl=https://aws-quickstart.s3.us-east-1.amazonaws.com/quickstart-eks-gitlab/templates/gitlab-entry-new-vpc.template.yaml&stackName=Gitlab-EKS-50K-Users-3AZs&param_NumberOfAZs=3&param_NodeInstanceType=c5.4xlarge&param_NumberOfNodes=7&param_MaxNumberOfNodes=9&param_DBInstanceClass=db.r6g.8xlarge&param_CacheNodes=3&param_CacheNodeType=cache.m6g.2xlarge&param_GitalyInstanceType=m5.16xlarge&param_NumberOfGitalyReplicas=3&param_PraefectInstanceType=c5.xlarge&param_NumberOfPraefectReplicas=3)**

View File

@ -1056,6 +1056,12 @@ For Maven builds, add the following to your `pom.xml` file:
</properties>
```
### SpotBugs Error: `Project couldn't be built`
If your job is failing at the build step with the message "Project couldn't be built", it's most likely because your job is asking SpotBugs to build with a tool that isn't part of its default tools. For a list of the SpotBugs default tools, see [SpotBugs' asdf dependencies](https://gitlab.com/gitlab-org/security-products/analyzers/spotbugs/-/raw/master/config/.tool-versions).
The solution is to use [pre-compilation](#pre-compilation). Pre-compilation ensures the images required by SpotBugs are available in the job's container.
### Flawfinder encoding error
This occurs when Flawfinder encounters an invalid UTF-8 character. To fix this, convert all source code in your project to UTF-8 character encoding. This can be done with [`cvt2utf`](https://github.com/x1angli/cvt2utf) or [`iconv`](https://www.gnu.org/software/libiconv/documentation/libiconv-1.13/iconv.1.html) either over the entire project or per job using the [`before_script`](../../../ci/yaml/index.md#before_script) feature.

View File

@ -35,6 +35,21 @@ Only members of the project or group can access a private project's Container Re
If a project is public, so is the Container Registry.
### View the tags of a specific image
You can view a list of tags associated with a given container image:
1. Go to your project or group.
1. Go to **Packages & Registries > Container Registry**.
1. Select the container image you are interested in.
This brings up the Container Registry **Tag Details** page. You can view details about each tag,
such as when it was published, how much storage it consumes, and the manifest and configuration
digests.
You can search, sort (by tag name), filter, and [delete](#delete-images-from-within-gitlab)
tags on this page. You can share a filtered view by copying the URL from your browser.
## Use images from the Container Registry
To download and run a container image hosted in the GitLab Container Registry:
@ -299,7 +314,7 @@ is set to `always`.
To use your own Docker images for Docker-in-Docker, follow these steps
in addition to the steps in the
[Docker-in-Docker](../../../ci/docker/using_docker_build.md#use-the-docker-executor-with-the-docker-image-docker-in-docker) section:
[Docker-in-Docker](../../../ci/docker/using_docker_build.md#use-docker-in-docker) section:
1. Update the `image` and `service` to point to your registry.
1. Add a service [alias](../../../ci/services/index.md#available-settings-for-services).
@ -329,7 +344,7 @@ error during connect: Get http://docker:2376/v1.39/info: dial tcp: lookup docker
To use your own Docker images with Dependency Proxy, follow these steps
in addition to the steps in the
[Docker-in-Docker](../../../ci/docker/using_docker_build.md#use-the-docker-executor-with-the-docker-image-docker-in-docker) section:
[Docker-in-Docker](../../../ci/docker/using_docker_build.md#use-docker-in-docker) section:
1. Update the `image` and `service` to point to your registry.
1. Add a service [alias](../../../ci/services/index.md#available-settings-for-services).

View File

@ -63,7 +63,7 @@ on your code by using GitLab CI/CD and [sitespeed.io](https://www.sitespeed.io)
using Docker-in-Docker.
1. First, set up GitLab Runner with a
[Docker-in-Docker build](../../../ci/docker/using_docker_build.md#use-the-docker-executor-with-the-docker-image-docker-in-docker).
[Docker-in-Docker build](../../../ci/docker/using_docker_build.md#use-docker-in-docker).
1. Configure the default Browser Performance Testing CI/CD job as follows in your `.gitlab-ci.yml` file:
```yaml

View File

@ -67,7 +67,7 @@ the merge request's diff view displays an indicator next to lines with new Code
This example shows how to run Code Quality on your code by using GitLab CI/CD and Docker.
- Using shared runners, the job should be configured For the [Docker-in-Docker workflow](../../../ci/docker/using_docker_build.md#use-the-docker-executor-with-the-docker-image-docker-in-docker).
- Using shared runners, the job should be configured For the [Docker-in-Docker workflow](../../../ci/docker/using_docker_build.md#use-docker-in-docker).
- Using private runners, there is an [alternative configuration](#set-up-a-private-runner-for-code-quality-without-docker-in-docker) recommended for running Code Quality analysis more efficiently.
In either configuration, the runner must have enough disk space to handle generated Code Quality files. For example on the [GitLab project](https://gitlab.com/gitlab-org/gitlab) the files are approximately 7 GB.
@ -226,7 +226,7 @@ are configured with `privileged=true`, and they do not expose `docker.sock` into
the job container. As a result, socket binding cannot be used to make `docker` available
in the context of the job script.
[Docker-in-Docker](../../../ci/docker/using_docker_build.md#use-the-docker-executor-with-the-docker-image-docker-in-docker)
[Docker-in-Docker](../../../ci/docker/using_docker_build.md#use-docker-in-docker)
was chosen as an operational decision by the runner team, instead of exposing `docker.sock`.
### Disabling the code quality job

View File

@ -103,7 +103,7 @@ job.
An example configuration workflow:
1. Set up GitLab Runner to run Docker containers, like the
[Docker-in-Docker workflow](../../../ci/docker/using_docker_build.md#use-the-docker-executor-with-the-docker-image-docker-in-docker).
[Docker-in-Docker workflow](../../../ci/docker/using_docker_build.md#use-docker-in-docker).
1. Configure the default Load Performance Testing CI/CD job in your `.gitlab-ci.yml` file.
You need to include the template and configure it with CI/CD variables:

View File

@ -614,6 +614,7 @@ module API
source_project = Project.find_by_id(params[:project_id])
not_found!('Project') unless source_project && can?(current_user, :read_project, source_project)
forbidden!('Project') unless source_project && can?(current_user, :admin_project_member, source_project)
result = ::Members::ImportProjectTeamService.new(current_user, params).execute

View File

@ -5,16 +5,16 @@ module BulkImports
module Pipelines
class UploadsPipeline
include Pipeline
include Gitlab::ImportExport::CommandLineUtil
FILENAME = 'uploads.tar.gz'
AVATAR_PATTERN = %r{.*\/#{BulkImports::UploadsExportService::AVATAR_PATH}\/(?<identifier>.*)}.freeze
AvatarLoadingError = Class.new(StandardError)
def extract(context)
download_service(tmp_dir, context).execute
untar_zxf(archive: File.join(tmp_dir, FILENAME), dir: tmp_dir)
def extract(_context)
download_service.execute
decompression_service.execute
extraction_service.execute
upload_file_paths = Dir.glob(File.join(tmp_dir, '**', '*'))
BulkImports::Pipeline::ExtractedData.new(data: upload_file_paths)
@ -29,6 +29,7 @@ module BulkImports
return unless dynamic_path
return if File.directory?(file_path)
return if File.lstat(file_path).symlink?
named_captures = dynamic_path.named_captures.symbolize_keys
@ -36,20 +37,40 @@ module BulkImports
end
def after_run(_)
FileUtils.remove_entry(tmp_dir)
FileUtils.remove_entry(tmp_dir) if Dir.exist?(tmp_dir)
end
private
def download_service(tmp_dir, context)
def download_service
BulkImports::FileDownloadService.new(
configuration: context.configuration,
relative_url: context.entity.relation_download_url_path('uploads'),
relative_url: context.entity.relation_download_url_path(relation),
dir: tmp_dir,
filename: FILENAME
filename: targz_filename
)
end
def decompression_service
BulkImports::FileDecompressionService.new(dir: tmp_dir, filename: targz_filename)
end
def extraction_service
BulkImports::ArchiveExtractionService.new(tmpdir: tmp_dir, filename: tar_filename)
end
def relation
BulkImports::FileTransfer::BaseConfig::UPLOADS_RELATION
end
def tar_filename
"#{relation}.tar"
end
def targz_filename
"#{tar_filename}.gz"
end
def tmp_dir
@tmp_dir ||= Dir.mktmpdir('bulk_imports')
end

View File

@ -18,6 +18,10 @@ module Gitlab
tar_with_options(archive: archive, dir: dir, options: 'cf')
end
def untar_xf(archive:, dir:)
untar_with_options(archive: archive, dir: dir, options: 'xf')
end
def gzip(dir:, filename:)
gzip_with_options(dir: dir, filename: filename)
end

View File

@ -48,10 +48,11 @@ module Gitlab
)
end
def authorize_url(redirect_uri)
def authorize_url(redirect_uri, state = nil)
client.auth_code.authorize_url({
redirect_uri: redirect_uri,
scope: "repo, user, user:email"
scope: "repo, user, user:email",
state: state
})
end

View File

@ -252,13 +252,13 @@ module Gitlab
def internal_web?(uri)
uri.scheme == config.gitlab.protocol &&
uri.hostname == config.gitlab.host &&
(uri.port.blank? || uri.port == config.gitlab.port)
get_port(uri) == config.gitlab.port
end
def internal_shell?(uri)
uri.scheme == 'ssh' &&
uri.hostname == config.gitlab_shell.ssh_host &&
(uri.port.blank? || uri.port == config.gitlab_shell.ssh_port)
get_port(uri) == config.gitlab_shell.ssh_port
end
def domain_allowed?(uri)

View File

@ -32,6 +32,19 @@ module QA
element :commit_changes_button
end
view 'app/assets/javascripts/pipeline_editor/components/header/validation_segment.vue' do
element :validation_message_content
end
view 'app/assets/javascripts/pipelines/components/pipeline_graph/pipeline_graph.vue' do
element :stage_container
element :job_container
end
view 'app/assets/javascripts/pipeline_editor/components/pipeline_editor_tabs.vue' do
element :file_editor_container
end
def initialize
super
@ -80,8 +93,48 @@ module QA
find_element(:pipeline_id_content).text.delete!('#').to_i
end
def ci_syntax_validate_message
find_element(:validation_message_content).text
end
def go_to_visualize_tab
go_to_tab('Visualize')
end
def go_to_lint_tab
go_to_tab('Lint')
end
def go_to_view_merged_yaml_tab
go_to_tab('View merged YAML')
end
def has_source_editor?
has_element?(:source_editor_container)
end
def has_stage?(name)
all_elements(:stage_container, minimum: 1).any? { |item| item.text.match(/#{name}/i) }
end
def has_job?(name)
all_elements(:job_container, minimum: 1).any? { |item| item.text.match(/#{name}/i) }
end
def tab_alert_message
within_element(:file_editor_container) do
find('.gl-alert-body').text
end
end
private
def go_to_tab(name)
within_element(:file_editor_container) do
find('.nav-item', text: name).click
end
end
# If the page thinks user has never opened pipeline editor before
# It will expand pipeline editor sidebar by default
# Collapse the sidebar if it is expanded

View File

@ -0,0 +1,95 @@
# frozen_string_literal: true
module QA
RSpec.describe 'Verify' do
describe 'Pipeline editor' do
let(:project) do
Resource::Project.fabricate_via_api! do |project|
project.name = 'pipeline-editor-project'
end
end
let!(:commit) do
Resource::Repository::Commit.fabricate_via_api! do |commit|
commit.project = project
commit.commit_message = 'Add .gitlab-ci.yml'
commit.add_files(
[
{
file_path: '.gitlab-ci.yml',
content: <<~YAML
stages:
- stage1
- stage2
job1:
stage: stage1
script: echo 'Done.'
job2:
stage: stage2
script: echo 'Done.'
YAML
}
]
)
end
end
before do
Flow::Login.sign_in
project.visit!
Page::Project::Menu.perform(&:go_to_pipeline_editor)
end
after do
project&.remove_via_api!
end
context 'when CI has valid syntax' do
it 'shows valid validations', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/349128' do
Page::Project::PipelineEditor::Show.perform do |show|
aggregate_failures do
expect(show.ci_syntax_validate_message).to have_content('CI configuration is valid')
show.go_to_visualize_tab
{ stage1: 'job1', stage2: 'job2' }.each_pair do |stage, job|
expect(show).to have_stage(stage), "Pipeline graph does not have stage #{stage}."
expect(show).to have_job(job), "Pipeline graph does not have job #{job}."
end
show.go_to_lint_tab
expect(show.tab_alert_message).to have_content('Syntax is correct')
show.go_to_view_merged_yaml_tab
expect(show).to have_source_editor
end
end
end
end
context 'when CI has invalid syntax' do
it 'shows invalid validations', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/349129' do
invalid_msg = 'syntax is invalid'
Page::Project::PipelineEditor::Show.perform do |show|
show.write_to_editor(SecureRandom.hex(10))
aggregate_failures do
show.go_to_visualize_tab
expect(show.tab_alert_message).to have_content(invalid_msg)
show.go_to_lint_tab
expect(show.tab_alert_message).to have_content('Syntax is incorrect')
show.go_to_view_merged_yaml_tab
expect(show.tab_alert_message).to have_content(invalid_msg)
expect(show.ci_syntax_validate_message).to have_content('CI configuration is invalid')
end
end
end
end
end
end
end

View File

@ -1,3 +1,3 @@
#!/usr/bin/env bash
bundle exec undercover -c "${CI_MERGE_REQUEST_DIFF_BASE_SHA:-$(git merge-base origin/master HEAD)}"
bundle exec undercover -c "${1:-$(git merge-base origin/master HEAD)}"

View File

@ -8,10 +8,6 @@ RSpec.describe Dashboard::ProjectsController, :aggregate_failures do
let_it_be(:user) { create(:user) }
describe '#index' do
context 'user not logged in' do
it_behaves_like 'authenticates sessionless user', :index, :atom
end
context 'user logged in' do
let_it_be(:project) { create(:project, name: 'Project 1') }
let_it_be(:project2) { create(:project, name: 'Project Two') }

View File

@ -72,9 +72,6 @@ RSpec.describe DashboardController do
end
end
it_behaves_like 'authenticates sessionless user', :issues, :atom, author_id: User.first
it_behaves_like 'authenticates sessionless user', :issues_calendar, :ics
describe "#check_filters_presence!" do
let(:user) { create(:user) }

View File

@ -1209,26 +1209,6 @@ RSpec.describe GroupsController, factory_default: :keep do
end
end
context 'token authentication' do
it_behaves_like 'authenticates sessionless user', :show, :atom, public: true do
before do
default_params.merge!(id: group)
end
end
it_behaves_like 'authenticates sessionless user', :issues, :atom, public: true do
before do
default_params.merge!(id: group, author_id: user.id)
end
end
it_behaves_like 'authenticates sessionless user', :issues_calendar, :ics, public: true do
before do
default_params.merge!(id: group)
end
end
end
describe 'external authorization' do
before do
group.add_owner(user)

View File

@ -6,6 +6,7 @@ RSpec.describe Import::GithubController do
include ImportSpecHelper
let(:provider) { :github }
let(:new_import_url) { public_send("new_import_#{provider}_url") }
include_context 'a GitHub-ish import controller'
@ -50,13 +51,37 @@ RSpec.describe Import::GithubController do
stub_omniauth_provider('github')
end
it "updates access token" do
token = "asdasd12345"
context "when auth state param is missing from session" do
it "reports an error" do
get :callback
get :callback
expect(controller).to redirect_to(new_import_url)
expect(flash[:alert]).to eq('Access denied to your GitHub account.')
end
end
expect(session[:github_access_token]).to eq(token)
expect(controller).to redirect_to(status_import_github_url)
context "when auth state param is present in session" do
let(:valid_auth_state) { "secret-state" }
before do
session[:github_auth_state_key] = valid_auth_state
end
it "updates access token if state param is valid" do
token = "asdasd12345"
get :callback, params: { state: valid_auth_state }
expect(session[:github_access_token]).to eq(token)
expect(controller).to redirect_to(status_import_github_url)
end
it "reports an error if state param is invalid" do
get :callback, params: { state: "different-state" }
expect(controller).to redirect_to(new_import_url)
expect(flash[:alert]).to eq('Access denied to your GitHub account.')
end
end
end
@ -71,8 +96,6 @@ RSpec.describe Import::GithubController do
end
context 'when OAuth config is missing' do
let(:new_import_url) { public_send("new_import_#{provider}_url") }
before do
allow(controller).to receive(:oauth_config).and_return(nil)
end
@ -108,6 +131,16 @@ RSpec.describe Import::GithubController do
get :status
end
it 'gets authorization url using legacy client' do
allow(controller).to receive(:logged_in_with_provider?).and_return(true)
expect(controller).to receive(:go_to_provider_for_permissions).and_call_original
expect_next_instance_of(Gitlab::LegacyGithubImport::Client) do |client|
expect(client).to receive(:authorize_url).and_call_original
end
get :new
end
end
context 'when feature remove_legacy_github_client is enabled' do
@ -130,6 +163,16 @@ RSpec.describe Import::GithubController do
get :status
end
it 'gets authorization url using oauth client' do
allow(controller).to receive(:logged_in_with_provider?).and_return(true)
expect(controller).to receive(:go_to_provider_for_permissions).and_call_original
expect_next_instance_of(OAuth2::Client) do |client|
expect(client.auth_code).to receive(:authorize_url).and_call_original
end
get :new
end
context 'pagination' do
context 'when no page is specified' do
it 'requests first page' do

View File

@ -162,27 +162,4 @@ RSpec.describe Projects::CommitsController do
end
end
end
context 'token authentication' do
context 'public project' do
it_behaves_like 'authenticates sessionless user', :show, :atom, { public: true, ignore_incrementing: true } do
before do
public_project = create(:project, :repository, :public)
default_params.merge!(namespace_id: public_project.namespace, project_id: public_project, id: "master.atom")
end
end
end
context 'private project' do
it_behaves_like 'authenticates sessionless user', :show, :atom, { public: false, ignore_incrementing: true } do
before do
private_project = create(:project, :repository, :private)
private_project.add_maintainer(user)
default_params.merge!(namespace_id: private_project.namespace, project_id: private_project, id: "master.atom")
end
end
end
end
end

View File

@ -1948,40 +1948,4 @@ RSpec.describe Projects::IssuesController do
end
end
end
context 'private project with token authentication' do
let_it_be(:private_project) { create(:project, :private) }
it_behaves_like 'authenticates sessionless user', :index, :atom, ignore_incrementing: true do
before do
default_params.merge!(project_id: private_project, namespace_id: private_project.namespace)
private_project.add_maintainer(user)
end
end
it_behaves_like 'authenticates sessionless user', :calendar, :ics, ignore_incrementing: true do
before do
default_params.merge!(project_id: private_project, namespace_id: private_project.namespace)
private_project.add_maintainer(user)
end
end
end
context 'public project with token authentication' do
let_it_be(:public_project) { create(:project, :public) }
it_behaves_like 'authenticates sessionless user', :index, :atom, public: true do
before do
default_params.merge!(project_id: public_project, namespace_id: public_project.namespace)
end
end
it_behaves_like 'authenticates sessionless user', :calendar, :ics, public: true do
before do
default_params.merge!(project_id: public_project, namespace_id: public_project.namespace)
end
end
end
end

View File

@ -128,6 +128,8 @@ RSpec.describe Projects::RawController do
let_it_be(:user) { create(:user, static_object_token: 'very-secure-token') }
let_it_be(:file_path) { 'master/README.md' }
let(:token) { user.static_object_token }
before do
project.add_developer(user)
end
@ -143,13 +145,36 @@ RSpec.describe Projects::RawController do
context 'when a token param is present' do
context 'when token is correct' do
let(:params) { { token: user.static_object_token } }
let(:params) { { token: token } }
it 'calls the action normally' do
get_show
expect(response).to have_gitlab_http_status(:ok)
end
context 'when user with expired password' do
let_it_be(:user) { create(:user, password_expires_at: 2.minutes.ago) }
it 'redirects to sign in page' do
get_show
expect(response).to have_gitlab_http_status(:found)
expect(response.location).to end_with('/users/sign_in')
end
end
context 'when password expiration is not applicable' do
context 'when ldap user' do
let_it_be(:user) { create(:omniauth_user, provider: 'ldap', password_expires_at: 2.minutes.ago) }
it 'calls the action normally' do
get_show
expect(response).to have_gitlab_http_status(:ok)
end
end
end
end
context 'when token is incorrect' do
@ -165,18 +190,45 @@ RSpec.describe Projects::RawController do
end
context 'when a token header is present' do
before do
request.headers['X-Gitlab-Static-Object-Token'] = token
end
context 'when token is correct' do
it 'calls the action normally' do
request.headers['X-Gitlab-Static-Object-Token'] = user.static_object_token
get_show
expect(response).to have_gitlab_http_status(:ok)
end
context 'when user with expired password' do
let_it_be(:user) { create(:user, password_expires_at: 2.minutes.ago) }
it 'redirects to sign in page' do
get_show
expect(response).to have_gitlab_http_status(:found)
expect(response.location).to end_with('/users/sign_in')
end
end
context 'when password expiration is not applicable' do
context 'when ldap user' do
let_it_be(:user) { create(:omniauth_user, provider: 'ldap', password_expires_at: 2.minutes.ago) }
it 'calls the action normally' do
get_show
expect(response).to have_gitlab_http_status(:ok)
end
end
end
end
context 'when token is incorrect' do
let(:token) { 'foobar' }
it 'redirects to sign in page' do
request.headers['X-Gitlab-Static-Object-Token'] = 'foobar'
get_show
expect(response).to have_gitlab_http_status(:found)

View File

@ -178,6 +178,29 @@ RSpec.describe Projects::RepositoriesController do
expect(response).to have_gitlab_http_status(:ok)
end
context 'when user with expired password' do
let_it_be(:user) { create(:user, password_expires_at: 2.minutes.ago) }
it 'redirects to sign in page' do
get :archive, params: { namespace_id: project.namespace, project_id: project, id: 'master', token: user.static_object_token }, format: 'zip'
expect(response).to have_gitlab_http_status(:found)
expect(response.location).to end_with('/users/sign_in')
end
end
context 'when password expiration is not applicable' do
context 'when ldap user' do
let_it_be(:user) { create(:omniauth_user, provider: 'ldap', password_expires_at: 2.minutes.ago) }
it 'calls the action normally' do
get :archive, params: { namespace_id: project.namespace, project_id: project, id: 'master', token: user.static_object_token }, format: 'zip'
expect(response).to have_gitlab_http_status(:ok)
end
end
end
end
context 'when token is incorrect' do
@ -197,6 +220,31 @@ RSpec.describe Projects::RepositoriesController do
expect(response).to have_gitlab_http_status(:ok)
end
context 'when user with expired password' do
let_it_be(:user) { create(:user, password_expires_at: 2.minutes.ago) }
it 'redirects to sign in page' do
request.headers['X-Gitlab-Static-Object-Token'] = user.static_object_token
get :archive, params: { namespace_id: project.namespace, project_id: project, id: 'master' }, format: 'zip'
expect(response).to have_gitlab_http_status(:found)
expect(response.location).to end_with('/users/sign_in')
end
end
context 'when password expiration is not applicable' do
context 'when ldap user' do
let_it_be(:user) { create(:omniauth_user, provider: 'ldap', password_expires_at: 2.minutes.ago) }
it 'calls the action normally' do
request.headers['X-Gitlab-Static-Object-Token'] = user.static_object_token
get :archive, params: { namespace_id: project.namespace, project_id: project, id: 'master' }, format: 'zip'
expect(response).to have_gitlab_http_status(:ok)
end
end
end
end
context 'when token is incorrect' do

View File

@ -117,28 +117,6 @@ RSpec.describe Projects::TagsController do
end
end
context 'private project with token authentication' do
let(:private_project) { create(:project, :repository, :private) }
it_behaves_like 'authenticates sessionless user', :index, :atom, ignore_incrementing: true do
before do
default_params.merge!(project_id: private_project, namespace_id: private_project.namespace)
private_project.add_maintainer(user)
end
end
end
context 'public project with token authentication' do
let(:public_project) { create(:project, :repository, :public) }
it_behaves_like 'authenticates sessionless user', :index, :atom, public: true do
before do
default_params.merge!(project_id: public_project, namespace_id: public_project.namespace)
end
end
end
describe 'POST #create' do
before do
project.add_developer(user)

View File

@ -1568,28 +1568,6 @@ RSpec.describe ProjectsController do
end
end
context 'private project with token authentication' do
let_it_be(:private_project) { create(:project, :private) }
it_behaves_like 'authenticates sessionless user', :show, :atom, ignore_incrementing: true do
before do
default_params.merge!(id: private_project, namespace_id: private_project.namespace)
private_project.add_maintainer(user)
end
end
end
context 'public project with token authentication' do
let_it_be(:public_project) { create(:project, :public) }
it_behaves_like 'authenticates sessionless user', :show, :atom, public: true do
before do
default_params.merge!(id: public_project, namespace_id: public_project.namespace)
end
end
end
context 'GET show.atom' do
let_it_be(:public_project) { create(:project, :public) }
let_it_be(:event) { create(:event, :commented, project: public_project, target: create(:note, project: public_project)) }

BIN
spec/fixtures/symlink_export.tar vendored Normal file

Binary file not shown.

View File

@ -97,6 +97,18 @@ describe('gl_emoji', () => {
});
});
it('escapes gl-emoji name', async () => {
const glEmojiElement = markupToDomElement(
"<gl-emoji data-name='&#34;x=&#34y&#34 onload=&#34;alert(document.location.href)&#34;' data-unicode-version='x'>abc</gl-emoji>",
);
await waitForPromises();
expect(glEmojiElement.outerHTML).toBe(
'<gl-emoji data-name="&quot;x=&quot;y&quot; onload=&quot;alert(document.location.href)&quot;" data-unicode-version="x"><img class="emoji" title=":&quot;x=&quot;y&quot; onload=&quot;alert(document.location.href)&quot;:" alt=":&quot;x=&quot;y&quot; onload=&quot;alert(document.location.href)&quot;:" src="/-/emojis/2/grey_question.png" width="20" height="20" align="absmiddle"></gl-emoji>',
);
});
it('Adds sprite CSS if emojis are not supported', async () => {
const testPath = '/test-path.css';
jest.spyOn(EmojiUnicodeSupport, 'default').mockReturnValue(false);

View File

@ -560,13 +560,13 @@ RSpec.describe Resolvers::IssuesResolver do
end
it 'finds a specific issue with iid', :request_store do
result = batch_sync(max_queries: 5) { resolve_issues(iid: issue1.iid).to_a }
result = batch_sync(max_queries: 6) { resolve_issues(iid: issue1.iid).to_a }
expect(result).to contain_exactly(issue1)
end
it 'batches queries that only include IIDs', :request_store do
result = batch_sync(max_queries: 5) do
result = batch_sync(max_queries: 6) do
[issue1, issue2]
.map { |issue| resolve_issues(iid: issue.iid.to_s) }
.flat_map(&:to_a)
@ -576,7 +576,7 @@ RSpec.describe Resolvers::IssuesResolver do
end
it 'finds a specific issue with iids', :request_store do
result = batch_sync(max_queries: 5) do
result = batch_sync(max_queries: 6) do
resolve_issues(iids: [issue1.iid]).to_a
end

View File

@ -4,6 +4,12 @@ require 'spec_helper'
RSpec.describe 'ActionCableSubscriptionAdapterIdentifier override' do
describe '#identifier' do
let!(:original_config) { ::ActionCable::Server::Base.config.cable }
after do
::ActionCable::Server::Base.config.cable = original_config
end
context 'when id key is nil on cable.yml' do
it 'does not override server config id with action cable pid' do
config = {

View File

@ -70,8 +70,10 @@ RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do
describe '#extract' do
it 'downloads & extracts upload paths' do
allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
expect(pipeline).to receive(:untar_zxf)
file_download_service = instance_double("BulkImports::FileDownloadService")
download_service = instance_double("BulkImports::FileDownloadService")
decompression_service = instance_double("BulkImports::FileDecompressionService")
extraction_service = instance_double("BulkImports::ArchiveExtractionService")
expect(BulkImports::FileDownloadService)
.to receive(:new)
@ -80,9 +82,13 @@ RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do
relative_url: "/#{entity.pluralized_name}/test/export_relations/download?relation=uploads",
dir: tmpdir,
filename: 'uploads.tar.gz')
.and_return(file_download_service)
.and_return(download_service)
expect(BulkImports::FileDecompressionService).to receive(:new).with(dir: tmpdir, filename: 'uploads.tar.gz').and_return(decompression_service)
expect(BulkImports::ArchiveExtractionService).to receive(:new).with(tmpdir: tmpdir, filename: 'uploads.tar').and_return(extraction_service)
expect(file_download_service).to receive(:execute)
expect(download_service).to receive(:execute)
expect(decompression_service).to receive(:execute)
expect(extraction_service).to receive(:execute)
extracted_data = pipeline.extract(context)
@ -106,6 +112,16 @@ RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do
expect { pipeline.load(context, uploads_dir_path) }.not_to change { portable.uploads.count }
end
end
context 'when path is a symlink' do
it 'does not upload the file' do
symlink = File.join(tmpdir, 'symlink')
FileUtils.ln_s(File.join(tmpdir, upload_file_path), symlink)
expect { pipeline.load(context, symlink) }.not_to change { portable.uploads.count }
end
end
end
end

View File

@ -101,4 +101,39 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil do
end
end
end
describe '#untar_xf' do
let(:archive_dir) { Dir.mktmpdir }
after do
FileUtils.remove_entry(archive_dir)
end
it 'extracts archive without decompression' do
filename = 'archive.tar.gz'
archive_file = File.join(archive_dir, 'archive.tar')
FileUtils.copy_file(archive, File.join(archive_dir, filename))
subject.gunzip(dir: archive_dir, filename: filename)
result = subject.untar_xf(archive: archive_file, dir: archive_dir)
expect(result).to eq(true)
expect(File.exist?(archive_file)).to eq(true)
expect(File.exist?(File.join(archive_dir, 'project.json'))).to eq(true)
expect(Dir.exist?(File.join(archive_dir, 'uploads'))).to eq(true)
end
context 'when something goes wrong' do
it 'raises an error' do
expect(Gitlab::Popen).to receive(:popen).and_return(['Error', 1])
klass = Class.new do
include Gitlab::ImportExport::CommandLineUtil
end.new
expect { klass.untar_xf(archive: 'test', dir: 'test') }.to raise_error(Gitlab::ImportExport::Error, 'System call failed')
end
end
end
end

View File

@ -531,24 +531,6 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
end
end
end
def stub_domain_resolv(domain, ip, port = 80, &block)
address = instance_double(Addrinfo,
ip_address: ip,
ipv4_private?: true,
ipv6_linklocal?: false,
ipv4_loopback?: false,
ipv6_loopback?: false,
ipv4?: false,
ip_port: port
)
allow(Addrinfo).to receive(:getaddrinfo).with(domain, port, any_args).and_return([address])
allow(address).to receive(:ipv6_v4mapped?).and_return(false)
yield
allow(Addrinfo).to receive(:getaddrinfo).and_call_original
end
end
context 'when enforce_user is' do
@ -611,6 +593,44 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
expect(described_class).to be_blocked_url('http://foobar.x')
end
context 'when gitlab is running on a non-default port' do
let(:gitlab_port) { 3000 }
before do
stub_config(gitlab: { protocol: 'http', host: 'gitlab.local', port: gitlab_port })
end
it 'returns true for url targeting the wrong port' do
stub_domain_resolv('gitlab.local', '127.0.0.1') do
expect(described_class).to be_blocked_url("http://gitlab.local/foo")
end
end
it 'does not block url on gitlab port' do
stub_domain_resolv('gitlab.local', '127.0.0.1') do
expect(described_class).not_to be_blocked_url("http://gitlab.local:#{gitlab_port}/foo")
end
end
end
def stub_domain_resolv(domain, ip, port = 80, &block)
address = instance_double(Addrinfo,
ip_address: ip,
ipv4_private?: true,
ipv6_linklocal?: false,
ipv4_loopback?: false,
ipv6_loopback?: false,
ipv4?: false,
ip_port: port
)
allow(Addrinfo).to receive(:getaddrinfo).with(domain, port, any_args).and_return([address])
allow(address).to receive(:ipv6_v4mapped?).and_return(false)
yield
allow(Addrinfo).to receive(:getaddrinfo).and_call_original
end
end
describe '#validate_hostname' do

View File

@ -16,6 +16,8 @@ RSpec.describe Integrations::ChatMessage::AlertMessage do
}.merge(Gitlab::DataBuilder::Alert.build(alert))
end
it_behaves_like Integrations::ChatMessage
describe '#message' do
it 'returns the correct message' do
expect(subject.message).to eq("Alert firing in #{args[:project_name]}")

View File

@ -31,4 +31,22 @@ RSpec.describe Integrations::ChatMessage::BaseMessage do
it { is_expected.to eq('Check this out https://gitlab-domain.com/uploads/Screenshot1.png. And this https://gitlab-domain.com/uploads/Screenshot2.png') }
end
end
describe '#strip_markup' do
using RSpec::Parameterized::TableSyntax
where(:input, :output) do
nil | nil
'' | ''
'[label](url)' | 'label(url)'
'<url|label>' | 'urllabel'
'<a href="url">label</a>' | 'a href="url"label/a'
end
with_them do
it 'returns the expected output' do
expect(base_message.send(:strip_markup, input)).to eq(output)
end
end
end
end

View File

@ -3,83 +3,79 @@
require 'spec_helper'
RSpec.describe Integrations::ChatMessage::DeploymentMessage do
subject { described_class.new(args) }
let_it_be(:user) { create(:user, name: 'John Smith', username: 'smith') }
let_it_be(:namespace) { create(:namespace, name: 'myspace') }
let_it_be(:project) { create(:project, :repository, namespace: namespace, name: 'myproject') }
let_it_be(:commit) { project.commit('HEAD') }
let_it_be(:ci_build) { create(:ci_build, project: project) }
let_it_be(:environment) { create(:environment, name: 'myenvironment', project: project) }
let_it_be(:deployment) { create(:deployment, status: :success, deployable: ci_build, environment: environment, project: project, user: user, sha: commit.sha) }
let(:args) do
Gitlab::DataBuilder::Deployment.build(deployment, Time.current)
end
it_behaves_like Integrations::ChatMessage
describe '#pretext' do
it 'returns a message with the data returned by the deployment data builder' do
environment = create(:environment, name: "myenvironment")
project = create(:project, :repository)
commit = project.commit('HEAD')
deployment = create(:deployment, status: :success, environment: environment, project: project, sha: commit.sha)
data = Gitlab::DataBuilder::Deployment.build(deployment, Time.current)
message = described_class.new(data)
expect(message.pretext).to eq("Deploy to myenvironment succeeded")
expect(subject.pretext).to eq("Deploy to myenvironment succeeded")
end
it 'returns a message for a successful deployment' do
data = {
args.merge!(
status: 'success',
environment: 'production'
}
)
message = described_class.new(data)
expect(message.pretext).to eq('Deploy to production succeeded')
expect(subject.pretext).to eq('Deploy to production succeeded')
end
it 'returns a message for a failed deployment' do
data = {
args.merge!(
status: 'failed',
environment: 'production'
}
)
message = described_class.new(data)
expect(message.pretext).to eq('Deploy to production failed')
expect(subject.pretext).to eq('Deploy to production failed')
end
it 'returns a message for a canceled deployment' do
data = {
args.merge!(
status: 'canceled',
environment: 'production'
}
)
message = described_class.new(data)
expect(message.pretext).to eq('Deploy to production canceled')
expect(subject.pretext).to eq('Deploy to production canceled')
end
it 'returns a message for a deployment to another environment' do
data = {
args.merge!(
status: 'success',
environment: 'staging'
}
)
message = described_class.new(data)
expect(message.pretext).to eq('Deploy to staging succeeded')
expect(subject.pretext).to eq('Deploy to staging succeeded')
end
it 'returns a message for a deployment with any other status' do
data = {
args.merge!(
status: 'unknown',
environment: 'staging'
}
)
message = described_class.new(data)
expect(message.pretext).to eq('Deploy to staging unknown')
expect(subject.pretext).to eq('Deploy to staging unknown')
end
it 'returns a message for a running deployment' do
data = {
status: 'running',
environment: 'production'
}
args.merge!(
status: 'running',
environment: 'production'
)
message = described_class.new(data)
expect(message.pretext).to eq('Starting deploy to production')
expect(subject.pretext).to eq('Starting deploy to production')
end
end
@ -108,21 +104,11 @@ RSpec.describe Integrations::ChatMessage::DeploymentMessage do
end
it 'returns attachments with the data returned by the deployment data builder' do
user = create(:user, name: "John Smith", username: "smith")
namespace = create(:namespace, name: "myspace")
project = create(:project, :repository, namespace: namespace, name: "myproject")
commit = project.commit('HEAD')
environment = create(:environment, name: "myenvironment", project: project)
ci_build = create(:ci_build, project: project)
deployment = create(:deployment, :success, deployable: ci_build, environment: environment, project: project, user: user, sha: commit.sha)
job_url = Gitlab::Routing.url_helpers.project_job_url(project, ci_build)
commit_url = Gitlab::UrlBuilder.build(deployment.commit)
user_url = Gitlab::Routing.url_helpers.user_url(user)
data = Gitlab::DataBuilder::Deployment.build(deployment, Time.current)
message = described_class.new(data)
expect(message.attachments).to eq([{
expect(subject.attachments).to eq([{
text: "[myspace/myproject](#{project.web_url}) with job [##{ci_build.id}](#{job_url}) by [John Smith (smith)](#{user_url})\n[#{deployment.short_sha}](#{commit_url}): #{commit.title}",
color: "good"
}])

View File

@ -28,6 +28,8 @@ RSpec.describe Integrations::ChatMessage::IssueMessage do
}
end
it_behaves_like Integrations::ChatMessage
context 'without markdown' do
let(:color) { '#C95823' }

View File

@ -29,6 +29,8 @@ RSpec.describe Integrations::ChatMessage::MergeMessage do
}
end
it_behaves_like Integrations::ChatMessage
context 'without markdown' do
let(:color) { '#345' }

View File

@ -19,6 +19,10 @@ RSpec.describe Integrations::ChatMessage::NoteMessage do
name: 'project_name',
url: 'http://somewhere.com'
},
commit: {
id: '5f163b2b95e6f53cbd428f5f0b103702a52b9a23',
message: "Added a commit message\ndetails\n123\n"
},
object_attributes: {
id: 10,
note: 'comment on a commit',
@ -28,16 +32,9 @@ RSpec.describe Integrations::ChatMessage::NoteMessage do
}
end
context 'commit notes' do
before do
args[:object_attributes][:note] = 'comment on a commit'
args[:object_attributes][:noteable_type] = 'Commit'
args[:commit] = {
id: '5f163b2b95e6f53cbd428f5f0b103702a52b9a23',
message: "Added a commit message\ndetails\n123\n"
}
end
it_behaves_like Integrations::ChatMessage
context 'commit notes' do
context 'without markdown' do
it 'returns a message regarding notes on commits' do
expect(subject.pretext).to eq("Test User (test.user) <http://url.com|commented on " \

View File

@ -40,6 +40,8 @@ RSpec.describe Integrations::ChatMessage::PipelineMessage do
let(:has_yaml_errors) { false }
it_behaves_like Integrations::ChatMessage
before do
test_commit = double("A test commit", committer: args[:user], title: "A test commit message")
test_project = double("A test project", commit_by: test_commit, name: args[:project][:name], web_url: args[:project][:web_url])

View File

@ -19,6 +19,8 @@ RSpec.describe Integrations::ChatMessage::PushMessage do
let(:color) { '#345' }
it_behaves_like Integrations::ChatMessage
context 'push' do
before do
args[:commits] = [

View File

@ -33,6 +33,8 @@ RSpec.describe Integrations::ChatMessage::WikiPageMessage do
}
end
it_behaves_like Integrations::ChatMessage
context 'without markdown' do
describe '#pretext' do
context 'when :action == "create"' do

View File

@ -253,7 +253,7 @@ RSpec.describe 'GraphQL' do
end
context 'with token authentication' do
let(:token) { create(:personal_access_token) }
let(:token) { create(:personal_access_token, user: user) }
it 'authenticates users with a PAT' do
stub_authentication_activity_metrics(debug: false)
@ -276,6 +276,32 @@ RSpec.describe 'GraphQL' do
expect(graphql_errors).to include({ 'message' => /API not accessible/ })
end
context 'when user with expired password' do
let_it_be(:user) { create(:user, password_expires_at: 2.minutes.ago) }
it 'does not authenticate user' do
post_graphql(query, headers: { 'PRIVATE-TOKEN' => token.token })
expect(response).to have_gitlab_http_status(:ok)
expect(graphql_data['echo']).to eq('nil says: Hello world')
end
end
context 'when password expiration is not applicable' do
context 'when ldap user' do
let_it_be(:user) { create(:omniauth_user, provider: 'ldap', password_expires_at: 2.minutes.ago) }
it 'authenticates user' do
post_graphql(query, headers: { 'PRIVATE-TOKEN' => token.token })
expect(response).to have_gitlab_http_status(:ok)
expect(graphql_data['echo']).to eq("\"#{token.user.username}\" says: Hello world")
end
end
end
context 'when the personal access token has no api scope' do
it 'does not log the user in' do
token.update!(scopes: [:read_user])

View File

@ -3138,6 +3138,29 @@ RSpec.describe API::Projects do
expect(json_response['message']).to eq('404 Project Not Found')
end
it 'returns 404 if the source project members cannot be viewed by the requester' do
private_project = create(:project, :private)
expect do
post api("/projects/#{project.id}/import_project_members/#{private_project.id}", user)
end.not_to change { project.members.count }
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Project Not Found')
end
it 'returns 403 if the source project members cannot be administered by the requester' do
project.add_maintainer(user2)
project2.add_developer(user2)
expect do
post api("/projects/#{project.id}/import_project_members/#{project2.id}", user2)
end.not_to change { project.members.count }
expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden - Project')
end
it 'returns 422 if the import failed for valid projects' do
allow_next_instance_of(::ProjectTeam) do |project_team|
allow(project_team).to receive(:import).and_return(false)

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Dashboard::ProjectsController do
context 'token authentication' do
it_behaves_like 'authenticates sessionless user for the request spec', 'index atom', public_resource: false do
let(:url) { dashboard_projects_url(:atom) }
end
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe DashboardController do
context 'token authentication' do
it_behaves_like 'authenticates sessionless user for the request spec', 'issues atom', public_resource: false do
let(:url) { issues_dashboard_url(:atom, assignee_username: user.username) }
end
it_behaves_like 'authenticates sessionless user for the request spec', 'issues_calendar ics', public_resource: false do
let(:url) { issues_dashboard_url(:ics, assignee_username: user.username) }
end
end
end

View File

@ -0,0 +1,51 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe GroupsController do
context 'token authentication' do
context 'when public group' do
let_it_be(:public_group) { create(:group, :public) }
it_behaves_like 'authenticates sessionless user for the request spec', 'show atom', public_resource: true do
let(:url) { group_path(public_group, format: :atom) }
end
it_behaves_like 'authenticates sessionless user for the request spec', 'issues atom', public_resource: true do
let(:url) { issues_group_path(public_group, format: :atom) }
end
it_behaves_like 'authenticates sessionless user for the request spec', 'issues_calendar ics', public_resource: true do
let(:url) { issues_group_calendar_url(public_group, format: :ics) }
end
end
context 'when private project' do
let_it_be(:private_group) { create(:group, :private) }
it_behaves_like 'authenticates sessionless user for the request spec', 'show atom', public_resource: false, ignore_metrics: true do
let(:url) { group_path(private_group, format: :atom) }
before do
private_group.add_maintainer(user)
end
end
it_behaves_like 'authenticates sessionless user for the request spec', 'issues atom', public_resource: false, ignore_metrics: true do
let(:url) { issues_group_path(private_group, format: :atom) }
before do
private_group.add_maintainer(user)
end
end
it_behaves_like 'authenticates sessionless user for the request spec', 'issues_calendar ics', public_resource: false, ignore_metrics: true do
let(:url) { issues_group_calendar_url(private_group, format: :ics) }
before do
private_group.add_maintainer(user)
end
end
end
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::CommitsController do
context 'token authentication' do
context 'when public project' do
let_it_be(:public_project) { create(:project, :repository, :public) }
it_behaves_like 'authenticates sessionless user for the request spec', 'show atom', public_resource: true do
let(:url) { project_commits_url(public_project, public_project.default_branch, format: :atom) }
end
end
context 'when private project' do
let_it_be(:private_project) { create(:project, :repository, :private) }
it_behaves_like 'authenticates sessionless user for the request spec', 'show atom', public_resource: false, ignore_metrics: true do
let(:url) { project_commits_url(private_project, private_project.default_branch, format: :atom) }
before do
private_project.add_maintainer(user)
end
end
end
end
end

View File

@ -8,11 +8,11 @@ RSpec.describe Projects::IssuesController do
let_it_be(:project) { issue.project }
let_it_be(:user) { issue.author }
before do
login_as(user)
end
describe 'GET #discussions' do
before do
login_as(user)
end
let_it_be(:discussion) { create(:discussion_note_on_issue, noteable: issue, project: issue.project) }
let_it_be(:discussion_reply) { create(:discussion_note_on_issue, noteable: issue, project: issue.project, in_reply_to: discussion) }
let_it_be(:state_event) { create(:resource_state_event, issue: issue) }
@ -68,4 +68,38 @@ RSpec.describe Projects::IssuesController do
end
end
end
context 'token authentication' do
context 'when public project' do
let_it_be(:public_project) { create(:project, :public) }
it_behaves_like 'authenticates sessionless user for the request spec', 'index atom', public_resource: true do
let(:url) { project_issues_url(public_project, format: :atom) }
end
it_behaves_like 'authenticates sessionless user for the request spec', 'calendar ics', public_resource: true do
let(:url) { project_issues_url(public_project, format: :ics) }
end
end
context 'when private project' do
let_it_be(:private_project) { create(:project, :private) }
it_behaves_like 'authenticates sessionless user for the request spec', 'index atom', public_resource: false, ignore_metrics: true do
let(:url) { project_issues_url(private_project, format: :atom) }
before do
private_project.add_maintainer(user)
end
end
it_behaves_like 'authenticates sessionless user for the request spec', 'calendar ics', public_resource: false, ignore_metrics: true do
let(:url) { project_issues_url(private_project, format: :ics) }
before do
private_project.add_maintainer(user)
end
end
end
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::MergeRequestsController do
context 'token authentication' do
context 'when public project' do
let_it_be(:public_project) { create(:project, :public) }
it_behaves_like 'authenticates sessionless user for the request spec', 'index atom', public_resource: true do
let(:url) { project_merge_requests_url(public_project, format: :atom) }
end
end
context 'when private project' do
let_it_be(:private_project) { create(:project, :private) }
it_behaves_like 'authenticates sessionless user for the request spec', 'index atom', public_resource: false, ignore_metrics: true do
let(:url) { project_merge_requests_url(private_project, format: :atom) }
before do
private_project.add_maintainer(user)
end
end
end
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::TagsController do
context 'token authentication' do
context 'when public project' do
let_it_be(:public_project) { create(:project, :repository, :public) }
it_behaves_like 'authenticates sessionless user for the request spec', 'index atom', public_resource: true do
let(:url) { project_tags_url(public_project, format: :atom) }
end
end
context 'when private project' do
let_it_be(:private_project) { create(:project, :repository, :private) }
it_behaves_like 'authenticates sessionless user for the request spec', 'index atom', public_resource: false, ignore_metrics: true do
let(:url) { project_tags_url(private_project, format: :atom) }
before do
private_project.add_maintainer(user)
end
end
end
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ProjectsController do
context 'token authentication' do
context 'when public project' do
let_it_be(:public_project) { create(:project, :public) }
it_behaves_like 'authenticates sessionless user for the request spec', 'show atom', public_resource: true do
let(:url) { project_url(public_project, format: :atom) }
end
end
context 'when private project' do
let_it_be(:private_project) { create(:project, :private) }
it_behaves_like 'authenticates sessionless user for the request spec', 'show atom', public_resource: false, ignore_metrics: true do
let(:url) { project_url(private_project, format: :atom) }
before do
private_project.add_maintainer(user)
end
end
end
end
end

View File

@ -805,9 +805,9 @@ RSpec.describe UsersController do
end
context 'token authentication' do
let(:url) { user_url(user.username, format: :atom) }
it_behaves_like 'authenticates sessionless user for the request spec', public: true
it_behaves_like 'authenticates sessionless user for the request spec', 'show atom', public_resource: true do
let(:url) { user_url(user, format: :atom) }
end
end
def user_moved_message(redirect_route, user)

View File

@ -0,0 +1,60 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::ArchiveExtractionService do
let_it_be(:tmpdir) { Dir.mktmpdir }
let_it_be(:filename) { 'symlink_export.tar' }
let_it_be(:filepath) { File.join(tmpdir, filename) }
before do
FileUtils.copy_file(File.join('spec', 'fixtures', filename), filepath)
end
after(:all) do
FileUtils.remove_entry(tmpdir)
end
subject(:service) { described_class.new(tmpdir: tmpdir, filename: filename) }
describe '#execute' do
it 'extracts files from archive and removes symlinks' do
file = File.join(tmpdir, 'project.json')
folder = File.join(tmpdir, 'uploads')
symlink = File.join(tmpdir, 'uploads', 'link.gitignore')
expect(service).to receive(:untar_xf).with(archive: filepath, dir: tmpdir).and_call_original
service.execute
expect(File.exist?(file)).to eq(true)
expect(Dir.exist?(folder)).to eq(true)
expect(File.exist?(symlink)).to eq(false)
end
context 'when dir is not in tmpdir' do
it 'raises an error' do
['/etc', '/usr', '/', '/home', '', '/some/other/path', Rails.root].each do |path|
expect { described_class.new(tmpdir: path, filename: 'filename').execute }
.to raise_error(BulkImports::Error, 'Invalid target directory')
end
end
end
context 'when archive file is a symlink' do
it 'raises an error' do
FileUtils.ln_s(File.join(tmpdir, filename), File.join(tmpdir, 'symlink'))
expect { described_class.new(tmpdir: tmpdir, filename: 'symlink').execute }
.to raise_error(BulkImports::Error, 'Invalid file')
end
end
context 'when filepath is being traversed' do
it 'raises an error' do
expect { described_class.new(tmpdir: File.join(tmpdir, '../../../'), filename: 'name').execute }
.to raise_error(Gitlab::Utils::PathTraversalAttackError, 'Invalid path')
end
end
end
end

View File

@ -11,10 +11,8 @@ RSpec.describe Packages::Npm::CreatePackageService do
Gitlab::Json.parse(fixture_file('packages/npm/payload.json')
.gsub('@root/npm-test', package_name)
.gsub('1.0.1', version)).with_indifferent_access
.merge!(override)
end
let(:override) { {} }
let(:package_name) { "@#{namespace.path}/my-app" }
let(:version_data) { params.dig('versions', '1.0.1') }
@ -116,13 +114,53 @@ RSpec.describe Packages::Npm::CreatePackageService do
it { expect(subject[:message]).to be 'Package already exists.' }
end
context 'file size above maximum limit' do
before do
params['_attachments']["#{package_name}-#{version}.tgz"]['length'] = project.actual_limits.npm_max_file_size + 1
describe 'max file size validation' do
let(:max_file_size) { 5.bytes}
shared_examples_for 'max file size validation failure' do
it 'returns a 400 error', :aggregate_failures do
expect(subject[:http_status]).to eq 400
expect(subject[:message]).to be 'File is too large.'
end
end
it { expect(subject[:http_status]).to eq 400 }
it { expect(subject[:message]).to be 'File is too large.' }
before do
project.actual_limits.update!(npm_max_file_size: max_file_size)
end
context 'when max file size is exceeded' do
# NOTE: The base64 encoded package data in the fixture file is the "hello\n" string, whose byte size is 6.
it_behaves_like 'max file size validation failure'
end
context 'when file size is faked by setting the attachment length param to a lower size' do
let(:params) { super().deep_merge!( { _attachments: { "#{package_name}-#{version}.tgz" => { data: encoded_package_data, length: 1 } } }) }
# TODO (technical debt): Extract the package size calculation outside the service and add separate specs for it.
# Right now we have several contexts here to test the calculation's different scenarios.
context "when encoded package data is not padded" do
# 'Hello!' (size = 6 bytes) => 'SGVsbG8h'
let(:encoded_package_data) { 'SGVsbG8h' }
it_behaves_like 'max file size validation failure'
end
context "when encoded package data is padded with '='" do
let(:max_file_size) { 4.bytes}
# 'Hello' (size = 5 bytes) => 'SGVsbG8='
let(:encoded_package_data) { 'SGVsbG8=' }
it_behaves_like 'max file size validation failure'
end
context "when encoded package data is padded with '=='" do
let(:max_file_size) { 3.bytes}
# 'Hell' (size = 4 bytes) => 'SGVsbA=='
let(:encoded_package_data) { 'SGVsbA==' }
it_behaves_like 'max file size validation failure'
end
end
end
[
@ -141,7 +179,7 @@ RSpec.describe Packages::Npm::CreatePackageService do
end
context 'with empty versions' do
let(:override) { { versions: {} } }
let(:params) { super().merge!({ versions: {} } ) }
it { expect(subject[:http_status]).to eq 400 }
it { expect(subject[:message]).to eq 'Version is empty.' }

View File

@ -1,112 +0,0 @@
# frozen_string_literal: true
# This controller shared examples will be migrated to
# spec/support/shared_examples/requests/sessionless_auth_request_shared_examples.rb
# See also https://gitlab.com/groups/gitlab-org/-/epics/5076
RSpec.shared_examples 'authenticates sessionless user' do |path, format, params|
params ||= {}
before do
stub_authentication_activity_metrics(debug: false)
end
let(:user) { create(:user) }
let(:personal_access_token) { create(:personal_access_token, user: user) }
let(:default_params) { { format: format }.merge(params.except(:public) || {}) }
context "when the 'personal_access_token' param is populated with the personal access token" do
it 'logs the user in' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_session_override_counter)
.and increment(:user_sessionless_authentication_counter)
get path, params: default_params.merge(private_token: personal_access_token.token)
expect(response).to have_gitlab_http_status(:ok)
expect(controller.current_user).to eq(user)
end
it 'does not log the user in if page is public', if: params[:public] do
get path, params: default_params
expect(response).to have_gitlab_http_status(:ok)
expect(controller.current_user).to be_nil
end
end
context 'when the personal access token has no api scope', unless: params[:public] do
it 'does not log the user in' do
# Several instances of where these specs are shared route the request
# through ApplicationController#route_not_found which does not involve
# the usual auth code from Devise, so does not increment the
# :user_unauthenticated_counter
#
unless params[:ignore_incrementing]
expect(authentication_metrics)
.to increment(:user_unauthenticated_counter)
end
personal_access_token.update!(scopes: [:read_user])
get path, params: default_params.merge(private_token: personal_access_token.token)
expect(response).not_to have_gitlab_http_status(:ok)
end
end
context "when the 'PERSONAL_ACCESS_TOKEN' header is populated with the personal access token" do
it 'logs the user in' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_session_override_counter)
.and increment(:user_sessionless_authentication_counter)
@request.headers['PRIVATE-TOKEN'] = personal_access_token.token
get path, params: default_params
expect(response).to have_gitlab_http_status(:ok)
end
end
context "when the 'feed_token' param is populated with the feed token", if: format == :rss do
it "logs the user in" do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_session_override_counter)
.and increment(:user_sessionless_authentication_counter)
get path, params: default_params.merge(feed_token: user.feed_token)
expect(response).to have_gitlab_http_status(:ok)
end
end
context "when the 'feed_token' param is populated with an invalid feed token", if: format == :rss, unless: params[:public] do
it "logs the user" do
expect(authentication_metrics)
.to increment(:user_unauthenticated_counter)
get path, params: default_params.merge(feed_token: 'token')
expect(response).not_to have_gitlab_http_status(:ok)
end
end
it "doesn't log the user in otherwise", unless: params[:public] do
# Several instances of where these specs are shared route the request
# through ApplicationController#route_not_found which does not involve
# the usual auth code from Devise, so does not increment the
# :user_unauthenticated_counter
#
unless params[:ignore_incrementing]
expect(authentication_metrics)
.to increment(:user_unauthenticated_counter)
end
get path, params: default_params.merge(private_token: 'token')
expect(response).not_to have_gitlab_http_status(:ok)
end
end

View File

@ -0,0 +1,56 @@
# frozen_string_literal: true
RSpec.shared_examples Integrations::ChatMessage do
context 'when input contains link markup' do
let(:evil_input) { '[Markdown](http://evil.com) <a href="http://evil.com">HTML</a> <http://evil.com|Slack>' }
# Attributes returned from #activity and #attributes which should be sanitized.
let(:sanitized_attributes) do
%i[title subtitle text fallback author_name]
end
# Attributes passed to #initialize which can contain user input.
before do
args.deep_merge!(
project_name: evil_input,
user_name: evil_input,
user_full_name: evil_input,
commit_title: evil_input,
environment: evil_input,
project: {
name: evil_input
},
user: {
name: evil_input,
username: evil_input
},
object_attributes: {
title: evil_input
}
)
end
# NOTE: The `include` matcher is used here so the RSpec error messages will tell us
# which method or attribute is failing, even though it makes the spec a bit less readable.
it 'strips all link markup characters', :aggregate_failures do
expect(subject).not_to have_attributes(
pretext: include(evil_input),
summary: include(evil_input)
)
begin
sanitized_attributes.each do |attribute|
expect(subject.activity).not_to include(attribute => include(evil_input))
end
rescue NotImplementedError
end
begin
sanitized_attributes.each do |attribute|
expect(subject.attachments).not_to include(include(attribute => include(evil_input)))
end
rescue NotImplementedError
end
end
end
end

View File

@ -1,84 +1,160 @@
# frozen_string_literal: true
RSpec.shared_examples 'authenticates sessionless user for the request spec' do |params|
params ||= {}
RSpec.shared_examples 'authenticates sessionless user for the request spec' do |name, public_resource:, ignore_metrics: false, params: {}|
before do
stub_authentication_activity_metrics(debug: false)
end
let(:user) { create(:user) }
let_it_be(:user) { create(:user) }
let(:personal_access_token) { create(:personal_access_token, user: user) }
let(:default_params) { params.except(:public) || {} }
context "when the 'personal_access_token' param is populated with the personal access token" do
it 'logs the user in' do
shared_examples 'authenticates user and returns response with ok status' do
it 'authenticates user and returns response with ok status' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_session_override_counter)
.and increment(:user_sessionless_authentication_counter)
.and increment(:user_session_override_counter)
.and increment(:user_sessionless_authentication_counter)
get url, params: default_params.merge(private_token: personal_access_token.token)
subject
expect(response).to have_gitlab_http_status(:ok)
expect(controller.current_user).to eq(user)
end
it 'does not log the user in if page is public', if: params[:public] do
get url, params: default_params
expect(response).to have_gitlab_http_status(:ok)
expect(controller.current_user).to be_nil
end
end
context 'when the personal access token has no api scope', unless: params[:public] do
it 'does not log the user in' do
shared_examples 'does not authenticate user and returns response with ok status' do
it 'does not authenticate user and returns response with ok status' do
subject
expect(controller.current_user).to be_nil
expect(response).to have_gitlab_http_status(:ok)
end
end
shared_examples 'does not return response with ok status' do
it 'does not return response with ok status' do
# Several instances of where these specs are shared route the request
# through ApplicationController#route_not_found which does not involve
# the usual auth code from Devise, so does not increment the
# :user_unauthenticated_counter
#
unless params[:ignore_incrementing]
unless ignore_metrics
expect(authentication_metrics)
.to increment(:user_unauthenticated_counter)
end
personal_access_token.update!(scopes: [:read_user])
get url, params: default_params.merge(private_token: personal_access_token.token)
subject
expect(response).not_to have_gitlab_http_status(:ok)
end
end
context "when the 'PERSONAL_ACCESS_TOKEN' header is populated with the personal access token" do
it 'logs the user in' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_session_override_counter)
.and increment(:user_sessionless_authentication_counter)
shared_examples 'using valid token' do
context 'when resource is private', unless: public_resource do
include_examples 'authenticates user and returns response with ok status'
headers = { 'PRIVATE-TOKEN': personal_access_token.token }
get url, params: default_params, headers: headers
context 'when user with expired password' do
let_it_be(:user) { create(:user, password_expires_at: 2.minutes.ago) }
expect(response).to have_gitlab_http_status(:ok)
include_examples 'does not return response with ok status'
end
context 'when password expiration is not applicable' do
context 'when ldap user' do
let_it_be(:user) { create(:omniauth_user, provider: 'ldap', password_expires_at: 2.minutes.ago) }
include_examples 'authenticates user and returns response with ok status'
end
end
end
context 'when resource is public', if: public_resource do
include_examples 'authenticates user and returns response with ok status'
context 'when user with expired password' do
let_it_be(:user) { create(:user, password_expires_at: 2.minutes.ago) }
include_examples 'does not authenticate user and returns response with ok status'
end
end
end
it "doesn't log the user in otherwise", unless: params[:public] do
# Several instances of where these specs are shared route the request
# through ApplicationController#route_not_found which does not involve
# the usual auth code from Devise, so does not increment the
# :user_unauthenticated_counter
#
unless params[:ignore_incrementing]
expect(authentication_metrics)
.to increment(:user_unauthenticated_counter)
shared_examples 'using invalid token' do
context 'when resource is private', unless: public_resource do
include_examples 'does not return response with ok status'
end
get url, params: default_params.merge(private_token: 'token')
context 'when resource is public', if: public_resource do
include_examples 'does not authenticate user and returns response with ok status'
end
end
expect(response).not_to have_gitlab_http_status(:ok)
shared_examples 'personal access token has no api scope' do
context 'when the personal access token has no api scope' do
before do
personal_access_token.update!(scopes: [:read_user])
end
context 'when resource is private', unless: public_resource do
include_examples 'does not return response with ok status'
end
context 'when resource is public', if: public_resource do
include_examples 'does not authenticate user and returns response with ok status'
end
end
end
describe name do
context "when the 'private_token' param is populated with the personal access token" do
context 'when valid token' do
subject { get url, params: params.merge(private_token: personal_access_token.token) }
include_examples 'using valid token'
include_examples 'personal access token has no api scope'
end
context 'when invalid token' do
subject { get url, params: params.merge(private_token: 'invalid token') }
include_examples 'using invalid token'
end
end
context "when the 'PRIVATE-TOKEN' header is populated with the personal access token" do
context 'when valid token' do
subject do
headers = { 'PRIVATE-TOKEN': personal_access_token.token }
get url, params: params, headers: headers
end
include_examples 'using valid token'
include_examples 'personal access token has no api scope'
end
context 'when invalid token' do
subject do
headers = { 'PRIVATE-TOKEN': 'invalid token' }
get url, params: params, headers: headers
end
include_examples 'using invalid token'
end
end
context "when the 'feed_token' param is populated with the feed token" do
context 'when valid token' do
subject { get url, params: params.merge(feed_token: user.feed_token) }
include_examples 'using valid token'
end
context 'when invalid token' do
subject { get url, params: params.merge(feed_token: 'invalid token') }
include_examples 'using invalid token'
end
end
end
end