Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-09-28 21:10:29 +00:00
parent dce5758796
commit e994d22d1d
25 changed files with 551 additions and 403 deletions

View file

@ -1,5 +1,5 @@
## Actionable Insights
Actionable insights always have a follow-up action that needs to take place as a result of the research observation or data, and a clear recommendation or action associated with it. An actionable insight both defines the insight and clearly calls out the next step. These insights are tracked over time.
Actionable insights always have a follow-up action that needs to take place as a result of the research observation or data, and a clear recommendation or action associated with it. An actionable insight both defines the insight and clearly calls out the next step. These insights are tracked over time and at the group level.
#### Link
@ -10,6 +10,10 @@ Actionable insights always have a follow-up action that needs to take place as a
- [ ] Assign this issue to the appropriate Product Manager, Product Designer, or UX Researcher
#### Group label
- [ ] Add the appropriate `Group` (such as `~"group::source code"`) label to the issue. This is done to identify and track actionable insights at the group level.
#### Description
- [ ] Provide some brief details on the actionable insight and the action to take

View file

@ -16,7 +16,7 @@ function decodeUrlParameter(val) {
return decodeURIComponent(val.replace(/\+/g, '%20'));
}
function cleanLeadingSeparator(path) {
export function cleanLeadingSeparator(path) {
return path.replace(PATH_SEPARATOR_LEADING_REGEX, '');
}
@ -435,3 +435,12 @@ export function getHTTPProtocol(url) {
const protocol = url.split(':');
return protocol.length > 1 ? protocol[0] : undefined;
}
/**
* Strips the filename from the given path by removing every non-slash character from the end of the
* passed parameter.
* @param {string} path
*/
export function stripPathTail(path = '') {
return path.replace(/[^/]+$/, '');
}

View file

@ -46,6 +46,12 @@ class MergeRequestPollCachedWidgetEntity < IssuableEntity
end
end
expose :actual_head_pipeline, as: :pipeline, if: -> (mr, _) {
Feature.enabled?(:merge_request_cached_pipeline_serializer, mr.project) && presenter(mr).can_read_pipeline?
} do |merge_request, options|
MergeRequests::PipelineEntity.represent(merge_request.actual_head_pipeline, options)
end
# Paths
#
expose :target_branch_commits_path do |merge_request|

View file

@ -19,7 +19,9 @@ class MergeRequestPollWidgetEntity < Grape::Entity
# User entities
expose :merge_user, using: UserEntity
expose :actual_head_pipeline, as: :pipeline, if: -> (mr, _) { presenter(mr).can_read_pipeline? } do |merge_request, options|
expose :actual_head_pipeline, as: :pipeline, if: -> (mr, _) {
Feature.disabled?(:merge_request_cached_pipeline_serializer, mr.project) && presenter(mr).can_read_pipeline?
} do |merge_request, options|
MergeRequests::PipelineEntity.represent(merge_request.actual_head_pipeline, options)
end

View file

@ -32,11 +32,18 @@ module Ci
Gitlab::Routing.url_helpers.project_new_merge_request_path(project, format: :json)
end
def pipelines_project_merge_request_path(merge_request)
Gitlab::Routing.url_helpers.pipelines_project_merge_request_path(merge_request.target_project, merge_request, format: :json)
end
def merge_request_widget_path(merge_request)
Gitlab::Routing.url_helpers.cached_widget_project_json_merge_request_path(merge_request.project, merge_request, format: :json)
end
def each_pipelines_merge_request_path(pipeline)
pipeline.all_merge_requests.each do |merge_request|
path = Gitlab::Routing.url_helpers.pipelines_project_merge_request_path(merge_request.target_project, merge_request, format: :json)
yield(path)
yield(pipelines_project_merge_request_path(merge_request))
yield(merge_request_widget_path(merge_request))
end
end

View file

@ -0,0 +1,7 @@
---
name: merge_request_cached_pipeline_serializer
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/38273
rollout_issue_url:
type: development
group: group::source code
default_enabled: false

View file

@ -203,9 +203,9 @@ _The artifacts are stored by default in
enabled: true
object_store:
enabled: true
remote_directory: "artifacts" # The bucket name
remote_directory: "artifacts" # The bucket name
connection:
provider: AWS # Only AWS supported at the moment
provider: AWS # Only AWS supported at the moment
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1

View file

@ -589,7 +589,7 @@ In the examples below we set the Registry's port to `5001`.
[`http:addr`](https://docs.docker.com/registry/configuration/#http) value:
```yaml
http
http:
addr: localhost:5001
```
@ -1065,7 +1065,7 @@ Start with a value between `25000000` (25MB) and `50000000` (50MB).
s3:
accesskey: 'AKIAKIAKI'
secretkey: 'secret123'
bucket: 'gitlab-registry-bucket-AKIAKIAKI'
bucket: 'gitlab-registry-bucket-AKIAKIAKI'
chunksize: 25000000
```

View file

@ -135,28 +135,28 @@ This section describes the earlier configuration format.
##
## The location where build dependency_proxy are stored (default: shared/dependency_proxy).
##
#storage_path: shared/dependency_proxy
# storage_path: shared/dependency_proxy
object_store:
enabled: false
remote_directory: dependency_proxy # The bucket name.
#direct_upload: false # Use Object Storage directly for uploads instead of background uploads if enabled (Default: false).
#background_upload: true # Temporary option to limit automatic upload (Default: true).
#proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage.
remote_directory: dependency_proxy # The bucket name.
# direct_upload: false # Use Object Storage directly for uploads instead of background uploads if enabled (Default: false).
# background_upload: true # Temporary option to limit automatic upload (Default: true).
# proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage.
connection:
##
## If the provider is AWS S3, use the following
##
provider: AWS
region: us-east-1
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
##
## If the provider is AWS S3, uncomment the following
## If the provider is other than AWS (an S3-compatible one), comment out the previous 4 lines and use the following instead:
##
#provider: AWS
#region: us-east-1
#aws_access_key_id: AWS_ACCESS_KEY_ID
#aws_secret_access_key: AWS_SECRET_ACCESS_KEY
##
## If the provider is other than AWS (an S3-compatible one), uncomment the following
##
#host: 's3.amazonaws.com' # default: s3.amazonaws.com.
#aws_signature_version: 4 # For creation of signed URLs. Set to 2 if provider does not support v4.
#endpoint: 'https://s3.amazonaws.com' # Useful for S3-compliant services such as DigitalOcean Spaces.
#path_style: false # If true, use 'host/bucket_name/object' instead of 'bucket_name.host/object'.
# host: 's3.amazonaws.com' # default: s3.amazonaws.com.
# aws_signature_version: 4 # For creation of signed URLs. Set to 2 if provider does not support v4.
# endpoint: 'https://s3.amazonaws.com' # Useful for S3-compliant services such as DigitalOcean Spaces.
# path_style: false # If true, use 'host/bucket_name/object' instead of 'bucket_name.host/object'.
```
1. [Restart GitLab](../restart_gitlab.md#installations-from-source "How to restart GitLab") for the changes to take effect.

View file

@ -142,33 +142,33 @@ We recommend using the [consolidated object storage settings](../object_storage.
1. Edit the `packages` section in `config/gitlab.yml` (uncomment where necessary):
```yaml
packages:
enabled: true
packages:
enabled: true
##
## The location where build packages are stored (default: shared/packages).
##
# storage_path: shared/packages
object_store:
enabled: false
remote_directory: packages # The bucket name.
# direct_upload: false # Use Object Storage directly for uploads instead of background uploads if enabled (Default: false).
# background_upload: true # Temporary option to limit automatic upload (Default: true).
# proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage.
connection:
##
## The location where build packages are stored (default: shared/packages).
## If the provider is AWS S3, use the following:
##
#storage_path: shared/packages
object_store:
enabled: false
remote_directory: packages # The bucket name.
#direct_upload: false # Use Object Storage directly for uploads instead of background uploads if enabled (Default: false).
#background_upload: true # Temporary option to limit automatic upload (Default: true).
#proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage.
connection:
##
## If the provider is AWS S3, uncomment the following
##
#provider: AWS
#region: us-east-1
#aws_access_key_id: AWS_ACCESS_KEY_ID
#aws_secret_access_key: AWS_SECRET_ACCESS_KEY
##
## If the provider is other than AWS (an S3-compatible one), uncomment the following
##
#host: 's3.amazonaws.com' # default: s3.amazonaws.com.
#aws_signature_version: 4 # For creation of signed URLs. Set to 2 if provider does not support v4.
#endpoint: 'https://s3.amazonaws.com' # Useful for S3-compliant services such as DigitalOcean Spaces.
#path_style: false # If true, use 'host/bucket_name/object' instead of 'bucket_name.host/object'.
provider: AWS
region: us-east-1
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
##
## If the provider is other than AWS (an S3-compatible one), comment out the previous 4 lines and use the following instead:
##
# host: 's3.amazonaws.com' # default: s3.amazonaws.com.
# aws_signature_version: 4 # For creation of signed URLs. Set to 2 if provider does not support v4.
# endpoint: 'https://s3.amazonaws.com' # Useful for S3-compliant services such as DigitalOcean Spaces.
# path_style: false # If true, use 'host/bucket_name/object' instead of 'bucket_name.host/object'.
```
1. Save the file and [restart GitLab](../restart_gitlab.md#installations-from-source) for the changes to take effect.

View file

@ -103,7 +103,7 @@ the secrets stored in Vault by defining them with the `vault` keyword:
```yaml
secrets:
DATABASE_PASSWORD:
vault: production/db/password@ops # translates to secret `ops/data/production/db`, field `password`
vault: production/db/password@ops # translates to secret `ops/data/production/db`, field `password`
```
In this example:

View file

@ -103,8 +103,8 @@ variables:
You can then call its value in your script:
```yaml
script:
- echo "$TEST"
script:
- echo "$TEST"
```
For more details, see [`.gitlab-ci.yml` defined variables](#gitlab-ciyml-defined-variables).

View file

@ -1547,7 +1547,7 @@ docker build:
- Dockerfile
- docker/scripts/*
when: manual
# - when: never would be redundant here, this is implied any time rules are listed.
# - when: never would be redundant here, this is implied any time rules are listed.
```
Keywords such as `branches` or `refs` that are currently available for
@ -4074,7 +4074,7 @@ field to fetch the value for:
job:
secrets:
DATABASE_PASSWORD:
vault: production/db/password # translates to secret `kv-v2/data/production/db`, field `password`
vault: production/db/password # translates to secret `kv-v2/data/production/db`, field `password`
```
You can specify a custom secrets engine path by adding a suffix starting with `@`:
@ -4083,7 +4083,7 @@ You can specify a custom secrets engine path by adding a suffix starting with `@
job:
secrets:
DATABASE_PASSWORD:
vault: production/db/password@ops # translates to secret `ops/data/production/db`, field `password`
vault: production/db/password@ops # translates to secret `ops/data/production/db`, field `password`
```
In the detailed form of the syntax, you can specify all details explicitly:

View file

@ -251,7 +251,7 @@ The table below shows what kind of documentation goes where.
| `doc/legal/` | Legal documents about contributing to GitLab. |
| `doc/install/` | Contains instructions for installing GitLab. |
| `doc/update/` | Contains instructions for updating GitLab. |
| `doc/topics/` | Indexes per topic (`doc/topics/topic-name/index.md`): all resources for that topic. |
| `doc/topics/` | Indexes per topic (`doc/topics/topic_name/index.md`): all resources for that topic. |
### Work with directories and files

File diff suppressed because it is too large Load diff

View file

@ -119,7 +119,7 @@ Once the remote is set, you can use the remote when running Conan commands by ad
For example:
```shell
conan search Hello* --all --remote=gitlab
conan search Hello* --remote=gitlab
```
### Instance level remote
@ -285,9 +285,9 @@ The `conan search` command can be run searching by full or partial package name,
To search using a partial name, use the wildcard symbol `*`, which should be placed at the end of your search (for example, `my-packa*`):
```shell
conan search Hello --all --remote=gitlab
conan search He* --all --remote=gitlab
conan search Hello/0.1@mycompany/beta --all --remote=gitlab
conan search Hello --remote=gitlab
conan search He* --remote=gitlab
conan search Hello/0.1@mycompany/beta --remote=gitlab
```
The scope of your search includes all projects you have permission to access, this includes your private projects as well as all public projects.
@ -330,7 +330,6 @@ create_package:
- conan new <package-name>/0.1 -t
- conan create . <group-name>+<project-name>/stable
- CONAN_LOGIN_USERNAME=ci_user CONAN_PASSWORD=${CI_JOB_TOKEN} conan upload <package-name>/0.1@<group-name>+<project-name>/stable --all --remote=gitlab
```
You can find additional Conan images to use as the base of your CI file

View file

@ -307,15 +307,15 @@ in addition to the steps in the
Below is an example of what your `.gitlab-ci.yml` should look like:
```yaml
build:
image: $CI_REGISTRY/group/project/docker:19.03.12
services:
- name: $CI_REGISTRY/group/project/docker:19.03.12-dind
alias: docker
stage: build
script:
- docker build -t my-docker-image .
- docker run my-docker-image /script/to/run/tests
build:
image: $CI_REGISTRY/group/project/docker:19.03.12
services:
- name: $CI_REGISTRY/group/project/docker:19.03.12-dind
alias: docker
stage: build
script:
- docker build -t my-docker-image .
- docker run my-docker-image /script/to/run/tests
```
If you forget to set the service alias, the `docker:19.03.12` image is unable to find the

View file

@ -250,21 +250,21 @@ is updated:
1. Add a `deploy` job to your `.gitlab-ci.yml` file:
```yaml
image: mcr.microsoft.com/dotnet/core/sdk:3.1
image: mcr.microsoft.com/dotnet/core/sdk:3.1
stages:
- deploy
stages:
- deploy
deploy:
stage: deploy
script:
- dotnet restore -p:Configuration=Release
- dotnet build -c Release
- dotnet pack -c Release
- dotnet nuget add source "$CI_SERVER_URL/api/v4/projects/$CI_PROJECT_ID/packages/nuget/index.json" --name gitlab --username gitlab-ci-token --password $CI_JOB_TOKEN --store-password-in-clear-text
- dotnet nuget push "bin/Release/*.nupkg" --source gitlab
only:
- master
deploy:
stage: deploy
script:
- dotnet restore -p:Configuration=Release
- dotnet build -c Release
- dotnet pack -c Release
- dotnet nuget add source "$CI_SERVER_URL/api/v4/projects/$CI_PROJECT_ID/packages/nuget/index.json" --name gitlab --username gitlab-ci-token --password $CI_JOB_TOKEN --store-password-in-clear-text
- dotnet nuget push "bin/Release/*.nupkg" --source gitlab
only:
- master
```
1. Commit the changes and push it to your GitLab repository to trigger a new CI build.

View file

@ -76,6 +76,3 @@ If you've accidentally started the import process with the wrong account, follow
1. Revoke GitLab access to your Bitbucket account, essentially reversing the process in the following procedure: [Import your Bitbucket repositories](#import-your-bitbucket-repositories).
1. Sign out of the Bitbucket account. Follow the procedure linked from the previous step.
NOTE: **Note:**
To import a repository including LFS objects from a Bitbucket server repository, use the [Repo by URL](../import/repo_by_url.md) importer.

View file

@ -30,12 +30,6 @@ repository is too large the import can timeout.
There is also the option of [connecting your external repository to get CI/CD benefits](../../../ci/ci_cd_for_external_repos/index.md). **(PREMIUM)**
## LFS authentication
When importing a project that contains LFS objects, if the project has an
`.lfsconfig` file with the user/password set on the URL, this authentication
information will take precedence over the import authentication data.
## Migrating from self-managed GitLab to GitLab.com
If you only need to migrate Git repos, you can [import each project by URL](repo_by_url.md). Issues and merge requests can't be imported.

View file

@ -664,6 +664,19 @@ describe('URL utility', () => {
});
});
describe('cleanLeadingSeparator', () => {
it.each`
path | expected
${'/foo/bar'} | ${'foo/bar'}
${'foo/bar'} | ${'foo/bar'}
${'//foo/bar'} | ${'foo/bar'}
${'/./foo/bar'} | ${'./foo/bar'}
${''} | ${''}
`('$path becomes $expected', ({ path, expected }) => {
expect(urlUtils.cleanLeadingSeparator(path)).toBe(expected);
});
});
describe('joinPaths', () => {
it.each`
paths | expected
@ -787,4 +800,18 @@ describe('URL utility', () => {
expect(urlUtils.getHTTPProtocol(url)).toBe(expectation);
});
});
describe('stripPathTail', () => {
it.each`
path | expected
${''} | ${''}
${'index.html'} | ${''}
${'/'} | ${'/'}
${'/foo/bar'} | ${'/foo/'}
${'/foo/bar/'} | ${'/foo/bar/'}
${'/foo/bar/index.html'} | ${'/foo/bar/'}
`('strips the filename from $path => $expected', ({ path, expected }) => {
expect(urlUtils.stripPathTail(path)).toBe(expected);
});
});
});

View file

@ -213,4 +213,55 @@ RSpec.describe MergeRequestPollCachedWidgetEntity do
end
end
end
describe 'pipeline' do
let_it_be(:pipeline) { create(:ci_empty_pipeline, project: project, ref: resource.source_branch, sha: resource.source_branch_sha, head_pipeline_of: resource) }
before do
allow_any_instance_of(MergeRequestPresenter).to receive(:can?).and_call_original
allow_any_instance_of(MergeRequestPresenter).to receive(:can?).with(user, :read_pipeline, anything).and_return(can_access)
end
context 'when user has access to pipelines' do
let(:can_access) { true }
context 'when is up to date' do
let(:req) { double('request', current_user: user, project: project) }
it 'returns pipeline' do
pipeline_payload =
MergeRequests::PipelineEntity
.represent(pipeline, request: req)
.as_json
expect(subject[:pipeline]).to eq(pipeline_payload)
end
context 'when merge_request_cached_pipeline_serializer is disabled' do
it 'does not return pipeline' do
stub_feature_flags(merge_request_cached_pipeline_serializer: false)
expect(subject[:pipeline]).to be_nil
end
end
end
context 'when user does not have access to pipelines' do
let(:can_access) { false }
let(:req) { double('request', current_user: user, project: project) }
it 'does not have pipeline' do
expect(subject[:pipeline]).to eq(nil)
end
end
context 'when is not up to date' do
it 'returns nil' do
pipeline.update!(sha: "not up to date")
expect(subject[:pipeline]).to eq(nil)
end
end
end
end
end

View file

@ -222,13 +222,21 @@ RSpec.describe MergeRequestPollWidgetEntity do
context 'when is up to date' do
let(:req) { double('request', current_user: user, project: project) }
it 'returns pipeline' do
pipeline_payload =
MergeRequests::PipelineEntity
.represent(pipeline, request: req)
.as_json
it 'does not return pipeline' do
expect(subject[:pipeline]).to be_nil
end
expect(subject[:pipeline]).to eq(pipeline_payload)
context 'when merge_request_cached_pipeline_serializer is disabled' do
it 'returns detailed info about pipeline' do
stub_feature_flags(merge_request_cached_pipeline_serializer: false)
pipeline_payload =
MergeRequests::PipelineEntity
.represent(pipeline, request: req)
.as_json
expect(subject[:pipeline]).to eq(pipeline_payload)
end
end
it 'returns ci_status' do
@ -249,10 +257,6 @@ RSpec.describe MergeRequestPollWidgetEntity do
let(:result) { false }
let(:req) { double('request', current_user: user, project: project) }
it 'does not have pipeline' do
expect(subject[:pipeline]).to eq(nil)
end
it 'does not return ci_status' do
expect(subject[:ci_status]).to eq(nil)
end

View file

@ -26,9 +26,11 @@ RSpec.describe Ci::ExpirePipelineCacheService do
project = merge_request.target_project
merge_request_pipelines_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/pipelines.json"
merge_request_widget_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/cached_widget.json"
allow_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch)
expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(merge_request_pipelines_path)
expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(merge_request_widget_path)
subject.execute(merge_request.all_pipelines.last)
end

View file

@ -5,19 +5,19 @@ require_relative 'workhorse_helpers'
module GitHttpHelpers
include WorkhorseHelpers
def clone_get(project, options = {})
def clone_get(project, **options)
get "/#{project}/info/refs", params: { service: 'git-upload-pack' }, headers: auth_env(*options.values_at(:user, :password, :spnego_request_token))
end
def clone_post(project, options = {})
def clone_post(project, **options)
post "/#{project}/git-upload-pack", headers: auth_env(*options.values_at(:user, :password, :spnego_request_token))
end
def push_get(project, options = {})
def push_get(project, **options)
get "/#{project}/info/refs", params: { service: 'git-receive-pack' }, headers: auth_env(*options.values_at(:user, :password, :spnego_request_token))
end
def push_post(project, options = {})
def push_post(project, **options)
post "/#{project}/git-receive-pack", headers: auth_env(*options.values_at(:user, :password, :spnego_request_token))
end