Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-06-05 21:08:27 +00:00
parent aee8d27430
commit 908a54b624
55 changed files with 1199 additions and 415 deletions

View File

@ -2,11 +2,14 @@ inherit_gem:
gitlab-styles:
- rubocop-default.yml
inherit_from: .rubocop_todo.yml
require:
- ./rubocop/rubocop
- rubocop-rspec
inherit_from:
- .rubocop_todo.yml
- ./rubocop/rubocop-migrations.yml
inherit_mode:
merge:
- Include

View File

@ -662,8 +662,8 @@ GEM
shellany (~> 0.0)
numerizer (0.2.0)
oauth (0.5.4)
oauth2 (1.4.1)
faraday (>= 0.8, < 0.16.0)
oauth2 (1.4.4)
faraday (>= 0.8, < 2.0)
jwt (>= 1.0, < 3.0)
multi_json (~> 1.3)
multi_xml (~> 0.5)

View File

@ -1,5 +1,5 @@
<script>
import { isEqual, pickBy } from 'lodash';
import { isEqual } from 'lodash';
import { __, sprintf, s__ } from '../../locale';
import createFlash from '../../flash';
import PipelinesService from '../services/pipelines_service';
@ -10,7 +10,8 @@ import NavigationControls from './nav_controls.vue';
import { getParameterByName } from '../../lib/utils/common_utils';
import CIPaginationMixin from '../../vue_shared/mixins/ci_pagination_api_mixin';
import PipelinesFilteredSearch from './pipelines_filtered_search.vue';
import { ANY_TRIGGER_AUTHOR, RAW_TEXT_WARNING, SUPPORTED_FILTER_PARAMETERS } from '../constants';
import { ANY_TRIGGER_AUTHOR, RAW_TEXT_WARNING } from '../constants';
import { validateParams } from '../utils';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
export default {
@ -225,14 +226,12 @@ export default {
return this.glFeatures.filterPipelinesSearch;
},
validatedParams() {
return pickBy(this.params, (val, key) => SUPPORTED_FILTER_PARAMETERS.includes(key) && val);
return validateParams(this.params);
},
},
created() {
this.service = new PipelinesService(this.endpoint);
this.requestData = { page: this.page, scope: this.scope };
Object.assign(this.requestData, this.validatedParams);
this.requestData = { page: this.page, scope: this.scope, ...this.validatedParams };
},
methods: {
successCallback(resp) {
@ -313,7 +312,6 @@ export default {
<pipelines-filtered-search
v-if="canFilterPipelines"
:pipelines="state.pipelines"
:project-id="projectId"
:params="validatedParams"
@filterPipelines="filterPipelines"

View File

@ -3,6 +3,7 @@ import { GlFilteredSearch } from '@gitlab/ui';
import { __, s__ } from '~/locale';
import PipelineTriggerAuthorToken from './tokens/pipeline_trigger_author_token.vue';
import PipelineBranchNameToken from './tokens/pipeline_branch_name_token.vue';
import PipelineStatusToken from './tokens/pipeline_status_token.vue';
import { map } from 'lodash';
export default {
@ -10,10 +11,6 @@ export default {
GlFilteredSearch,
},
props: {
pipelines: {
type: Array,
required: true,
},
projectId: {
type: String,
required: true,
@ -44,6 +41,14 @@ export default {
operators: [{ value: '=', description: __('is'), default: 'true' }],
projectId: this.projectId,
},
{
type: 'status',
icon: 'status',
title: s__('Pipeline|Status'),
unique: true,
token: PipelineStatusToken,
operators: [{ value: '=', description: __('is'), default: 'true' }],
},
];
},
paramsValue() {

View File

@ -0,0 +1,104 @@
<script>
import { GlFilteredSearchToken, GlFilteredSearchSuggestion, GlIcon } from '@gitlab/ui';
import { s__ } from '~/locale';
export default {
components: {
GlFilteredSearchToken,
GlFilteredSearchSuggestion,
GlIcon,
},
props: {
config: {
type: Object,
required: true,
},
value: {
type: Object,
required: true,
},
},
computed: {
statuses() {
return [
{
class: 'ci-status-icon-canceled',
icon: 'status_canceled',
text: s__('Pipeline|Canceled'),
value: 'canceled',
},
{
class: 'ci-status-icon-created',
icon: 'status_created',
text: s__('Pipeline|Created'),
value: 'created',
},
{
class: 'ci-status-icon-failed',
icon: 'status_failed',
text: s__('Pipeline|Failed'),
value: 'failed',
},
{
class: 'ci-status-icon-manual',
icon: 'status_manual',
text: s__('Pipeline|Manual'),
value: 'manual',
},
{
class: 'ci-status-icon-success',
icon: 'status_success',
text: s__('Pipeline|Passed'),
value: 'success',
},
{
class: 'ci-status-icon-pending',
icon: 'status_pending',
text: s__('Pipeline|Pending'),
value: 'pending',
},
{
class: 'ci-status-icon-running',
icon: 'status_running',
text: s__('Pipeline|Running'),
value: 'running',
},
{
class: 'ci-status-icon-skipped',
icon: 'status_skipped',
text: s__('Pipeline|Skipped'),
value: 'skipped',
},
];
},
findActiveStatus() {
return this.statuses.find(status => status.value === this.value.data);
},
},
};
</script>
<template>
<gl-filtered-search-token v-bind="{ ...$props, ...$attrs }" v-on="$listeners">
<template #view>
<div class="gl-display-flex gl-align-items-center">
<div :class="findActiveStatus.class">
<gl-icon :name="findActiveStatus.icon" class="gl-mr-2 gl-display-block" />
</div>
<span>{{ findActiveStatus.text }}</span>
</div>
</template>
<template #suggestions>
<gl-filtered-search-suggestion
v-for="(status, index) in statuses"
:key="index"
:value="status.value"
>
<div class="gl-display-flex" :class="status.class">
<gl-icon :name="status.icon" class="gl-mr-3" />
<span>{{ status.text }}</span>
</div>
</gl-filtered-search-suggestion>
</template>
</gl-filtered-search-token>
</template>

View File

@ -5,7 +5,7 @@ export const PIPELINES_TABLE = 'PIPELINES_TABLE';
export const LAYOUT_CHANGE_DELAY = 300;
export const FILTER_PIPELINES_SEARCH_DELAY = 200;
export const ANY_TRIGGER_AUTHOR = 'Any';
export const SUPPORTED_FILTER_PARAMETERS = ['username', 'ref'];
export const SUPPORTED_FILTER_PARAMETERS = ['username', 'ref', 'status'];
export const TestStatus = {
FAILED: 'failed',

View File

@ -1,5 +1,6 @@
import axios from '../../lib/utils/axios_utils';
import Api from '~/api';
import { validateParams } from '../utils';
export default class PipelinesService {
/**
@ -19,18 +20,10 @@ export default class PipelinesService {
}
getPipelines(data = {}) {
const { scope, page, username, ref } = data;
const { scope, page } = data;
const { CancelToken } = axios;
const queryParams = { scope, page };
if (username) {
queryParams.username = username;
}
if (ref) {
queryParams.ref = ref;
}
const queryParams = { scope, page, ...validateParams(data) };
this.cancelationSource = CancelToken.source();

View File

@ -0,0 +1,8 @@
import { pickBy } from 'lodash';
import { SUPPORTED_FILTER_PARAMETERS } from './constants';
export const validateParams = params => {
return pickBy(params, (val, key) => SUPPORTED_FILTER_PARAMETERS.includes(key) && val);
};
export default () => {};

View File

@ -5,6 +5,7 @@
* Components need to have `scope`, `page` and `requestData`
*/
import { historyPushState, buildUrlWithCurrentLocation } from '../../lib/utils/common_utils';
import { validateParams } from '~/pipelines/utils';
export default {
methods: {
@ -35,18 +36,7 @@ export default {
},
onChangeWithFilter(params) {
const { username, ref } = this.requestData;
const paramsData = params;
if (username) {
paramsData.username = username;
}
if (ref) {
paramsData.ref = ref;
}
return paramsData;
return { ...params, ...validateParams(this.requestData) };
},
updateInternalState(parameters) {

View File

@ -277,7 +277,7 @@ class Projects::PipelinesController < Projects::ApplicationController
end
def index_params
params.permit(:scope, :username, :ref)
params.permit(:scope, :username, :ref, :status)
end
end

View File

@ -0,0 +1,52 @@
# frozen_string_literal: true
module LooksAhead
extend ActiveSupport::Concern
FEATURE_FLAG = :graphql_lookahead_support
included do
attr_accessor :lookahead
end
def resolve(**args)
self.lookahead = args.delete(:lookahead)
resolve_with_lookahead(**args)
end
def apply_lookahead(query)
return query unless Feature.enabled?(FEATURE_FLAG)
selection = node_selection
includes = preloads.each.flat_map do |name, requirements|
selection&.selects?(name) ? requirements : []
end
preloads = (unconditional_includes + includes).uniq
return query if preloads.empty?
query.preload(*preloads) # rubocop: disable CodeReuse/ActiveRecord
end
private
def unconditional_includes
[]
end
def preloads
{}
end
def node_selection
return unless lookahead
if lookahead.selects?(:nodes)
lookahead.selection(:nodes)
elsif lookahead.selects?(:edges)
lookahead.selection(:edges).selection(:nodes)
end
end
end

View File

@ -4,12 +4,13 @@
# that `MergeRequestsFinder` can handle, so you may need to use aliasing.
module ResolvesMergeRequests
extend ActiveSupport::Concern
include LooksAhead
included do
type Types::MergeRequestType, null: true
end
def resolve(**args)
def resolve_with_lookahead(**args)
args[:iids] = Array.wrap(args[:iids]) if args[:iids]
args.compact!
@ -18,7 +19,7 @@ module ResolvesMergeRequests
else
args[:project_id] ||= project
MergeRequestsFinder.new(current_user, args).execute
apply_lookahead(MergeRequestsFinder.new(current_user, args).execute)
end.then(&(single? ? :first : :itself))
end
@ -41,10 +42,26 @@ module ResolvesMergeRequests
# rubocop: disable CodeReuse/ActiveRecord
def batch_load(iid)
BatchLoader::GraphQL.for(iid.to_s).batch(key: project) do |iids, loader, args|
args[:key].merge_requests.where(iid: iids).each do |mr|
query = args[:key].merge_requests.where(iid: iids)
apply_lookahead(query).each do |mr|
loader.call(mr.iid.to_s, mr)
end
end
end
# rubocop: enable CodeReuse/ActiveRecord
def unconditional_includes
[:target_project]
end
def preloads
{
assignees: [:assignees],
labels: [:labels],
author: [:author],
milestone: [:milestone],
head_pipeline: [:merge_request_diff, { head_pipeline: [:merge_request] }]
}
end
end

View File

@ -125,6 +125,7 @@ module Types
Types::MergeRequestType.connection_type,
null: true,
description: 'Merge requests of the project',
extras: [:lookahead],
resolver: Resolvers::MergeRequestsResolver
field :merge_request,

View File

@ -136,7 +136,7 @@ class DiffNote < Note
# As an extra benefit, the returned `diff_file` already
# has `highlighted_diff_lines` data set from Redis on
# `Diff::FileCollection::MergeRequestDiff`.
file = noteable.diffs(original_position.diff_options).diff_files.first
file = original_position.find_diff_file_from(noteable)
# if line is not found in persisted diffs, fallback and retrieve file from repository using gitaly
# This is required because of https://gitlab.com/gitlab-org/gitlab/issues/42676
file = nil if file&.line_for_position(original_position).nil? && importing?

View File

@ -271,7 +271,10 @@ class WikiPage
def title_changed?
if persisted?
old_title, old_dir = wiki.page_title_and_dir(self.class.unhyphenize(page.url_path))
# A page's `title` will be returned from Gollum/Gitaly with any +<>
# characters changed to -, whereas the `path` preserves these characters.
path_without_extension = Pathname(page.path).sub_ext('').to_s
old_title, old_dir = wiki.page_title_and_dir(self.class.unhyphenize(path_without_extension))
new_title, new_dir = wiki.page_title_and_dir(self.class.unhyphenize(title))
new_title != old_title || (title.include?('/') && new_dir != old_dir)

View File

@ -20,6 +20,9 @@
%link{ rel: 'dns-prefetch', href: ActionController::Base.asset_host }
%link{ rel: 'preconnect', href: ActionController::Base.asset_host, crossorigin: '' }
- if Gitlab::CurrentSettings.snowplow_enabled? && Gitlab::CurrentSettings.snowplow_collector_hostname
%link{ rel: 'preconnect', href: Gitlab::CurrentSettings.snowplow_collector_hostname, crossorigin: '' }
%meta{ 'http-equiv' => 'X-UA-Compatible', content: 'IE=edge' }
-# Open Graph - http://ogp.me/

View File

@ -0,0 +1,5 @@
---
title: Allow wiki pages with +<> characters in their title to be saved
merge_request: 33803
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Add GraphQL lookahead support
merge_request: 32373
author:
type: performance

View File

@ -0,0 +1,5 @@
---
title: Filter pipelines by status
merge_request: 32151
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Adjust wrong column reference for ResetMergeStatus (background job)
merge_request: 33899
author:
type: fixed

View File

@ -123,7 +123,7 @@ not without its own challenges:
- By default, Docker 17.09 and higher uses `--storage-driver overlay2` which is
the recommended storage driver. See [Using the overlayfs driver](#use-the-overlayfs-driver)
for details.
- Since the `docker:19.03.8-dind` container and the Runner container don't share their
- Since the `docker:19.03.11-dind` container and the Runner container don't share their
root filesystem, the job's working directory can be used as a mount point for
child containers. For example, if you have files you want to share with a
child container, you may create a subdirectory under `/builds/$CI_PROJECT_PATH`
@ -142,7 +142,7 @@ not without its own challenges:
An example project using this approach can be found here: <https://gitlab.com/gitlab-examples/docker>.
In the examples below, we are using Docker images tags to specify a
specific version, such as `docker:19.03.8`. If tags like `docker:stable`
specific version, such as `docker:19.03.11`. If tags like `docker:stable`
are used, you have no control over what version is going to be used and this
can lead to unpredictable behavior, especially when new versions are
released.
@ -158,7 +158,7 @@ issue](https://gitlab.com/gitlab-org/charts/gitlab-runner/-/issues/83) for
details.
The Docker daemon supports connection over TLS and it's done by default
for Docker 19.03.8 or higher. This is the **suggested** way to use the
for Docker 19.03.11 or higher. This is the **suggested** way to use the
Docker-in-Docker service and
[GitLab.com Shared Runners](../../user/gitlab_com/index.md#shared-runners)
support this.
@ -174,13 +174,13 @@ support this.
--registration-token REGISTRATION_TOKEN \
--executor docker \
--description "My Docker Runner" \
--docker-image "docker:19.03.8" \
--docker-image "docker:19.03.11" \
--docker-privileged \
--docker-volumes "/certs/client"
```
The above command will register a new Runner to use the special
`docker:19.03.8` image, which is provided by Docker. **Notice that it's
`docker:19.03.11` image, which is provided by Docker. **Notice that it's
using the `privileged` mode to start the build and service
containers.** If you want to use [Docker-in-Docker](https://www.docker.com/blog/docker-can-now-run-within-docker/) mode, you always
have to use `privileged = true` in your Docker containers.
@ -199,7 +199,7 @@ support this.
executor = "docker"
[runners.docker]
tls_verify = false
image = "docker:19.03.8"
image = "docker:19.03.11"
privileged = true
disable_cache = false
volumes = ["/certs/client", "/cache"]
@ -209,10 +209,10 @@ support this.
```
1. You can now use `docker` in the build script (note the inclusion of the
`docker:19.03.8-dind` service):
`docker:19.03.11-dind` service):
```yaml
image: docker:19.03.8
image: docker:19.03.11
variables:
# When using dind service, we need to instruct docker, to talk with
@ -237,7 +237,7 @@ support this.
DOCKER_TLS_CERTDIR: "/certs"
services:
- docker:19.03.8-dind
- docker:19.03.11-dind
before_script:
- docker info
@ -264,7 +264,7 @@ Assuming that the Runner `config.toml` is similar to:
executor = "docker"
[runners.docker]
tls_verify = false
image = "docker:19.03.8"
image = "docker:19.03.11"
privileged = true
disable_cache = false
volumes = ["/cache"]
@ -274,10 +274,10 @@ Assuming that the Runner `config.toml` is similar to:
```
You can now use `docker` in the build script (note the inclusion of the
`docker:19.03.8-dind` service):
`docker:19.03.11-dind` service):
```yaml
image: docker:19.03.8
image: docker:19.03.11
variables:
# When using dind service we need to instruct docker, to talk with the
@ -298,7 +298,7 @@ variables:
DOCKER_TLS_CERTDIR: ""
services:
- docker:19.03.8-dind
- docker:19.03.11-dind
before_script:
- docker info
@ -318,7 +318,7 @@ container so that Docker is available in the context of that image.
NOTE: **Note:**
If you bind the Docker socket [when using GitLab Runner 11.11 or
newer](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/1261),
you can no longer use `docker:19.03.8-dind` as a service because volume bindings
you can no longer use `docker:19.03.11-dind` as a service because volume bindings
are done to the services as well, making these incompatible.
In order to do that, follow the steps:
@ -333,12 +333,12 @@ In order to do that, follow the steps:
--registration-token REGISTRATION_TOKEN \
--executor docker \
--description "My Docker Runner" \
--docker-image "docker:19.03.8" \
--docker-image "docker:19.03.11" \
--docker-volumes /var/run/docker.sock:/var/run/docker.sock
```
The above command will register a new Runner to use the special
`docker:19.03.8` image which is provided by Docker. **Notice that it's using
`docker:19.03.11` image which is provided by Docker. **Notice that it's using
the Docker daemon of the Runner itself, and any containers spawned by Docker
commands will be siblings of the Runner rather than children of the Runner.**
This may have complications and limitations that are unsuitable for your workflow.
@ -352,7 +352,7 @@ In order to do that, follow the steps:
executor = "docker"
[runners.docker]
tls_verify = false
image = "docker:19.03.8"
image = "docker:19.03.11"
privileged = false
disable_cache = false
volumes = ["/var/run/docker.sock:/var/run/docker.sock", "/cache"]
@ -361,11 +361,11 @@ In order to do that, follow the steps:
```
1. You can now use `docker` in the build script (note that you don't need to
include the `docker:19.03.8-dind` service as when using the Docker in Docker
include the `docker:19.03.11-dind` service as when using the Docker in Docker
executor):
```yaml
image: docker:19.03.8
image: docker:19.03.11
before_script:
- docker info
@ -419,10 +419,10 @@ any image that's used with the `--cache-from` argument must first be pulled
Here's a `.gitlab-ci.yml` file showing how Docker caching can be used:
```yaml
image: docker:19.03.8
image: docker:19.03.11
services:
- docker:19.03.8-dind
- docker:19.03.11-dind
variables:
# Use TLS https://docs.gitlab.com/ee/ci/docker/using_docker_build.html#tls-enabled

View File

@ -101,6 +101,7 @@ you can filter the pipeline list by:
- Trigger author
- Branch name
- Status ([since GitLab 13.1](https://gitlab.com/gitlab-org/gitlab/-/issues/217617))
### Run a pipeline manually

View File

@ -644,6 +644,59 @@ abstractions Resolvers should not be considered re-usable, finders are to be
preferred), remember to call the `ready?` method and check the boolean flag
before calling `resolve`! An example can be seen in our [`GraphQLHelpers`](https://gitlab.com/gitlab-org/gitlab/-/blob/2d395f32d2efbb713f7bc861f96147a2a67e92f2/spec/support/helpers/graphql_helpers.rb#L20-27).
### Look-Ahead
The full query is known in advance during execution, which means we can make use
of [lookahead](https://graphql-ruby.org/queries/lookahead.html) to optimize our
queries, and batch load associations we know we will need. Consider adding
lookahead support in your resolvers to avoid `N+1` performance issues.
To enable support for common lookahead use-cases (pre-loading associations when
child fields are requested), you can
include [`LooksAhead`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/graphql/resolvers/concerns/looks_ahead.rb). For example:
```ruby
# Assuming a model `MyThing` with attributes `[child_attribute, other_attribute, nested]`,
# where nested has an attribute named `included_attribute`.
class MyThingResolver < BaseResolver
include LooksAhead
# Rather than defining `resolve(**args)`, we implement: `resolve_with_lookahead(**args)`
def resolve_with_lookahead(**args)
apply_lookahead(MyThingFinder.new(current_user).execute)
end
# We list things that should always be preloaded:
# For example, if child_attribute is always needed (during authorization
# perhaps), then we can include it here.
def unconditional_includes
[:child_attribute]
end
# We list things that should be included if a certain field is selected:
def preloads
{
field_one: [:other_attribute],
field_two: [{ nested: [:included_attribute] }]
}
end
end
```
The final thing that is needed is that every field that uses this resolver needs
to advertise the need for lookahead:
```ruby
# in ParentType
field :my_things, MyThingType.connection_type, null: true,
extras: [:lookahead], # Necessary
resolver: MyThingResolver,
description: 'My things'
```
For an example of real world use, please
see [`ResolvesMergeRequests`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/graphql/resolvers/concerns/resolves_merge_requests.rb).
## Mutations
Mutations are used to change any stored values, or to trigger

View File

@ -728,6 +728,7 @@ We include guidance for links in the following categories:
- How to set up [criteria](#basic-link-criteria) for configuring a link.
- What to set up when [linking to a `help`](../documentation/index.md#linking-to-help) page.
- How to set up [links to internal documentation](#links-to-internal-documentation) for cross-references.
- How to set up [links to external documentation](#links-to-external-documentation) for authoritative sources.
- When to use [links requiring permissions](#links-requiring-permissions).
- How to set up a [link to a video](#link-to-video).
- How to [include links with version text](#text-for-documentation-requiring-version-text).
@ -780,6 +781,12 @@ To link to internal documentation:
NOTE: **Note**:
Using the Markdown extension is necessary for the [`/help`](index.md#gitlab-help) section of GitLab.
### Links to external documentation
When describing interactions with external software, it's often helpful to include links to external
documentation. When possible, make sure that you are linking to an **authoritative** source.
For example, if you're describing a feature in Microsoft's Active Directory, include a link to official Microsoft documentation.
### Links requiring permissions
Don't link directly to:

View File

@ -12,6 +12,8 @@ are very appreciative of the work done by translators and proofreaders!
- Tsegaselassie Tadesse - [GitLab](https://gitlab.com/tsega), [CrowdIn](https://crowdin.com/profile/tsegaselassi/activity)
- Arabic
- Proofreaders needed.
- Bosnian
- Proofreaders needed.
- Bulgarian
- Lyubomir Vasilev - [CrowdIn](https://crowdin.com/profile/lyubomirv)
- Catalan
@ -27,6 +29,8 @@ are very appreciative of the work done by translators and proofreaders!
- Chinese Traditional, Hong Kong 繁體中文 (香港)
- Victor Wu - [GitLab](https://gitlab.com/victorwuky), [CrowdIn](https://crowdin.com/profile/victorwu)
- Ivan Ip - [GitLab](https://gitlab.com/lifehome), [CrowdIn](https://crowdin.com/profile/lifehome)
- Croatian
- Proofreaders needed.
- Czech
- Jan Urbanec - [GitLab](https://gitlab.com/TatranskyMedved), [CrowdIn](https://crowdin.com/profile/Tatranskymedved)
- Danish
@ -51,6 +55,8 @@ are very appreciative of the work done by translators and proofreaders!
- Proofreaders needed.
- Hebrew
- Yaron Shahrabani - [GitLab](https://gitlab.com/yarons), [CrowdIn](https://crowdin.com/profile/YaronSh)
- Hindi
- Proofreaders needed.
- Hungarian
- Proofreaders needed.
- Indonesian
@ -76,7 +82,6 @@ are very appreciative of the work done by translators and proofreaders!
- Filip Mech - [GitLab](https://gitlab.com/mehenz), [CrowdIn](https://crowdin.com/profile/mehenz)
- Maksymilian Roman - [GitLab](https://gitlab.com/villaincandle), [CrowdIn](https://crowdin.com/profile/villaincandle)
- Portuguese
- Proofreaders needed.
- Diogo Trindade - [GitLab](https://gitlab.com/luisdiogo2071317), [CrowdIn](https://crowdin.com/profile/ldiogotrindade)
- Portuguese, Brazilian
- Paulo George Gomes Bezerra - [GitLab](https://gitlab.com/paulobezerra), [CrowdIn](https://crowdin.com/profile/paulogomes.rep)
@ -90,14 +95,14 @@ are very appreciative of the work done by translators and proofreaders!
- NickVolynkin - [Crowdin](https://crowdin.com/profile/NickVolynkin)
- Andrey Komarov - [GitLab](https://gitlab.com/elkamarado), [Crowdin](https://crowdin.com/profile/kamarado)
- Iaroslav Postovalov - [GitLab](https://gitlab.com/CMDR_Tvis), [Crowdin](https://crowdin.com/profile/CMDR_Tvis)
- Serbian (Cyrillic)
- Proofreaders needed.
- Serbian (Latin)
- Serbian (Latin and Cyrillic)
- Proofreaders needed.
- Slovak
- Proofreaders needed.
- Spanish
- Pedro Garcia - [GitLab](https://gitlab.com/pedgarrod), [CrowdIn](https://crowdin.com/profile/breaking_pitt)
- Swedish
- Proofreaders needed.
- Turkish
- Ali Demirtaş - [GitLab](https://gitlab.com/alidemirtas), [CrowdIn](https://crowdin.com/profile/alidemirtas)
- Ukrainian

View File

@ -2,33 +2,27 @@
To enable the Microsoft Azure OAuth2 OmniAuth provider you must register your application with Azure. Azure will generate a client ID and secret key for you to use.
1. Sign in to the [Azure Management Portal](https://portal.azure.com).
1. Sign in to the [Azure Portal](https://portal.azure.com).
1. Select "Active Directory" on the left and choose the directory you want to use to register GitLab.
1. Select "All Services" from the hamburger menu located top left and select "Azure Active Directory" or use the search bar at the top of the page to search for "Azure Active Directory".
1. You can select alternative directories by clicking the "switch tenant" button at the top of the Azure AD page.
1. Select "Applications" at the top bar and click the "Add" button the bottom.
1. Select "App registrations" from the left hand menu, then select "New registration" from the top of the page.
1. Select "Add an application my organization is developing".
1. Provide the project information and click the "Next" button.
1. Provide the required information and click the "Register" button.
- Name: 'GitLab' works just fine here.
- Type: 'WEB APPLICATION AND/OR WEB API'
- Supported account types: Select the appropriate choice based on the descriptions provided.
- Redirect URI: Enter the URL to the Azure OAuth callback of your GitLab installation (e.g. `https://gitlab.mycompany.com/users/auth/azure_oauth2/callback`), the type dropdown should be set to "Web".
1. On the "App properties" page enter the needed URI's and click the "Complete" button.
- SIGN-IN URL: Enter the URL of your GitLab installation (e.g `https://gitlab.mycompany.com/`)
- APP ID URI: Enter the endpoint URL for Microsoft to use, just has to be unique (e.g `https://mycompany.onmicrosoft.com/gitlab`)
1. On the "App Registration" page for the app you've created. Select "Certificates & secrets" on the left.
- Create a new Client secret by clicking "New client secret" and selecting a duration. Provide a description if required to help identify the secret.
- Copy the secret and note it securely, this is shown when you click the "add" button. (You will not be able to retrieve the secret when you perform the next step or leave that blade in the Azure Portal.)
1. Select "Configure" in the top menu.
1. Select "Overview" in the left hand menu.
1. Add a "Reply URL" pointing to the Azure OAuth callback of your GitLab installation (e.g. `https://gitlab.mycompany.com/users/auth/azure_oauth2/callback`).
1. Note the "Application (client) ID" from the section at the top of the displayed page.
1. Create a "Client secret" by selecting a duration, the secret will be generated as soon as you click the "Save" button in the bottom menu.
1. Note the "CLIENT ID" and the "CLIENT SECRET".
1. Select "View endpoints" from the bottom menu.
1. You will see lots of endpoint URLs in the form `https://login.microsoftonline.com/TENANT ID/...`, note down the TENANT ID part of one of those endpoints.
1. Note the "Directory (tenant) ID" from the section at the top of the page.
1. On your GitLab server, open the configuration file.
@ -84,4 +78,4 @@ To enable the Microsoft Azure OAuth2 OmniAuth provider you must register your ap
1. [Reconfigure](../administration/restart_gitlab.md#omnibus-gitlab-reconfigure) or [restart GitLab](../administration/restart_gitlab.md#installations-from-source) for the changes to take effect if you
installed GitLab via Omnibus or from source respectively.
On the sign in page there should now be a Microsoft icon below the regular sign in form. Click the icon to begin the authentication process. Microsoft will ask the user to sign in and authorize the GitLab application. If everything goes well the user will be returned to GitLab and will be signed in.
On the sign in page there should now be a Microsoft icon below the regular sign in form. Click the icon to begin the authentication process. Microsoft will ask the user to sign in and authorize the GitLab application. If everything goes well the user will be returned to GitLab and will be signed in. See [Enable OmniAuth for an Existing User](omniauth.md#enable-omniauth-for-an-existing-user) for information on how existing GitLab users can connect their newly available Azure AD accounts to their existing GitLab users.

View File

@ -5,23 +5,23 @@ type: concepts
# GitLab Release and Maintenance Policy
GitLab has strict policies governing version naming, as well as release pace for major, minor,
patch and security releases. New releases are usually announced on the [GitLab blog](https://about.gitlab.com/releases/categories/releases/).
patch, and security releases. New releases are usually announced on the [GitLab blog](https://about.gitlab.com/releases/categories/releases/).
Our current policy is:
- Backporting bug fixes for **only the current stable release** at any given time, see [patch releases](#patch-releases).
- Backporting to **to the previous two monthly releases in addition to the current stable release**, see [security releases](#security-releases).
- Backporting bug fixes for **only the current stable release** at any given time. (See [patch releases](#patch-releases).)
- Backporting **to the previous two monthly releases in addition to the current stable release**. (See [security releases](#security-releases).)
## Versioning
GitLab uses [Semantic Versioning](https://semver.org/) for its releases:
`(Major).(Minor).(Patch)`.
For example, for GitLab version 10.5.7:
For example, for GitLab version 12.10.6:
- `10` represents the major version. The major release was 10.0.0, but often referred to as 10.0.
- `5` represents the minor version. The minor release was 10.5.0, but often referred to as 10.5.
- `7` represents the patch number.
- `12` represents the major version. The major release was 12.0.0, but often referred to as 12.0.
- `10` represents the minor version. The minor release was 12.10.0, but often referred to as 12.10.
- `6` represents the patch number.
Any part of the version number can increment into multiple digits, for example, 13.10.11.
@ -33,6 +33,114 @@ The following table describes the version types and their release cadence:
| Minor | For when new backward-compatible functionality is introduced to the public API, a minor feature is introduced, or when a set of smaller features is rolled out. | Monthly on the 22nd. |
| Patch | For backward-compatible bug fixes that fix incorrect behavior. See [Patch releases](#patch-releases). | As needed. |
## Upgrade recommendations
We encourage everyone to run the [latest stable release](https://about.gitlab.com/releases/categories/releases/)
to ensure that you can easily upgrade to the most secure and feature-rich GitLab experience.
In order to make sure you can easily run the most recent stable release, we are working
hard to keep the update process simple and reliable.
If you are unable to follow our monthly release cycle, there are a couple of
cases you need to consider.
It is considered safe to jump between patch versions and minor versions within
one major version. For example, it is safe to:
- Upgrade the *minor* version. For example:
- `12.7.5` -> `12.10.5`
- `11.3.4` -> `11.11.1`
- `10.6.6` -> `10.8.3`
- `11.3.4` -> `11.11.8`
- `10.6.6` -> `10.8.7`
- `9.2.3` -> `9.5.5`
- `8.9.4` -> `8.12.3`
- Upgrade the *patch* version. For example:
- `12.0.4` -> `12.0.12`
- `11.11.1` -> `11.11.8`
- `10.6.3` -> `10.6.6`
- `11.11.1` -> `11.11.8`
- `10.6.3` -> `10.6.6`
- `9.5.5` -> `9.5.9`
- `8.9.2` -> `8.9.6`
NOTE **Note** Version specific changes in Omnibus GitLab Linux packages can be found in [the Omnibus GitLab documentation](https://docs.gitlab.com/omnibus/update/README.html#version-specific-changes).
NOTE: **Note:**
Instructions are available for downloading an Omnibus GitLab Linux package locally and [manually installing](https://docs.gitlab.com/omnibus/manual_install.html) it.
### Upgrading major versions
Upgrading the *major* version requires more attention.
Backward-incompatible changes and migrations are reserved for major versions.
We cannot guarantee that upgrading between major versions will be seamless.
We suggest upgrading to the latest available *minor* version within
your major version before proceeding to the next major version.
Doing this will address any backward-incompatible changes or deprecations
to help ensure a successful upgrade to next major release.
It's also important to ensure that any background migrations have been fully completed
before upgrading to a new major version. To see the current size of the `background_migration` queue,
[Check for background migrations before upgrading](../update/README.md#checking-for-background-migrations-before-upgrading).
If your GitLab instance has any GitLab Runners associated with it, it is very
important to upgrade the GitLab Runners to match the GitLab minor version that was
upgraded to. This is to ensure [compatibility with GitLab versions](https://docs.gitlab.com/runner/#compatibility-with-gitlab-versions).
### Version 12 onwards: Extra step for major upgrades
From version 12 onwards, an additional step is required. More significant migrations
may occur during major release upgrades.
To ensure these are successful:
1. Increment to the first minor version (`x.0.x`) during the major version jump.
1. Proceed with upgrading to a newer release.
**For example: `11.5.x` -> `11.11.x` -> `12.0.x` -> `12.10.x` -> `13.0.x`**
### Example upgrade paths
Please see the table below for some examples:
| Target version | Your version | Recommended upgrade path | Note |
| --------------------- | ------------ | ------------------------ | ---- |
| `13.2.0` | `11.5.0` | `11.5.0` -> `11.11.8` -> `12.0.12` -> `12.10.6` -> `13.0.0` -> `13.2.0` | Four intermediate versions are required: the final 11.11, 12.0, and 12.10 releases, plus 13.0. |
| `13.0.1` | `11.10.8` | `11.10.5` -> `11.11.8` -> `12.0.12` -> `12.10.6` -> `13.0.1` | Three intermediate versions are required: `11.11`, `12.0`, and `12.10`. |
| `12.10.6` | `11.3.4` | `11.3.4` -> `11.11.8` -> `12.0.12` -> `12.10.6` | Two intermediate versions are required: `11.11` and `12.0` |
| `12.9.5.` | `10.4.5` | `10.4.5` -> `10.8.7` -> `11.11.8` -> `12.0.12` -> `12.9.5` | Three intermediate versions are required: `10.8`, `11.11`, and `12.0`, then `12.9.5` |
| `12.2.5` | `9.2.6` | `9.2.6` -> `9.5.10` -> `10.8.7` -> `11.11.8` -> `12.0.12` -> `12.2.5` | Four intermediate versions are required: `9.5`, `10.8`, `11.11`, `12.0`, then `12.2`. |
| `11.3.4` | `8.13.4` | `8.13.4` -> `8.17.7` -> `9.5.10` -> `10.8.7` -> `11.3.4` | `8.17.7` is the last version in version 8, `9.5.10` is the last version in version 9, `10.8.7` is the last version in version 10. |
### Upgrades from versions earlier than 8.12
- `8.11.x` and earlier: you might have to upgrade to `8.12.0` specifically before you can
upgrade to `8.17.7`. This was [reported in an issue](https://gitlab.com/gitlab-org/gitlab/-/issues/207259).
- [CI changes prior to version 8.0](https://docs.gitlab.com/omnibus/update/README.html#updating-gitlab-ci-from-prior-540-to-version-714-via-omnibus-gitlab)
when it was merged into GitLab.
### Multi-step upgrade paths with GitLab all-in-one Linux package repository
Linux package managers default to installing the latest available version of a package for installation and upgrades.
Upgrading directly to the latest major version can be problematic for older GitLab versions that require a multi-stage upgrade path.
When following an upgrade path spanning multiple versions, for each upgrade, specify the intended GitLab version number in your package manager's install or upgrade command.
Examples:
```shell
# apt-get (Ubuntu/Debian)
sudo apt-get upgrade gitlab-ee=12.0.12-ee.0
# yum (RHEL/CentOS 6 and 7)
yum install gitlab-ee-12.0.12-ee.0.el7
# dnf (RHEL/CentOS 8)
dnf install gitlab-ee-12.0.12-ee.0.el8
# zypper (SUSE)
zypper install gitlab-ee=12.0.12-ee.0
```
## Patch releases
Patch releases **only include bug fixes** for the current stable released version of
@ -107,89 +215,12 @@ For very serious security issues, there is
to backport security fixes to even more monthly releases of GitLab.
This decision is made on a case-by-case basis.
## Upgrade recommendations
We encourage everyone to run the [latest stable release](https://about.gitlab.com/releases/categories/releases/) to ensure that you can
easily upgrade to the most secure and feature-rich GitLab experience. In order
to make sure you can easily run the most recent stable release, we are working
hard to keep the update process simple and reliable.
If you are unable to follow our monthly release cycle, there are a couple of
cases you need to consider.
It is considered safe to jump between patch versions and minor versions within
one major version. For example, it is safe to:
- Upgrade the patch version:
- `8.9.0` -> `8.9.7`
- `8.9.0` -> `8.9.1`
- `8.9.2` -> `8.9.6`
- `9.5.5` -> `9.5.9`
- `10.6.3` -> `10.6.6`
- `11.11.1` -> `11.11.8`
- `12.0.4` -> `12.0.12`
- Upgrade the minor version:
- `8.9.4` -> `8.12.3`
- `9.2.3` -> `9.5.5`
- `10.6.6` -> `10.8.7`
- `11.3.4` -> `11.11.8`
Upgrading the major version requires more attention.
We cannot guarantee that upgrading between major versions will be seamless. As previously mentioned, major versions are reserved for backwards incompatible changes.
We recommend that you first upgrade to the latest available minor version within
your major version. By doing this, you can address any deprecation messages
that could change behavior in the next major release.
It's also important to ensure that any background migrations have been fully completed
before upgrading to a new major version. To see the current size of the `background_migration` queue,
[Check for background migrations before upgrading](../update/README.md#checking-for-background-migrations-before-upgrading).
If your GitLab instance has any GitLab Runners associated with it, it is very
important to upgrade the GitLab Runners to match the GitLab minor version that was
upgraded to. This is to ensure [compatibility with GitLab versions](https://docs.gitlab.com/runner/#compatibility-with-gitlab-versions).
### Version 12 onwards: Extra step for major upgrades
From version 12 onwards, an additional step is required. More significant migrations
may occur during major release upgrades.
To ensure these are successful:
1. Increment to the first minor version (`x.0.x`) during the major version jump.
1. Proceed with upgrading to a newer release.
For example: `11.11.x` -> `12.0.x` -> `12.8.x`
### Upgrades from old versions
- `8.11.x` and earlier: you might have to upgrade to `8.12.0` specifically before you can
upgrade to `8.17.7`. This was [reported in an issue](https://gitlab.com/gitlab-org/gitlab/-/issues/207259).
- [CI changes prior to version 8.0](https://docs.gitlab.com/omnibus/update/README.html#updating-gitlab-ci-from-prior-540-to-version-714-via-omnibus-gitlab)
when it was merged into GitLab.
- Version specific changes in
[the Omnibus documentation](https://docs.gitlab.com/omnibus/update/README.html#version-specific-changes).
### Example upgrade paths
Please see the table below for some examples:
| Latest stable version | Your version | Recommended upgrade path | Note |
| --------------------- | ------------ | ------------------------ | ---- |
| 11.3.4 | 8.13.4 | `8.13.4` -> `8.17.7` -> `9.5.10` -> `10.8.7` -> `11.3.4` | `8.17.7` is the last version in version `8`, `9.5.10` is the last version in version `9`, `10.8.7` is the last version in version `10` |
| 12.5.10 | 11.3.4 | `11.3.4` -> `11.11.8` -> `12.0.12` -> `12.5.10` | `11.11.8` is the last version in version `11`. `12.0.x` [is a required step](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/23211#note_272842444). |
| 12.8.5 | 9.2.6 | `9.2.6` -> `9.5.10` -> `10.8.7` -> `11.11.8` -> `12.0.12` -> `12.8.5` | Four intermediate versions are required: the final 9.5, 10.8, 11.11 releases, plus 12.0. |
| 13.2.0 | 11.5.0 | `11.5.0` -> `11.11.8` -> `12.0.12` -> `12.10.6` -> `13.0.0` -> `13.2.0` | Five intermediate versions are required: the final 11.11, 12.0, 12.10 releases, plus 13.0. |
NOTE: **Note:**
Instructions for installing a specific version of GitLab or downloading the package locally for installation can be found at [GitLab Repositories](https://packages.gitlab.com/gitlab).
## More information
Check [our release posts](https://about.gitlab.com/releases/categories/releases/).
Each month, we publish either a major or minor release of GitLab. At the end
of those release posts there are three sections to look for: deprecations, important notes,
and upgrade barometer. These will draw your attention to:
of those release posts there are three sections to look for: Deprecations, Removals, and Important notes on upgrading. These will include:
- Steps you need to perform as part of an upgrade.
For example [8.12](https://about.gitlab.com/releases/2016/09/22/gitlab-8-12-released/#upgrade-barometer)

View File

@ -58,10 +58,10 @@ To enable Container Scanning in your pipeline, you need the following:
```yaml
build:
image: docker:19.03.8
image: docker:19.03.11
stage: build
services:
- docker:19.03.8-dind
- docker:19.03.11-dind
variables:
IMAGE_TAG: $CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG:$CI_COMMIT_SHA
script:
@ -114,7 +114,7 @@ build:
image: docker:stable
stage: build
services:
- docker:19.03.8-dind
- docker:19.03.11-dind
variables:
IMAGE: $CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG:$CI_COMMIT_SHA
script:
@ -282,7 +282,7 @@ stages:
build_latest_vulnerabilities:
stage: build
services:
- docker:19.03.8-dind
- docker:19.03.11-dind
script:
- docker pull arminc/clair-db:latest
- docker tag arminc/clair-db:latest $CI_REGISTRY/namespace/clair-vulnerabilities-db

View File

@ -458,6 +458,38 @@ If you specify the `ADDITIONAL_CA_CERT_BUNDLE` [environment variable](#available
variable's X.509 certificates are installed in the Docker image's default trust store and Conan is
configured to use this as the default `CA_CERT_PATH`.
### Configuring Go projects
To configure [Go modules](https://github.com/golang/go/wiki/Modules)
based projects, specify [environment variables](https://golang.org/pkg/cmd/go/#hdr-Environment_variables)
in the `license_scanning` job's [variables](#available-variables) section in `.gitlab-ci.yml`.
If a project has [vendored](https://golang.org/pkg/cmd/go/#hdr-Vendor_Directories) its modules,
then the combination of the `vendor` directory and `mod.sum` file are used to detect the software
licenses associated with the Go module dependencies.
#### Using private Go registries
You can use the [`GOPRIVATE`](https://golang.org/pkg/cmd/go/#hdr-Environment_variables)
and [`GOPROXY`](https://golang.org/pkg/cmd/go/#hdr-Environment_variables)
environment variables to control where modules are sourced from. Alternatively, you can use
[`go mod vendor`](https://golang.org/ref/mod#tmp_28) to vendor a project's modules.
#### Custom root certificates for Go
You can specify the [`-insecure`](https://golang.org/pkg/cmd/go/internal/get/) flag by exporting the
[`GOFLAGS`](https://golang.org/cmd/go/#hdr-Environment_variables)
environment variable. For example:
```yaml
include:
- template: License-Scanning.gitlab-ci.yml
license_scanning:
variables:
GOFLAGS: '-insecure'
```
### Migration from `license_management` to `license_scanning`
In GitLab 12.8 a new name for `license_management` job was introduced. This change was made to improve clarity around the purpose of the scan, which is to scan and collect the types of licenses present in a projects dependencies.
@ -563,6 +595,7 @@ your code and generate security reports, without requiring internet access.
Additional configuration may be needed for connecting to
[private Bower registries](#using-private-bower-registries),
[private Conan registries](#using-private-bower-registries),
[private Go registries](#using-private-go-registries),
[private Maven repositories](#using-private-maven-repos),
[private NPM registries](#using-private-npm-registries),
[private Python repositories](#using-private-python-repos),

View File

@ -248,10 +248,10 @@ should look similar to this:
```yaml
build:
image: docker:19.03.8
image: docker:19.03.11
stage: build
services:
- docker:19.03.8-dind
- docker:19.03.11-dind
script:
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker build -t $CI_REGISTRY/group/project/image:latest .
@ -262,10 +262,10 @@ You can also make use of [other variables](../../../ci/variables/README.md) to a
```yaml
build:
image: docker:19.03.8
image: docker:19.03.11
stage: build
services:
- docker:19.03.8-dind
- docker:19.03.11-dind
variables:
IMAGE_TAG: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
script:
@ -288,9 +288,9 @@ when needed. Changes to `master` also get tagged as `latest` and deployed using
an application-specific deploy script:
```yaml
image: docker:19.03.8
image: docker:19.03.11
services:
- docker:19.03.8-dind
- docker:19.03.11-dind
stages:
- build
@ -363,9 +363,9 @@ Below is an example of what your `.gitlab-ci.yml` should look like:
```yaml
build:
image: $CI_REGISTRY/group/project/docker:19.03.8
image: $CI_REGISTRY/group/project/docker:19.03.11
services:
- name: $CI_REGISTRY/group/project/docker:19.03.8-dind
- name: $CI_REGISTRY/group/project/docker:19.03.11-dind
alias: docker
stage: build
script:
@ -373,7 +373,7 @@ Below is an example of what your `.gitlab-ci.yml` should look like:
- docker run my-docker-image /script/to/run/tests
```
If you forget to set the service alias, the `docker:19.03.8` image won't find the
If you forget to set the service alias, the `docker:19.03.11` image won't find the
`dind` service, and an error like the following will be thrown:
```plaintext
@ -443,10 +443,10 @@ stages:
- clean
build_image:
image: docker:19.03.8
image: docker:19.03.11
stage: build
services:
- docker:19.03.8-dind
- docker:19.03.11-dind
variables:
IMAGE_TAG: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
script:
@ -459,10 +459,10 @@ build_image:
- master
delete_image:
image: docker:19.03.8
image: docker:19.03.11
stage: clean
services:
- docker:19.03.8-dind
- docker:19.03.11-dind
variables:
IMAGE_TAG: $CI_PROJECT_PATH:$CI_COMMIT_REF_SLUG
REG_SHA256: ade837fc5224acd8c34732bf54a94f579b47851cc6a7fd5899a98386b782e228

View File

@ -7,7 +7,7 @@ module Gitlab
class ResetMergeStatus
def perform(from_id, to_id)
relation = MergeRequest.where(id: from_id..to_id,
state: 'opened',
state_id: 1, # opened
merge_status: 'can_be_merged')
relation.update_all(merge_status: 'unchecked')

View File

@ -1,11 +1,11 @@
performance:
stage: performance
image: docker:19.03.8
image: docker:19.03.11
allow_failure: true
variables:
DOCKER_TLS_CERTDIR: ""
services:
- docker:19.03.8-dind
- docker:19.03.11-dind
script:
- |
if ! docker info &>/dev/null; then

View File

@ -1,10 +1,10 @@
build:
stage: build
image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-build-image:v0.2.2"
image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-build-image:v0.2.3"
variables:
DOCKER_TLS_CERTDIR: ""
services:
- docker:19.03.8-dind
- docker:19.03.11-dind
script:
- |
if [[ -z "$CI_COMMIT_TAG" ]]; then

View File

@ -1,9 +1,9 @@
code_quality:
stage: test
image: docker:19.03.8
image: docker:19.03.11
allow_failure: true
services:
- docker:19.03.8-dind
- docker:19.03.11-dind
variables:
DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: ""

View File

@ -157,13 +157,23 @@ module Gitlab
position_type == 'text'
end
def find_diff_file_from(diffable)
diff_files = diffable.diffs(diff_options).diff_files
if Feature.enabled?(:file_identifier_hash) && file_identifier_hash.present?
diff_files.find { |df| df.file_identifier_hash == file_identifier_hash }
else
diff_files.first
end
end
private
def find_diff_file(repository)
return unless diff_refs.complete?
return unless comparison = diff_refs.compare_in(repository.project)
comparison.diffs(diff_options).diff_files.first
find_diff_file_from(comparison)
end
def get_formatter_class(type)

View File

@ -82,7 +82,7 @@ module Gitlab
def sliced_nodes
@sliced_nodes ||=
begin
OrderInfo.validate_ordering(ordered_items, order_list)
OrderInfo.validate_ordering(ordered_items, order_list) unless loaded?(ordered_items)
sliced = ordered_items
sliced = slice_nodes(sliced, before, :before) if before.present?
@ -113,16 +113,14 @@ module Gitlab
# grab one more than we need
paginated_nodes = sliced_nodes.last(limit_value + 1)
if paginated_nodes.count > limit_value
# there is an extra node, so there is a previous page
@has_previous_page = true
paginated_nodes = paginated_nodes.last(limit_value)
end
# there is an extra node, so there is a previous page
@has_previous_page = paginated_nodes.count > limit_value
@has_previous_page ? paginated_nodes.last(limit_value) : paginated_nodes
elsif loaded?(sliced_nodes)
sliced_nodes.take(limit_value) # rubocop: disable CodeReuse/ActiveRecord
else
paginated_nodes = sliced_nodes.limit(limit_value) # rubocop: disable CodeReuse/ActiveRecord
sliced_nodes.limit(limit_value) # rubocop: disable CodeReuse/ActiveRecord
end
paginated_nodes
end
end
@ -141,6 +139,15 @@ module Gitlab
@limit_value ||= [first, last, max_page_size].compact.min
end
def loaded?(items)
case items
when Array
true
else
items.loaded?
end
end
def ordered_items
strong_memoize(:ordered_items) do
unless items.primary_key.present?
@ -149,6 +156,16 @@ module Gitlab
list = OrderInfo.build_order_list(items)
if loaded?(items)
@order_list = list.presence || [items.primary_key]
# already sorted, or trivially sorted
next items if list.present? || items.size <= 1
pkey = items.primary_key.to_sym
next items.sort_by { |item| item[pkey] }.reverse
end
# ensure there is a primary key ordering
if list&.last&.attribute_name != items.primary_key
items.order(arel_table[items.primary_key].desc) # rubocop: disable CodeReuse/ActiveRecord

View File

@ -15981,6 +15981,9 @@ msgstr ""
msgid "Pipeline|Branch name"
msgstr ""
msgid "Pipeline|Canceled"
msgstr ""
msgid "Pipeline|Commit"
msgstr ""
@ -15990,6 +15993,9 @@ msgstr ""
msgid "Pipeline|Coverage"
msgstr ""
msgid "Pipeline|Created"
msgstr ""
msgid "Pipeline|Date"
msgstr ""
@ -16002,9 +16008,15 @@ msgstr ""
msgid "Pipeline|Existing branch name or tag"
msgstr ""
msgid "Pipeline|Failed"
msgstr ""
msgid "Pipeline|Key"
msgstr ""
msgid "Pipeline|Manual"
msgstr ""
msgid "Pipeline|Merge train pipeline"
msgstr ""
@ -16014,6 +16026,12 @@ msgstr ""
msgid "Pipeline|No pipeline has been run for this commit."
msgstr ""
msgid "Pipeline|Passed"
msgstr ""
msgid "Pipeline|Pending"
msgstr ""
msgid "Pipeline|Pipeline"
msgstr ""
@ -16029,9 +16047,15 @@ msgstr ""
msgid "Pipeline|Run for"
msgstr ""
msgid "Pipeline|Running"
msgstr ""
msgid "Pipeline|Search branches"
msgstr ""
msgid "Pipeline|Skipped"
msgstr ""
msgid "Pipeline|Specify variable values to be used in this run. The values specified in %{settings_link} will be used by default."
msgstr ""

View File

@ -1,49 +0,0 @@
require_relative '../../migration_helpers'
module RuboCop
module Cop
module Migration
# This cop checks for `add_column_with_default` on a table that's been
# explicitly blacklisted because of its size.
#
# Even though this helper performs the update in batches to avoid
# downtime, using it with tables with millions of rows still causes a
# significant delay in the deploy process and is best avoided.
#
# See https://gitlab.com/gitlab-com/infrastructure/issues/1602 for more
# information.
class UpdateLargeTable < RuboCop::Cop::Cop
include MigrationHelpers
MSG = 'Using `%s` on the `%s` table will take a long time to ' \
'complete, and should be avoided unless absolutely ' \
'necessary'.freeze
BATCH_UPDATE_METHODS = %w[
:add_column_with_default
:change_column_type_concurrently
:rename_column_concurrently
:update_column_in_batches
].join(' ').freeze
def_node_matcher :batch_update?, <<~PATTERN
(send nil? ${#{BATCH_UPDATE_METHODS}} $(sym ...) ...)
PATTERN
def on_send(node)
return unless in_migration?(node)
matches = batch_update?(node)
return unless matches
update_method = matches.first
table = matches.last.to_a.first
return unless BLACKLISTED_TABLES.include?(table)
add_offense(node, location: :expression, message: format(MSG, update_method, table))
end
end
end
end
end

View File

@ -6,45 +6,6 @@ module RuboCop
plan_limits
].freeze
# Blacklisted tables due to:
# - size in GB (>= 10 GB on GitLab.com as of 02/2020)
# - number of records
BLACKLISTED_TABLES = %i[
audit_events
ci_build_trace_sections
ci_builds
ci_builds_metadata
ci_job_artifacts
ci_pipeline_variables
ci_pipelines
ci_stages
deployments
events
issues
merge_request_diff_commits
merge_request_diff_files
merge_request_diffs
merge_request_metrics
merge_requests
namespaces
note_diff_files
notes
project_authorizations
projects
project_ci_cd_settings
project_features
push_event_payloads
resource_label_events
routes
sent_notifications
services
system_note_metadata
taggings
todos
users
web_hook_logs
].freeze
# Blacklisted tables due to:
# - number of columns (> 50 on GitLab.com as of 03/2020)
# - number of records

View File

@ -0,0 +1,40 @@
Migration/UpdateLargeTable:
Enabled: true
DeniedTables: &denied_tables # size in GB (>= 10 GB on GitLab.com as of 02/2020) and/or number of records
- :audit_events
- :ci_build_trace_sections
- :ci_builds
- :ci_builds_metadata
- :ci_job_artifacts
- :ci_pipeline_variables
- :ci_pipelines
- :ci_stages
- :deployments
- :events
- :issues
- :merge_request_diff_commits
- :merge_request_diff_files
- :merge_request_diffs
- :merge_request_metrics
- :merge_requests
- :namespaces
- :note_diff_files
- :notes
- :project_authorizations
- :projects
- :project_ci_cd_settings
- :project_features
- :push_event_payloads
- :resource_label_events
- :routes
- :sent_notifications
- :services
- :system_note_metadata
- :taggings
- :todos
- :users
- :web_hook_logs
DeniedMethods:
- :change_column_type_concurrently
- :rename_column_concurrently
- :update_column_in_batches

View File

@ -171,6 +171,40 @@ RSpec.describe Projects::PipelinesController do
end
end
context 'filter by status' do
context 'when pipelines with the status exists' do
it 'returns matched pipelines' do
get_pipelines_index_json(status: 'success')
check_pipeline_response(returned: 1, all: 1, running: 0, pending: 0, finished: 1)
end
context 'when filter by unrelated scope' do
it 'returns empty list' do
get_pipelines_index_json(status: 'success', scope: 'running')
check_pipeline_response(returned: 0, all: 1, running: 0, pending: 0, finished: 1)
end
end
end
context 'when no pipeline with the status exists' do
it 'returns empty list' do
get_pipelines_index_json(status: 'manual')
check_pipeline_response(returned: 0, all: 0, running: 0, pending: 0, finished: 0)
end
end
context 'when invalid status' do
it 'returns all list' do
get_pipelines_index_json(status: 'invalid-status')
check_pipeline_response(returned: 6, all: 6, running: 2, pending: 1, finished: 3)
end
end
end
def get_pipelines_index_json(params = {})
get :index, params: {
namespace_id: project.namespace,

View File

@ -109,6 +109,17 @@ FactoryBot.define do
end
end
trait :with_head_pipeline do
after(:build) do |merge_request|
merge_request.head_pipeline = build(
:ci_pipeline,
:running,
project: merge_request.source_project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha)
end
end
trait :with_test_reports do
after(:build) do |merge_request|
merge_request.head_pipeline = build(

View File

@ -3,7 +3,7 @@ import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import PipelinesFilteredSearch from '~/pipelines/components/pipelines_filtered_search.vue';
import { users, mockSearch, pipelineWithStages, branches } from '../mock_data';
import { users, mockSearch, branches } from '../mock_data';
import { GlFilteredSearch } from '@gitlab/ui';
describe('Pipelines filtered search', () => {
@ -19,7 +19,6 @@ describe('Pipelines filtered search', () => {
const createComponent = (params = {}) => {
wrapper = mount(PipelinesFilteredSearch, {
propsData: {
pipelines: [pipelineWithStages],
projectId: '21',
params,
},
@ -67,6 +66,14 @@ describe('Pipelines filtered search', () => {
projectId: '21',
operators: [expect.objectContaining({ value: '=' })],
});
expect(getSearchToken('status')).toMatchObject({
type: 'status',
icon: 'status',
title: 'Status',
unique: true,
operators: [expect.objectContaining({ value: '=' })],
});
});
it('emits filterPipelines on submit with correct filter', () => {

View File

@ -563,6 +563,7 @@ export const branches = [
export const mockSearch = [
{ type: 'username', value: { data: 'root', operator: '=' } },
{ type: 'ref', value: { data: 'master', operator: '=' } },
{ type: 'status', value: { data: 'pending', operator: '=' } },
];
export const mockBranchesAfterMap = ['branch-1', 'branch-10', 'branch-11'];

View File

@ -684,7 +684,13 @@ describe('Pipelines', () => {
});
it('updates request data and query params on filter submit', () => {
const expectedQueryParams = { page: '1', scope: 'all', username: 'root', ref: 'master' };
const expectedQueryParams = {
page: '1',
scope: 'all',
username: 'root',
ref: 'master',
status: 'pending',
};
findFilteredSearch().vm.$emit('submit', mockSearch);

View File

@ -0,0 +1,62 @@
import { GlFilteredSearchToken, GlFilteredSearchSuggestion, GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import PipelineStatusToken from '~/pipelines/components/tokens/pipeline_status_token.vue';
describe('Pipeline Status Token', () => {
let wrapper;
const findFilteredSearchToken = () => wrapper.find(GlFilteredSearchToken);
const findAllFilteredSearchSuggestions = () => wrapper.findAll(GlFilteredSearchSuggestion);
const findAllGlIcons = () => wrapper.findAll(GlIcon);
const stubs = {
GlFilteredSearchToken: {
template: `<div><slot name="suggestions"></slot></div>`,
},
};
const defaultProps = {
config: {
type: 'status',
icon: 'status',
title: 'Status',
unique: true,
},
value: {
data: '',
},
};
const createComponent = options => {
wrapper = shallowMount(PipelineStatusToken, {
propsData: {
...defaultProps,
},
...options,
});
};
beforeEach(() => {
createComponent();
});
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
it('passes config correctly', () => {
expect(findFilteredSearchToken().props('config')).toEqual(defaultProps.config);
});
describe('shows statuses correctly', () => {
beforeEach(() => {
createComponent({ stubs });
});
it('renders all pipeline statuses available', () => {
expect(findAllFilteredSearchSuggestions()).toHaveLength(wrapper.vm.statuses.length);
expect(findAllGlIcons()).toHaveLength(wrapper.vm.statuses.length);
});
});
});

View File

@ -0,0 +1,177 @@
# frozen_string_literal: true
require 'spec_helper'
describe LooksAhead do
include GraphqlHelpers
let_it_be(:the_user) { create(:user) }
let_it_be(:label_a) { create(:label) }
let_it_be(:label_b) { create(:label) }
let_it_be(:issue_a) { create(:issue, author: the_user, labels: [label_a, label_b]) }
let_it_be(:issue_b) { create(:issue, author: the_user, labels: [label_a]) }
let_it_be(:issue_c) { create(:issue, author: the_user, labels: [label_b]) }
# Simplified schema to test lookahead
let_it_be(:schema) do
issues_resolver = Class.new(Resolvers::BaseResolver) do
include LooksAhead
def resolve_with_lookahead(**args)
apply_lookahead(object.issues)
end
def preloads
{ labels: [:labels] }
end
end
label = Class.new(GraphQL::Schema::Object) do
graphql_name 'Label'
field :id, Integer, null: false
end
issue = Class.new(GraphQL::Schema::Object) do
graphql_name 'Issue'
field :title, String, null: true
field :labels, label.connection_type, null: true
end
user = Class.new(GraphQL::Schema::Object) do
graphql_name 'User'
field :name, String, null: true
field :issues, issue.connection_type,
null: true
field :issues_with_lookahead, issue.connection_type,
extras: [:lookahead],
resolver: issues_resolver,
null: true
end
Class.new(GraphQL::Schema) do
query(Class.new(GraphQL::Schema::Object) do
graphql_name 'Query'
field :find_user, user, null: true do
argument :username, String, required: true
end
def find_user(username:)
context[:user_db].find { |u| u.username == username }
end
end)
end
end
def query(doc = document)
GraphQL::Query.new(schema,
document: doc,
context: { user_db: [the_user] },
variables: { username: the_user.username })
end
let(:document) do
GraphQL.parse <<-GRAPHQL
query($username: String!){
findUser(username: $username) {
name
issues {
nodes {
title
labels { nodes { id } }
}
}
issuesWithLookahead {
nodes {
title
labels { nodes { id } }
}
}
}
}
GRAPHQL
end
def run_query(gql_query)
query(GraphQL.parse(gql_query)).result
end
shared_examples 'a working query on the test schema' do
it 'has a good test setup', :aggregate_failures do
expected_label_ids = [label_a, label_b].cycle.take(4).map(&:id)
issue_titles = [issue_a, issue_b, issue_c].map(&:title)
res = query.result
expect(res['errors']).to be_blank
expect(res.dig('data', 'findUser', 'name')).to eq(the_user.name)
%w(issues issuesWithLookahead).each do |field|
expect(all_issue_titles(res, field)).to match_array(issue_titles)
expect(all_label_ids(res, field)).to match_array(expected_label_ids)
end
end
end
it_behaves_like 'a working query on the test schema'
it 'preloads labels on issues' do
expect(the_user.issues).to receive(:preload).with(:labels)
query.result
end
context 'the feature flag is off' do
before do
stub_feature_flags(described_class::FEATURE_FLAG => false)
end
it_behaves_like 'a working query on the test schema'
it 'does not preload labels on issues' do
expect(the_user.issues).not_to receive(:preload).with(:labels)
query.result
end
end
it 'issues fewer queries than the naive approach' do
the_user.reload # ensure no attributes are loaded before we begin
naive = <<-GQL
query($username: String!){
findUser(username: $username) {
name
issues {
nodes {
labels { nodes { id } }
}
}
}
}
GQL
with_lookahead = <<-GQL
query($username: String!){
findUser(username: $username) {
name
issuesWithLookahead {
nodes {
labels { nodes { id } }
}
}
}
}
GQL
expect { run_query(with_lookahead) }.to issue_fewer_queries_than { run_query(naive) }
end
private
def all_label_ids(result, field_name)
result.dig('data', 'findUser', field_name, 'nodes').flat_map do |node|
node.dig('labels', 'nodes').map { |n| n['id'] }
end
end
def all_issue_titles(result, field_name)
result.dig('data', 'findUser', field_name, 'nodes').map do |node|
node['title']
end
end
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
describe Gitlab::BackgroundMigration::ResetMergeStatus, schema: 20190528180441 do
describe Gitlab::BackgroundMigration::ResetMergeStatus do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:namespace) { namespaces.create(name: 'gitlab', path: 'gitlab-org') }
@ -23,24 +23,24 @@ describe Gitlab::BackgroundMigration::ResetMergeStatus, schema: 20190528180441 d
end
it 'correctly updates opened mergeable MRs to unchecked' do
create_merge_request(1, state: 'opened', merge_status: 'can_be_merged')
create_merge_request(2, state: 'opened', merge_status: 'can_be_merged')
create_merge_request(3, state: 'opened', merge_status: 'can_be_merged')
create_merge_request(4, state: 'merged', merge_status: 'can_be_merged')
create_merge_request(5, state: 'opened', merge_status: 'cannot_be_merged')
create_merge_request(1, state_id: MergeRequest.available_states[:opened], merge_status: 'can_be_merged')
create_merge_request(2, state_id: MergeRequest.available_states[:opened], merge_status: 'can_be_merged')
create_merge_request(3, state_id: MergeRequest.available_states[:opened], merge_status: 'can_be_merged')
create_merge_request(4, state_id: MergeRequest.available_states[:merged], merge_status: 'can_be_merged')
create_merge_request(5, state_id: MergeRequest.available_states[:opened], merge_status: 'cannot_be_merged')
subject.perform(1, 5)
expected_rows = [
{ id: 1, state: 'opened', merge_status: 'unchecked' },
{ id: 2, state: 'opened', merge_status: 'unchecked' },
{ id: 3, state: 'opened', merge_status: 'unchecked' },
{ id: 4, state: 'merged', merge_status: 'can_be_merged' },
{ id: 5, state: 'opened', merge_status: 'cannot_be_merged' }
{ id: 1, state_id: MergeRequest.available_states[:opened], merge_status: 'unchecked' },
{ id: 2, state_id: MergeRequest.available_states[:opened], merge_status: 'unchecked' },
{ id: 3, state_id: MergeRequest.available_states[:opened], merge_status: 'unchecked' },
{ id: 4, state_id: MergeRequest.available_states[:merged], merge_status: 'can_be_merged' },
{ id: 5, state_id: MergeRequest.available_states[:opened], merge_status: 'cannot_be_merged' }
]
rows = merge_requests.order(:id).map do |row|
row.attributes.slice('id', 'state', 'merge_status').symbolize_keys
row.attributes.slice('id', 'state_id', 'merge_status').symbolize_keys
end
expect(rows).to eq(expected_rows)

View File

@ -574,6 +574,86 @@ describe Gitlab::Diff::Position do
end
end
describe '#find_diff_file_from' do
context "position for a diff file that has changed from symlink to regular file" do
let(:commit) { project.commit("81e6355ce4e1544a3524b230952c12455de0777b") }
let(:old_symlink_file_identifier_hash) { "bfa430463f33619872d52a6b85ced59c973e42dc" }
let(:new_regular_file_identifier_hash) { "e25b60c2e5ffb977d2b1431b96c6f7800c3c3529" }
let(:file_identifier_hash) { new_regular_file_identifier_hash }
let(:args) do
{
file_identifier_hash: file_identifier_hash,
old_path: "symlink",
new_path: "symlink",
old_line: nil,
new_line: 1,
diff_refs: commit.diff_refs
}
end
let(:diffable) { commit.diff_refs.compare_in(project) }
subject(:diff_file) { described_class.new(args).find_diff_file_from(diffable) }
context 'when file_identifier_hash is disabled' do
before do
stub_feature_flags(file_identifier_hash: false)
end
it "returns the first diff file" do
expect(diff_file.file_identifier_hash).to eq(old_symlink_file_identifier_hash)
end
end
context 'when file_identifier_hash is enabled' do
before do
stub_feature_flags(file_identifier_hash: true)
end
context 'for new regular file' do
it "returns the correct diff file" do
expect(diff_file.file_identifier_hash).to eq(new_regular_file_identifier_hash)
end
end
context 'for old symlink file' do
let(:args) do
{
file_identifier_hash: old_symlink_file_identifier_hash,
old_path: "symlink",
new_path: "symlink",
old_line: 1,
new_line: nil,
diff_refs: commit.diff_refs
}
end
it "returns the correct diff file" do
expect(diff_file.file_identifier_hash).to eq(old_symlink_file_identifier_hash)
end
end
context 'when file_identifier_hash is missing' do
let(:file_identifier_hash) { nil }
it "returns the first diff file" do
expect(diff_file.file_identifier_hash).to eq(old_symlink_file_identifier_hash)
end
end
context 'when file_identifier_hash cannot be found' do
let(:file_identifier_hash) { "missingidentifier" }
it "returns nil" do
expect(diff_file).to be_nil
end
end
end
end
end
describe '#==' do
let(:commit) { project.commit("570e7b2abdd848b95f2f578043fc23bd6f6fd24d") }

View File

@ -238,12 +238,32 @@ describe DiffNote do
end
context 'when the discussion was created in the diff' do
it 'returns correct diff file' do
diff_file = subject.diff_file
context 'when file_identifier_hash is disabled' do
before do
stub_feature_flags(file_identifier_hash: false)
end
expect(diff_file.old_path).to eq(position.old_path)
expect(diff_file.new_path).to eq(position.new_path)
expect(diff_file.diff_refs).to eq(position.diff_refs)
it 'returns correct diff file' do
diff_file = subject.diff_file
expect(diff_file.old_path).to eq(position.old_path)
expect(diff_file.new_path).to eq(position.new_path)
expect(diff_file.diff_refs).to eq(position.diff_refs)
end
end
context 'when file_identifier_hash is enabled' do
before do
stub_feature_flags(file_identifier_hash: true)
end
it 'returns correct diff file' do
diff_file = subject.diff_file
expect(diff_file.old_path).to eq(position.old_path)
expect(diff_file.new_path).to eq(position.new_path)
expect(diff_file.diff_refs).to eq(position.diff_refs)
end
end
end

View File

@ -651,6 +651,7 @@ describe WikiPage do
let(:untitled_page) { described_class.new(wiki) }
let(:directory_page) { create(:wiki_page, title: 'parent directory/child page') }
let(:page_with_special_characters) { create(:wiki_page, title: 'test+page') }
where(:page, :title, :changed) do
:untitled_page | nil | false
@ -658,6 +659,8 @@ describe WikiPage do
:new_page | nil | true
:new_page | 'test page' | true
:new_page | 'test-page' | true
:new_page | 'test+page' | true
:new_page | 'new title' | true
:existing_page | nil | false
@ -665,6 +668,7 @@ describe WikiPage do
:existing_page | 'test-page' | false
:existing_page | '/test page' | false
:existing_page | '/test-page' | false
:existing_page | 'test+page' | true
:existing_page | ' test page ' | true
:existing_page | 'new title' | true
:existing_page | 'new-title' | true
@ -681,6 +685,11 @@ describe WikiPage do
:directory_page | 'parent-directory / child-page' | true
:directory_page | 'other directory/child page' | true
:directory_page | 'other-directory/child page' | true
:page_with_special_characters | nil | false
:page_with_special_characters | 'test+page' | false
:page_with_special_characters | 'test-page' | true
:page_with_special_characters | 'test page' | true
end
with_them do

View File

@ -62,6 +62,54 @@ describe 'getting project information' do
end
end
describe 'performance' do
before do
project.add_developer(current_user)
mrs = create_list(:merge_request, 10, :closed, :with_head_pipeline,
source_project: project,
author: current_user)
mrs.each do |mr|
mr.assignees << create(:user)
mr.assignees << current_user
end
end
def run_query(number)
q = <<~GQL
query {
project(fullPath: "#{project.full_path}") {
mergeRequests(first: #{number}) {
nodes {
assignees { nodes { username } }
headPipeline { status }
}
}
}
}
GQL
post_graphql(q, current_user: current_user)
end
it 'returns appropriate results' do
run_query(2)
mrs = graphql_data.dig('project', 'mergeRequests', 'nodes')
expect(mrs.size).to eq(2)
expect(mrs).to all(
match(
a_hash_including(
'assignees' => { 'nodes' => all(match(a_hash_including('username' => be_present))) },
'headPipeline' => { 'status' => be_present }
)))
end
it 'can lookahead to eliminate N+1 queries', :use_clean_rails_memory_store_caching, :request_store do
expect { run_query(10) }.to issue_same_number_of_queries_as { run_query(1) }.or_fewer.ignoring_cached_queries
end
end
context 'when the user does not have access to the project' do
it 'returns an empty field' do
post_graphql(query, current_user: current_user)

View File

@ -1,91 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/migration/update_large_table'
describe RuboCop::Cop::Migration::UpdateLargeTable do
include CopHelper
subject(:cop) { described_class.new }
context 'in migration' do
before do
allow(cop).to receive(:in_migration?).and_return(true)
end
shared_examples 'large tables' do |update_method|
described_class::BLACKLISTED_TABLES.each do |table|
it "registers an offense for the #{table} table" do
inspect_source("#{update_method} :#{table}, :column, default: true")
aggregate_failures do
expect(cop.offenses.size).to eq(1)
expect(cop.offenses.map(&:line)).to eq([1])
end
end
end
end
context 'for the add_column_with_default method' do
include_examples 'large tables', 'add_column_with_default'
end
context 'for the change_column_type_concurrently method' do
include_examples 'large tables', 'change_column_type_concurrently'
end
context 'for the rename_column_concurrently method' do
include_examples 'large tables', 'rename_column_concurrently'
end
context 'for the update_column_in_batches method' do
include_examples 'large tables', 'update_column_in_batches'
end
it 'registers no offense for non-blacklisted tables' do
inspect_source("add_column_with_default :table, :column, default: true")
expect(cop.offenses).to be_empty
end
it 'registers no offense for non-blacklisted methods' do
table = described_class::BLACKLISTED_TABLES.sample
inspect_source("some_other_method :#{table}, :column, default: true")
expect(cop.offenses).to be_empty
end
end
context 'outside of migration' do
let(:table) { described_class::BLACKLISTED_TABLES.sample }
it 'registers no offense for add_column_with_default' do
inspect_source("add_column_with_default :#{table}, :column, default: true")
expect(cop.offenses).to be_empty
end
it 'registers no offense for change_column_type_concurrently' do
inspect_source("change_column_type_concurrently :#{table}, :column, default: true")
expect(cop.offenses).to be_empty
end
it 'registers no offense for rename_column_concurrently' do
inspect_source("rename_column_concurrently :#{table}, :column, default: true")
expect(cop.offenses).to be_empty
end
it 'registers no offense for update_column_concurrently' do
inspect_source("update_column_concurrently :#{table}, :column, default: true")
expect(cop.offenses).to be_empty
end
end
end

View File

@ -59,11 +59,15 @@ module ExceedQueryLimitHelpers
def verify_count(&block)
@subject_block = block
actual_count > expected_count + threshold
actual_count > maximum
end
def maximum
expected_count + threshold
end
def failure_message
threshold_message = threshold > 0 ? " (+#{@threshold})" : ''
threshold_message = threshold > 0 ? " (+#{threshold})" : ''
counts = "#{expected_count}#{threshold_message}"
"Expected a maximum of #{counts} queries, got #{actual_count}:\n\n#{log_message}"
end
@ -73,7 +77,7 @@ module ExceedQueryLimitHelpers
end
end
RSpec::Matchers.define :issue_same_number_of_queries_as do
RSpec::Matchers.define :issue_fewer_queries_than do
supports_block_expectations
include ExceedQueryLimitHelpers
@ -87,25 +91,110 @@ RSpec::Matchers.define :issue_same_number_of_queries_as do
end
def expected_count
@expected_count ||= control_recorder.count
control_recorder.count
end
def verify_count(&block)
@subject_block = block
(expected_count - actual_count).abs <= threshold
# These blocks need to be evaluated in an expected order, in case
# the events in expected affect the counts in actual
expected_count
actual_count
actual_count < expected_count
end
match do |block|
verify_count(&block)
end
def failure_message
<<~MSG
Expected to issue fewer than #{expected_count} queries, but got #{actual_count}
#{log_message}
MSG
end
failure_message_when_negated do |actual|
failure_message
<<~MSG
Expected query count of #{actual_count} to be less than #{expected_count}
#{log_message}
MSG
end
end
RSpec::Matchers.define :issue_same_number_of_queries_as do
supports_block_expectations
include ExceedQueryLimitHelpers
def control
block_arg
end
chain :or_fewer do
@or_fewer = true
end
chain :ignoring_cached_queries do
@skip_cached = true
end
def control_recorder
@control_recorder ||= ActiveRecord::QueryRecorder.new(&control)
end
def expected_count
control_recorder.count
end
def verify_count(&block)
@subject_block = block
# These blocks need to be evaluated in an expected order, in case
# the events in expected affect the counts in actual
expected_count
actual_count
if @or_fewer
actual_count <= expected_count
else
(expected_count - actual_count).abs <= threshold
end
end
match do |block|
verify_count(&block)
end
def failure_message
<<~MSG
Expected #{expected_count_message} queries, but got #{actual_count}
#{log_message}
MSG
end
failure_message_when_negated do |actual|
<<~MSG
Expected #{actual_count} not to equal #{expected_count_message}
#{log_message}
MSG
end
def expected_count_message
or_fewer_msg = "or fewer" if @or_fewer
threshold_msg = "(+/- #{threshold})" unless threshold.zero?
["#{expected_count}", or_fewer_msg, threshold_msg].compact.join(' ')
end
def skip_cached
false
@skip_cached || false
end
end

View File

@ -64,18 +64,25 @@ describe 'layouts/_head' do
context 'when an asset_host is set and snowplow url is set' do
let(:asset_host) { 'http://test.host' }
let(:snowplow_collector_hostname) { 'www.snow.plow' }
before do
allow(ActionController::Base).to receive(:asset_host).and_return(asset_host)
allow(Gitlab::CurrentSettings).to receive(:snowplow_enabled?).and_return(true)
allow(Gitlab::CurrentSettings).to receive(:snowplow_collector_hostname).and_return('www.snow.plow')
allow(Gitlab::CurrentSettings).to receive(:snowplow_collector_hostname).and_return(snowplow_collector_hostname)
end
it 'adds a snowplow script tag with asset host' do
render
expect(rendered).to match('http://test.host/assets/snowplow/')
expect(rendered).to match('window.snowplow')
expect(rendered).to match('www.snow.plow')
expect(rendered).to match(snowplow_collector_hostname)
end
it 'adds a link preconnect tag' do
render
expect(rendered).to match(%Q(<link crossorigin="" href="#{snowplow_collector_hostname}" rel="preconnect">))
end
end