Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-06-18 18:08:58 +00:00
parent 874c603d7a
commit ed16c9434e
48 changed files with 1175 additions and 230 deletions

View File

@ -61,6 +61,7 @@ variables:
DOCKER_VERSION: "19.03.0"
include:
- local: .gitlab/ci/build-images.gitlab-ci.yml
- local: .gitlab/ci/cache-repo.gitlab-ci.yml
- local: .gitlab/ci/cng.gitlab-ci.yml
- local: .gitlab/ci/docs.gitlab-ci.yml

View File

@ -0,0 +1,31 @@
# This image is used by the `review-qa-*` jobs. Not currently used by the `omnibus-gitlab` pipelines which rebuild this
# image, e.g. https://gitlab.com/gitlab-org/build/omnibus-gitlab-mirror/-/jobs/587107399, which we could probably avoid.
# See https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5429.
build-qa-image:
extends:
- .use-kaniko
- .build-images:rules:build-qa-image
stage: build-images
needs: []
script:
- export QA_IMAGE="${CI_REGISTRY}/${CI_PROJECT_PATH}/gitlab-ee-qa:${CI_COMMIT_REF_SLUG}"
- /kaniko/executor --context=${CI_PROJECT_DIR} --dockerfile=${CI_PROJECT_DIR}/qa/Dockerfile --destination=${QA_IMAGE} --cache=true
retry: 2
# This image is used by:
# - The `CNG` pipelines (via the `review-build-cng` job): https://gitlab.com/gitlab-org/build/CNG/-/blob/cfc67136d711e1c8c409bf8e57427a644393da2f/.gitlab-ci.yml#L335
# - The `omnibus-gitlab` pipelines (via the `package-and-qa` job): https://gitlab.com/gitlab-org/omnibus-gitlab/-/blob/dfd1ad475868fc84e91ab7b5706aa03e46dc3a86/.gitlab-ci.yml#L130
build-assets-image:
extends:
- .use-kaniko
- .build-images:rules:build-assets-image
stage: build-images
needs: ["compile-production-assets"]
variables:
GIT_DEPTH: "1"
script:
# TODO: Change the image tag to be the MD5 of assets files and skip image building if the image exists
# We'll also need to pass GITLAB_ASSETS_TAG to the trigerred omnibus-gitlab pipeline similarly to how we do it for trigerred CNG pipelines
# https://gitlab.com/gitlab-org/gitlab/issues/208389
- run_timed_command "scripts/build_assets_image"
retry: 2

View File

@ -90,21 +90,6 @@ update-yarn-cache:
cache:
policy: push
build-assets-image:
extends:
- .use-kaniko
- .frontend:rules:compile-production-assets
stage: build-images
needs: ["compile-production-assets"]
variables:
GIT_DEPTH: "1"
script:
# TODO: Change the image tag to be the MD5 of assets files and skip image building if the image exists
# We'll also need to pass GITLAB_ASSETS_TAG to the trigerred omnibus-gitlab pipeline similarly to how we do it for trigerred CNG pipelines
# https://gitlab.com/gitlab-org/gitlab/issues/208389
- run_timed_command "scripts/build_assets_image"
retry: 2
.frontend-fixtures-base:
extends:
- .frontend-base

View File

@ -49,7 +49,6 @@ update-qa-cache:
.package-and-qa-base:
image: ruby:2.6-alpine
stage: qa
dependencies: []
retry: 0
script:
- source scripts/utils.sh

View File

@ -1,14 +1,3 @@
build-qa-image:
extends:
- .use-kaniko
- .review:rules:build-qa-image
stage: build-images
needs: []
script:
- export QA_IMAGE="${CI_REGISTRY}/${CI_PROJECT_PATH}/gitlab-ee-qa:${CI_COMMIT_REF_SLUG}"
- /kaniko/executor --context=${CI_PROJECT_DIR} --dockerfile=${CI_PROJECT_DIR}/qa/Dockerfile --destination=${QA_IMAGE} --cache=true
retry: 2
review-cleanup:
extends:
- .default-retry

View File

@ -71,6 +71,22 @@
- ".gitlab-ci.yml"
- ".gitlab/ci/**/*"
.ci-build-images-patterns: &ci-build-images-patterns
- ".gitlab-ci.yml"
- ".gitlab/ci/build-images.gitlab-ci.yml"
.ci-review-patterns: &ci-review-patterns
- ".gitlab-ci.yml"
- ".gitlab/ci/frontend.gitlab-ci.yml"
- ".gitlab/ci/build-images.gitlab-ci.yml"
- ".gitlab/ci/review.gitlab-ci.yml"
.ci-qa-patterns: &ci-qa-patterns
- ".gitlab-ci.yml"
- ".gitlab/ci/frontend.gitlab-ci.yml"
- ".gitlab/ci/build-images.gitlab-ci.yml"
- ".gitlab/ci/qa.gitlab-ci.yml"
.yaml-patterns: &yaml-patterns
- "**/*.yml"
@ -208,6 +224,26 @@
- <<: *if-master-schedule-2-hourly
- <<: *if-merge-request-title-update-caches
######################
# Build images rules #
######################
.build-images:rules:build-qa-image:
rules:
- <<: *if-not-ee
when: never
- <<: *if-dot-com-gitlab-org-and-security-merge-request
changes: *ci-build-images-patterns
- <<: *if-dot-com-gitlab-org-and-security-merge-request
changes: *code-qa-patterns
- <<: *if-dot-com-gitlab-org-schedule
.build-images:rules:build-assets-image:
rules:
- <<: *if-not-canonical-namespace
when: never
- changes: *ci-build-images-patterns
- changes: *code-backstage-qa-patterns
####################
# Cache repo rules #
####################
@ -378,7 +414,7 @@
.qa:rules:package-and-qa:
rules:
- <<: *if-dot-com-gitlab-org-and-security-merge-request
changes: *ci-patterns
changes: *ci-qa-patterns
allow_failure: true
- <<: *if-dot-com-gitlab-org-and-security-merge-request
changes: *qa-patterns
@ -569,18 +605,12 @@
################
# Review rules #
################
.review:rules:build-qa-image:
.review:rules:review-build-cng:
rules:
- <<: *if-not-ee
when: never
- <<: *if-dot-com-gitlab-org-and-security-merge-request
changes: *code-qa-patterns
- <<: *if-dot-com-gitlab-org-schedule
.review:rules:review-build-cng:
rules:
- <<: *if-dot-com-gitlab-org-merge-request
changes: *ci-patterns
changes: *ci-review-patterns
- <<: *if-dot-com-gitlab-org-merge-request
changes: *frontend-patterns
- <<: *if-dot-com-gitlab-org-merge-request
@ -594,7 +624,7 @@
- <<: *if-not-ee
when: never
- <<: *if-dot-com-gitlab-org-merge-request
changes: *ci-patterns
changes: *ci-review-patterns
- <<: *if-dot-com-gitlab-org-merge-request
changes: *frontend-patterns
allow_failure: true
@ -617,7 +647,7 @@
- <<: *if-not-ee
when: never
- <<: *if-dot-com-gitlab-org-merge-request
changes: *ci-patterns
changes: *ci-review-patterns
- <<: *if-dot-com-gitlab-org-merge-request
changes: *frontend-patterns
allow_failure: true

View File

@ -2,7 +2,7 @@ import { slugify } from '~/lib/utils/text_utility';
import createGqClient, { fetchPolicies } from '~/lib/graphql';
import { SUPPORTED_FORMATS } from '~/lib/utils/unit_format';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { parseTemplatingVariables } from './variable_mapping';
import { mergeURLVariables, parseTemplatingVariables } from './variable_mapping';
import { DATETIME_RANGE_TYPES } from '~/lib/utils/constants';
import { timeRangeToParams, getRangeType } from '~/lib/utils/datetime_range';
import { isSafeURL, mergeUrlParams } from '~/lib/utils/url_utility';
@ -289,7 +289,7 @@ export const mapToDashboardViewModel = ({
}) => {
return {
dashboard,
variables: parseTemplatingVariables(templating),
variables: mergeURLVariables(parseTemplatingVariables(templating)),
links: links.map(mapLinksToViewModel),
panelGroups: panel_groups.map(mapToPanelGroupViewModel),
};

View File

@ -1,4 +1,5 @@
import { isString } from 'lodash';
import { templatingVariablesFromUrl } from '../utils';
import { VARIABLE_TYPES } from '../constants';
/**
@ -164,4 +165,39 @@ export const parseTemplatingVariables = ({ variables = {} } = {}) =>
return acc;
}, {});
/**
* Custom variables are defined in the dashboard yml file
* and their values can be passed through the URL.
*
* On component load, this method merges variables data
* from the yml file with URL data to store in the Vuex store.
* Not all params coming from the URL need to be stored. Only
* the ones that have a corresponding variable defined in the
* yml file.
*
* This ensures that there is always a single source of truth
* for variables
*
* This method can be improved further. See the below issue
* https://gitlab.com/gitlab-org/gitlab/-/issues/217713
*
* @param {Object} varsFromYML template variables from yml file
* @returns {Object}
*/
export const mergeURLVariables = (varsFromYML = {}) => {
const varsFromURL = templatingVariablesFromUrl();
const variables = {};
Object.keys(varsFromYML).forEach(key => {
if (Object.prototype.hasOwnProperty.call(varsFromURL, key)) {
variables[key] = {
...varsFromYML[key],
value: varsFromURL[key],
};
} else {
variables[key] = varsFromYML[key];
}
});
return variables;
};
export default {};

View File

@ -170,11 +170,10 @@ export const convertVariablesForURL = variables =>
* begin with a constant prefix so that it doesn't collide with
* other URL params.
*
* @param {String} New URL
* @param {String} search URL
* @returns {Object} The custom variables defined by the user in the URL
*/
export const getPromCustomVariablesFromUrl = (search = window.location.search) => {
export const templatingVariablesFromUrl = (search = window.location.search) => {
const params = queryToObject(search);
// pick the params with variable prefix
const paramsWithVars = pickBy(params, (val, key) => key.startsWith(VARIABLE_PREFIX));
@ -353,39 +352,4 @@ export const barChartsDataParser = (data = []) =>
{},
);
/**
* Custom variables are defined in the dashboard yml file
* and their values can be passed through the URL.
*
* On component load, this method merges variables data
* from the yml file with URL data to store in the Vuex store.
* Not all params coming from the URL need to be stored. Only
* the ones that have a corresponding variable defined in the
* yml file.
*
* This ensures that there is always a single source of truth
* for variables
*
* This method can be improved further. See the below issue
* https://gitlab.com/gitlab-org/gitlab/-/issues/217713
*
* @param {Object} varsFromYML template variables from yml file
* @returns {Object}
*/
export const mergeURLVariables = (varsFromYML = {}) => {
const varsFromURL = getPromCustomVariablesFromUrl();
const variables = {};
Object.keys(varsFromYML).forEach(key => {
if (Object.prototype.hasOwnProperty.call(varsFromURL, key)) {
variables[key] = {
...varsFromYML[key],
value: varsFromURL[key],
};
} else {
variables[key] = varsFromYML[key];
}
});
return variables;
};
export default {};

View File

@ -1,11 +1,11 @@
<script>
import { GlDeprecatedButton, GlLoadingIcon } from '@gitlab/ui';
import { GlButton, GlLoadingIcon } from '@gitlab/ui';
import { __ } from '~/locale';
import Icon from '~/vue_shared/components/icon.vue';
export default {
components: {
GlDeprecatedButton,
GlButton,
GlLoadingIcon,
Icon,
},
@ -58,16 +58,17 @@ export default {
</div>
<template v-else>
<gl-deprecated-button
<button
class="btn-blank btn s32 square append-right-default"
type="button"
:aria-label="ariaLabel"
:disabled="isLoading"
@click="toggleCollapsed"
>
<gl-loading-icon v-if="isLoading" />
<icon v-else :name="arrowIconName" class="js-icon" />
</gl-deprecated-button>
<gl-deprecated-button
</button>
<gl-button
variant="link"
class="js-title"
:disabled="isLoading"
@ -76,7 +77,7 @@ export default {
>
<template v-if="isCollapsed">{{ title }}</template>
<template v-else>{{ __('Collapse') }}</template>
</gl-deprecated-button>
</gl-button>
</template>
</div>

View File

@ -2,19 +2,34 @@
module KnownSignIn
include Gitlab::Utils::StrongMemoize
include CookiesHelper
KNOWN_SIGN_IN_COOKIE = :known_sign_in
KNOWN_SIGN_IN_COOKIE_EXPIRY = 14.days
private
def verify_known_sign_in
return unless current_user
notify_user unless known_remote_ip?
notify_user unless known_device? || known_remote_ip?
update_cookie
end
def known_remote_ip?
known_ip_addresses.include?(request.remote_ip)
end
def known_device?
cookies.encrypted[KNOWN_SIGN_IN_COOKIE] == current_user.id
end
def update_cookie
set_secure_cookie(KNOWN_SIGN_IN_COOKIE, current_user.id,
type: COOKIE_TYPE_ENCRYPTED, httponly: true, expires: KNOWN_SIGN_IN_COOKIE_EXPIRY)
end
def sessions
strong_memoize(:session) do
ActiveSession.list(current_user).reject(&:is_impersonated)

View File

@ -82,7 +82,7 @@ class Projects::ApplicationController < ApplicationController
end
def apply_diff_view_cookie!
set_secure_cookie(:diff_view, params.delete(:view), permanent: true) if params[:view].present?
set_secure_cookie(:diff_view, params.delete(:view), type: COOKIE_TYPE_PERMANENT) if params[:view].present?
end
def require_pages_enabled!

View File

@ -8,6 +8,7 @@ class Projects::MergeRequests::DiffsController < Projects::MergeRequests::Applic
before_action :commit
before_action :define_diff_vars
before_action :define_diff_comment_vars, except: [:diffs_batch, :diffs_metadata]
before_action :update_diff_discussion_positions!
around_action :allow_gitaly_ref_name_caching
@ -171,4 +172,12 @@ class Projects::MergeRequests::DiffsController < Projects::MergeRequests::Applic
@notes.concat(draft_notes)
end
def update_diff_discussion_positions!
return unless Feature.enabled?(:merge_ref_head_comments, @merge_request.target_project, default_enabled: true)
return unless Feature.enabled?(:merge_red_head_comments_position_on_demand, @merge_request.target_project, default_enabled: true)
return if @merge_request.has_any_diff_note_positions?
Discussions::CaptureDiffNotePositionsService.new(@merge_request).execute
end
end

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
module Types
class ReleaseLinksType < BaseObject
graphql_name 'ReleaseLinks'
authorize :download_code
alias_method :release, :object
present_using ReleasePresenter
field :self_url, GraphQL::STRING_TYPE, null: true,
description: 'HTTP URL of the release'
field :merge_requests_url, GraphQL::STRING_TYPE, null: true,
description: 'HTTP URL of the merge request page filtered by this release'
field :issues_url, GraphQL::STRING_TYPE, null: true,
description: 'HTTP URL of the issues page filtered by this release'
field :edit_url, GraphQL::STRING_TYPE, null: true,
description: "HTTP URL of the release's edit page",
authorize: :update_release
end
end

View File

@ -28,6 +28,8 @@ module Types
description: 'Timestamp of when the release was released'
field :assets, Types::ReleaseAssetsType, null: true, method: :itself,
description: 'Assets of the release'
field :links, Types::ReleaseLinksType, null: true, method: :itself,
description: 'Links of the release'
field :milestones, Types::MilestoneType.connection_type, null: true,
description: 'Milestones associated to the release'
field :evidences, Types::EvidenceType.connection_type, null: true,

View File

@ -1,9 +1,19 @@
# frozen_string_literal: true
module CookiesHelper
def set_secure_cookie(key, value, httponly: false, permanent: false)
cookie_jar = permanent ? cookies.permanent : cookies
COOKIE_TYPE_PERMANENT = :permanent
COOKIE_TYPE_ENCRYPTED = :encrypted
cookie_jar[key] = { value: value, secure: Gitlab.config.gitlab.https, httponly: httponly }
def set_secure_cookie(key, value, httponly: false, expires: nil, type: nil)
cookie_jar = case type
when COOKIE_TYPE_PERMANENT
cookies.permanent
when COOKIE_TYPE_ENCRYPTED
cookies.encrypted
else
cookies
end
cookie_jar[key] = { value: value, secure: Gitlab.config.gitlab.https, httponly: httponly, expires: expires }
end
end

View File

@ -67,6 +67,10 @@ module Noteable
false
end
def has_any_diff_note_positions?
notes.any? && DiffNotePosition.where(note: notes).exists?
end
def discussion_notes
notes
end

View File

@ -1,7 +1,7 @@
- page_title _("GitLab.com import")
- header_title _("Projects"), root_path
%h3.page-title
%i.fa.fa-heart
= sprite_icon('heart', size: 16, css_class: 'gl-vertical-align-middle')
= _('Import projects from GitLab.com')
- if Feature.enabled?(:new_import_ui)

View File

@ -0,0 +1,5 @@
---
title: Allow policies to override parent rules
merge_request: 33990
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Use IP or cookie in known sign-in check
merge_request: 34102
author:
type: changed

View File

@ -0,0 +1,5 @@
---
title: Fix missing templating vars set from URL in metrics dashboard
merge_request: 34668
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Update diff discussion positions on demand
merge_request: 34148
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Update heart icon from FontAwesome to GitLab SVG
merge_request: 34777
author:
type: other

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

View File

@ -47,6 +47,19 @@ project. If you create the project again, it will be created in its default stat
It can take a few seconds for it to be deleted.
1. After the project is deleted, GitLab displays a message confirming your action.
## Dashboards available in Omnibus GitLab
Omnibus GitLab provides a dashboard that displays CPU and memory usage
of each GitLab server. To select the servers to be displayed in the
panels, provide a regular expression in the **Instance label regex** field.
The dashboard uses metrics available in
[Omnibus GitLab](https://docs.gitlab.com/omnibus/) installations.
![GitLab self monitoring default dashboard](img/self_monitoring_default_dashboard.png)
You can also
[create your own dashboards](../../../user/project/integrations/prometheus.md#defining-custom-dashboards-per-project).
## Connection to Prometheus
The project will be automatically configured to connect to the

View File

@ -10161,6 +10161,11 @@ type Release {
last: Int
): ReleaseEvidenceConnection
"""
Links of the release
"""
links: ReleaseLinks
"""
Milestones associated to the release
"""
@ -10452,6 +10457,28 @@ type ReleaseEvidenceEdge {
node: ReleaseEvidence
}
type ReleaseLinks {
"""
HTTP URL of the release's edit page
"""
editUrl: String
"""
HTTP URL of the issues page filtered by this release
"""
issuesUrl: String
"""
HTTP URL of the merge request page filtered by this release
"""
mergeRequestsUrl: String
"""
HTTP URL of the release
"""
selfUrl: String
}
"""
Represents the source code attached to a release in a particular format
"""

View File

@ -29727,6 +29727,20 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "links",
"description": "Links of the release",
"args": [
],
"type": {
"kind": "OBJECT",
"name": "ReleaseLinks",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "milestones",
"description": "Milestones associated to the release",
@ -30508,6 +30522,75 @@
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "ReleaseLinks",
"description": null,
"fields": [
{
"name": "editUrl",
"description": "HTTP URL of the release's edit page",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "issuesUrl",
"description": "HTTP URL of the issues page filtered by this release",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "mergeRequestsUrl",
"description": "HTTP URL of the merge request page filtered by this release",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "selfUrl",
"description": "HTTP URL of the release",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "ReleaseSource",

View File

@ -1410,6 +1410,7 @@ Represents a release
| `createdAt` | Time | Timestamp of when the release was created |
| `description` | String | Description (also known as "release notes") of the release |
| `descriptionHtml` | String | The GitLab Flavored Markdown rendering of `description` |
| `links` | ReleaseLinks | Links of the release |
| `name` | String | Name of the release |
| `releasedAt` | Time | Timestamp of when the release was released |
| `tagName` | String | Name of the tag associated with the release |
@ -1446,6 +1447,15 @@ Evidence for a release
| `id` | ID! | ID of the evidence |
| `sha` | String | SHA1 ID of the evidence hash |
## ReleaseLinks
| Name | Type | Description |
| --- | ---- | ---------- |
| `editUrl` | String | HTTP URL of the release's edit page |
| `issuesUrl` | String | HTTP URL of the issues page filtered by this release |
| `mergeRequestsUrl` | String | HTTP URL of the merge request page filtered by this release |
| `selfUrl` | String | HTTP URL of the release |
## ReleaseSource
Represents the source code attached to a release in a particular format

View File

@ -158,6 +158,89 @@ end
will include all rules from `ProjectPolicy`. The delegated conditions will be evaluated with the correct delegated subject, and will be sorted along with the regular rules in the policy. Note that only the relevant rules for a particular ability will actually be considered.
### Overrides
We allow policies to opt-out of delegated abilities.
Delegated policies may define some abilities in a way that is incorrect for the
delegating policy. Take for example a child/parent relationship, where some
abilities can be inferred, and some cannot:
```ruby
class ParentPolicy < BasePolicy
condition(:speaks_spanish) { @subject.spoken_languages.include?(:es) }
condition(:has_license) { @subject.driving_license.present? }
condition(:enjoys_broccoli) { @subject.enjoyment_of(:broccoli) > 0 }
rule { speaks_spanish }.enable :read_spanish
rule { has_license }.enable :drive_car
rule { enjoys_broccoli }.enable :eat_broccoli
rule { ~enjoys_broccoli }.prevent :eat_broccoli
end
```
Here, if we delegated the child policy to the parent policy, some values would be
incorrect - we might correctly infer that the child can speak their parent's
language, but it would be incorrect to infer that the child can drive or would
eat broccoli just because the parent can and does.
Some of these things we can deal with - we can forbid driving universally in the
child policy, for example:
```ruby
class ChildPolicy < BasePolicy
delegate { @subject.parent }
rule { default }.prevent :drive_car
end
```
But the food preferences one is harder - because of the `prevent` call in the
parent policy, if the parent dislikes it, even calling `enable` in the child
will not enable `:eat_broccoli`.
We could remove the `prevent` call in the parent policy, but that still doesn't
help us, since the rules are different: parents get to eat what they like, and
children eat what they are given, provided they are well behaved. Allowing
delegation would end up with only children whose parents enjoy green vegetables
eating it. But a parent may well give their child broccoli, even if they dislike
it themselves, because it is good for their child.
The solution it to override the `:eat_broccoli` ability in the child policy:
```ruby
class ChildPolicy < BasePolicy
delegate { @subject.parent }
overrides :eat_broccoli
condition(:good_kid) { @subject.behavior_level >= Child::GOOD }
rule { good_kid }.enable :eat_broccoli
end
```
With this definition, the `ChildPolicy` will _never_ look in the `ParentPolicy` to
satisfy `:eat_broccoli`, but it _will_ use it for any other abilities. The child
policy can then define `:eat_broccoli` in a way that makes sense for `Child` and not
`Parent`.
### Alternatives to using `overrides`
Overriding policy delegation is complex, for the same reason delegation is
complex - it involves reasoning about logical inference, and being clear about
semantics. Misuse of `override` has the potential to duplicate code, and
potentially introduce security bugs, allowing things that should be prevented.
For this reason, it should be used only when other approaches are not feasible.
Other approaches can include for example using different ability names. Choosing
to eat a food and eating foods you are given are semantically distinct, and they
could be named differently (perhaps `chooses_to_eat_broccoli` and
`eats_what_is_given` in this case). It can depend on how polymorphic the call
site is. If you know that we will always check the policy with a `Parent` or a
`Child`, then we can choose the appropriate ability name. If the call site is
polymorphic, then we cannot do that.
## Specifying Policy Class
You can also override the Policy used for a given subject:

View File

@ -306,10 +306,12 @@ stages:
deploy:
stage: deploy
script:
- echo '//gitlab.com/api/v4/projects/<your_project_id>/packages/npm/:_authToken=${CI_JOB_TOKEN}'>.npmrc
- echo '//gitlab.com/api/v4/projects/${CI_PROJECT_ID}/packages/npm/:_authToken=${CI_JOB_TOKEN}'>.npmrc
- npm publish
```
Learn more about [using `CI_JOB_TOKEN` to authenticate to the GitLab NPM registry](#authenticating-with-a-ci-job-token).
## Troubleshooting
### Error running yarn with NPM registry

View File

@ -22,7 +22,7 @@ See the [authentication topic](../../topics/authentication/index.md) for more de
### Unknown sign-in
GitLab will notify you if a sign-in occurs that is from an unknown IP address.
GitLab will notify you if a sign-in occurs that is from an unknown IP address or device.
See [Unknown Sign-In Notification](unknown_sign_in_notification.md) for more details.
## User profile

View File

@ -9,16 +9,19 @@ info: To determine the technical writer assigned to the Stage/Group associated w
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/27211) in GitLab 13.0.
When a user successfully signs in from a previously unknown IP address,
When a user successfully signs in from a previously unknown IP address or device,
GitLab notifies the user by email. In this way, GitLab proactively alerts users of potentially
malicious or unauthorized sign-ins.
There are two methods used to identify a known sign-in:
There are several methods used to identify a known sign-in. All methods must fail
for a notification email to be sent.
- Last sign-in IP: The current sign-in IP address is checked against the last sign-in
IP address.
- Current active sessions: If the user has an existing active session from the
same IP address. See [Active Sessions](active_sessions.md).
- Cookie: After successful sign in, an encrypted cookie is stored in the browser.
This cookie is set to expire 14 days after the last successful sign in.
## Example email

View File

@ -11,7 +11,7 @@ type: howto
> - [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/199370) from **Settings > Repository** in GitLab 12.9.
> - [Added `write_registry` scope](https://gitlab.com/gitlab-org/gitlab/-/issues/22743) in GitLab 12.10.
> - [Moved](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/29280) from **Settings > CI / CD** in GitLab 12.10.1.
> - [Added package registry scopes](https://gitlab.com/gitlab-org/gitlab/-/issues/213566) from **Settings > CI / CD** in GitLab 13.0.
> - [Added package registry scopes](https://gitlab.com/gitlab-org/gitlab/-/issues/213566) in GitLab 13.0.
Deploy tokens allow you to download (`git clone`) or push and pull packages and container registry images of a project without having a user and a password.
@ -46,15 +46,17 @@ respective **Revoke** button under the 'Active deploy tokens' area.
## Limiting scopes of a deploy token
Deploy tokens can be created with two different scopes that allow various
Deploy tokens can be created with different scopes that allow various
actions that a given token can perform. The available scopes are depicted in
the following table.
the following table along with GitLab version it was introduced in.
| Scope | Description |
| ----- | ----------- |
| `read_repository` | Allows read-access to the repository through `git clone` |
| `read_registry` | Allows read-access to [container registry](../../packages/container_registry/index.md) images if a project is private and authorization is required. |
| `write_registry` | Allows write-access (push) to [container registry](../../packages/container_registry/index.md). |
| Scope | Description | Introduced in GitLab Version |
| ----- | ----------- | ------ |
| `read_repository` | Allows read-access to the repository through `git clone` | 10.7 |
| `read_registry` | Allows read-access to [container registry](../../packages/container_registry/index.md) images if a project is private and authorization is required. | 10.7 |
| `write_registry` | Allows write-access (push) to [container registry](../../packages/container_registry/index.md). | 12.10 |
| `read_package_registry` | Allows read access to the package registry. | 13.0 |
| `write_package_registry` | Allows write access to the package registry. | 13.0 |
## Deploy token custom username
@ -96,6 +98,8 @@ pull images from your Container Registry.
### Push Container Registry images
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/22743) in GitLab 12.10.
To push the container registry images, you'll need to:
1. Create a Deploy Token with `write_registry` as a scope.
@ -111,6 +115,8 @@ push images to your Container Registry.
### Read or pull packages
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/213566) in GitLab 13.0.
To pull packages in the GitLab package registry, you'll need to:
1. Create a Deploy Token with `read_package_registry` as a scope.
@ -119,6 +125,8 @@ To pull packages in the GitLab package registry, you'll need to:
### Push or upload packages
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/213566) in GitLab 13.0.
To upload packages in the GitLab package registry, you'll need to:
1. Create a Deploy Token with `write_package_registry` as a scope.
@ -151,8 +159,7 @@ apply consistently when cloning the repository of related projects.
There's a special case when it comes to Deploy Tokens. If a user creates one
named `gitlab-deploy-token`, the username and token of the Deploy Token will be
automatically exposed to the CI/CD jobs as environment variables: `CI_DEPLOY_USER` and
`CI_DEPLOY_PASSWORD`, respectively. With the GitLab Deploy Token, the
`read_registry` and `write_registry` scopes are implied.
`CI_DEPLOY_PASSWORD`, respectively.
After you create the token, you can login to the Container Registry using
those variables:

View File

@ -313,6 +313,11 @@ Here is an example of a Release Evidence object:
"created_at": "2019-04-17 15:45:12 UTC",
"issues": []
}
],
"report_artifacts": [
{
"url":"https://gitlab.example.com/root/project-name/-/jobs/111/artifacts/download"
}
]
}
}
@ -341,6 +346,37 @@ Releases page.
Evidence collection can be initiated by using an [API call](../../../api/releases/index.md#collect-release-evidence-premium-only) at any time. Evidence snapshots are visible on
the Release page, along with the timestamp the Evidence was collected.
### Include report artifacts as release evidence **(ULTIMATE ONLY)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/32773) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 13.2.
When you create a release, if [job artifacts](../../../ci/pipelines/job_artifacts.md#artifactsreports) are included in the last pipeline that ran, they are automatically included in the release as release evidence.
Although job artifacts normally expire, artifacts included in release evidence do not expire.
To enable job artifact collection you need to specify both:
1. [`artifacts:paths`](../../../ci/yaml/README.md#artifactspaths)
1. [`artifacts:reports`](../../../ci/pipelines/job_artifacts.md#artifactsreports)
```yaml
ruby:
script:
- gem install bundler
- bundle install
- bundle exec rspec --format progress --format RspecJunitFormatter --out rspec.xml
artifacts:
paths:
- rspec.xml
reports:
junit: rspec.xml
```
If the pipeline ran successfully, when you create your release, the `rspec.xml` file is saved as release evidence.
NOTE: **Note:**
If you [schedule release evidence collection](#schedule-release-evidence-collection), some artifacts may already be expired by the time of evidence collection. To avoid this you can use the [`artifacts:expire_in`](../../../ci/yaml/README.md#artifactsexpire_in) keyword. Learn more in [this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/222351).
### Schedule release evidence collection
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/23697) in GitLab 12.8.

View File

@ -117,6 +117,23 @@ module DeclarativePolicy
own_delegations[name] = delegation_block
end
# Declare that the given abilities should not be read from delegates.
#
# This is useful if you have an ability that you want to define
# differently in a policy than in a delegated policy, but still want to
# delegate all other abilities.
#
# example:
#
# delegate { @subect.parent }
#
# overrides :drive_car, :watch_tv
#
def overrides(*names)
@overrides ||= [].to_set
@overrides.merge(names)
end
# Declares a rule, constructed using RuleDsl, and returns
# a PolicyDsl which is used for registering the rule with
# this class. PolicyDsl will call back into Base.enable_when,
@ -265,9 +282,13 @@ module DeclarativePolicy
@runners ||= {}
@runners[ability] ||=
begin
delegated_runners = delegated_policies.values.compact.map { |p| p.runner(ability) }
own_runner = Runner.new(own_steps(ability))
delegated_runners.inject(own_runner, &:merge_runner)
if self.class.overrides.include?(ability)
own_runner
else
delegated_runners = delegated_policies.values.compact.map { |p| p.runner(ability) }
delegated_runners.inject(own_runner, &:merge_runner)
end
end
end

View File

@ -75,7 +75,7 @@ RSpec.describe SortingPreference do
it 'sets the cookie with the right values and flags' do
subject
expect(cookies['issue_sort']).to eq(value: 'popularity', secure: false, httponly: false)
expect(cookies['issue_sort']).to eq(expires: nil, value: 'popularity', secure: false, httponly: false)
end
end
@ -86,7 +86,7 @@ RSpec.describe SortingPreference do
it 'sets the cookie with the right values and flags' do
subject
expect(cookies['issue_sort']).to eq(value: 'created_asc', secure: false, httponly: false)
expect(cookies['issue_sort']).to eq(expires: nil, value: 'created_asc', secure: false, httponly: false)
end
end
end

View File

@ -91,6 +91,17 @@ RSpec.describe Projects::MergeRequests::DiffsController do
end
end
shared_examples "diff note on-demand position creation" do
it "updates diff discussion positions" do
service = double("service")
expect(Discussions::CaptureDiffNotePositionsService).to receive(:new).with(merge_request).and_return(service)
expect(service).to receive(:execute)
go
end
end
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
@ -146,6 +157,7 @@ RSpec.describe Projects::MergeRequests::DiffsController do
it_behaves_like 'persisted preferred diff view cookie'
it_behaves_like 'cached diff collection'
it_behaves_like 'diff note on-demand position creation'
end
describe 'GET diffs_metadata' do

View File

@ -10,6 +10,7 @@ import {
addDashboardMetaDataToLink,
normalizeCustomDashboardPath,
} from '~/monitoring/stores/utils';
import * as urlUtils from '~/lib/utils/url_utility';
import { annotationsData } from '../mock_data';
import { NOT_IN_DB_PREFIX } from '~/monitoring/constants';
@ -399,6 +400,118 @@ describe('mapToDashboardViewModel', () => {
});
});
});
describe('templating variables mapping', () => {
beforeEach(() => {
jest.spyOn(urlUtils, 'queryToObject');
});
afterEach(() => {
urlUtils.queryToObject.mockRestore();
});
it('sets variables as-is from yml file if URL has no variables', () => {
const response = {
dashboard: 'Dashboard Name',
links: [],
templating: {
variables: {
pod: 'kubernetes',
pod_2: 'kubernetes-2',
},
},
};
urlUtils.queryToObject.mockReturnValueOnce();
expect(mapToDashboardViewModel(response)).toMatchObject({
dashboard: 'Dashboard Name',
links: [],
variables: {
pod: {
label: 'pod',
type: 'text',
value: 'kubernetes',
},
pod_2: {
label: 'pod_2',
type: 'text',
value: 'kubernetes-2',
},
},
});
});
it('sets variables as-is from yml file if URL has no matching variables', () => {
const response = {
dashboard: 'Dashboard Name',
links: [],
templating: {
variables: {
pod: 'kubernetes',
pod_2: 'kubernetes-2',
},
},
};
urlUtils.queryToObject.mockReturnValueOnce({
'var-environment': 'POD',
});
expect(mapToDashboardViewModel(response)).toMatchObject({
dashboard: 'Dashboard Name',
links: [],
variables: {
pod: {
label: 'pod',
type: 'text',
value: 'kubernetes',
},
pod_2: {
label: 'pod_2',
type: 'text',
value: 'kubernetes-2',
},
},
});
});
it('merges variables from URL with the ones from yml file', () => {
const response = {
dashboard: 'Dashboard Name',
links: [],
templating: {
variables: {
pod: 'kubernetes',
pod_2: 'kubernetes-2',
},
},
};
urlUtils.queryToObject.mockReturnValueOnce({
'var-environment': 'POD',
'var-pod': 'POD1',
'var-pod_2': 'POD2',
});
expect(mapToDashboardViewModel(response)).toMatchObject({
dashboard: 'Dashboard Name',
links: [],
variables: {
pod: {
label: 'pod',
type: 'text',
value: 'POD1',
},
pod_2: {
label: 'pod_2',
type: 'text',
value: 'POD2',
},
},
});
});
});
});
describe('uniqMetricsId', () => {

View File

@ -1,4 +1,5 @@
import { parseTemplatingVariables } from '~/monitoring/stores/variable_mapping';
import { parseTemplatingVariables, mergeURLVariables } from '~/monitoring/stores/variable_mapping';
import * as urlUtils from '~/lib/utils/url_utility';
import { mockTemplatingData, mockTemplatingDataResponses } from '../mock_data';
describe('parseTemplatingVariables', () => {
@ -21,3 +22,73 @@ describe('parseTemplatingVariables', () => {
expect(parseTemplatingVariables(input?.dashboard?.templating)).toEqual(expected);
});
});
describe('mergeURLVariables', () => {
beforeEach(() => {
jest.spyOn(urlUtils, 'queryToObject');
});
afterEach(() => {
urlUtils.queryToObject.mockRestore();
});
it('returns empty object if variables are not defined in yml or URL', () => {
urlUtils.queryToObject.mockReturnValueOnce({});
expect(mergeURLVariables({})).toEqual({});
});
it('returns empty object if variables are defined in URL but not in yml', () => {
urlUtils.queryToObject.mockReturnValueOnce({
'var-env': 'one',
'var-instance': 'localhost',
});
expect(mergeURLVariables({})).toEqual({});
});
it('returns yml variables if variables defined in yml but not in the URL', () => {
urlUtils.queryToObject.mockReturnValueOnce({});
const params = {
env: 'one',
instance: 'localhost',
};
expect(mergeURLVariables(params)).toEqual(params);
});
it('returns yml variables if variables defined in URL do not match with yml variables', () => {
const urlParams = {
'var-env': 'one',
'var-instance': 'localhost',
};
const ymlParams = {
pod: { value: 'one' },
service: { value: 'database' },
};
urlUtils.queryToObject.mockReturnValueOnce(urlParams);
expect(mergeURLVariables(ymlParams)).toEqual(ymlParams);
});
it('returns merged yml and URL variables if there is some match', () => {
const urlParams = {
'var-env': 'one',
'var-instance': 'localhost:8080',
};
const ymlParams = {
instance: { value: 'localhost' },
service: { value: 'database' },
};
const merged = {
instance: { value: 'localhost:8080' },
service: { value: 'database' },
};
urlUtils.queryToObject.mockReturnValueOnce(urlParams);
expect(mergeURLVariables(ymlParams)).toEqual(merged);
});
});

View File

@ -169,8 +169,8 @@ describe('monitoring/utils', () => {
});
});
describe('getPromCustomVariablesFromUrl', () => {
const { getPromCustomVariablesFromUrl } = monitoringUtils;
describe('templatingVariablesFromUrl', () => {
const { templatingVariablesFromUrl } = monitoringUtils;
beforeEach(() => {
jest.spyOn(urlUtils, 'queryToObject');
@ -195,7 +195,7 @@ describe('monitoring/utils', () => {
'var-pod': 'POD',
});
expect(getPromCustomVariablesFromUrl()).toEqual(expect.objectContaining({ pod: 'POD' }));
expect(templatingVariablesFromUrl()).toEqual(expect.objectContaining({ pod: 'POD' }));
});
it('returns an empty object when no custom variables are present', () => {
@ -203,7 +203,7 @@ describe('monitoring/utils', () => {
dashboard: '.gitlab/dashboards/custom_dashboard.yml',
});
expect(getPromCustomVariablesFromUrl()).toStrictEqual({});
expect(templatingVariablesFromUrl()).toStrictEqual({});
});
});
@ -427,76 +427,6 @@ describe('monitoring/utils', () => {
});
});
describe('mergeURLVariables', () => {
beforeEach(() => {
jest.spyOn(urlUtils, 'queryToObject');
});
afterEach(() => {
urlUtils.queryToObject.mockRestore();
});
it('returns empty object if variables are not defined in yml or URL', () => {
urlUtils.queryToObject.mockReturnValueOnce({});
expect(monitoringUtils.mergeURLVariables({})).toEqual({});
});
it('returns empty object if variables are defined in URL but not in yml', () => {
urlUtils.queryToObject.mockReturnValueOnce({
'var-env': 'one',
'var-instance': 'localhost',
});
expect(monitoringUtils.mergeURLVariables({})).toEqual({});
});
it('returns yml variables if variables defined in yml but not in the URL', () => {
urlUtils.queryToObject.mockReturnValueOnce({});
const params = {
env: 'one',
instance: 'localhost',
};
expect(monitoringUtils.mergeURLVariables(params)).toEqual(params);
});
it('returns yml variables if variables defined in URL do not match with yml variables', () => {
const urlParams = {
'var-env': 'one',
'var-instance': 'localhost',
};
const ymlParams = {
pod: { value: 'one' },
service: { value: 'database' },
};
urlUtils.queryToObject.mockReturnValueOnce(urlParams);
expect(monitoringUtils.mergeURLVariables(ymlParams)).toEqual(ymlParams);
});
it('returns merged yml and URL variables if there is some match', () => {
const urlParams = {
'var-env': 'one',
'var-instance': 'localhost:8080',
};
const ymlParams = {
instance: { value: 'localhost' },
service: { value: 'database' },
};
const merged = {
instance: { value: 'localhost:8080' },
service: { value: 'database' },
};
urlUtils.queryToObject.mockReturnValueOnce(urlParams);
expect(monitoringUtils.mergeURLVariables(ymlParams)).toEqual(merged);
});
});
describe('convertVariablesForURL', () => {
it.each`
input | expected

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
require 'spec_helper'
describe GitlabSchema.types['ReleaseLinks'] do
it { expect(described_class).to require_graphql_authorizations(:download_code) }
it 'has the expected fields' do
expected_fields = %w[
selfUrl
mergeRequestsUrl
issuesUrl
editUrl
]
expect(described_class).to include_graphql_fields(*expected_fields)
end
end

View File

@ -9,7 +9,8 @@ describe GitlabSchema.types['Release'] do
expected_fields = %w[
tag_name tag_path
description description_html
name assets milestones evidences author commit
name milestones evidences author commit
assets links
created_at released_at
]
@ -22,6 +23,12 @@ describe GitlabSchema.types['Release'] do
it { is_expected.to have_graphql_type(Types::ReleaseAssetsType) }
end
describe 'links field' do
subject { described_class.fields['links'] }
it { is_expected.to have_graphql_type(Types::ReleaseLinksType) }
end
describe 'milestones field' do
subject { described_class.fields['milestones'] }

View File

@ -0,0 +1,42 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe CookiesHelper do
describe '#set_secure_cookie' do
it 'creates an encrypted cookie with expected attributes' do
stub_config_setting(https: true)
expiration = 1.month.from_now
key = :secure_cookie
value = 'secure value'
expect_next_instance_of(ActionDispatch::Cookies::EncryptedKeyRotatingCookieJar) do |instance|
expect(instance).to receive(:[]=).with(key, httponly: true, secure: true, expires: expiration, value: value)
end
helper.set_secure_cookie(key, value, httponly: true, expires: expiration, type: CookiesHelper::COOKIE_TYPE_ENCRYPTED)
end
it 'creates a permanent cookie with expected attributes' do
key = :permanent_cookie
value = 'permanent value'
expect_next_instance_of(ActionDispatch::Cookies::PermanentCookieJar) do |instance|
expect(instance).to receive(:[]=).with(key, httponly: false, secure: false, expires: nil, value: value)
end
helper.set_secure_cookie(key, value, type: CookiesHelper::COOKIE_TYPE_PERMANENT)
end
it 'creates a regular cookie with expected attributes' do
key = :regular_cookie
value = 'regular value'
expect_next_instance_of(ActionDispatch::Cookies::CookieJar) do |instance|
expect(instance).to receive(:[]=).with(key, httponly: false, secure: false, expires: nil, value: value)
end
helper.set_secure_cookie(key, value)
end
end
end

View File

@ -0,0 +1,82 @@
# frozen_string_literal: true
require 'fast_spec_helper'
require_dependency 'rspec-parameterized'
RSpec.describe 'DeclarativePolicy overrides' do
let(:foo_policy) do
Class.new(DeclarativePolicy::Base) do
condition(:foo_prop_cond) { @subject.foo_prop }
rule { foo_prop_cond }.policy do
enable :common_ability
enable :foo_prop_ability
end
end
end
let(:bar_policy) do
Class.new(DeclarativePolicy::Base) do
delegate { @subject.foo }
overrides :common_ability
condition(:bar_prop_cond) { @subject.bar_prop }
rule { bar_prop_cond }.policy do
enable :common_ability
enable :bar_prop_ability
end
rule { bar_prop_cond & can?(:foo_prop_ability) }.policy do
enable :combined_ability
end
end
end
before do
stub_const('Foo', Struct.new(:foo_prop))
stub_const('FooPolicy', foo_policy)
stub_const('Bar', Struct.new(:foo, :bar_prop))
stub_const('BarPolicy', bar_policy)
end
where(:foo_prop, :bar_prop) do
[
[true, true],
[true, false],
[false, true],
[false, false]
]
end
with_them do
let(:foo) { Foo.new(foo_prop) }
let(:bar) { Bar.new(foo, bar_prop) }
it 'determines the correct bar_prop_ability (non-delegated) permissions for bar' do
policy = DeclarativePolicy.policy_for(nil, bar)
expect(policy.allowed?(:bar_prop_ability)).to eq(bar_prop)
end
it 'determines the correct foo_prop (non-overridden) permissions for bar' do
policy = DeclarativePolicy.policy_for(nil, bar)
expect(policy.allowed?(:foo_prop_ability)).to eq(foo_prop)
end
it 'determines the correct common_ability (overridden) permissions for bar' do
policy = DeclarativePolicy.policy_for(nil, bar)
expect(policy.allowed?(:common_ability)).to eq(bar_prop)
end
it 'determines the correct common_ability permissions for foo' do
policy = DeclarativePolicy.policy_for(nil, foo)
expect(policy.allowed?(:common_ability)).to eq(foo_prop)
end
it 'allows combinations of overridden and inherited values' do
policy = DeclarativePolicy.policy_for(nil, bar)
expect(policy.allowed?(:combined_ability)).to eq(foo_prop && bar_prop)
end
end
end

View File

@ -262,4 +262,44 @@ describe Noteable do
end
end
end
describe "#has_any_diff_note_positions?" do
let(:source_branch) { "compare-with-merge-head-source" }
let(:target_branch) { "compare-with-merge-head-target" }
let(:merge_request) { create(:merge_request, source_branch: source_branch, target_branch: target_branch) }
let!(:note) do
path = "files/markdown/ruby-style-guide.md"
position = Gitlab::Diff::Position.new(
old_path: path,
new_path: path,
new_line: 508,
diff_refs: merge_request.diff_refs
)
create(:diff_note_on_merge_request, project: merge_request.project, position: position, noteable: merge_request)
end
before do
MergeRequests::MergeToRefService.new(merge_request.project, merge_request.author).execute(merge_request)
Discussions::CaptureDiffNotePositionsService.new(merge_request).execute
end
it "returns true when it has diff note positions" do
expect(merge_request.has_any_diff_note_positions?).to be(true)
end
it "returns false when it has notes but no diff note positions" do
DiffNotePosition.where(note: note).find_each(&:delete)
expect(merge_request.has_any_diff_note_positions?).to be(false)
end
it "returns false when it has no notes" do
merge_request.notes.find_each(&:destroy)
expect(merge_request.has_any_diff_note_positions?).to be(false)
end
end
end

View File

@ -1,7 +1,6 @@
# frozen_string_literal: true
require 'spec_helper'
require 'pp'
describe 'Query.project(fullPath).release(tagName)' do
include GraphqlHelpers
@ -12,15 +11,19 @@ describe 'Query.project(fullPath).release(tagName)' do
let_it_be(:reporter) { create(:user) }
let_it_be(:stranger) { create(:user) }
let(:params_for_issues_and_mrs) { { scope: 'all', state: 'opened', release_tag: release.tag } }
let(:post_query) { post_graphql(query, current_user: current_user) }
let(:path_prefix) { %w[project release] }
let(:data) { graphql_data.dig(*path) }
def query(rq = release_fields)
graphql_query_for(:project, { fullPath: project.full_path },
query_graphql_field(:release, { tagName: release.tag }, rq))
end
let(:post_query) { post_graphql(query, current_user: current_user) }
let(:path_prefix) { %w[project release] }
let(:data) { graphql_data.dig(*path) }
before do
stub_default_url_options(host: 'www.example.com')
end
shared_examples 'full access to the release field' do
describe 'scalar fields' do
@ -83,10 +86,10 @@ describe 'Query.project(fullPath).release(tagName)' do
it 'finds the author of the release' do
post_query
expect(data).to eq({
expect(data).to eq(
'id' => global_id_of(release.author),
'username' => release.author.username
})
)
end
end
@ -100,7 +103,7 @@ describe 'Query.project(fullPath).release(tagName)' do
it 'finds the commit associated with the release' do
post_query
expect(data).to eq({ 'sha' => release.commit.sha })
expect(data).to eq('sha' => release.commit.sha)
end
end
@ -115,7 +118,7 @@ describe 'Query.project(fullPath).release(tagName)' do
it 'returns the number of assets associated to the release' do
post_query
expect(data).to eq({ 'count' => release.sources.size + release.links.size })
expect(data).to eq('count' => release.sources.size + release.links.size)
end
end
@ -166,6 +169,28 @@ describe 'Query.project(fullPath).release(tagName)' do
end
end
describe 'links' do
let(:path) { path_prefix + %w[links] }
let(:release_fields) do
query_graphql_field(:links, nil, %{
selfUrl
mergeRequestsUrl
issuesUrl
})
end
it 'finds all release links' do
post_query
expect(data).to eq(
'selfUrl' => project_release_url(project, release),
'mergeRequestsUrl' => project_merge_requests_url(project, params_for_issues_and_mrs),
'issuesUrl' => project_issues_url(project, params_for_issues_and_mrs)
)
end
end
describe 'evidences' do
let(:path) { path_prefix + %w[evidences] }
@ -177,14 +202,13 @@ describe 'Query.project(fullPath).release(tagName)' do
post_query
evidence = release.evidences.first.present
expected = {
expect(data["nodes"].first).to eq(
'id' => global_id_of(evidence),
'sha' => evidence.sha,
'filepath' => evidence.filepath,
'collectedAt' => evidence.collected_at.utc.iso8601
}
expect(data["nodes"].first).to eq(expected)
)
end
end
end
@ -207,6 +231,38 @@ describe 'Query.project(fullPath).release(tagName)' do
end
end
shared_examples 'access to editUrl' do
let(:path) { path_prefix + %w[links] }
let(:release_fields) do
query_graphql_field(:links, nil, 'editUrl')
end
before do
post_query
end
it 'returns editUrl' do
expect(data).to eq('editUrl' => edit_project_release_url(project, release))
end
end
shared_examples 'no access to editUrl' do
let(:path) { path_prefix + %w[links] }
let(:release_fields) do
query_graphql_field(:links, nil, 'editUrl')
end
before do
post_query
end
it 'does not return editUrl' do
expect(data).to eq('editUrl' => nil)
end
end
describe "ensures that the correct data is returned based on the project's visibility and the user's access level" do
context 'when the project is private' do
let_it_be(:project) { create(:project, :repository, :private) }
@ -238,12 +294,14 @@ describe 'Query.project(fullPath).release(tagName)' do
let(:current_user) { reporter }
it_behaves_like 'full access to the release field'
it_behaves_like 'no access to editUrl'
end
context 'when the user has Developer permissions' do
let(:current_user) { developer }
it_behaves_like 'full access to the release field'
it_behaves_like 'access to editUrl'
end
end
@ -265,12 +323,21 @@ describe 'Query.project(fullPath).release(tagName)' do
let(:current_user) { stranger }
it_behaves_like 'full access to the release field'
it_behaves_like 'no access to editUrl'
end
context 'when the user has Guest permissions' do
let(:current_user) { guest }
it_behaves_like 'full access to the release field'
it_behaves_like 'no access to editUrl'
end
context 'when the user has Reporter permissions' do
let(:current_user) { reporter }
it_behaves_like 'full access to the release field'
it_behaves_like 'no access to editUrl'
end
context 'when the user has Reporter permissions' do
@ -283,6 +350,7 @@ describe 'Query.project(fullPath).release(tagName)' do
let(:current_user) { developer }
it_behaves_like 'full access to the release field'
it_behaves_like 'access to editUrl'
end
end
end

View File

@ -5,9 +5,10 @@ require 'spec_helper'
describe 'Query.project(fullPath).releases()' do
include GraphqlHelpers
let_it_be(:stranger) { create(:user) }
let_it_be(:guest) { create(:user) }
let_it_be(:reporter) { create(:user) }
let_it_be(:stranger) { create(:user) }
let_it_be(:developer) { create(:user) }
let(:query) do
graphql_query_for(:project, { fullPath: project.full_path },
@ -33,15 +34,25 @@ describe 'Query.project(fullPath).releases()' do
sha
}
}
links {
selfUrl
mergeRequestsUrl
issuesUrl
}
}
}
})
end
let(:params_for_issues_and_mrs) { { scope: 'all', state: 'opened', release_tag: release.tag } }
let(:post_query) { post_graphql(query, current_user: current_user) }
let(:data) { graphql_data.dig('project', 'releases', 'nodes', 0) }
before do
stub_default_url_options(host: 'www.example.com')
end
shared_examples 'full access to all repository-related fields' do
describe 'repository-related fields' do
before do
@ -57,7 +68,7 @@ describe 'Query.project(fullPath).releases()' do
{ 'sha' => e.sha }
end
expect(data).to eq({
expect(data).to eq(
'tagName' => release.tag,
'tagPath' => project_tag_path(project, release.tag),
'name' => release.name,
@ -72,8 +83,13 @@ describe 'Query.project(fullPath).releases()' do
},
'evidences' => {
'nodes' => expected_evidences
},
'links' => {
'selfUrl' => project_release_url(project, release),
'mergeRequestsUrl' => project_merge_requests_url(project, params_for_issues_and_mrs),
'issuesUrl' => project_issues_url(project, params_for_issues_and_mrs)
}
})
)
end
end
end
@ -85,7 +101,7 @@ describe 'Query.project(fullPath).releases()' do
end
it 'does not return data for fields that expose repository information' do
expect(data).to eq({
expect(data).to eq(
'tagName' => nil,
'tagPath' => nil,
'name' => "Release-#{release.id}",
@ -98,9 +114,76 @@ describe 'Query.project(fullPath).releases()' do
},
'evidences' => {
'nodes' => []
},
'links' => nil
)
end
end
end
# editUrl is tested separately becuase its permissions
# are slightly different than other release fields
shared_examples 'access to editUrl' do
let(:query) do
graphql_query_for(:project, { fullPath: project.full_path },
%{
releases {
nodes {
links {
editUrl
}
}
}
})
end
end
before do
post_query
end
it 'returns editUrl' do
expect(data).to eq(
'links' => {
'editUrl' => edit_project_release_url(project, release)
}
)
end
end
shared_examples 'no access to editUrl' do
let(:query) do
graphql_query_for(:project, { fullPath: project.full_path },
%{
releases {
nodes {
links {
editUrl
}
}
}
})
end
before do
post_query
end
it 'does not return editUrl' do
expect(data).to eq(
'links' => {
'editUrl' => nil
}
)
end
end
shared_examples 'no access to any release data' do
before do
post_query
end
it 'returns nil' do
expect(data).to eq(nil)
end
end
@ -112,6 +195,13 @@ describe 'Query.project(fullPath).releases()' do
before_all do
project.add_guest(guest)
project.add_reporter(reporter)
project.add_developer(developer)
end
context 'when the user is not logged in' do
let(:current_user) { stranger }
it_behaves_like 'no access to any release data'
end
context 'when the user has Guest permissions' do
@ -124,6 +214,14 @@ describe 'Query.project(fullPath).releases()' do
let(:current_user) { reporter }
it_behaves_like 'full access to all repository-related fields'
it_behaves_like 'no access to editUrl'
end
context 'when the user has Developer permissions' do
let(:current_user) { developer }
it_behaves_like 'full access to all repository-related fields'
it_behaves_like 'access to editUrl'
end
end
@ -134,18 +232,35 @@ describe 'Query.project(fullPath).releases()' do
before_all do
project.add_guest(guest)
project.add_reporter(reporter)
project.add_developer(developer)
end
context 'when the user is not logged in' do
let(:current_user) { stranger }
it_behaves_like 'full access to all repository-related fields'
it_behaves_like 'no access to editUrl'
end
context 'when the user has Guest permissions' do
let(:current_user) { guest }
it_behaves_like 'full access to all repository-related fields'
it_behaves_like 'no access to editUrl'
end
context 'when the user has Reporter permissions' do
let(:current_user) { reporter }
it_behaves_like 'full access to all repository-related fields'
it_behaves_like 'no access to editUrl'
end
context 'when the user has Developer permissions' do
let(:current_user) { developer }
it_behaves_like 'full access to all repository-related fields'
it_behaves_like 'access to editUrl'
end
end
end

View File

@ -9,14 +9,57 @@ RSpec.shared_examples 'known sign in' do
user.update!(current_sign_in_ip: ip)
end
context 'with a valid post' do
context 'when remote IP does not match user last sign in IP' do
before do
stub_user_ip('127.0.0.1')
stub_remote_ip('169.0.0.1')
def stub_cookie(value = user.id)
cookies.encrypted[KnownSignIn::KNOWN_SIGN_IN_COOKIE] = {
value: value, expires: KnownSignIn::KNOWN_SIGN_IN_COOKIE_EXPIRY
}
end
context 'when the remote IP and the last sign in IP match' do
before do
stub_user_ip('169.0.0.1')
stub_remote_ip('169.0.0.1')
end
it 'does not notify the user' do
expect_any_instance_of(NotificationService).not_to receive(:unknown_sign_in)
post_action
end
it 'sets/updates the encrypted cookie' do
post_action
expect(cookies.encrypted[KnownSignIn::KNOWN_SIGN_IN_COOKIE]).to eq(user.id)
end
end
context 'when the remote IP and the last sign in IP do not match' do
before do
stub_user_ip('127.0.0.1')
stub_remote_ip('169.0.0.1')
end
context 'when the cookie is not previously set' do
it 'notifies the user' do
expect_next_instance_of(NotificationService) do |instance|
expect(instance).to receive(:unknown_sign_in)
end
post_action
end
it 'notifies the user' do
it 'sets the encrypted cookie' do
post_action
expect(cookies.encrypted[KnownSignIn::KNOWN_SIGN_IN_COOKIE]).to eq(user.id)
end
end
it 'notifies the user when the cookie is expired' do
stub_cookie
Timecop.freeze((KnownSignIn::KNOWN_SIGN_IN_COOKIE_EXPIRY + 1.day).from_now) do
expect_next_instance_of(NotificationService) do |instance|
expect(instance).to receive(:unknown_sign_in)
end
@ -25,35 +68,30 @@ RSpec.shared_examples 'known sign in' do
end
end
context 'when remote IP matches an active session' do
before do
existing_sessions = ActiveSession.session_ids_for_user(user.id)
existing_sessions.each { |sessions| ActiveSession.destroy(user, sessions) }
it 'notifies the user when the cookie is for another user' do
stub_cookie(create(:user).id)
stub_user_ip('169.0.0.1')
stub_remote_ip('127.0.0.1')
ActiveSession.set(user, request)
expect_next_instance_of(NotificationService) do |instance|
expect(instance).to receive(:unknown_sign_in)
end
it 'does not notify the user' do
expect_any_instance_of(NotificationService).not_to receive(:unknown_sign_in)
post_action
end
post_action
end
context 'when remote IP address matches last sign in IP' do
before do
stub_user_ip('127.0.0.1')
stub_remote_ip('127.0.0.1')
end
it 'does not notify the user when remote IP matches an active session' do
ActiveSession.set(user, request)
it 'does not notify the user' do
expect_any_instance_of(NotificationService).not_to receive(:unknown_sign_in)
expect_any_instance_of(NotificationService).not_to receive(:unknown_sign_in)
post_action
end
post_action
end
it 'does not notify the user when the cookie is present and not expired' do
stub_cookie
expect_any_instance_of(NotificationService).not_to receive(:unknown_sign_in)
post_action
end
end
end