Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-09-28 12:10:02 +00:00
parent 15b3452054
commit effda22b3e
69 changed files with 970 additions and 466 deletions

View file

@ -942,7 +942,6 @@ Rails/SaveBang:
- 'spec/lib/gitlab/ci/ansi2json/style_spec.rb'
- 'spec/lib/gitlab/ci/status/build/common_spec.rb'
- 'spec/lib/gitlab/cycle_analytics/base_event_fetcher_spec.rb'
- 'spec/lib/gitlab/cycle_analytics/events_spec.rb'
- 'spec/lib/gitlab/database/custom_structure_spec.rb'
- 'spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb'
- 'spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb'

View file

@ -71,7 +71,7 @@ export default {
:action-icon="action.icon"
:tooltip-text="action.title"
:link="action.path"
class="js-stage-action stage-action position-absolute position-top-0 rounded"
class="js-stage-action stage-action rounded"
@pipelineActionRequestComplete="pipelineActionRequestComplete"
/>
</div>

View file

@ -1,11 +1,11 @@
<script>
import { GlDeprecatedButton, GlLink } from '@gitlab/ui';
import { GlButton, GlLink } from '@gitlab/ui';
import { mapState } from 'vuex';
import { s__ } from '../../locale';
export default {
components: {
GlDeprecatedButton,
GlButton,
GlLink,
},
props: {
@ -47,9 +47,9 @@ export default {
</p>
<div v-if="!missingData" class="text-left">
<gl-deprecated-button :href="clustersPath" variant="success">
<gl-button :href="clustersPath" variant="success" category="primary">
{{ s__('ServerlessDetails|Install Prometheus') }}
</gl-deprecated-button>
</gl-button>
</div>
</div>
</div>

View file

@ -136,7 +136,7 @@ class Import::FogbugzController < Import::BaseController
def verify_blocked_uri
Gitlab::UrlBlocker.validate!(
params[:uri],
{
**{
allow_localhost: allow_local_requests?,
allow_local_network: allow_local_requests?,
schemes: %w(http https)

View file

@ -108,7 +108,7 @@ class Import::GithubController < Import::BaseController
@client ||= if Feature.enabled?(:remove_legacy_github_client)
Gitlab::GithubImport::Client.new(session[access_token_key])
else
Gitlab::LegacyGithubImport::Client.new(session[access_token_key], client_options)
Gitlab::LegacyGithubImport::Client.new(session[access_token_key], **client_options)
end
end

View file

@ -148,6 +148,7 @@ class Issue < ApplicationRecord
after_commit :expire_etag_cache, unless: :importing?
after_save :ensure_metrics, unless: :importing?
after_create_commit :record_create_action, unless: :importing?
attr_spammable :title, spam_title: true
attr_spammable :description, spam_description: true
@ -429,6 +430,10 @@ class Issue < ApplicationRecord
metrics.record!
end
def record_create_action
Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_created_action(author: author)
end
# Returns `true` if the given User can read the current Issue.
#
# This method duplicates the same check of issue_policy.rb

View file

@ -15,6 +15,7 @@ class ResourceLabelEvent < ResourceEvent
validate :exactly_one_issuable
after_save :expire_etag_cache
after_save :usage_metrics
after_destroy :expire_etag_cache
enum action: {
@ -113,6 +114,16 @@ class ResourceLabelEvent < ResourceEvent
def discussion_id_key
[self.class.name, created_at, user_id]
end
def for_issue?
issue_id.present?
end
def usage_metrics
return unless for_issue?
Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_label_changed_action(author: user)
end
end
ResourceLabelEvent.prepend_if_ee('EE::ResourceLabelEvent')

View file

@ -11,6 +11,8 @@ class ResourceStateEvent < ResourceEvent
# state is used for issue and merge request states.
enum state: Issue.available_states.merge(MergeRequest.available_states).merge(reopened: 5)
after_save :usage_metrics
def self.issuable_attrs
%i(issue merge_request).freeze
end
@ -18,6 +20,29 @@ class ResourceStateEvent < ResourceEvent
def issuable
issue || merge_request
end
def for_issue?
issue_id.present?
end
private
def usage_metrics
return unless for_issue?
case state
when 'closed'
issue_usage_counter.track_issue_closed_action(author: user)
when 'reopened'
issue_usage_counter.track_issue_reopened_action(author: user)
else
# no-op, nothing to do, not a state we're tracking
end
end
def issue_usage_counter
Gitlab::UsageDataCounters::IssueActivityUniqueCounter
end
end
ResourceStateEvent.prepend_if_ee('EE::ResourceStateEvent')

View file

@ -13,6 +13,8 @@ class ResourceTimeboxEvent < ResourceEvent
remove: 2
}
after_save :usage_metrics
def self.issuable_attrs
%i(issue merge_request).freeze
end
@ -20,4 +22,17 @@ class ResourceTimeboxEvent < ResourceEvent
def issuable
issue || merge_request
end
private
def usage_metrics
case self
when ResourceMilestoneEvent
Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_milestone_changed_action(author: user)
when ResourceIterationEvent
Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_iteration_changed_action(author: user)
else
# no-op
end
end
end

View file

@ -1,7 +1,15 @@
# frozen_string_literal: true
class ResourceWeightEvent < ResourceEvent
include IssueResourceEvent
validates :issue, presence: true
include IssueResourceEvent
after_save :usage_metrics
private
def usage_metrics
Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_weight_changed_action(author: user)
end
end

View file

@ -8,7 +8,11 @@ class SnippetInputActionCollection
delegate :empty?, :any?, :[], to: :actions
def initialize(actions = [], allowed_actions: nil)
@actions = actions.map { |action| SnippetInputAction.new(action.merge(allowed_actions: allowed_actions)) }
@actions = actions.map do |action|
params = action.merge(allowed_actions: allowed_actions)
SnippetInputAction.new(**params)
end
end
def to_commit_actions

View file

@ -34,7 +34,7 @@ module IncidentManagement
strong_memoize(:pager_duty_processable_events) do
::PagerDuty::WebhookPayloadParser
.call(params.to_h)
.filter { |msg| msg['event'].in?(PAGER_DUTY_PROCESSABLE_EVENT_TYPES) }
.filter { |msg| msg['event'].to_s.in?(PAGER_DUTY_PROCESSABLE_EVENT_TYPES) }
end
end

View file

@ -12,7 +12,7 @@ module Lfs
def execute
lfs_objects_relation.each_batch(of: BATCH_SIZE) do |objects|
push_objects(objects)
push_objects!(objects)
end
success
@ -30,8 +30,8 @@ module Lfs
project.lfs_objects_for_repository_types(nil, :project)
end
def push_objects(objects)
rsp = lfs_client.batch('upload', objects)
def push_objects!(objects)
rsp = lfs_client.batch!('upload', objects)
objects = objects.index_by(&:oid)
rsp.fetch('objects', []).each do |spec|
@ -53,14 +53,14 @@ module Lfs
return
end
lfs_client.upload(object, upload, authenticated: authenticated)
lfs_client.upload!(object, upload, authenticated: authenticated)
end
def verify_object!(object, spec)
# TODO: the remote has requested that we make another call to verify that
# the object has been sent correctly.
# https://gitlab.com/gitlab-org/gitlab/-/issues/250654
log_error("LFS upload verification requested, but not supported for #{object.oid}")
authenticated = spec['authenticated']
verify = spec.dig('actions', 'verify')
lfs_client.verify!(object, verify, authenticated: authenticated)
end
def url

View file

@ -83,7 +83,7 @@ module Snippets
def create_commit
attrs = commit_attrs(@snippet, INITIAL_COMMIT_MSG)
@snippet.snippet_repository.multi_files_action(current_user, files_to_commit(@snippet), attrs)
@snippet.snippet_repository.multi_files_action(current_user, files_to_commit(@snippet), **attrs)
end
def move_temporary_files

View file

@ -100,7 +100,7 @@ module Snippets
attrs = commit_attrs(snippet, INITIAL_COMMIT_MSG)
actions = [{ file_path: snippet.file_name, content: snippet.content }]
snippet.snippet_repository.multi_files_action(current_user, actions, attrs)
snippet.snippet_repository.multi_files_action(current_user, actions, **attrs)
end
def create_commit(snippet)
@ -108,7 +108,7 @@ module Snippets
attrs = commit_attrs(snippet, UPDATE_COMMIT_MSG)
snippet.snippet_repository.multi_files_action(current_user, files_to_commit(snippet), attrs)
snippet.snippet_repository.multi_files_action(current_user, files_to_commit(snippet), **attrs)
end
# Because we are removing repositories we don't want to remove

View file

@ -0,0 +1,5 @@
---
title: Make git lfs for push mirrors work to GitHub.com
merge_request: 43321
author:
type: fixed

View file

@ -0,0 +1,5 @@
---
title: Modify time_period for last 28 days to improve batch counting performance
merge_request: 42972
author:
type: performance

View file

@ -0,0 +1,5 @@
---
title: Fix button placement on pipeline graph
merge_request: 43419
author:
type: fixed

View file

@ -2,6 +2,6 @@
name: increased_diff_limits
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/40357
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/241185
group: group::source_code
group: group::source code
type: development
default_enabled: false
default_enabled: false

View file

@ -1,7 +1,7 @@
---
name: limit_projects_in_groups_api
introduced_by_url:
rollout_issue_url:
group:
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/20023
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/257829
group: group::access
type: development
default_enabled: true

View file

@ -1,7 +1,7 @@
---
name: similarity_search
introduced_by_url:
rollout_issue_url:
group:
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/37300/
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/38675
group: group::analytics
type: development
default_enabled: true

View file

@ -1,7 +1,7 @@
---
name: sql_set_operators
introduced_by_url:
rollout_issue_url:
group:
introduced_by_url:
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39786#f99799ae4964b7650b877e081b669379d71bcca8
group: group::access
type: development
default_enabled: false

View file

@ -1,7 +1,7 @@
---
name: track_unique_visits
introduced_by_url:
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/33146
rollout_issue_url:
group:
group: group::analytics
type: development
default_enabled: false

View file

@ -1348,3 +1348,93 @@ You can switch an exiting database cluster to use Patroni instead of repmgr with
1. Repeat the last two steps for all replica nodes. `gitlab.rb` should look the same on all nodes.
1. Optional: You can remove `gitlab_repmgr` database and role on the primary.
### Upgrading PostgreSQL major version in a Patroni cluster
As of GitLab 13.3, PostgreSQL 11.7 and 12.3 are both shipped with Omnibus GitLab. GitLab still
uses PostgreSQL 11 by default. Therefore `gitlab-ctl pg-upgrade` does not automatically upgrade
to PostgreSQL 12. If you want to upgrade to PostgreSQL 12, you must ask for it explicitly.
CAUTION: **Warning:**
The procedure for upgrading PostgreSQL in a Patroni cluster is different than when upgrading using repmgr.
The following outlines the key differences and important considerations that need to be accounted for when
upgrading PostgreSQL.
Here are a few key facts that you must consider before upgrading PostgreSQL:
- The main point is that you will have to **shut down the Patroni cluster**. This means that your
GitLab deployment will be down for the duration of database upgrade or, at least, as long as your leader
node is upgraded. This can be **a significant downtime depending on the size of your database**.
- Upgrading PostgreSQL creates a new data directory with a new control data. From Patroni's perspective
this is a new cluster that needs to be bootstrapped again. Therefore, as part of the upgrade procedure,
the cluster state, which is stored in Consul, will be wiped out. Once the upgrade is completed, Patroni
will be instructed to bootstrap a new cluster. **Note that this will change your _cluster ID_**.
- The procedures for upgrading leader and replicas are not the same. That is why it is important to use the
right procedure on each node.
- Upgrading a replica node **deletes the data directory and resynchronizes it** from the leader using the
configured replication method (currently `pg_basebackup` is the only available option). It might take some
time for replica to catch up with the leader, depending on the size of your database.
- An overview of the upgrade procedure is outlined in [Patoni's documentation](https://patroni.readthedocs.io/en/latest/existing_data.html#major-upgrade-of-postgresql-version).
You can still use `gitlab-ctl pg-upgrade` which implements this procedure with a few adjustments.
Considering these, you should carefully plan your PostgreSQL upgrade:
1. Find out which node is the leader and which node is a replica:
```shell
gitlab-ctl patroni members
```
NOTE: **Note:**
`gitlab-ctl pg-upgrade` tries to detect the role of the node. If for any reason the auto-detection
does not work or you believe it did not detect the role correctly, you can use the `--leader` or `--replica`
arguments to manually override it.
1. Stop Patroni **only on replicas**.
```shell
sudo gitlab-ctl stop patroni
```
1. Enable the maintenance mode on the **application node**:
```shell
sudo gitlab-ctl deploy-page up
```
1. Upgrade PostgreSQL on **the leader node** and make sure that the upgrade is completed successfully:
```shell
sudo gitlab-ctl pg-upgrade -V 12
```
1. Check the status of the leader and cluster. You can only proceed if you have a healthy leader:
```shell
gitlab-ctl patroni check-leader
# OR
gitlab-ctl patroni members
```
1. You can now disable the maintenance mode on the **application node**:
```shell
sudo gitlab-ctl deploy-page down
```
1. Upgrade PostgreSQL **on replicas** (you can do this in parallel on all of them):
```shell
sudo gitlab-ctl pg-upgrade -V 12
```
CAUTION: **Warning:**
Reverting PostgreSQL upgrade with `gitlab-ctl revert-pg-upgrade` has the same considerations as
`gitlab-ctl pg-upgrade`. It can be complicated and may involve deletion of the data directory.
If you need to do that, please contact GitLab support.

View file

@ -1,4 +1,7 @@
---
stage: Enablement
group: Distribution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
comments: false
description: Read through the GitLab installation methods.
type: index

View file

@ -1,4 +1,7 @@
---
stage: Enablement
group: Distribution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
type: howto
---

View file

@ -1,4 +1,7 @@
---
stage: Enablement
group: Distribution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
type: index
---

View file

@ -1,4 +1,7 @@
---
stage: Enablement
group: Distribution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
description: 'Learn how to install a GitLab instance on Google Cloud Platform.'
type: howto
---

View file

@ -1,4 +1,7 @@
---
stage: Enablement
group: Distribution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
type: howto
---

View file

@ -1,6 +1,8 @@
---
stage: Enablement
group: Distribution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
type: howto
date: 2016-06-28
---
# How to install GitLab on OpenShift Origin 3

View file

@ -1,4 +1,7 @@
---
stage: Enablement
group: Distribution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
type: reference
---

View file

@ -1,4 +1,7 @@
---
stage: Enablement
group: Distribution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
type: reference
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 41 KiB

After

Width:  |  Height:  |  Size: 133 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 144 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 152 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 78 KiB

After

Width:  |  Height:  |  Size: 161 KiB

View file

@ -70,7 +70,7 @@ Critical, High, Medium, Low, Info, Unknown). Below this, a table shows each vuln
and description. Clicking a vulnerability takes you to its [Vulnerability Details](../vulnerabilities)
page to view more information about that vulnerability.
![Project Security Dashboard](img/project_security_dashboard_v13_3.png)
![Project Security Dashboard](img/project_security_dashboard_v13_4.png)
You can filter the vulnerabilities by one or more of the following:
@ -83,7 +83,7 @@ You can also dismiss vulnerabilities in the table:
1. Select the checkbox for each vulnerability you want to dismiss.
1. In the menu that appears, select the reason for dismissal and click **Dismiss Selected**.
![Project Security Dashboard](img/project_security_dashboard_v13_2.png)
![Project Security Dashboard](img/project_security_dashboard_dismissal_v13_4.png)
## Group Security Dashboard
@ -232,8 +232,14 @@ To create an issue associated with the vulnerability, click the **Create Issue**
![Create an issue for the vulnerability](img/vulnerability_page_v13_1.png)
Once you create the issue, the vulnerability list contains a link to the issue and an icon whose
color indicates the issue's status (green for open issues, blue for closed issues).
Once you create the issue, the linked issue icon in the vulnerability list:
- Indicates that an issue has been created for that vulnerability.
- Shows a tooltip that contains a link to the issue and an icon whose
color indicates the issue's status:
- Open issues: green
- Closed issues: blue
![Display attached issues](img/vulnerability_list_table_v13_4.png)

View file

@ -4,172 +4,165 @@ group: Package
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
---
# GitLab Composer Repository
# Composer packages in the Package Registry
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/15886) in [GitLab Premium](https://about.gitlab.com/pricing/) 13.2.
> - [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/221259) to GitLab Core in 13.3.
With the GitLab Composer Repository, every project can have its own space to store [Composer](https://getcomposer.org/) packages.
Publish [Composer](https://getcomposer.org/) packages in your project's Package Registry.
Then, install the packages whenever you need to use them as a dependency.
## Enabling the Composer Repository
## Create a Composer package
NOTE: **Note:**
This option is available only if your GitLab administrator has
[enabled support for the Package Registry](../../../administration/packages/index.md).
If you do not have a Composer package, create one and check it in to
a repository. This example shows a GitLab repository, but the repository
can be any public or private repository.
When the Composer Repository is enabled, it is available for all new projects
by default. To enable it for existing projects, or if you want to disable it:
1. Create a directory called `my-composer-package` and change to that directory:
1. Navigate to your project's **Settings > General > Visibility, project features, permissions**.
1. Find the Packages feature and enable or disable it.
1. Click on **Save changes** for the changes to take effect.
```shell
mkdir my-composer-package && cd my-composer-package
```
You should then be able to see the **Packages & Registries** section on the left sidebar.
1. Run [`composer init`](https://getcomposer.org/doc/03-cli.md#init) and answer the prompts.
## Getting started
For namespace, enter your unique [namespace](../../../user/group/index.md#namespaces), like your GitLab username or group name.
This section covers creating a new example Composer package to publish. This is a
quickstart to test out the **GitLab Composer Registry**.
A file called `composer.json` is created:
To complete this section, you need a recent version of [Composer](https://getcomposer.org/).
```json
{
"name": "<namespace>/composer-test",
"type": "library",
"license": "GPL-3.0-only",
"version": "1.0.0"
}
```
### Creating a package project
1. Run Git commands to tag the changes and push them to your repository:
Understanding how to create a full Composer project is outside the scope of this
guide, but you can create a small package to test out the registry. Start by
creating a new directory called `my-composer-package`:
```shell
git init
git add composer.json
git commit -m 'Composer package test'
git tag v1.0.0
git remote add origin git@gitlab.example.com:<namespace>/<project-name>.git
git push --set-upstream origin master
git push origin v1.0.0
```
```shell
mkdir my-composer-package && cd my-composer-package
```
The package is now in your GitLab Package Registry.
Create a new `composer.json` file inside this directory to set up the basic project:
## Publish a Composer package by using the API
```shell
touch composer.json
```
Publish a Composer package to the Package Registry,
so that anyone who can access the project can use the package as a dependency.
Inside `composer.json`, add the following code:
Prerequisites:
```json
{
"name": "<namespace>/composer-test",
"type": "library",
"license": "GPL-3.0-only",
"version": "1.0.0"
}
```
- A package in a GitLab repository.
- The project ID, which is on the project's home page.
- A [personal access token](../../../user/profile/personal_access_tokens.md) with the scope set to `api`.
Replace `<namespace>` with a unique namespace like your GitLab username or group name.
NOTE: **Note:**
[Deploy tokens](./../../project/deploy_tokens/index.md) are
[not yet supported](https://gitlab.com/gitlab-org/gitlab/-/issues/240897) for use with Composer.
After this basic package structure is created, we need to tag it in Git and push it to the repository.
To publish the package:
```shell
git init
git add composer.json
git commit -m 'Composer package test'
git tag v1.0.0
git remote add origin git@gitlab.com:<namespace>/<project-name>.git
git push --set-upstream origin master
git push origin v1.0.0
```
- Send a `POST` request to the [Packages API](../../../api/packages.md).
For example, you can use `curl`:
### Publishing the package
```shell
curl --data tag=<tag> "https://__token__:<personal-access-token>@gitlab.example.com/api/v4/projects/<project_id>/packages/composer"
```
Now that the basics of our project is completed, we can publish the package.
To publish the package, you need:
- `<personal-access-token>` is your personal access token.
- `<project_id>` is your project ID.
- `<tag>` is the Git tag name of the version you want to publish.
To publish a branch, use `branch=<branch>` instead of `tag=<tag>`.
- A personal access token or `CI_JOB_TOKEN`.
You can view the published package by going to **Packages & Registries > Package Registry** and
selecting the **Composer** tab.
([Deploy tokens](./../../project/deploy_tokens/index.md) are not yet supported for use with Composer.)
## Publish a Composer package by using CI/CD
- Your project ID which can be found on the home page of your project.
You can publish a Composer package to the Package Registry as part of your CI/CD process.
To publish the package hosted on GitLab, make a `POST` request to the GitLab package API.
A tool like `curl` can be used to make this request:
1. Specify a `CI_JOB_TOKEN` in your `.gitlab-ci.yml` file:
You can generate a [personal access token](../../../user/profile/personal_access_tokens.md) with the scope set to `api` for repository authentication. For example:
```yaml
stages:
- deploy
```shell
curl --data tag=<tag> 'https://__token__:<personal-access-token>@gitlab.com/api/v4/projects/<project_id>/packages/composer'
```
deploy:
stage: deploy
script:
- 'curl --header "Job-Token: $CI_JOB_TOKEN" --data tag=<tag> "https://gitlab.example.com/api/v4/projects/$CI_PROJECT_ID/packages/composer"'
```
Where:
1. Run the pipeline.
- `<personal-access-token>` is your personal access token.
- `<project_id>` is your project ID.
- `<tag>` is the Git tag name of the version you want to publish. In this example it should be `v1.0.0`. Notice that instead of `tag=<tag>` you can also use `branch=<branch>` to publish branches.
You can view the published package by going to **Packages & Registries > Package Registry** and selecting the **Composer** tab.
If the above command succeeds, you now should be able to see the package under the **Packages & Registries** section of your project page.
### Use a CI/CD template
### Publishing the package with CI/CD
A more detailed Composer CI/CD file is also available as a `.gitlab-ci.yml` template:
To work with Composer commands within [GitLab CI/CD](./../../../ci/README.md), you can
publish Composer packages by using `CI_JOB_TOKEN` in your `.gitlab-ci.yml` file:
1. On the left sidebar, click **Project overview**.
1. Above the file list, click **Set up CI/CD**. If this button is not available, select **CI/CD Configuration** and then **Edit**.
1. From the **Apply a template** list, select **Composer**.
```yaml
stages:
- deploy
CAUTION: **Warning:**
Do not save unless you want to overwrite the existing CI/CD file.
deploy:
stage: deploy
script:
- 'curl --header "Job-Token: $CI_JOB_TOKEN" --data tag=<tag> "https://gitlab.example.com/api/v4/projects/$CI_PROJECT_ID/packages/composer"'
```
## Install a Composer package
### Installing a package
Install a package from the Package Registry so you can use it as a dependency.
To install your package, you need:
Prerequisites:
- A personal access token. You can generate a [personal access token](../../../user/profile/personal_access_tokens.md) with the scope set to `api` for repository authentication.
- Your group ID which can be found on the home page of your project's group.
- A package in the Package Registry.
- The group ID, which is on the group's home page.
- A [personal access token](../../../user/profile/personal_access_tokens.md) with the scope set to `api`.
Add the GitLab Composer package repository to your existing project's `composer.json` file, along with the package name and version you want to install like so:
NOTE: **Note:**
[Deploy tokens](./../../project/deploy_tokens/index.md) are
[not yet supported](https://gitlab.com/gitlab-org/gitlab/-/issues/240897) for use with Composer.
```json
{
...
"repositories": [
{ "type": "composer", "url": "https://gitlab.com/api/v4/group/<group_id>/-/packages/composer/packages.json" }
],
"require": {
...
"<package_name>": "<version>"
},
...
}
```
To install a package:
Where:
1. Add the Package Registry URL to your project's `composer.json` file, along with the package name and version you want to install:
- `<group_id>` is the group ID found under your project's group page.
- `<package_name>` is your package name as defined in your package's `composer.json` file.
- `<version>` is your package version (`1.0.0` in this example).
```json
{
...
"repositories": [
{ "type": "composer", "url": "https://gitlab.example.com/api/v4/group/<group_id>/-/packages/composer/packages.json" }
],
"require": {
...
"<package_name>": "<version>"
},
...
}
```
You also need to create a `auth.json` file with your GitLab credentials:
- `<group_id>` is the group ID.
- `<package_name>` is the package name defined in your package's `composer.json` file.
- `<version>` is the package version.
```json
{
"gitlab-token": {
"gitlab.com": "<personal_access_token>"
}
}
```
1. Create an `auth.json` file with your GitLab credentials:
Where:
```shell
composer config gitlab-token.<DOMAIN-NAME> <personal_access_token>
```
- `<personal_access_token>` is your personal access token.
With the `composer.json` and `auth.json` files configured, you can install the package by running `composer`:
```shell
composer update
```
If successful, you should be able to see the output indicating that the package has been successfully installed.
Output indicates that the package has been successfully installed.
CAUTION: **Important:**
Make sure to never commit the `auth.json` file to your repository. To install packages from a CI job,
Never commit the `auth.json` file to your repository. To install packages from a CI/CD job,
consider using the [`composer config`](https://getcomposer.org/doc/articles/handling-private-packages-with-satis.md#authentication) tool with your personal access token
stored in a [GitLab CI/CD environment variable](../../../ci/variables/README.md) or in
[Hashicorp Vault](../../../ci/secrets/index.md).
[HashiCorp Vault](../../../ci/secrets/index.md).

View file

@ -31,7 +31,7 @@ authenticate with GitLab by using the `CI_JOB_TOKEN`.
CI/CD templates, which you can use to get started, are in [this repo](https://gitlab.com/gitlab-org/gitlab/-/tree/master/lib/gitlab/ci/templates).
Learn more about [using CI/CD to build Maven packages](../maven_repository/index.md#creating-maven-packages-with-gitlab-cicd), [NPM packages](../npm_registry/index.md#publishing-a-package-with-cicd), [Composer packages](../composer_repository/index.md#publishing-the-package-with-cicd), [NuGet Packages](../nuget_repository/index.md#publishing-a-nuget-package-with-cicd), [Conan Packages](../conan_repository/index.md#using-gitlab-ci-with-conan-packages), and [PyPI packages](../pypi_repository/index.md#using-gitlab-ci-with-pypi-packages).
Learn more about [using CI/CD to build Maven packages](../maven_repository/index.md#creating-maven-packages-with-gitlab-cicd), [NPM packages](../npm_registry/index.md#publishing-a-package-with-cicd), [Composer packages](../composer_repository/index.md#publish-a-composer-package-by-using-cicd), [NuGet Packages](../nuget_repository/index.md#publishing-a-nuget-package-with-cicd), [Conan Packages](../conan_repository/index.md#using-gitlab-ci-with-conan-packages), and [PyPI packages](../pypi_repository/index.md#using-gitlab-ci-with-pypi-packages).
If you use CI/CD to build a package, extended activity
information is displayed when you view the package details:

View file

@ -109,7 +109,7 @@ module Gitlab
end
def create_commit(snippet)
snippet.snippet_repository.multi_files_action(commit_author(snippet), snippet_action(snippet), commit_attrs)
snippet.snippet_repository.multi_files_action(commit_author(snippet), snippet_action(snippet), **commit_attrs)
end
# If the user is not allowed to access git or update the snippet

View file

@ -95,7 +95,7 @@ module Gitlab
run_block_with_transaction
rescue ActiveRecord::LockWaitTimeout
if retry_with_lock_timeout?
disable_idle_in_transaction_timeout
disable_idle_in_transaction_timeout if ActiveRecord::Base.connection.transaction_open?
wait_until_next_retry
reset_db_settings
@ -149,7 +149,7 @@ module Gitlab
log(message: "Couldn't acquire lock to perform the migration", current_iteration: current_iteration)
log(message: "Executing the migration without lock timeout", current_iteration: current_iteration)
execute("SET LOCAL lock_timeout TO '0'")
disable_lock_timeout if ActiveRecord::Base.connection.transaction_open?
run_block
@ -184,6 +184,10 @@ module Gitlab
execute("SET LOCAL idle_in_transaction_session_timeout TO '0'")
end
def disable_lock_timeout
execute("SET LOCAL lock_timeout TO '0'")
end
def reset_db_settings
execute('RESET idle_in_transaction_session_timeout; RESET lock_timeout')
end

View file

@ -6,6 +6,12 @@ module Gitlab
# * https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
# * https://github.com/git-lfs/git-lfs/blob/master/docs/api/basic-transfers.md
class Client
GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs+json'
DEFAULT_HEADERS = {
'Accept' => GIT_LFS_CONTENT_TYPE,
'Content-Type' => GIT_LFS_CONTENT_TYPE
}.freeze
attr_reader :base_url
def initialize(base_url, credentials:)
@ -13,19 +19,19 @@ module Gitlab
@credentials = credentials
end
def batch(operation, objects)
def batch!(operation, objects)
body = {
operation: operation,
transfers: ['basic'],
# We don't know `ref`, so can't send it
objects: objects.map { |object| { oid: object.oid, size: object.size } }
objects: objects.as_json(only: [:oid, :size])
}
rsp = Gitlab::HTTP.post(
batch_url,
basic_auth: basic_auth,
body: body.to_json,
headers: { 'Content-Type' => 'application/vnd.git-lfs+json' }
headers: build_request_headers
)
raise BatchSubmitError unless rsp.success?
@ -40,7 +46,7 @@ module Gitlab
body
end
def upload(object, upload_action, authenticated:)
def upload!(object, upload_action, authenticated:)
file = object.file.open
params = {
@ -60,8 +66,25 @@ module Gitlab
file&.close
end
def verify!(object, verify_action, authenticated:)
params = {
body: object.to_json(only: [:oid, :size]),
headers: build_request_headers(verify_action['header'])
}
params[:basic_auth] = basic_auth unless authenticated
rsp = Gitlab::HTTP.post(verify_action['href'], params)
raise ObjectVerifyError unless rsp.success?
end
private
def build_request_headers(extra_headers = nil)
DEFAULT_HEADERS.merge(extra_headers || {})
end
attr_reader :credentials
def batch_url
@ -96,6 +119,12 @@ module Gitlab
"Failed to upload object"
end
end
class ObjectVerifyError < StandardError
def message
"Failed to verify object"
end
end
end
end
end

View file

@ -473,7 +473,7 @@ module Gitlab
end
def last_28_days_time_period(column: :created_at)
{ column => 28.days.ago..Time.current }
{ column => 30.days.ago..2.days.ago }
end
# Source: https://gitlab.com/gitlab-data/analytics/blob/master/transform/snowflake-dbt/data/ping_metrics_to_stage_mapping_data.csv
@ -702,10 +702,10 @@ module Gitlab
counter = Gitlab::UsageDataCounters::EditorUniqueCounter
{
action_monthly_active_users_web_ide_edit: redis_usage_data { counter.count_web_ide_edit_actions(date_range) },
action_monthly_active_users_sfe_edit: redis_usage_data { counter.count_sfe_edit_actions(date_range) },
action_monthly_active_users_snippet_editor_edit: redis_usage_data { counter.count_snippet_editor_edit_actions(date_range) },
action_monthly_active_users_ide_edit: redis_usage_data { counter.count_edit_using_editor(date_range) }
action_monthly_active_users_web_ide_edit: redis_usage_data { counter.count_web_ide_edit_actions(**date_range) },
action_monthly_active_users_sfe_edit: redis_usage_data { counter.count_sfe_edit_actions(**date_range) },
action_monthly_active_users_snippet_editor_edit: redis_usage_data { counter.count_snippet_editor_edit_actions(**date_range) },
action_monthly_active_users_ide_edit: redis_usage_data { counter.count_edit_using_editor(**date_range) }
}
end

View file

@ -3,14 +3,26 @@
module Gitlab
module UsageDataCounters
module IssueActivityUniqueCounter
ISSUE_TITLE_CHANGED = 'g_project_management_issue_title_changed'
ISSUE_DESCRIPTION_CHANGED = 'g_project_management_issue_description_changed'
ISSUE_ASSIGNEE_CHANGED = 'g_project_management_issue_assignee_changed'
ISSUE_MADE_CONFIDENTIAL = 'g_project_management_issue_made_confidential'
ISSUE_MADE_VISIBLE = 'g_project_management_issue_made_visible'
ISSUE_CATEGORY = 'issues_edit'
ISSUE_ASSIGNEE_CHANGED = 'g_project_management_issue_assignee_changed'
ISSUE_CREATED = 'g_project_management_issue_created'
ISSUE_CLOSED = 'g_project_management_issue_closed'
ISSUE_DESCRIPTION_CHANGED = 'g_project_management_issue_description_changed'
ISSUE_ITERATION_CHANGED = 'g_project_management_issue_iteration_changed'
ISSUE_LABEL_CHANGED = 'g_project_management_issue_label_changed'
ISSUE_MADE_CONFIDENTIAL = 'g_project_management_issue_made_confidential'
ISSUE_MADE_VISIBLE = 'g_project_management_issue_made_visible'
ISSUE_MILESTONE_CHANGED = 'g_project_management_issue_milestone_changed'
ISSUE_REOPENED = 'g_project_management_issue_reopened'
ISSUE_TITLE_CHANGED = 'g_project_management_issue_title_changed'
ISSUE_WEIGHT_CHANGED = 'g_project_management_issue_weight_changed'
class << self
def track_issue_created_action(author:, time: Time.zone.now)
track_unique_action(ISSUE_CREATED, author, time)
end
def track_issue_title_changed_action(author:, time: Time.zone.now)
track_unique_action(ISSUE_TITLE_CHANGED, author, time)
end
@ -31,6 +43,30 @@ module Gitlab
track_unique_action(ISSUE_MADE_VISIBLE, author, time)
end
def track_issue_closed_action(author:, time: Time.zone.now)
track_unique_action(ISSUE_CLOSED, author, time)
end
def track_issue_reopened_action(author:, time: Time.zone.now)
track_unique_action(ISSUE_REOPENED, author, time)
end
def track_issue_label_changed_action(author:, time: Time.zone.now)
track_unique_action(ISSUE_LABEL_CHANGED, author, time)
end
def track_issue_milestone_changed_action(author:, time: Time.zone.now)
track_unique_action(ISSUE_MILESTONE_CHANGED, author, time)
end
def track_issue_iteration_changed_action(author:, time: Time.zone.now)
track_unique_action(ISSUE_ITERATION_CHANGED, author, time)
end
def track_issue_weight_changed_action(author:, time: Time.zone.now)
track_unique_action(ISSUE_WEIGHT_CHANGED, author, time)
end
private
def track_unique_action(action, author, time)

View file

@ -206,3 +206,31 @@
category: issues_edit
redis_slot: project_management
aggregation: daily
- name: g_project_management_issue_created
category: issues_edit
redis_slot: project_management
aggregation: daily
- name: g_project_management_issue_closed
category: issues_edit
redis_slot: project_management
aggregation: daily
- name: g_project_management_issue_reopened
category: issues_edit
redis_slot: project_management
aggregation: daily
- name: g_project_management_issue_label_changed
category: issues_edit
redis_slot: project_management
aggregation: daily
- name: g_project_management_issue_milestone_changed
category: issues_edit
redis_slot: project_management
aggregation: daily
- name: g_project_management_issue_iteration_changed
category: issues_edit
redis_slot: project_management
aggregation: daily
- name: g_project_management_issue_weight_changed
category: issues_edit
redis_slot: project_management
aggregation: daily

View file

@ -0,0 +1,47 @@
{
"type": "object",
"required": ["event", "incident"],
"properties": {
"event": { "type": "string" },
"incident": {
"type": "object",
"required": [
"html_url",
"incident_number",
"title",
"status",
"created_at",
"urgency",
"incident_key"
],
"properties": {
"html_url": { "type": "string" },
"incindent_number": { "type": "integer" },
"title": { "type": "string" },
"status": { "type": "string" },
"created_at": { "type": "string" },
"urgency": { "type": "string", "enum": ["high", "low"] },
"incident_key": { "type": ["string", "null"] },
"assignments": {
"type": "array",
"items": {
"assignee": {
"type": "array",
"items": {
"summary": { "type": "string" },
"html_url": { "type": "string" }
}
}
}
},
"impacted_services": {
"type": "array",
"items": {
"summary": { "type": "string" },
"html_url": { "type": "string" }
}
}
}
}
}
}

View file

@ -2,6 +2,8 @@
module PagerDuty
class WebhookPayloadParser
SCHEMA_PATH = File.join('lib', 'pager_duty', 'validator', 'schemas', 'message.json')
def initialize(payload)
@payload = payload
end
@ -11,7 +13,7 @@ module PagerDuty
end
def call
Array(payload['messages']).map { |msg| parse_message(msg) }
Array(payload['messages']).map { |msg| parse_message(msg) }.reject(&:empty?)
end
private
@ -19,6 +21,8 @@ module PagerDuty
attr_reader :payload
def parse_message(message)
return {} unless valid_message?(message)
{
'event' => message['event'],
'incident' => parse_incident(message['incident'])
@ -26,8 +30,6 @@ module PagerDuty
end
def parse_incident(incident)
return {} if incident.blank?
{
'url' => incident['html_url'],
'incident_number' => incident['incident_number'],
@ -62,5 +64,9 @@ module PagerDuty
def reject_empty(entities)
Array(entities).reject { |e| e['summary'].blank? && e['url'].blank? }
end
def valid_message?(message)
::JSONSchemer.schema(Pathname.new(SCHEMA_PATH)).valid?(message)
end
end
end

View file

@ -213,6 +213,10 @@ module QA
run("cat #{file}").to_s
end
def delete_netrc
File.delete(netrc_file_path) if File.exist?(netrc_file_path)
end
private
attr_reader :uri, :username, :password, :known_hosts_file,

View file

@ -28,6 +28,13 @@ module QA
end
end
after do
# Delete the .netrc file created during this test so that subsequent tests don't try to use the logins
Git::Repository.perform do |repository|
repository.delete_netrc
end
end
it 'download archives of each user project then check they are different', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/issues/427' do
archive_checksums = {}

View file

@ -15,7 +15,7 @@ module QA
disable_optional_jobs(project)
end
describe 'Auto DevOps support', :orchestrated, :kubernetes, quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/230927', type: :stale } do
describe 'Auto DevOps support', :orchestrated, :kubernetes, quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/251090', type: :stale } do
context 'when rbac is enabled' do
let(:cluster) { Service::KubernetesCluster.new.create! }
@ -24,6 +24,8 @@ module QA
end
it 'runs auto devops', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/issues/702' do
skip('Test requires tunnel: see https://gitlab.com/gitlab-org/gitlab/-/issues/251090')
Flow::Login.sign_in
# Set an application secret CI variable (prefixed with K8S_SECRET_)

View file

@ -97,9 +97,9 @@ FactoryBot.define do
create(:grafana_integration, project: projects[1], enabled: true)
create(:grafana_integration, project: projects[2], enabled: false)
create(:package, project: projects[0])
create(:package, project: projects[0])
create(:package, project: projects[1])
create(:package, project: projects[0], created_at: 3.days.ago)
create(:package, project: projects[0], created_at: 3.days.ago)
create(:package, project: projects[1], created_at: 3.days.ago)
create(:package, created_at: 2.months.ago, project: projects[1])
# User Preferences
@ -109,7 +109,7 @@ FactoryBot.define do
# Create fresh & a month (28-days SMAU) old data
env = create(:environment, project: projects[3])
[2, 29].each do |n|
[3, 31].each do |n|
deployment_options = { created_at: n.days.ago, project: env.project, environment: env }
create(:deployment, :failed, deployment_options)
create(:deployment, :success, deployment_options)

View file

@ -1,4 +1,4 @@
import { GlDeprecatedButton } from '@gitlab/ui';
import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { createStore } from '~/serverless/store';
import missingPrometheusComponent from '~/serverless/components/missing_prometheus.vue';
@ -24,7 +24,7 @@ describe('missingPrometheusComponent', () => {
'Function invocation metrics require Prometheus to be installed first.',
);
expect(wrapper.find(GlDeprecatedButton).attributes('variant')).toBe('success');
expect(wrapper.find(GlButton).attributes('variant')).toBe('success');
});
it('should render no prometheus data message', () => {

View file

@ -187,7 +187,7 @@ RSpec.describe GitlabRoutingHelper do
let(:ref) { 'test-ref' }
let(:args) { {} }
subject { gitlab_raw_snippet_blob_path(snippet, blob.path, ref, args) }
subject { gitlab_raw_snippet_blob_path(snippet, blob.path, ref, **args) }
it_behaves_like 'snippet blob raw path'
@ -222,7 +222,7 @@ RSpec.describe GitlabRoutingHelper do
let(:ref) { 'snippet-test-ref' }
let(:args) { {} }
subject { gitlab_raw_snippet_blob_url(snippet, blob.path, ref, args) }
subject { gitlab_raw_snippet_blob_url(snippet, blob.path, ref, **args) }
it_behaves_like 'snippet blob raw url'

View file

@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
let(:project) { create(:project, :repository) }
let(:head_sha) { project.repository.head_commit.id }
let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: head_sha) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:head_sha) { project.repository.head_commit.id }
let(:pipeline) { build(:ci_empty_pipeline, project: project, sha: head_sha) }
let(:attributes) { { name: 'rspec', ref: 'master', scheduling_type: :stage } }
let(:previous_stages) { [] }
@ -503,7 +503,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
using RSpec::Parameterized
let(:pipeline) do
build(:ci_empty_pipeline, ref: 'deploy', tag: false, source: source)
build(:ci_empty_pipeline, ref: 'deploy', tag: false, source: source, project: project)
end
context 'matches' do
@ -766,7 +766,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
context 'with a matching changes: rule' do
let(:pipeline) do
create(:ci_pipeline, project: project).tap do |pipeline|
build(:ci_pipeline, project: project).tap do |pipeline|
stub_pipeline_modified_paths(pipeline, %w[app/models/ci/pipeline.rb spec/models/ci/pipeline_spec.rb .gitlab-ci.yml])
end
end

View file

@ -2,16 +2,20 @@
require 'spec_helper'
RSpec.describe 'cycle analytics events' do
let(:project) { create(:project, :repository) }
RSpec.describe 'cycle analytics events', :aggregate_failures do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user, :admin) }
let(:from_date) { 10.days.ago }
let(:user) { create(:user, :admin) }
let!(:context) { create(:issue, project: project, created_at: 2.days.ago) }
let(:events) do
CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user })[stage].events
CycleAnalytics::ProjectLevel
.new(project, options: { from: from_date, current_user: user })[stage]
.events
end
let(:event) { events.first }
before do
setup(context)
end
@ -19,36 +23,15 @@ RSpec.describe 'cycle analytics events' do
describe '#issue_events' do
let(:stage) { :issue }
it 'has the total time' do
expect(events.first[:total_time]).not_to be_empty
end
it 'has a title' do
expect(events.first[:title]).to eq(context.title)
end
it 'has the URL' do
expect(events.first[:url]).not_to be_nil
end
it 'has an iid' do
expect(events.first[:iid]).to eq(context.iid.to_s)
end
it 'has a created_at timestamp' do
expect(events.first[:created_at]).to end_with('ago')
end
it "has the author's URL" do
expect(events.first[:author][:web_url]).not_to be_nil
end
it "has the author's avatar URL" do
expect(events.first[:author][:avatar_url]).not_to be_nil
end
it "has the author's name" do
expect(events.first[:author][:name]).to eq(context.author.name)
it 'has correct attributes' do
expect(event[:total_time]).not_to be_empty
expect(event[:title]).to eq(context.title)
expect(event[:url]).not_to be_nil
expect(event[:iid]).to eq(context.iid.to_s)
expect(event[:created_at]).to end_with('ago')
expect(event[:author][:web_url]).not_to be_nil
expect(event[:author][:avatar_url]).not_to be_nil
expect(event[:author][:name]).to eq(context.author.name)
end
end
@ -59,36 +42,15 @@ RSpec.describe 'cycle analytics events' do
create_commit_referencing_issue(context)
end
it 'has the total time' do
expect(events.first[:total_time]).not_to be_empty
end
it 'has a title' do
expect(events.first[:title]).to eq(context.title)
end
it 'has the URL' do
expect(events.first[:url]).not_to be_nil
end
it 'has an iid' do
expect(events.first[:iid]).to eq(context.iid.to_s)
end
it 'has a created_at timestamp' do
expect(events.first[:created_at]).to end_with('ago')
end
it "has the author's URL" do
expect(events.first[:author][:web_url]).not_to be_nil
end
it "has the author's avatar URL" do
expect(events.first[:author][:avatar_url]).not_to be_nil
end
it "has the author's name" do
expect(events.first[:author][:name]).to eq(context.author.name)
it 'has correct attributes' do
expect(event[:total_time]).not_to be_empty
expect(event[:title]).to eq(context.title)
expect(event[:url]).not_to be_nil
expect(event[:iid]).to eq(context.iid.to_s)
expect(event[:created_at]).to end_with('ago')
expect(event[:author][:web_url]).not_to be_nil
expect(event[:author][:avatar_url]).not_to be_nil
expect(event[:author][:name]).to eq(context.author.name)
end
end
@ -100,32 +62,14 @@ RSpec.describe 'cycle analytics events' do
create_commit_referencing_issue(context)
end
it 'has the total time' do
expect(events.first[:total_time]).not_to be_empty
end
it 'has a title' do
expect(events.first[:title]).to eq('Awesome merge_request')
end
it 'has an iid' do
expect(events.first[:iid]).to eq(context.iid.to_s)
end
it 'has a created_at timestamp' do
expect(events.first[:created_at]).to end_with('ago')
end
it "has the author's URL" do
expect(events.first[:author][:web_url]).not_to be_nil
end
it "has the author's avatar URL" do
expect(events.first[:author][:avatar_url]).not_to be_nil
end
it "has the author's name" do
expect(events.first[:author][:name]).to eq(MergeRequest.first.author.name)
it 'has correct attributes' do
expect(event[:total_time]).not_to be_empty
expect(event[:title]).to eq('Awesome merge_request')
expect(event[:iid]).to eq(context.iid.to_s)
expect(event[:created_at]).to end_with('ago')
expect(event[:author][:web_url]).not_to be_nil
expect(event[:author][:avatar_url]).not_to be_nil
expect(event[:author][:name]).to eq(MergeRequest.first.author.name)
end
end
@ -152,40 +96,16 @@ RSpec.describe 'cycle analytics events' do
merge_merge_requests_closing_issue(user, project, context)
end
it 'has the name' do
expect(events.first[:name]).not_to be_nil
end
it 'has the ID' do
expect(events.first[:id]).not_to be_nil
end
it 'has the URL' do
expect(events.first[:url]).not_to be_nil
end
it 'has the branch name' do
expect(events.first[:branch]).not_to be_nil
end
it 'has the branch URL' do
expect(events.first[:branch][:url]).not_to be_nil
end
it 'has the short SHA' do
expect(events.first[:short_sha]).not_to be_nil
end
it 'has the commit URL' do
expect(events.first[:commit_url]).not_to be_nil
end
it 'has the date' do
expect(events.first[:date]).not_to be_nil
end
it 'has the total time' do
expect(events.first[:total_time]).not_to be_empty
it 'has correct attributes' do
expect(event[:name]).not_to be_nil
expect(event[:id]).not_to be_nil
expect(event[:url]).not_to be_nil
expect(event[:branch]).not_to be_nil
expect(event[:branch][:url]).not_to be_nil
expect(event[:short_sha]).not_to be_nil
expect(event[:commit_url]).not_to be_nil
expect(event[:date]).not_to be_nil
expect(event[:total_time]).not_to be_empty
end
end
@ -197,40 +117,16 @@ RSpec.describe 'cycle analytics events' do
merge_merge_requests_closing_issue(user, project, context)
end
it 'has the total time' do
expect(events.first[:total_time]).not_to be_empty
end
it 'has a title' do
expect(events.first[:title]).to eq('Awesome merge_request')
end
it 'has an iid' do
expect(events.first[:iid]).to eq(context.iid.to_s)
end
it 'has the URL' do
expect(events.first[:url]).not_to be_nil
end
it 'has a state' do
expect(events.first[:state]).not_to be_nil
end
it 'has a created_at timestamp' do
expect(events.first[:created_at]).not_to be_nil
end
it "has the author's URL" do
expect(events.first[:author][:web_url]).not_to be_nil
end
it "has the author's avatar URL" do
expect(events.first[:author][:avatar_url]).not_to be_nil
end
it "has the author's name" do
expect(events.first[:author][:name]).to eq(MergeRequest.first.author.name)
it 'has correct attributes' do
expect(event[:total_time]).not_to be_empty
expect(event[:title]).to eq('Awesome merge_request')
expect(event[:iid]).to eq(context.iid.to_s)
expect(event[:url]).not_to be_nil
expect(event[:state]).not_to be_nil
expect(event[:created_at]).not_to be_nil
expect(event[:author][:web_url]).not_to be_nil
expect(event[:author][:avatar_url]).not_to be_nil
expect(event[:author][:name]).to eq(MergeRequest.first.author.name)
end
end
@ -257,58 +153,25 @@ RSpec.describe 'cycle analytics events' do
deploy_master(user, project)
end
it 'has the name' do
expect(events.first[:name]).not_to be_nil
end
it 'has the ID' do
expect(events.first[:id]).not_to be_nil
end
it 'has the URL' do
expect(events.first[:url]).not_to be_nil
end
it 'has the branch name' do
expect(events.first[:branch]).not_to be_nil
end
it 'has the branch URL' do
expect(events.first[:branch][:url]).not_to be_nil
end
it 'has the short SHA' do
expect(events.first[:short_sha]).not_to be_nil
end
it 'has the commit URL' do
expect(events.first[:commit_url]).not_to be_nil
end
it 'has the date' do
expect(events.first[:date]).not_to be_nil
end
it 'has the total time' do
expect(events.first[:total_time]).not_to be_empty
end
it "has the author's URL" do
expect(events.first[:author][:web_url]).not_to be_nil
end
it "has the author's avatar URL" do
expect(events.first[:author][:avatar_url]).not_to be_nil
end
it "has the author's name" do
expect(events.first[:author][:name]).to eq(MergeRequest.first.author.name)
it 'has correct attributes' do
expect(event[:name]).not_to be_nil
expect(event[:id]).not_to be_nil
expect(event[:url]).not_to be_nil
expect(event[:branch]).not_to be_nil
expect(event[:branch][:url]).not_to be_nil
expect(event[:short_sha]).not_to be_nil
expect(event[:commit_url]).not_to be_nil
expect(event[:date]).not_to be_nil
expect(event[:total_time]).not_to be_empty
expect(event[:author][:web_url]).not_to be_nil
expect(event[:author][:avatar_url]).not_to be_nil
expect(event[:author][:name]).to eq(MergeRequest.first.author.name)
end
end
def setup(context)
milestone = create(:milestone, project: project)
context.update(milestone: milestone)
context.update!(milestone: milestone)
mr = create_merge_request_closing_issue(user, project, context, commit_message: "References #{context.to_reference}")
ProcessCommitWorker.new.perform(project.id, user.id, mr.commits.last.to_hash)

View file

@ -104,9 +104,69 @@ RSpec.describe Gitlab::Database::WithLockRetries do
end
context 'after 3 iterations' do
let(:retry_count) { 4 }
it_behaves_like 'retriable exclusive lock on `projects`' do
let(:retry_count) { 4 }
end
it_behaves_like 'retriable exclusive lock on `projects`'
context 'setting the idle transaction timeout' do
context 'when there is no outer transaction: disable_ddl_transaction! is set in the migration' do
it 'does not disable the idle transaction timeout' do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
allow(subject).to receive(:run_block_with_transaction).once.and_raise(ActiveRecord::LockWaitTimeout)
allow(subject).to receive(:run_block_with_transaction).once
expect(subject).not_to receive(:disable_idle_in_transaction_timeout)
subject.run {}
end
end
context 'when there is outer transaction: disable_ddl_transaction! is not set in the migration' do
it 'disables the idle transaction timeout so the code can sleep and retry' do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(true)
n = 0
allow(subject).to receive(:run_block_with_transaction).twice do
n += 1
raise(ActiveRecord::LockWaitTimeout) if n == 1
end
expect(subject).to receive(:disable_idle_in_transaction_timeout).once
subject.run {}
end
end
end
end
context 'after the retries are exhausted' do
let(:timing_configuration) do
[
[1.second, 1.second]
]
end
context 'when there is no outer transaction: disable_ddl_transaction! is set in the migration' do
it 'does not disable the lock_timeout' do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
allow(subject).to receive(:run_block_with_transaction).once.and_raise(ActiveRecord::LockWaitTimeout)
expect(subject).not_to receive(:disable_lock_timeout)
subject.run {}
end
end
context 'when there is outer transaction: disable_ddl_transaction! is not set in the migration' do
it 'disables the lock_timeout' do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(true)
allow(subject).to receive(:run_block_with_transaction).once.and_raise(ActiveRecord::LockWaitTimeout)
expect(subject).to receive(:disable_lock_timeout)
subject.run {}
end
end
end
context 'after the retries, without setting lock_timeout' do

View file

@ -7,6 +7,7 @@ RSpec.describe Gitlab::Lfs::Client do
let(:username) { 'user' }
let(:password) { 'password' }
let(:credentials) { { user: username, password: password, auth_method: 'password' } }
let(:git_lfs_content_type) { 'application/vnd.git-lfs+json' }
let(:basic_auth_headers) do
{ 'Authorization' => "Basic #{Base64.strict_encode64("#{username}:#{password}")}" }
@ -21,6 +22,15 @@ RSpec.describe Gitlab::Lfs::Client do
}
end
let(:verify_action) do
{
"href" => "#{base_url}/some/file/verify",
"header" => {
"Key" => "value"
}
}
end
subject(:lfs_client) { described_class.new(base_url, credentials: credentials) }
describe '#batch' do
@ -34,10 +44,10 @@ RSpec.describe Gitlab::Lfs::Client do
).to_return(
status: 200,
body: { 'objects' => 'anything', 'transfer' => 'basic' }.to_json,
headers: { 'Content-Type' => 'application/vnd.git-lfs+json' }
headers: { 'Content-Type' => git_lfs_content_type }
)
result = lfs_client.batch('upload', objects)
result = lfs_client.batch!('upload', objects)
expect(stub).to have_been_requested
expect(result).to eq('objects' => 'anything', 'transfer' => 'basic')
@ -48,7 +58,7 @@ RSpec.describe Gitlab::Lfs::Client do
it 'raises an error' do
stub_batch(objects: objects, headers: basic_auth_headers).to_return(status: 400)
expect { lfs_client.batch('upload', objects) }.to raise_error(/Failed/)
expect { lfs_client.batch!('upload', objects) }.to raise_error(/Failed/)
end
end
@ -56,7 +66,7 @@ RSpec.describe Gitlab::Lfs::Client do
it 'raises an error' do
stub_batch(objects: objects, headers: basic_auth_headers).to_return(status: 400)
expect { lfs_client.batch('upload', objects) }.to raise_error(/Failed/)
expect { lfs_client.batch!('upload', objects) }.to raise_error(/Failed/)
end
end
@ -68,17 +78,22 @@ RSpec.describe Gitlab::Lfs::Client do
).to_return(
status: 200,
body: { 'transfer' => 'carrier-pigeon' }.to_json,
headers: { 'Content-Type' => 'application/vnd.git-lfs+json' }
headers: { 'Content-Type' => git_lfs_content_type }
)
expect { lfs_client.batch('upload', objects) }.to raise_error(/Unsupported transfer/)
expect { lfs_client.batch!('upload', objects) }.to raise_error(/Unsupported transfer/)
end
end
def stub_batch(objects:, headers:, operation: 'upload', transfer: 'basic')
objects = objects.map { |o| { oid: o.oid, size: o.size } }
objects = objects.as_json(only: [:oid, :size])
body = { operation: operation, 'transfers': [transfer], objects: objects }.to_json
headers = {
'Accept' => git_lfs_content_type,
'Content-Type' => git_lfs_content_type
}.merge(headers)
stub_request(:post, base_url + '/info/lfs/objects/batch').with(body: body, headers: headers)
end
end
@ -90,7 +105,7 @@ RSpec.describe Gitlab::Lfs::Client do
it "makes an HTTP PUT with expected parameters" do
stub_upload(object: object, headers: upload_action['header']).to_return(status: 200)
lfs_client.upload(object, upload_action, authenticated: true)
lfs_client.upload!(object, upload_action, authenticated: true)
end
end
@ -101,7 +116,7 @@ RSpec.describe Gitlab::Lfs::Client do
headers: basic_auth_headers.merge(upload_action['header'])
).to_return(status: 200)
lfs_client.upload(object, upload_action, authenticated: false)
lfs_client.upload!(object, upload_action, authenticated: false)
expect(stub).to have_been_requested
end
@ -110,13 +125,13 @@ RSpec.describe Gitlab::Lfs::Client do
context 'LFS object has no file' do
let(:object) { LfsObject.new }
it 'makes an HJTT PUT with expected parameters' do
it 'makes an HTTP PUT with expected parameters' do
stub = stub_upload(
object: object,
headers: upload_action['header']
).to_return(status: 200)
lfs_client.upload(object, upload_action, authenticated: true)
lfs_client.upload!(object, upload_action, authenticated: true)
expect(stub).to have_been_requested
end
@ -126,7 +141,7 @@ RSpec.describe Gitlab::Lfs::Client do
it 'raises an error' do
stub_upload(object: object, headers: upload_action['header']).to_return(status: 400)
expect { lfs_client.upload(object, upload_action, authenticated: true) }.to raise_error(/Failed/)
expect { lfs_client.upload!(object, upload_action, authenticated: true) }.to raise_error(/Failed/)
end
end
@ -134,15 +149,73 @@ RSpec.describe Gitlab::Lfs::Client do
it 'raises an error' do
stub_upload(object: object, headers: upload_action['header']).to_return(status: 500)
expect { lfs_client.upload(object, upload_action, authenticated: true) }.to raise_error(/Failed/)
expect { lfs_client.upload!(object, upload_action, authenticated: true) }.to raise_error(/Failed/)
end
end
def stub_upload(object:, headers:)
headers = {
'Content-Type' => 'application/octet-stream',
'Content-Length' => object.size.to_s
}.merge(headers)
stub_request(:put, upload_action['href']).with(
body: object.file.read,
headers: headers.merge('Content-Length' => object.size.to_s)
)
end
end
describe "#verify" do
let_it_be(:object) { create(:lfs_object) }
context 'server returns 200 OK to an authenticated request' do
it "makes an HTTP POST with expected parameters" do
stub_verify(object: object, headers: verify_action['header']).to_return(status: 200)
lfs_client.verify!(object, verify_action, authenticated: true)
end
end
context 'server returns 200 OK to an unauthenticated request' do
it "makes an HTTP POST with expected parameters" do
stub = stub_verify(
object: object,
headers: basic_auth_headers.merge(upload_action['header'])
).to_return(status: 200)
lfs_client.verify!(object, verify_action, authenticated: false)
expect(stub).to have_been_requested
end
end
context 'server returns 400 error' do
it 'raises an error' do
stub_verify(object: object, headers: verify_action['header']).to_return(status: 400)
expect { lfs_client.verify!(object, verify_action, authenticated: true) }.to raise_error(/Failed/)
end
end
context 'server returns 500 error' do
it 'raises an error' do
stub_verify(object: object, headers: verify_action['header']).to_return(status: 500)
expect { lfs_client.verify!(object, verify_action, authenticated: true) }.to raise_error(/Failed/)
end
end
def stub_verify(object:, headers:)
headers = {
'Accept' => git_lfs_content_type,
'Content-Type' => git_lfs_content_type
}.merge(headers)
stub_request(:post, verify_action['href']).with(
body: object.to_json(only: [:oid, :size]),
headers: headers
)
end
end
end

View file

@ -92,6 +92,46 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
end
context 'for Issue created actions' do
it_behaves_like 'tracks and counts action' do
let(:action) { described_class::ISSUE_CREATED }
def track_action(params)
described_class.track_issue_created_action(params)
end
end
end
context 'for Issue closed actions' do
it_behaves_like 'tracks and counts action' do
let(:action) { described_class::ISSUE_CLOSED }
def track_action(params)
described_class.track_issue_closed_action(params)
end
end
end
context 'for Issue reopened actions' do
it_behaves_like 'tracks and counts action' do
let(:action) { described_class::ISSUE_REOPENED }
def track_action(params)
described_class.track_issue_reopened_action(params)
end
end
end
context 'for Issue label changed actions' do
it_behaves_like 'tracks and counts action' do
let(:action) { described_class::ISSUE_LABEL_CHANGED }
def track_action(params)
described_class.track_issue_label_changed_action(params)
end
end
end
it 'can return the count of actions per user deduplicated', :aggregate_failures do
described_class.track_issue_title_changed_action(author: user1)
described_class.track_issue_description_changed_action(author: user1)

View file

@ -1020,7 +1020,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
def for_defined_days_back(days: [29, 2])
def for_defined_days_back(days: [31, 3])
days.each do |n|
Timecop.travel(n.days.ago) do
yield

View file

@ -1,6 +1,7 @@
# frozen_string_literal: true
require 'fast_spec_helper'
require 'json_schemer'
RSpec.describe PagerDuty::WebhookPayloadParser do
describe '.call' do
@ -8,36 +9,36 @@ RSpec.describe PagerDuty::WebhookPayloadParser do
File.read(File.join(File.dirname(__FILE__), '../../fixtures/pager_duty/webhook_incident_trigger.json'))
end
let(:triggered_event) do
{
'event' => 'incident.trigger',
'incident' => {
'url' => 'https://webdemo.pagerduty.com/incidents/PRORDTY',
'incident_number' => 33,
'title' => 'My new incident',
'status' => 'triggered',
'created_at' => '2017-09-26T15:14:36Z',
'urgency' => 'high',
'incident_key' => nil,
'assignees' => [{
'summary' => 'Laura Haley',
'url' => 'https://webdemo.pagerduty.com/users/P553OPV'
}],
'impacted_services' => [{
'summary' => 'Production XDB Cluster',
'url' => 'https://webdemo.pagerduty.com/services/PN49J75'
}]
}
}
end
subject(:parse) { described_class.call(payload) }
context 'when payload is a correct PagerDuty payload' do
let(:payload) { Gitlab::Json.parse(fixture_file) }
it 'returns parsed payload' do
is_expected.to eq(
[
{
'event' => 'incident.trigger',
'incident' => {
'url' => 'https://webdemo.pagerduty.com/incidents/PRORDTY',
'incident_number' => 33,
'title' => 'My new incident',
'status' => 'triggered',
'created_at' => '2017-09-26T15:14:36Z',
'urgency' => 'high',
'incident_key' => nil,
'assignees' => [{
'summary' => 'Laura Haley',
'url' => 'https://webdemo.pagerduty.com/users/P553OPV'
}],
'impacted_services' => [{
'summary' => 'Production XDB Cluster',
'url' => 'https://webdemo.pagerduty.com/services/PN49J75'
}]
}
}
]
)
is_expected.to eq([triggered_event])
end
context 'when assignments summary and html_url are blank' do
@ -69,11 +70,42 @@ RSpec.describe PagerDuty::WebhookPayloadParser do
end
end
context 'when payload has no incidents' do
context 'when payload schema is invalid' do
let(:payload) { { 'messages' => [{ 'event' => 'incident.trigger' }] } }
it 'returns payload with blank incidents' do
is_expected.to eq([{ 'event' => 'incident.trigger', 'incident' => {} }])
is_expected.to eq([])
end
end
context 'when payload consists of two messages' do
context 'when one of the messages has no incident data' do
let(:payload) do
valid_payload = Gitlab::Json.parse(fixture_file)
event = { 'event' => 'incident.trigger' }
valid_payload['messages'] = valid_payload['messages'].append(event)
valid_payload
end
it 'returns parsed payload with valid events only' do
is_expected.to eq([triggered_event])
end
end
context 'when one of the messages has unknown event' do
let(:payload) do
valid_payload = Gitlab::Json.parse(fixture_file)
event = { 'event' => 'incident.unknown', 'incident' => valid_payload['messages'].first['incident'] }
valid_payload['messages'] = valid_payload['messages'].append(event)
valid_payload
end
it 'returns parsed payload' do
unknown_event = triggered_event.dup
unknown_event['event'] = 'incident.unknown'
is_expected.to contain_exactly(triggered_event, unknown_event)
end
end
end
end

View file

@ -105,6 +105,14 @@ RSpec.describe Issue do
create(:issue, project: reusable_project)
end
end
describe '#record_create_action' do
it 'records the creation action after saving' do
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_created_action)
create(:issue)
end
end
end
describe '.with_alert_management_alerts' do

View file

@ -50,26 +50,36 @@ RSpec.describe ResourceLabelEvent, type: :model do
end
end
describe '#expire_etag_cache' do
def expect_expiration(issue)
expect_next_instance_of(Gitlab::EtagCaching::Store) do |instance|
expect(instance).to receive(:touch)
.with("/#{issue.project.namespace.to_param}/#{issue.project.to_param}/noteable/issue/#{issue.id}/notes")
context 'callbacks' do
describe '#usage_metrics' do
it 'tracks changed labels' do
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_label_changed_action)
subject.save!
end
end
it 'expires resource note etag cache on event save' do
expect_expiration(subject.issuable)
describe '#expire_etag_cache' do
def expect_expiration(issue)
expect_next_instance_of(Gitlab::EtagCaching::Store) do |instance|
expect(instance).to receive(:touch)
.with("/#{issue.project.namespace.to_param}/#{issue.project.to_param}/noteable/issue/#{issue.id}/notes")
end
end
subject.save!
end
it 'expires resource note etag cache on event save' do
expect_expiration(subject.issuable)
it 'expires resource note etag cache on event destroy' do
subject.save!
subject.save!
end
expect_expiration(subject.issuable)
it 'expires resource note etag cache on event destroy' do
subject.save!
subject.destroy!
expect_expiration(subject.issuable)
subject.destroy!
end
end
end

View file

@ -11,6 +11,7 @@ RSpec.describe ResourceMilestoneEvent, type: :model do
it_behaves_like 'timebox resource event validations'
it_behaves_like 'timebox resource event states'
it_behaves_like 'timebox resource event actions'
it_behaves_like 'timebox resource tracks issue metrics', :milestone
describe 'associations' do
it { is_expected.to belong_to(:milestone) }

View file

@ -39,4 +39,20 @@ RSpec.describe ResourceStateEvent, type: :model do
end
end
end
context 'callbacks' do
describe '#usage_metrics' do
it 'tracks closed issues' do
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_closed_action)
create(described_class.name.underscore.to_sym, issue: issue, state: described_class.states[:closed])
end
it 'tracks reopened issues' do
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_reopened_action)
create(described_class.name.underscore.to_sym, issue: issue, state: described_class.states[:reopened])
end
end
end
end

View file

@ -73,4 +73,14 @@ RSpec.describe ResourceWeightEvent, type: :model do
expect(event.discussion_id).to eq('73d167c478')
end
end
context 'callbacks' do
describe '#usage_metrics' do
it 'tracks changed weights' do
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_weight_changed_action).with(author: user1)
create(:resource_weight_event, issue: issue1, user: user1)
end
end
end
end

View file

@ -67,7 +67,7 @@ RSpec.describe SnippetInputAction do
let(:options) { { action: action, file_path: file_path, content: content, previous_path: previous_path } }
let(:expected_options) { options.merge(action: action.to_sym) }
subject { described_class.new(options).to_commit_action }
subject { described_class.new(**options).to_commit_action }
it 'transforms attributes to commit action' do
expect(subject).to eq(expected_options)

View file

@ -19,7 +19,7 @@ RSpec.describe Lfs::PushService do
stub_lfs_batch(lfs_object)
expect(lfs_client)
.to receive(:upload)
.to receive(:upload!)
.with(lfs_object, upload_action_spec(lfs_object), authenticated: true)
expect(service.execute).to eq(status: :success)
@ -28,7 +28,7 @@ RSpec.describe Lfs::PushService do
it 'does nothing if there are no LFS objects' do
lfs_object.destroy!
expect(lfs_client).not_to receive(:upload)
expect(lfs_client).not_to receive(:upload!)
expect(service.execute).to eq(status: :success)
end
@ -36,20 +36,39 @@ RSpec.describe Lfs::PushService do
it 'does not upload the object when upload is not requested' do
stub_lfs_batch(lfs_object, upload: false)
expect(lfs_client).not_to receive(:upload)
expect(lfs_client).not_to receive(:upload!)
expect(service.execute).to eq(status: :success)
end
it 'verifies the upload if requested' do
stub_lfs_batch(lfs_object, verify: true)
expect(lfs_client).to receive(:upload!)
expect(lfs_client)
.to receive(:verify!)
.with(lfs_object, verify_action_spec(lfs_object), authenticated: true)
expect(service.execute).to eq(status: :success)
end
it 'skips verification if requested but upload fails' do
stub_lfs_batch(lfs_object, verify: true)
expect(lfs_client).to receive(:upload!) { raise 'failed' }
expect(lfs_client).not_to receive(:verify!)
expect(service.execute).to eq(status: :error, message: 'failed')
end
it 'returns a failure when submitting a batch fails' do
expect(lfs_client).to receive(:batch) { raise 'failed' }
expect(lfs_client).to receive(:batch!) { raise 'failed' }
expect(service.execute).to eq(status: :error, message: 'failed')
end
it 'returns a failure when submitting an upload fails' do
stub_lfs_batch(lfs_object)
expect(lfs_client).to receive(:upload) { raise 'failed' }
expect(lfs_client).to receive(:upload!) { raise 'failed' }
expect(service.execute).to eq(status: :error, message: 'failed')
end
@ -71,23 +90,28 @@ RSpec.describe Lfs::PushService do
create(:lfs_objects_project, project: project, repository_type: type).lfs_object
end
def stub_lfs_batch(*objects, upload: true)
def stub_lfs_batch(*objects, upload: true, verify: false)
expect(lfs_client)
.to receive(:batch).with('upload', containing_exactly(*objects))
.and_return('transfer' => 'basic', 'objects' => objects.map { |o| object_spec(o, upload: upload) })
.to receive(:batch!).with('upload', containing_exactly(*objects))
.and_return('transfer' => 'basic', 'objects' => objects.map { |o| object_spec(o, upload: upload, verify: verify) })
end
def batch_spec(*objects, upload: true)
def batch_spec(*objects, upload: true, verify: false)
{ 'transfer' => 'basic', 'objects' => objects.map {|o| object_spec(o, upload: upload) } }
end
def object_spec(object, upload: true)
{ 'oid' => object.oid, 'size' => object.size, 'authenticated' => true }.tap do |spec|
spec['actions'] = { 'upload' => upload_action_spec(object) } if upload
def object_spec(object, upload: true, verify: false)
{ 'oid' => object.oid, 'size' => object.size, 'authenticated' => true, 'actions' => {} }.tap do |spec|
spec['actions']['upload'] = upload_action_spec(object) if upload
spec['actions']['verify'] = verify_action_spec(object) if verify
end
end
def upload_action_spec(object)
{ 'href' => "https://example.com/#{object.oid}/#{object.size}", 'header' => { 'Key' => 'value' } }
end
def verify_action_spec(object)
{ 'href' => "https://example.com/#{object.oid}/#{object.size}/verify", 'header' => { 'Key' => 'value' } }
end
end

View file

@ -229,7 +229,7 @@ module UsageDataHelpers
receive_matchers.each { |m| expect(prometheus_client).to m }
end
def for_defined_days_back(days: [29, 2])
def for_defined_days_back(days: [31, 3])
days.each do |n|
Timecop.travel(n.days.ago) do
yield

View file

@ -73,3 +73,13 @@ RSpec.shared_examples 'timebox resource event actions' do
end
end
end
RSpec.shared_examples 'timebox resource tracks issue metrics' do |type|
describe '#usage_metrics' do
it 'tracks usage' do
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:"track_issue_#{type}_changed_action")
create(described_class.name.underscore.to_sym, issue: create(:issue))
end
end
end