Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-05-27 18:10:52 +00:00
parent f719944dee
commit 479221aa79
56 changed files with 3278 additions and 3454 deletions

View file

@ -54,7 +54,7 @@ export default {
if (e.target.closest('.js-no-trigger')) return; if (e.target.closest('.js-no-trigger')) return;
const isMultiSelect = e.ctrlKey || e.metaKey; const isMultiSelect = e.ctrlKey || e.metaKey;
if (isMultiSelect) { if (isMultiSelect && gon?.features?.boardMultiSelect) {
this.toggleBoardItemMultiSelection(this.item); this.toggleBoardItemMultiSelection(this.item);
} else { } else {
this.toggleBoardItem({ boardItem: this.item }); this.toggleBoardItem({ boardItem: this.item });

View file

@ -45,6 +45,7 @@ input[type='checkbox']:hover {
margin: 0 8px; margin: 0 8px;
form { form {
display: block;
margin: 0; margin: 0;
padding: 4px; padding: 4px;
width: $search-input-width; width: $search-input-width;

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -8,6 +8,7 @@ class Groups::BoardsController < Groups::ApplicationController
before_action :assign_endpoint_vars before_action :assign_endpoint_vars
before_action do before_action do
push_frontend_feature_flag(:graphql_board_lists, group, default_enabled: false) push_frontend_feature_flag(:graphql_board_lists, group, default_enabled: false)
push_frontend_feature_flag(:board_multi_select, group, default_enabled: :yaml)
push_frontend_feature_flag(:swimlanes_buffered_rendering, group, default_enabled: :yaml) push_frontend_feature_flag(:swimlanes_buffered_rendering, group, default_enabled: :yaml)
end end

View file

@ -9,6 +9,7 @@ class Projects::BoardsController < Projects::ApplicationController
before_action do before_action do
push_frontend_feature_flag(:swimlanes_buffered_rendering, project, default_enabled: :yaml) push_frontend_feature_flag(:swimlanes_buffered_rendering, project, default_enabled: :yaml)
push_frontend_feature_flag(:graphql_board_lists, project, default_enabled: :yaml) push_frontend_feature_flag(:graphql_board_lists, project, default_enabled: :yaml)
push_frontend_feature_flag(:board_multi_select, project, default_enabled: :yaml)
end end
feature_category :boards feature_category :boards

View file

@ -4,6 +4,10 @@ module BulkImports
class Export < ApplicationRecord class Export < ApplicationRecord
include Gitlab::Utils::StrongMemoize include Gitlab::Utils::StrongMemoize
STARTED = 0
FINISHED = 1
FAILED = -1
self.table_name = 'bulk_import_exports' self.table_name = 'bulk_import_exports'
belongs_to :project, optional: true belongs_to :project, optional: true
@ -18,9 +22,9 @@ module BulkImports
validate :portable_relation? validate :portable_relation?
state_machine :status, initial: :started do state_machine :status, initial: :started do
state :started, value: 0 state :started, value: STARTED
state :finished, value: 1 state :finished, value: FINISHED
state :failed, value: -1 state :failed, value: FAILED
event :start do event :start do
transition any => :started transition any => :started

View file

@ -0,0 +1,47 @@
# frozen_string_literal: true
module BulkImports
class ExportStatus
include Gitlab::Utils::StrongMemoize
def initialize(pipeline_tracker, relation)
@pipeline_tracker = pipeline_tracker
@relation = relation
@entity = @pipeline_tracker.entity
@configuration = @entity.bulk_import.configuration
@client = Clients::Http.new(uri: @configuration.url, token: @configuration.access_token)
end
def started?
export_status['status'] == Export::STARTED
end
def failed?
export_status['status'] == Export::FAILED
end
def error
export_status['error']
end
private
attr_reader :client, :entity, :relation
def export_status
strong_memoize(:export_status) do
fetch_export_status.find { |item| item['relation'] == relation }
end
rescue StandardError => e
{ 'status' => Export::FAILED, 'error' => e.message }
end
def fetch_export_status
client.get(status_endpoint).parsed_response
end
def status_endpoint
"/groups/#{entity.encoded_source_full_path}/export_relations/status"
end
end
end

View file

@ -13,6 +13,14 @@ module BulkImports
attributes_finder.find_root(portable_class_sym) attributes_finder.find_root(portable_class_sym)
end end
def top_relation_tree(relation)
portable_relations_tree[relation.to_s]
end
def relation_excluded_keys(relation)
attributes_finder.find_excluded_keys(relation)
end
def export_path def export_path
strong_memoize(:export_path) do strong_memoize(:export_path) do
relative_path = File.join(base_export_path, SecureRandom.hex) relative_path = File.join(base_export_path, SecureRandom.hex)
@ -47,6 +55,10 @@ module BulkImports
@portable_class_sym ||= portable_class.to_s.demodulize.underscore.to_sym @portable_class_sym ||= portable_class.to_s.demodulize.underscore.to_sym
end end
def portable_relations_tree
@portable_relations_tree ||= attributes_finder.find_relations_tree(portable_class_sym).deep_stringify_keys
end
def import_export_yaml def import_export_yaml
raise NotImplementedError raise NotImplementedError
end end

View file

@ -3,7 +3,7 @@
module BulkImports module BulkImports
class FileDownloadService class FileDownloadService
FILE_SIZE_LIMIT = 5.gigabytes FILE_SIZE_LIMIT = 5.gigabytes
ALLOWED_CONTENT_TYPES = ['application/octet-stream'].freeze ALLOWED_CONTENT_TYPES = %w(application/gzip application/octet-stream).freeze
ServiceError = Class.new(StandardError) ServiceError = Class.new(StandardError)

View file

@ -36,7 +36,7 @@
= favicon_link_tag favicon, id: 'favicon', data: { original_href: favicon }, type: 'image/png' = favicon_link_tag favicon, id: 'favicon', data: { original_href: favicon }, type: 'image/png'
= render 'layouts/startup_css' = render 'layouts/startup_css', { startup_filename: local_assigns.fetch(:startup_filename, nil) }
- if user_application_theme == 'gl-dark' - if user_application_theme == 'gl-dark'
= stylesheet_link_tag_defer "application_dark" = stylesheet_link_tag_defer "application_dark"
= yield :page_specific_styles = yield :page_specific_styles

View file

@ -1,4 +1,5 @@
- startup_filename = current_path?("sessions#new") ? 'signin' : user_application_theme == 'gl-dark' ? 'dark' : 'general' - startup_filename_default = user_application_theme == 'gl-dark' ? 'dark' : 'general'
- startup_filename = local_assigns.fetch(:startup_filename, nil) || startup_filename_default
%style %style
= Rails.application.assets_manifest.find_sources("themes/#{user_application_theme_css_filename}.css").first.to_s.html_safe if user_application_theme_css_filename = Rails.application.assets_manifest.find_sources("themes/#{user_application_theme_css_filename}.css").first.to_s.html_safe if user_application_theme_css_filename

View file

@ -1,6 +1,6 @@
!!! 5 !!! 5
%html.devise-layout-html{ class: system_message_class } %html.devise-layout-html{ class: system_message_class }
= render "layouts/head" = render "layouts/head", { startup_filename: 'signin' }
%body.ui-indigo.login-page.application.navless{ class: "#{client_class_list}", data: { page: body_data_page, qa_selector: 'login_page' } } %body.ui-indigo.login-page.application.navless{ class: "#{client_class_list}", data: { page: body_data_page, qa_selector: 'login_page' } }
= header_message = header_message
= render "layouts/init_client_detection_flags" = render "layouts/init_client_detection_flags"

View file

@ -1087,15 +1087,6 @@
:weight: 2 :weight: 2
:idempotent: true :idempotent: true
:tags: [] :tags: []
- :name: incident_management:incident_management_process_prometheus_alert
:worker_name: IncidentManagement::ProcessPrometheusAlertWorker
:feature_category: :incident_management
:has_external_dependencies:
:urgency: :low
:resource_boundary: :cpu
:weight: 2
:idempotent:
:tags: []
- :name: jira_connect:jira_connect_sync_branch - :name: jira_connect:jira_connect_sync_branch
:worker_name: JiraConnect::SyncBranchWorker :worker_name: JiraConnect::SyncBranchWorker
:feature_category: :integrations :feature_category: :integrations

View file

@ -4,6 +4,8 @@ module BulkImports
class PipelineWorker # rubocop:disable Scalability/IdempotentWorker class PipelineWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker include ApplicationWorker
NDJSON_PIPELINE_PERFORM_DELAY = 1.minute
feature_category :importers feature_category :importers
tags :exclude_from_kubernetes tags :exclude_from_kubernetes
@ -40,6 +42,15 @@ module BulkImports
private private
def run(pipeline_tracker) def run(pipeline_tracker)
if ndjson_pipeline?(pipeline_tracker)
status = ExportStatus.new(pipeline_tracker, pipeline_tracker.pipeline_class::RELATION)
raise(Pipeline::ExpiredError, 'Pipeline timeout') if job_timeout?(pipeline_tracker)
raise(Pipeline::FailedError, status.error) if status.failed?
return reenqueue(pipeline_tracker) if status.started?
end
pipeline_tracker.update!(status_event: 'start', jid: jid) pipeline_tracker.update!(status_event: 'start', jid: jid)
context = ::BulkImports::Pipeline::Context.new(pipeline_tracker) context = ::BulkImports::Pipeline::Context.new(pipeline_tracker)
@ -48,7 +59,7 @@ module BulkImports
pipeline_tracker.finish! pipeline_tracker.finish!
rescue StandardError => e rescue StandardError => e
pipeline_tracker.fail_op! pipeline_tracker.update!(status_event: 'fail_op', jid: jid)
logger.error( logger.error(
worker: self.class.name, worker: self.class.name,
@ -67,5 +78,17 @@ module BulkImports
def logger def logger
@logger ||= Gitlab::Import::Logger.build @logger ||= Gitlab::Import::Logger.build
end end
def ndjson_pipeline?(pipeline_tracker)
pipeline_tracker.pipeline_class.ndjson_pipeline?
end
def job_timeout?(pipeline_tracker)
(Time.zone.now - pipeline_tracker.entity.created_at) > Pipeline::NDJSON_EXPORT_TIMEOUT
end
def reenqueue(pipeline_tracker)
self.class.perform_in(NDJSON_PIPELINE_PERFORM_DELAY, pipeline_tracker.id, pipeline_tracker.stage, pipeline_tracker.entity.id)
end
end end
end end

View file

@ -1,23 +0,0 @@
# frozen_string_literal: true
module IncidentManagement
class ProcessPrometheusAlertWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
sidekiq_options retry: 3
queue_namespace :incident_management
feature_category :incident_management
worker_resource_boundary :cpu
def perform(project_id, alert_hash)
# no-op
#
# This worker is not scheduled anymore since
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/35943
# and will be removed completely via
# https://gitlab.com/gitlab-org/gitlab/-/issues/227146
# in 14.0.
end
end
end

View file

@ -0,0 +1,8 @@
---
name: board_multi_select
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/61955
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/331189
milestone: '14.0'
type: development
group: group::product planning
default_enabled: false

View file

@ -2175,6 +2175,25 @@ Input type: `EscalationPolicyCreateInput`
| <a id="mutationescalationpolicycreateerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. | | <a id="mutationescalationpolicycreateerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
| <a id="mutationescalationpolicycreateescalationpolicy"></a>`escalationPolicy` | [`EscalationPolicyType`](#escalationpolicytype) | The escalation policy. | | <a id="mutationescalationpolicycreateescalationpolicy"></a>`escalationPolicy` | [`EscalationPolicyType`](#escalationpolicytype) | The escalation policy. |
### `Mutation.escalationPolicyDestroy`
Input type: `EscalationPolicyDestroyInput`
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationescalationpolicydestroyclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationescalationpolicydestroyid"></a>`id` | [`IncidentManagementEscalationPolicyID!`](#incidentmanagementescalationpolicyid) | The escalation policy internal ID to remove. |
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationescalationpolicydestroyclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationescalationpolicydestroyerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
| <a id="mutationescalationpolicydestroyescalationpolicy"></a>`escalationPolicy` | [`EscalationPolicyType`](#escalationpolicytype) | The escalation policy. |
### `Mutation.exportRequirements` ### `Mutation.exportRequirements`
Input type: `ExportRequirementsInput` Input type: `ExportRequirementsInput`

View file

@ -79,8 +79,17 @@ EE: true
## What warrants a changelog entry? ## What warrants a changelog entry?
- Any user-facing change **should** have a changelog entry. Example: "GitLab now - Any change that introduces a database migration, whether it's regular, post,
or data migration, **must** have a changelog entry, even if it is behind a
disabled feature flag.
- [Security fixes](https://gitlab.com/gitlab-org/release/docs/blob/master/general/security/developer.md)
**must** have a changelog entry, with `Changelog` trailer set to `security`.
- Any user-facing change **must** have a changelog entry. Example: "GitLab now
uses system fonts for all text." uses system fonts for all text."
- Any client-facing change to our REST and GraphQL APIs **must** have a changelog entry.
See the [complete list what comprises a GraphQL breaking change](api_graphql_styleguide.md#breaking-changes).
- Any change that introduces an [Advanced Search migration](elasticsearch.md#creating-a-new-advanced-search-migration)
**must** have a changelog entry.
- A fix for a regression introduced and then fixed in the same release (such as - A fix for a regression introduced and then fixed in the same release (such as
fixing a bug introduced during a monthly release candidate) **should not** fixing a bug introduced during a monthly release candidate) **should not**
have a changelog entry. have a changelog entry.

View file

@ -0,0 +1,16 @@
---
stage:
group:
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# Build your application **(FREE)**
Add your source code to a repository, create merge requests to check in
code, and use CI/CD to generate your application. Include packages in your app and output it to a variety of environments.
- [Repositories](../user/project/repository/index.md)
- [Merge requests](../user/project/merge_requests/index.md)
- [CI/CD](../ci/README.md)
- [Packages & Registries](../user/packages/index.md)
- [Application infrastructure](../user/project/clusters/index.md)

View file

@ -161,6 +161,34 @@ Feature.disable(:include_lfs_blobs_in_archive)
## Troubleshooting ## Troubleshooting
### Encountered `n` file(s) that should have been pointers, but weren't
This error indicates the file (or files) are expected to be tracked by LFS, but for
some reason the repository is not tracking them as LFS. This issue can be one
potential reason for this error:
[Files not tracked with LFS when uploaded through the web interface](https://gitlab.com/gitlab-org/gitlab/-/issues/326342#note_586820485)
To resolve the problem, migrate the affected file (or files) and push back to the repository:
1. Migrate the file to LFS:
```shell
git lfs migrate import --yes --no-rewrite "<your-file>"
```
1. Push back to your repository:
```shell
git push
```
1. (Optional) Clean up your `.git` folder:
```shell
git reflog expire --expire-unreachable=now --all
git gc --prune=now
```
### error: Repository or object not found ### error: Repository or object not found
There are a couple of reasons why this error can occur: There are a couple of reasons why this error can occur:

View file

@ -0,0 +1,28 @@
---
stage:
group:
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# Plan and track work **(FREE)**
Plan your work by creating requirements, issues, and epics. Schedule work
with milestones and track your team's time. Learn how to save time with
quick actions, see how GitLab renders Markdown text, and learn how to
use Git to interact with GitLab.
- [Epics](../user/group/epics/index.md)
- [Issues](../user/project/issues/index.md)
- [Labels](../user/project/labels.md)
- [Discussions](../user/discussions/index.md)
- [Iterations](../user/group/iterations/index.md)
- [Milestones](../user/project/milestones/index.md)
- [Requirements](../user/project/requirements/index.md)
- [Roadmaps](../user/group/roadmap/index.md)
- [Time tracking](../user/project/time_tracking.md)
- [Wikis](../user/project/wiki/index.md)
- [Keyboard shortcuts](../user/shortcuts.md)
- [Quick actions](../user/project/quick_actions.md)
- [Markdown](../user/markdown.md)
- [To-Do lists](../user/todos.md)
- [Using Git](../topics/git/index.md)

View file

@ -0,0 +1,13 @@
---
stage:
group:
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# Release your application **(FREE)**
Release your application internally or to the public. Use
flags to release features incrementally.
- [Releases](../user/project/releases/index.md)
- [Feature flags](../operations/feature_flags.md)

View file

@ -0,0 +1,16 @@
---
stage:
group:
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# Set up your organization **(FREE)**
Configure your organization and its users. Determine user roles
and give everyone access to the projects they need.
- [Members](../user/project/members/index.md)
- [Groups](../user/group/index.md)
- [User account options](../user/profile/index.md)
- [SSH keys](../ssh/README.md)
- [GitLab.com settings](../user/gitlab_com/index.md)

19
doc/topics/use_gitlab.md Normal file
View file

@ -0,0 +1,19 @@
---
stage:
group:
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# Use GitLab **(FREE)**
Get to know the GitLab end-to-end workflow. Configure permissions,
organize your work, create and secure your application, and analyze its performance. Report on team productivity throughout the process.
- [Set up your organization](set_up_organization.md)
- [Organize work with projects](../user/project/index.md)
- [Plan and track work](plan_and_track.md)
- [Build your application](build_your_application.md)
- [Secure your application](../user/application_security/index.md)
- [Release your application](release_your_application.md)
- [Monitor application performance](../operations/index.md)
- [Analyze GitLab usage](../user/analytics/index.md)

View file

@ -4,7 +4,7 @@ group: Optimize
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
--- ---
# Analytics **(FREE)** # Analyze GitLab usage **(FREE)**
## Definitions ## Definitions

View file

@ -7,7 +7,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Value Stream Analytics **(FREE)** # Value Stream Analytics **(FREE)**
> - Introduced as Cycle Analytics prior to GitLab 12.3 at the project level. > - Introduced as Cycle Analytics prior to GitLab 12.3 at the project level.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/12077) in [GitLab Premium](https://about.gitlab.com/pricing/) 12.3 at the group level. > - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/12077) in GitLab Premium 12.3 at the group level.
> - [Renamed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/23427) from Cycle Analytics to Value Stream Analytics in GitLab 12.8. > - [Renamed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/23427) from Cycle Analytics to Value Stream Analytics in GitLab 12.8.
Value Stream Analytics measures the time spent to go from an Value Stream Analytics measures the time spent to go from an
@ -15,20 +15,20 @@ Value Stream Analytics measures the time spent to go from an
(also known as cycle time) for each of your projects or groups. Value Stream Analytics displays the median time (also known as cycle time) for each of your projects or groups. Value Stream Analytics displays the median time
spent in each stage defined in the process. spent in each stage defined in the process.
Value Stream Analytics is useful in order to quickly determine the velocity of a given You can use Value Stream Analytics to determine the velocity of a given
project. It points to bottlenecks in the development process, enabling management project. It points to bottlenecks in the development process, enabling management
to uncover, triage, and identify the root cause of slowdowns in the software development life cycle. to uncover, triage, and identify the root cause of slowdowns in the software development life cycle.
For information on how to contribute to the development of Value Stream Analytics, see our [contributor documentation](../../development/value_stream_analytics.md). For information about how to contribute to the development of Value Stream Analytics, see our [contributor documentation](../../development/value_stream_analytics.md).
Project-level Value Stream Analytics is available via **Project > Analytics > Value Stream**. Project-level Value Stream Analytics is available by using **Project > Analytics > Value Stream**.
NOTE: NOTE:
[Group-level Value Stream Analytics](../group/value_stream_analytics) is also available. [Group-level Value Stream Analytics](../group/value_stream_analytics) is also available.
## Default stages ## Default stages
The stages tracked by Value Stream Analytics by default represent the [GitLab flow](../../topics/gitlab_flow.md). These stages can be customized in Group Level Value Stream Analytics. The stages tracked by Value Stream Analytics by default represent the [GitLab flow](../../topics/gitlab_flow.md). You can customize these stages in group-level Value Stream Analytics.
- **Issue** (Tracker) - **Issue** (Tracker)
- Time to schedule an issue (by milestone or by adding it to an issue board) - Time to schedule an issue (by milestone or by adding it to an issue board)
@ -38,55 +38,51 @@ The stages tracked by Value Stream Analytics by default represent the [GitLab fl
- Time to create a merge request - Time to create a merge request
- **Test** (CI) - **Test** (CI)
- Time it takes GitLab CI/CD to test your code - Time it takes GitLab CI/CD to test your code
- **Review** (Merge Request/MR) - **Review** (Merge request)
- Time spent on code review - Time spent on code review
- **Staging** (Continuous Deployment) - **Staging** (Continuous Deployment)
- Time between merging and deploying to production - Time between merging and deploying to production
### Date ranges ### Date ranges
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/36300) in GitLab 10.0. To filter analytics results based on a date range, select one of these options:
GitLab provides the ability to filter analytics based on a date range. To filter results, select one of these options: - **Last 7 days**
- **Last 30 days** (default)
1. Last 7 days - **Last 90 days**
1. Last 30 days (default)
1. Last 90 days
## How Time metrics are measured ## How Time metrics are measured
The "Time" metrics near the top of the page are measured as follows: The **Time** metrics near the top of the page are measured as follows:
- **Lead time**: median time from issue created to issue closed. - **Lead time**: Median time from issue created to issue closed.
- **Cycle time**: median time from first commit to issue closed. (You can associate a commit with an issue by [crosslinking in the commit message](../project/issues/crosslinking_issues.md#from-commit-messages).) - **Cycle time**: Median time from first commit to issue closed. (You can associate a commit with an issue by [crosslinking in the commit message](../project/issues/crosslinking_issues.md#from-commit-messages).)
## How the stages are measured ## How the stages are measured
Value Stream Analytics uses start events and stop events to measure the time that an Issue or MR spends in each stage. Value Stream Analytics uses start events and stop events to measure the time that an issue or merge request spends in each stage.
For example, a stage might start when one label is added to an issue, and end when another label is added. For example, a stage might start when one label is added to an issue and end when another label is added.
Items are not included in the stage time calculation if they have not reached the stop event. Items aren't included in the stage time calculation if they have not reached the stop event.
Each stage of Value Stream Analytics is further described in the table below. | Stage | Description |
|---------|---------------|
| Issue | Measures the median time between creating an issue and taking action to solve it, by either labeling it or adding it to a milestone, whichever comes first. The label is tracked only if it already includes an [Issue Board list](../project/issue_board.md) created for it. |
| Plan | Measures the median time between the action you took for the previous stage, and pushing the first commit to the branch. That first branch commit triggers the separation between **Plan** and **Code**, and at least one of the commits in the branch must include the related issue number (such as `#42`). If the issue number is *not* included in a commit, that data is not included in the measurement time of the stage. |
| Code | Measures the median time between pushing a first commit (previous stage) and creating a merge request (MR). The process is tracked with the [issue closing pattern](../project/issues/managing_issues.md#closing-issues-automatically) in the description of the merge request. For example, if the issue is closed with `Closes #xxx`, it's assumed that `xxx` is issue number for the merge request). If there is no closing pattern, the start time is set to the create time of the first commit. |
| Test | Essentially the start to finish time for all pipelines. Measures the median time to run the entire pipeline for that project. Related to the time required by GitLab CI/CD to run every job for the commits pushed to that merge request, as defined in the previous stage. |
| Review | Measures the median time taken to review merge requests with a closing issue pattern, from creation to merge. |
| Staging | Measures the median time between merging the merge request (with a closing issue pattern) to the first deployment to a [production environment](#how-the-production-environment-is-identified). Data not collected without a production environment. |
| **Stage** | **Description** | How this works:
| --------- | --------------- |
| Issue | Measures the median time between creating an issue and taking action to solve it, by either labeling it or adding it to a milestone, whichever comes first. The label is tracked only if it already includes an [Issue Board list](../project/issue_board.md) created for it. |
| Plan | Measures the median time between the action you took for the previous stage, and pushing the first commit to the branch. That first branch commit triggers the separation between **Plan** and **Code**, and at least one of the commits in the branch must include the related issue number (such as `#42`). If the issue number is *not* included in a commit, that data is not included in the measurement time of the stage. |
| Code | Measures the median time between pushing a first commit (previous stage) and creating a merge request (MR). The process is tracked with the [issue closing pattern](../project/issues/managing_issues.md#closing-issues-automatically) in the description of the merge request. For example, if the issue is closed with `Closes #xxx`, it's assumed that `xxx` is issue number for the merge request). If there is no closing pattern, the start time is set to the create time of the first commit. |
| Test | Essentially the start to finish time for all pipelines. Measures the median time to run the entire pipeline for that project. Related to the time required by GitLab CI/CD to run every job for the commits pushed to that merge request, as defined in the previous stage. |
| Review | Measures the median time taken to review merge requests with a closing issue pattern, from creation to merge. |
| Staging | Measures the median time between merging the merge request (with a closing issue pattern) to the first deployment to a [production environment](#how-the-production-environment-is-identified). Data not collected without a production environment. |
How this works, behind the scenes:
1. Issues and merge requests are grouped in pairs, where the merge request has the 1. Issues and merge requests are grouped in pairs, where the merge request has the
[closing pattern](../project/issues/managing_issues.md#closing-issues-automatically) [closing pattern](../project/issues/managing_issues.md#closing-issues-automatically)
for the corresponding issue. Issue/merge request pairs without closing patterns are for the corresponding issue. Issue and merge request pairs without closing patterns are
**not** included. not included.
1. Issue/merge request pairs are filtered by the last XX days, specified through the UI 1. Issue and merge request pairs are filtered by the last XX days, specified through the UI
(default = 90 days). Pairs outside the filtered range are not included. (default is `90` days). Pairs outside the filtered range are not included.
1. For the remaining pairs, review information needed for stages, including 1. For the remaining pairs, review information needed for stages, including
issue creation date, merge request merge time, and so on. issue creation date and merge request merge time.
In short, the Value Stream Analytics dashboard tracks data related to [GitLab flow](../../topics/gitlab_flow.md). It does not include data for: In short, the Value Stream Analytics dashboard tracks data related to [GitLab flow](../../topics/gitlab_flow.md). It does not include data for:
@ -97,67 +93,69 @@ In short, the Value Stream Analytics dashboard tracks data related to [GitLab fl
## How the production environment is identified ## How the production environment is identified
Value Stream Analytics identifies production environments based on Value Stream Analytics identifies production environments based on the
[the deployment tier of environments](../../ci/environments/index.md#deployment-tier-of-environments). [deployment tier of environments](../../ci/environments/index.md#deployment-tier-of-environments).
## Example workflow ## Example workflow
Below is a simple fictional workflow of a single cycle that happens in a Here's a fictional workflow of a single cycle that happens in a
single day passing through all seven stages. Note that if a stage does not have single day, passing through all seven stages. If a stage doesn't have
a start and a stop mark, it is not measured and hence not calculated in the median a start and a stop mark, it isn't measured and hence isn't calculated in the median
time. It is assumed that milestones are created and CI for testing and setting time. It's assumed that milestones are created, and CI for testing and setting
environments is configured. environments is configured.
1. Issue is created at 09:00 (start of **Issue** stage). 1. Issue is created at 09:00 (start of **Issue** stage).
1. Issue is added to a milestone at 11:00 (stop of **Issue** stage / start of 1. Issue is added to a milestone at 11:00 (stop of **Issue** stage and start of
**Plan** stage). **Plan** stage).
1. Start working on the issue, create a branch locally and make one commit at 1. Start working on the issue, create a branch locally, and make one commit at
12:00. 12:00.
1. Make a second commit to the branch which mentions the issue number at 12.30 1. Make a second commit to the branch that mentions the issue number at 12:30
(stop of **Plan** stage / start of **Code** stage). (stop of **Plan** stage and start of **Code** stage).
1. Push branch and create a merge request that contains the [issue closing pattern](../project/issues/managing_issues.md#closing-issues-automatically) 1. Push branch, and create a merge request that contains the [issue closing pattern](../project/issues/managing_issues.md#closing-issues-automatically)
in its description at 14:00 (stop of **Code** stage / start of **Test** and in its description at 14:00 (stop of **Code** stage and start of **Test** and
**Review** stages). **Review** stages).
1. The CI starts running your scripts defined in [`.gitlab-ci.yml`](../../ci/yaml/README.md) and 1. The CI starts running your scripts defined in [`.gitlab-ci.yml`](../../ci/yaml/README.md) and
takes 5min (stop of **Test** stage). takes 5 minutes (stop of **Test** stage).
1. Review merge request, ensure that everything is OK and merge the merge 1. Review merge request, ensure that everything is okay, and then merge the merge
request at 19:00. (stop of **Review** stage / start of **Staging** stage). request at 19:00 (stop of **Review** stage and start of **Staging** stage).
1. Now that the merge request is merged, a deployment to the `production` 1. The merge request is merged, and a deployment to the `production`
environment starts and finishes at 19:30 (stop of **Staging** stage). environment starts and finishes at 19:30 (stop of **Staging** stage).
From the above example we see the time used for each stage: From the previous example we see the time used for each stage:
- **Issue**: 2h (11:00 - 09:00) - **Issue**: 2 hrs (09:00 to 11:00)
- **Plan**: 1h (12:00 - 11:00) - **Plan**: 1 hr (11:00 to 12:00)
- **Code**: 2h (14:00 - 12:00) - **Code**: 2 hrs (12:00 to 14:00)
- **Test**: 5min - **Test**: 5 mins
- **Review**: 5h (19:00 - 14:00) - **Review**: 5 hrs (14:00 to 19:00)
- **Staging**: 30min (19:30 - 19:00) - **Staging**: 30 mins (19:00 to 19:30)
More information: More information:
- The above example specifies the issue number in a latter commit. The process - Although the previous example specifies the issue number in a later commit, the process
still collects analytics data for that issue. still collects analytics data for the issue.
- The time required in the **Test** stage is not included in the overall time of - The time required in the **Test** stage isn't included in the overall time of
the cycle. It is included in the **Review** process, as every MR should be the cycle. The time is included in the **Review** process, as every merge request should be
tested. tested.
- The example above illustrates only **one cycle** of the multiple stages. Value - The previous example illustrates only one cycle of the multiple stages. Value
Stream Analytics, on its dashboard, shows the calculated median elapsed time Stream Analytics, on its dashboard, shows the calculated median elapsed time
for these issues. for these issues.
## Permissions ## Permissions
The current permissions on the Project-level Value Stream Analytics dashboard are: The permissions for the project-level Value Stream Analytics dashboard include:
- Public projects - anyone can access. | Project type | Permissions |
- Internal projects - any authenticated user can access. |--------------|---------------------------------------|
- Private projects - any member Guest and above can access. | Public | Anyone can access |
| Internal | Any authenticated user can access |
| Private | Any member Guest and above can access |
You can [read more about permissions](../../user/permissions.md) in general. You can [read more about permissions](../../user/permissions.md) in general.
## More resources ## More resources
Learn more about Value Stream Analytics in the following resources: Learn more about Value Stream Analytics with the following resources:
- [Value Stream Analytics feature page](https://about.gitlab.com/stages-devops-lifecycle/value-stream-analytics/). - [Value Stream Analytics feature page](https://about.gitlab.com/stages-devops-lifecycle/value-stream-analytics/).
- [Value Stream Analytics feature preview](https://about.gitlab.com/blog/2016/09/16/feature-preview-introducing-cycle-analytics/). - [Value Stream Analytics feature preview](https://about.gitlab.com/blog/2016/09/16/feature-preview-introducing-cycle-analytics/).

View file

@ -5,7 +5,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
type: reference, howto type: reference, howto
--- ---
# Application security **(ULTIMATE)** # Secure your application **(ULTIMATE)**
GitLab can check your application for security vulnerabilities including: GitLab can check your application for security vulnerabilities including:

View file

@ -61,6 +61,9 @@ Kubernetes version to any supported version at any time:
Some GitLab features may support versions outside the range provided here. Some GitLab features may support versions outside the range provided here.
NOTE:
[GKE Cluster creation](add_remove_clusters.md#create-new-cluster) by GitLab is currently not supported for Kubernetes 1.19+. For these versions you can create the cluster through GCP, then [Add existing cluster](add_remove_clusters.md#add-existing-cluster). See [the related issue](https://gitlab.com/gitlab-org/gitlab/-/issues/331922) for more information.
### Adding and removing clusters ### Adding and removing clusters
See [Adding and removing Kubernetes clusters](add_remove_clusters.md) for details on how See [Adding and removing Kubernetes clusters](add_remove_clusters.md) for details on how

View file

@ -5,7 +5,7 @@ info: "To determine the technical writer assigned to the Stage/Group associated
type: reference type: reference
--- ---
# Projects **(FREE)** # Organize work with projects **(FREE)**
In GitLab, you can create projects to host In GitLab, you can create projects to host
your codebase. You can also use projects to track issues, plan work, your codebase. You can also use projects to track issues, plan work,

View file

@ -583,7 +583,15 @@ When dragging issues between lists, different behavior occurs depending on the s
### Multi-select issue cards ### Multi-select issue cards
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/18954) in GitLab 12.4. > - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/18954) in GitLab 12.4.
> - [Placed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/61955) behind a [feature flag](../feature_flags.md), disabled by default in GitLab 14.0.
> - Disabled on GitLab.com.
> - Not recommended for production use.
> - To use in GitLab self-managed instances, ask a GitLab administrator to [enable it](#enable-or-disable-multi-selecting-issue-cards). **(FREE SELF)**
This in-development feature might not be available for your use. There can be
[risks when enabling features still in development](../feature_flags.md#risks-when-enabling-features-still-in-development).
Refer to this feature's version history for more details.
You can select multiple issue cards, then drag the group to another position within the list, or to You can select multiple issue cards, then drag the group to another position within the list, or to
another list. This makes it faster to reorder many issues at once. another list. This makes it faster to reorder many issues at once.
@ -685,3 +693,22 @@ To disable it:
```ruby ```ruby
Feature.disable(:iteration_board_lists) Feature.disable(:iteration_board_lists)
``` ```
### Enable or disable multi-selecting issue cards **(FREE SELF)**
Multi-selecting issue cards is under development and not ready for production use. It is
deployed behind a feature flag that is **disabled by default**.
[GitLab administrators with access to the GitLab Rails console](../../administration/feature_flags.md)
can enable it.
To enable it:
```ruby
Feature.enable(:board_multi_select)
```
To disable it:
```ruby
Feature.disable(:board_multi_select)
```

View file

@ -0,0 +1,68 @@
# frozen_string_literal: true
module BulkImports
module Common
module Extractors
class NdjsonExtractor
include Gitlab::ImportExport::CommandLineUtil
include Gitlab::Utils::StrongMemoize
EXPORT_DOWNLOAD_URL_PATH = "/%{resource}/%{full_path}/export_relations/download?relation=%{relation}"
def initialize(relation:)
@relation = relation
@tmp_dir = Dir.mktmpdir
end
def extract(context)
download_service(tmp_dir, context).execute
decompression_service(tmp_dir).execute
relations = ndjson_reader(tmp_dir).consume_relation('', relation)
BulkImports::Pipeline::ExtractedData.new(data: relations)
end
def remove_tmp_dir
FileUtils.remove_entry(tmp_dir)
end
private
attr_reader :relation, :tmp_dir
def filename
@filename ||= "#{relation}.ndjson.gz"
end
def download_service(tmp_dir, context)
@download_service ||= BulkImports::FileDownloadService.new(
configuration: context.configuration,
relative_url: relative_resource_url(context),
dir: tmp_dir,
filename: filename
)
end
def decompression_service(tmp_dir)
@decompression_service ||= BulkImports::FileDecompressionService.new(
dir: tmp_dir,
filename: filename
)
end
def ndjson_reader(tmp_dir)
@ndjson_reader ||= Gitlab::ImportExport::JSON::NdjsonReader.new(tmp_dir)
end
def relative_resource_url(context)
strong_memoize(:relative_resource_url) do
resource = context.portable.class.name.downcase.pluralize
encoded_full_path = context.entity.encoded_source_full_path
EXPORT_DOWNLOAD_URL_PATH % { resource: resource, full_path: encoded_full_path, relation: relation }
end
end
end
end
end
end

View file

@ -1,53 +0,0 @@
# frozen_string_literal: true
module BulkImports
module Groups
module Graphql
module GetLabelsQuery
extend self
def to_s
<<-'GRAPHQL'
query ($full_path: ID!, $cursor: String, $per_page: Int) {
group(fullPath: $full_path) {
labels(first: $per_page, after: $cursor, onlyGroupLabels: true) {
page_info: pageInfo {
next_page: endCursor
has_next_page: hasNextPage
}
nodes {
title
description
color
created_at: createdAt
updated_at: updatedAt
}
}
}
}
GRAPHQL
end
def variables(context)
{
full_path: context.entity.source_full_path,
cursor: context.tracker.next_page,
per_page: ::BulkImports::Tracker::DEFAULT_PAGE_SIZE
}
end
def base_path
%w[data group labels]
end
def data_path
base_path << 'nodes'
end
def page_info_path
base_path << 'page_info'
end
end
end
end
end

View file

@ -4,6 +4,10 @@ module BulkImports
module Groups module Groups
module Pipelines module Pipelines
class EntityFinisher class EntityFinisher
def self.ndjson_pipeline?
false
end
def initialize(context) def initialize(context)
@context = context @context = context
end end

View file

@ -4,15 +4,35 @@ module BulkImports
module Groups module Groups
module Pipelines module Pipelines
class LabelsPipeline class LabelsPipeline
include Pipeline include NdjsonPipeline
extractor BulkImports::Common::Extractors::GraphqlExtractor, RELATION = 'labels'
query: BulkImports::Groups::Graphql::GetLabelsQuery
transformer Common::Transformers::ProhibitedAttributesTransformer extractor ::BulkImports::Common::Extractors::NdjsonExtractor, relation: RELATION
def load(context, data) def transform(context, data)
Labels::CreateService.new(data).execute(group: context.group) relation_hash = data.first
relation_index = data.last
relation_definition = import_export_config.top_relation_tree(RELATION)
deep_transform_relation!(relation_hash, RELATION, relation_definition) do |key, hash|
Gitlab::ImportExport::Group::RelationFactory.create(
relation_index: relation_index,
relation_sym: key.to_sym,
relation_hash: hash,
importable: context.portable,
members_mapper: nil,
object_builder: object_builder,
user: context.current_user,
excluded_keys: import_export_config.relation_excluded_keys(key)
)
end
end
def load(_, label)
return unless label
label.save! unless label.persisted?
end end
end end
end end

View file

@ -0,0 +1,63 @@
# frozen_string_literal: true
module BulkImports
module NdjsonPipeline
extend ActiveSupport::Concern
include Pipeline
included do
ndjson_pipeline!
def deep_transform_relation!(relation_hash, relation_key, relation_definition, &block)
relation_key = relation_key_override(relation_key)
relation_definition.each do |sub_relation_key, sub_relation_definition|
sub_relation = relation_hash[sub_relation_key]
next unless sub_relation
current_item =
if sub_relation.is_a?(Array)
sub_relation
.map { |entry| deep_transform_relation!(entry, sub_relation_key, sub_relation_definition, &block) }
.tap { |entry| entry.compact! }
.presence
else
deep_transform_relation!(sub_relation, sub_relation_key, sub_relation_definition, &block)
end
if current_item
relation_hash[sub_relation_key] = current_item
else
relation_hash.delete(sub_relation_key)
end
end
yield(relation_key, relation_hash)
end
def after_run(_)
extractor.remove_tmp_dir if extractor.respond_to?(:remove_tmp_dir)
end
def relation_class(relation_key)
relation_key.to_s.classify.constantize
rescue NameError
relation_key.to_s.constantize
end
def relation_key_override(relation_key)
relation_key_overrides[relation_key.to_sym]&.to_s || relation_key
end
def relation_key_overrides
"Gitlab::ImportExport::#{portable.class}::RelationFactory::OVERRIDES".constantize
end
def object_builder
"Gitlab::ImportExport::#{portable.class}::ObjectBuilder".constantize
end
end
end
end

View file

@ -8,8 +8,11 @@ module BulkImports
include Runner include Runner
NotAllowedError = Class.new(StandardError) NotAllowedError = Class.new(StandardError)
ExpiredError = Class.new(StandardError)
FailedError = Class.new(StandardError)
CACHE_KEY_EXPIRATION = 2.hours CACHE_KEY_EXPIRATION = 2.hours
NDJSON_EXPORT_TIMEOUT = 30.minutes
def initialize(context) def initialize(context)
@context = context @context = context
@ -19,6 +22,14 @@ module BulkImports
@tracker ||= context.tracker @tracker ||= context.tracker
end end
def portable
@portable ||= context.portable
end
def import_export_config
@import_export_config ||= context.import_export_config
end
included do included do
private private
@ -111,7 +122,7 @@ module BulkImports
options = class_config[:options] options = class_config[:options]
if options if options
class_config[:klass].new(class_config[:options]) class_config[:klass].new(**class_config[:options])
else else
class_config[:klass].new class_config[:klass].new
end end
@ -155,6 +166,14 @@ module BulkImports
class_attributes[:abort_on_failure] class_attributes[:abort_on_failure]
end end
def ndjson_pipeline!
class_attributes[:ndjson_pipeline] = true
end
def ndjson_pipeline?
class_attributes[:ndjson_pipeline]
end
private private
def add_attribute(sym, klass, options) def add_attribute(sym, klass, options)

View file

@ -16,6 +16,14 @@ module BulkImports
@entity ||= tracker.entity @entity ||= tracker.entity
end end
def portable
@portable ||= entity.group || entity.project
end
def import_export_config
@import_export_config ||= ::BulkImports::FileTransfer.config_for(portable)
end
def group def group
@group ||= entity.group @group ||= entity.group
end end

View file

@ -6,7 +6,7 @@ module BulkImports
attr_reader :data attr_reader :data
def initialize(data: nil, page_info: {}) def initialize(data: nil, page_info: {})
@data = Array.wrap(data) @data = data.is_a?(Enumerator) ? data : Array.wrap(data)
@page_info = page_info @page_info = page_info
end end

View file

@ -37506,6 +37506,9 @@ msgstr ""
msgid "You have insufficient permissions to create an on-call schedule for this project" msgid "You have insufficient permissions to create an on-call schedule for this project"
msgstr "" msgstr ""
msgid "You have insufficient permissions to remove an escalation policy from this project"
msgstr ""
msgid "You have insufficient permissions to remove an on-call rotation from this project" msgid "You have insufficient permissions to remove an on-call rotation from this project"
msgstr "" msgstr ""

View file

@ -41,9 +41,9 @@ RSpec.describe 'Multi Select Issue', :js do
before do before do
project.add_maintainer(user) project.add_maintainer(user)
# multi-drag disabled with feature flag for now # Multi select drag&drop support is temporarily disabled
# https://gitlab.com/gitlab-org/gitlab/-/issues/289797 # https://gitlab.com/gitlab-org/gitlab/-/issues/289797
stub_feature_flags(graphql_board_lists: false) stub_feature_flags(graphql_board_lists: false, board_multi_select: project)
sign_in(user) sign_in(user)
end end

View file

@ -72,6 +72,10 @@ describe('Board card', () => {
await wrapper.vm.$nextTick(); await wrapper.vm.$nextTick();
}; };
beforeEach(() => {
window.gon = { features: {} };
});
afterEach(() => { afterEach(() => {
wrapper.destroy(); wrapper.destroy();
wrapper = null; wrapper = null;
@ -140,6 +144,10 @@ describe('Board card', () => {
}); });
describe('when using multi-select', () => { describe('when using multi-select', () => {
beforeEach(() => {
window.gon = { features: { boardMultiSelect: true } };
});
it('should call vuex action "multiSelectBoardItem" with correct parameters', async () => { it('should call vuex action "multiSelectBoardItem" with correct parameters', async () => {
await multiSelectCard(); await multiSelectCard();

View file

@ -0,0 +1,53 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Common::Extractors::NdjsonExtractor do
let_it_be(:tmpdir) { Dir.mktmpdir }
let_it_be(:filepath) { 'spec/fixtures/bulk_imports/labels.ndjson.gz' }
let_it_be(:import) { create(:bulk_import) }
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: import) }
let_it_be(:entity) { create(:bulk_import_entity, bulk_import: import) }
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
subject { described_class.new(relation: 'labels') }
before do
allow(FileUtils).to receive(:remove_entry).with(any_args).and_call_original
subject.instance_variable_set(:@tmp_dir, tmpdir)
end
after(:all) do
FileUtils.remove_entry(tmpdir) if File.directory?(tmpdir)
end
describe '#extract' do
before do
FileUtils.copy_file(filepath, File.join(tmpdir, 'labels.ndjson.gz'))
allow_next_instance_of(BulkImports::FileDownloadService) do |service|
allow(service).to receive(:execute)
end
end
it 'returns ExtractedData' do
extracted_data = subject.extract(context)
label = extracted_data.data.first.first
expect(extracted_data).to be_instance_of(BulkImports::Pipeline::ExtractedData)
expect(label['title']).to include('Label')
expect(label['description']).to include('Label')
expect(label['type']).to eq('GroupLabel')
end
end
describe '#remove_tmp_dir' do
it 'removes tmp dir' do
expect(FileUtils).to receive(:remove_entry).with(tmpdir).once
subject.remove_tmp_dir
end
end
end

View file

@ -1,35 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Groups::Graphql::GetLabelsQuery do
it 'has a valid query' do
tracker = create(:bulk_import_tracker)
context = BulkImports::Pipeline::Context.new(tracker)
query = GraphQL::Query.new(
GitlabSchema,
described_class.to_s,
variables: described_class.variables(context)
)
result = GitlabSchema.static_validator.validate(query)
expect(result[:errors]).to be_empty
end
describe '#data_path' do
it 'returns data path' do
expected = %w[data group labels nodes]
expect(described_class.data_path).to eq(expected)
end
end
describe '#page_info_path' do
it 'returns pagination information path' do
expected = %w[data group labels page_info]
expect(described_class.page_info_path).to eq(expected)
end
end
end

View file

@ -5,98 +5,87 @@ require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) } let_it_be(:group) { create(:group) }
let_it_be(:timestamp) { Time.new(2020, 01, 01).utc } let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:filepath) { 'spec/fixtures/bulk_imports/labels.ndjson.gz' }
let_it_be(:entity) do let_it_be(:entity) do
create( create(
:bulk_import_entity, :bulk_import_entity,
group: group,
bulk_import: bulk_import,
source_full_path: 'source/full/path', source_full_path: 'source/full/path',
destination_name: 'My Destination Group', destination_name: 'My Destination Group',
destination_namespace: group.full_path, destination_namespace: group.full_path
group: group
) )
end end
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:tmpdir) { Dir.mktmpdir }
before do
FileUtils.copy_file(filepath, File.join(tmpdir, 'labels.ndjson.gz'))
group.add_owner(user)
end
subject { described_class.new(context) } subject { described_class.new(context) }
describe '#run' do describe '#run' do
it 'imports a group labels' do it 'imports group labels into destination group and removes tmpdir' do
first_page = extracted_data(title: 'label1', has_next_page: true) allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
last_page = extracted_data(title: 'label2') allow_next_instance_of(BulkImports::FileDownloadService) do |service|
allow(service).to receive(:execute)
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor)
.to receive(:extract)
.and_return(first_page, last_page)
end end
expect { subject.run }.to change(Label, :count).by(2) expect { subject.run }.to change(::GroupLabel, :count).by(1)
label = group.labels.order(:created_at).last label = group.labels.first
expect(label.title).to eq('label2') expect(label.title).to eq('Label 1')
expect(label.description).to eq('desc') expect(label.description).to eq('Label 1')
expect(label.color).to eq('#428BCA') expect(label.color).to eq('#6699cc')
expect(label.created_at).to eq(timestamp) expect(File.directory?(tmpdir)).to eq(false)
expect(label.updated_at).to eq(timestamp)
end end
end end
describe '#load' do describe '#load' do
it 'creates the label' do context 'when label is not persisted' do
data = label_data('label') it 'saves the label' do
label = build(:group_label, group: group)
expect { subject.load(context, data) }.to change(Label, :count).by(1) expect(label).to receive(:save!)
label = group.labels.first subject.load(context, label)
end
end
data.each do |key, value| context 'when label is persisted' do
expect(label[key]).to eq(value) it 'does not save label' do
label = create(:group_label, group: group)
expect(label).not_to receive(:save!)
subject.load(context, label)
end
end
context 'when label is missing' do
it 'returns' do
expect(subject.load(context, nil)).to be_nil
end end
end end
end end
describe 'pipeline parts' do describe 'pipeline parts' do
it { expect(described_class).to include_module(BulkImports::Pipeline) } it { expect(described_class).to include_module(BulkImports::NdjsonPipeline) }
it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) } it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
it 'has extractors' do it 'has extractor' do
expect(described_class.get_extractor) expect(described_class.get_extractor)
.to eq( .to eq(
klass: BulkImports::Common::Extractors::GraphqlExtractor, klass: BulkImports::Common::Extractors::NdjsonExtractor,
options: { options: { relation: described_class::RELATION }
query: BulkImports::Groups::Graphql::GetLabelsQuery
}
) )
end end
it 'has transformers' do
expect(described_class.transformers)
.to contain_exactly(
{ klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil }
)
end
end
def label_data(title)
{
'title' => title,
'description' => 'desc',
'color' => '#428BCA',
'created_at' => timestamp.to_s,
'updated_at' => timestamp.to_s
}
end
def extracted_data(title:, has_next_page: false)
page_info = {
'has_next_page' => has_next_page,
'next_page' => has_next_page ? 'cursor' : nil
}
BulkImports::Pipeline::ExtractedData.new(data: [label_data(title)], page_info: page_info)
end end
end end

View file

@ -0,0 +1,123 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::NdjsonPipeline do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project) }
let_it_be(:klass) do
Class.new do
include BulkImports::NdjsonPipeline
attr_reader :portable
def initialize(portable)
@portable = portable
end
end
end
subject { klass.new(group) }
it 'marks pipeline as ndjson' do
expect(klass.ndjson_pipeline?).to eq(true)
end
describe '#deep_transform_relation!' do
it 'transforms relation hash' do
transformed = subject.deep_transform_relation!({}, 'test', {}) do |key, hash|
hash.merge(relation_key: key)
end
expect(transformed[:relation_key]).to eq('test')
end
context 'when subrelations is an array' do
it 'transforms each element of the array' do
relation_hash = {
'key' => 'value',
'labels' => [
{ 'title' => 'label 1' },
{ 'title' => 'label 2' },
{ 'title' => 'label 3' }
]
}
relation_definition = { 'labels' => {} }
transformed = subject.deep_transform_relation!(relation_hash, 'test', relation_definition) do |key, hash|
hash.merge(relation_key: key)
end
transformed['labels'].each do |label|
expect(label[:relation_key]).to eq('group_labels')
end
end
end
context 'when subrelation is a hash' do
it 'transforms subrelation hash' do
relation_hash = {
'key' => 'value',
'label' => { 'title' => 'label' }
}
relation_definition = { 'label' => {} }
transformed = subject.deep_transform_relation!(relation_hash, 'test', relation_definition) do |key, hash|
hash.merge(relation_key: key)
end
expect(transformed['label'][:relation_key]).to eq('group_label')
end
end
context 'when subrelation is nil' do
it 'removes subrelation' do
relation_hash = {
'key' => 'value',
'label' => { 'title' => 'label' }
}
relation_definition = { 'label' => {} }
transformed = subject.deep_transform_relation!(relation_hash, 'test', relation_definition) do |key, hash|
if key == 'group_label'
nil
else
hash
end
end
expect(transformed['label']).to be_nil
end
end
end
describe '#relation_class' do
context 'when relation name is pluralized' do
it 'returns constantized class' do
expect(subject.relation_class('MergeRequest::Metrics')).to eq(MergeRequest::Metrics)
end
end
context 'when relation name is singularized' do
it 'returns constantized class' do
expect(subject.relation_class('Badge')).to eq(Badge)
end
end
end
describe '#relation_key_override' do
context 'when portable is group' do
it 'returns group relation name override' do
expect(subject.relation_key_override('labels')).to eq('group_labels')
end
end
context 'when portable is project' do
subject { klass.new(project) }
it 'returns group relation name override' do
expect(subject.relation_key_override('labels')).to eq('project_labels')
end
end
end
end

View file

@ -6,6 +6,9 @@ RSpec.describe BulkImports::Pipeline::Context do
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) } let_it_be(:group) { create(:group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) } let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:project) { create(:project) }
let_it_be(:project_entity) { create(:bulk_import_entity, :project_entity, project: project) }
let_it_be(:project_tracker) { create(:bulk_import_tracker, entity: project_entity) }
let_it_be(:entity) do let_it_be(:entity) do
create( create(
@ -51,4 +54,24 @@ RSpec.describe BulkImports::Pipeline::Context do
describe '#extra' do describe '#extra' do
it { expect(subject.extra).to eq(extra: :data) } it { expect(subject.extra).to eq(extra: :data) }
end end
describe '#portable' do
it { expect(subject.portable).to eq(group) }
context 'when portable is project' do
subject { described_class.new(project_tracker) }
it { expect(subject.portable).to eq(project) }
end
end
describe '#import_export_config' do
it { expect(subject.import_export_config).to be_instance_of(BulkImports::FileTransfer::GroupConfig) }
context 'when portable is project' do
subject { described_class.new(project_tracker) }
it { expect(subject.import_export_config).to be_instance_of(BulkImports::FileTransfer::ProjectConfig) }
end
end
end end

View file

@ -63,6 +63,7 @@ RSpec.describe BulkImports::Pipeline do
BulkImports::MyPipeline.transformer(klass, options) BulkImports::MyPipeline.transformer(klass, options)
BulkImports::MyPipeline.loader(klass, options) BulkImports::MyPipeline.loader(klass, options)
BulkImports::MyPipeline.abort_on_failure! BulkImports::MyPipeline.abort_on_failure!
BulkImports::MyPipeline.ndjson_pipeline!
expect(BulkImports::MyPipeline.get_extractor).to eq({ klass: klass, options: options }) expect(BulkImports::MyPipeline.get_extractor).to eq({ klass: klass, options: options })
@ -74,6 +75,7 @@ RSpec.describe BulkImports::Pipeline do
expect(BulkImports::MyPipeline.get_loader).to eq({ klass: klass, options: options }) expect(BulkImports::MyPipeline.get_loader).to eq({ klass: klass, options: options })
expect(BulkImports::MyPipeline.abort_on_failure?).to eq(true) expect(BulkImports::MyPipeline.abort_on_failure?).to eq(true)
expect(BulkImports::MyPipeline.ndjson_pipeline?).to eq(true)
end end
end end
end end

View file

@ -0,0 +1,77 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::ExportStatus do
let_it_be(:relation) { 'labels' }
let_it_be(:import) { create(:bulk_import) }
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: import) }
let_it_be(:entity) { create(:bulk_import_entity, bulk_import: import, source_full_path: 'foo') }
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let(:response_double) do
double(parsed_response: [{ 'relation' => 'labels', 'status' => status, 'error' => 'error!' }])
end
subject { described_class.new(tracker, relation) }
before do
allow_next_instance_of(BulkImports::Clients::Http) do |client|
allow(client).to receive(:get).and_return(response_double)
end
end
describe '#started?' do
context 'when export status is started' do
let(:status) { BulkImports::Export::STARTED }
it 'returns true' do
expect(subject.started?).to eq(true)
end
end
context 'when export status is not started' do
let(:status) { BulkImports::Export::FAILED }
it 'returns false' do
expect(subject.started?).to eq(false)
end
end
end
describe '#failed' do
context 'when export status is failed' do
let(:status) { BulkImports::Export::FAILED }
it 'returns true' do
expect(subject.failed?).to eq(true)
end
end
context 'when export status is not failed' do
let(:status) { BulkImports::Export::STARTED }
it 'returns false' do
expect(subject.failed?).to eq(false)
end
end
end
describe '#error' do
let(:status) { BulkImports::Export::FAILED }
it 'returns error message' do
expect(subject.error).to eq('error!')
end
context 'when something goes wrong during export status fetch' do
it 'returns exception class as error' do
allow_next_instance_of(BulkImports::Clients::Http) do |client|
allow(client).to receive(:get).and_raise(StandardError, 'Error!')
end
expect(subject.error).to eq('Error!')
end
end
end
end

View file

@ -12,8 +12,8 @@ RSpec.describe BulkImports::FileTransfer::GroupConfig do
subject { described_class.new(exportable) } subject { described_class.new(exportable) }
describe '#exportable_tree' do describe '#portable_tree' do
it 'returns exportable tree' do it 'returns portable tree' do
expect_next_instance_of(::Gitlab::ImportExport::AttributesFinder) do |finder| expect_next_instance_of(::Gitlab::ImportExport::AttributesFinder) do |finder|
expect(finder).to receive(:find_root).with(:group).and_call_original expect(finder).to receive(:find_root).with(:group).and_call_original
end end
@ -30,9 +30,21 @@ RSpec.describe BulkImports::FileTransfer::GroupConfig do
end end
end end
describe '#exportable_relations' do describe '#portable_relations' do
it 'returns a list of top level exportable relations' do it 'returns a list of top level exportable relations' do
expect(subject.portable_relations).to include('milestones', 'badges', 'boards', 'labels') expect(subject.portable_relations).to include('milestones', 'badges', 'boards', 'labels')
end end
end end
describe '#top_relation_tree' do
it 'returns relation tree of a top level relation' do
expect(subject.top_relation_tree('labels')).to eq('priorities' => {})
end
end
describe '#relation_excluded_keys' do
it 'returns excluded keys for relation' do
expect(subject.relation_excluded_keys('group')).to include('owner_id')
end
end
end end

View file

@ -12,8 +12,8 @@ RSpec.describe BulkImports::FileTransfer::ProjectConfig do
subject { described_class.new(exportable) } subject { described_class.new(exportable) }
describe '#exportable_tree' do describe 'portable_tree' do
it 'returns exportable tree' do it 'returns portable tree' do
expect_next_instance_of(::Gitlab::ImportExport::AttributesFinder) do |finder| expect_next_instance_of(::Gitlab::ImportExport::AttributesFinder) do |finder|
expect(finder).to receive(:find_root).with(:project).and_call_original expect(finder).to receive(:find_root).with(:project).and_call_original
end end
@ -30,9 +30,21 @@ RSpec.describe BulkImports::FileTransfer::ProjectConfig do
end end
end end
describe '#exportable_relations' do describe '#portable_relations' do
it 'returns a list of top level exportable relations' do it 'returns a list of top level exportable relations' do
expect(subject.portable_relations).to include('issues', 'labels', 'milestones', 'merge_requests') expect(subject.portable_relations).to include('issues', 'labels', 'milestones', 'merge_requests')
end end
end end
describe '#top_relation_tree' do
it 'returns relation tree of a top level relation' do
expect(subject.top_relation_tree('labels')).to eq('priorities' => {})
end
end
describe '#relation_excluded_keys' do
it 'returns excluded keys for relation' do
expect(subject.relation_excluded_keys('project')).to include('creator_id')
end
end
end end

View file

@ -32,11 +32,21 @@ RSpec.describe BulkImports::FileDownloadService do
end end
end end
it 'downloads file' do shared_examples 'downloads file' do
subject.execute it 'downloads file' do
subject.execute
expect(File.exist?(filepath)).to eq(true) expect(File.exist?(filepath)).to eq(true)
expect(File.read(filepath)).to include('chunk') expect(File.read(filepath)).to include('chunk')
end
end
include_examples 'downloads file'
context 'when content-type is application/gzip' do
let_it_be(:content_type) { 'application/gzip' }
include_examples 'downloads file'
end end
context 'when url is not valid' do context 'when url is not valid' do

View file

@ -8,10 +8,16 @@ RSpec.describe BulkImports::PipelineWorker do
def initialize(_); end def initialize(_); end
def run; end def run; end
def self.ndjson_pipeline?
false
end
end end
end end
let_it_be(:entity) { create(:bulk_import_entity) } let_it_be(:bulk_import) { create(:bulk_import) }
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import) }
let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
before do before do
stub_const('FakePipeline', pipeline_class) stub_const('FakePipeline', pipeline_class)
@ -27,6 +33,7 @@ RSpec.describe BulkImports::PipelineWorker do
expect(BulkImports::Stage) expect(BulkImports::Stage)
.to receive(:pipeline_exists?) .to receive(:pipeline_exists?)
.with('FakePipeline') .with('FakePipeline')
.twice
.and_return(true) .and_return(true)
expect_next_instance_of(Gitlab::Import::Logger) do |logger| expect_next_instance_of(Gitlab::Import::Logger) do |logger|
@ -122,4 +129,114 @@ RSpec.describe BulkImports::PipelineWorker do
expect(pipeline_tracker.jid).to eq('jid') expect(pipeline_tracker.jid).to eq('jid')
end end
end end
context 'when ndjson pipeline' do
let(:ndjson_pipeline) do
Class.new do
def initialize(_); end
def run; end
def self.ndjson_pipeline?
true
end
end
end
let(:pipeline_tracker) do
create(
:bulk_import_tracker,
entity: entity,
pipeline_name: 'NdjsonPipeline'
)
end
before do
stub_const('NdjsonPipeline', ndjson_pipeline)
stub_const('NdjsonPipeline::RELATION', 'test')
allow(BulkImports::Stage)
.to receive(:pipeline_exists?)
.with('NdjsonPipeline')
.and_return(true)
end
it 'runs the pipeline successfully' do
allow_next_instance_of(BulkImports::ExportStatus) do |status|
allow(status).to receive(:started?).and_return(false)
allow(status).to receive(:failed?).and_return(false)
end
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
expect(pipeline_tracker.reload.status_name).to eq(:finished)
end
context 'when export status is started' do
it 'reenqueues pipeline worker' do
allow_next_instance_of(BulkImports::ExportStatus) do |status|
allow(status).to receive(:started?).and_return(true)
allow(status).to receive(:failed?).and_return(false)
end
expect(described_class)
.to receive(:perform_in)
.with(
described_class::NDJSON_PIPELINE_PERFORM_DELAY,
pipeline_tracker.id,
pipeline_tracker.stage,
entity.id
)
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
end
end
context 'when job reaches timeout' do
it 'marks as failed and logs the error' do
old_created_at = entity.created_at
entity.update!(created_at: (BulkImports::Pipeline::NDJSON_EXPORT_TIMEOUT + 1.hour).ago)
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger)
.to receive(:error)
.with(
worker: described_class.name,
pipeline_name: 'NdjsonPipeline',
entity_id: entity.id,
message: 'Pipeline timeout'
)
end
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
expect(pipeline_tracker.reload.status_name).to eq(:failed)
entity.update!(created_at: old_created_at)
end
end
context 'when export status is failed' do
it 'marks as failed and logs the error' do
allow_next_instance_of(BulkImports::ExportStatus) do |status|
allow(status).to receive(:failed?).and_return(true)
allow(status).to receive(:error).and_return('Error!')
end
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger)
.to receive(:error)
.with(
worker: described_class.name,
pipeline_name: 'NdjsonPipeline',
entity_id: entity.id,
message: 'Error!'
)
end
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
expect(pipeline_tracker.reload.status_name).to eq(:failed)
end
end
end
end end

View file

@ -306,7 +306,6 @@ RSpec.describe 'Every Sidekiq worker' do
'IncidentManagement::OncallRotations::PersistAllRotationsShiftsJob' => 3, 'IncidentManagement::OncallRotations::PersistAllRotationsShiftsJob' => 3,
'IncidentManagement::OncallRotations::PersistShiftsJob' => 3, 'IncidentManagement::OncallRotations::PersistShiftsJob' => 3,
'IncidentManagement::PagerDuty::ProcessIncidentWorker' => 3, 'IncidentManagement::PagerDuty::ProcessIncidentWorker' => 3,
'IncidentManagement::ProcessPrometheusAlertWorker' => 3,
'InvalidGpgSignatureUpdateWorker' => 3, 'InvalidGpgSignatureUpdateWorker' => 3,
'IrkerWorker' => 3, 'IrkerWorker' => 3,
'IssuableExportCsvWorker' => 3, 'IssuableExportCsvWorker' => 3,

View file

@ -1,28 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe IncidentManagement::ProcessPrometheusAlertWorker do
describe '#perform' do
let_it_be(:project) { create(:project) }
let_it_be(:prometheus_alert) { create(:prometheus_alert, project: project) }
let(:payload_key) { Gitlab::AlertManagement::Payload::Prometheus.new(project: project, payload: alert_params).gitlab_fingerprint }
let!(:prometheus_alert_event) { create(:prometheus_alert_event, prometheus_alert: prometheus_alert, payload_key: payload_key) }
let!(:settings) { create(:project_incident_management_setting, project: project, create_issue: true) }
let(:alert_params) do
{
startsAt: prometheus_alert.created_at.rfc3339,
labels: {
gitlab_alert_id: prometheus_alert.prometheus_metric_id
}
}.with_indifferent_access
end
it 'does nothing' do
expect { subject.perform(project.id, alert_params) }
.not_to change(Issue, :count)
end
end
end