Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-09-27 18:12:38 +00:00
parent 28b15b6b1c
commit 223359576d
28 changed files with 259 additions and 138 deletions

View file

@ -20,6 +20,8 @@
class BulkImports::Entity < ApplicationRecord
self.table_name = 'bulk_import_entities'
EXPORT_RELATIONS_URL = '/%{resource}/%{full_path}/export_relations'
belongs_to :bulk_import, optional: false
belongs_to :parent, class_name: 'BulkImports::Entity', optional: true
@ -102,6 +104,14 @@ class BulkImports::Entity < ApplicationRecord
end
end
def pluralized_name
source_type.gsub('_entity', '').pluralize
end
def export_relations_url_path
@export_relations_url_path ||= EXPORT_RELATIONS_URL % { resource: pluralized_name, full_path: encoded_source_full_path }
end
private
def validate_parent_is_a_group

View file

@ -41,7 +41,7 @@ module BulkImports
end
def status_endpoint
"/groups/#{entity.encoded_source_full_path}/export_relations/status"
File.join(entity.export_relations_url_path, 'status')
end
end
end

View file

@ -26,7 +26,7 @@ class BulkImportWorker # rubocop:disable Scalability/IdempotentWorker
created_entities.first(next_batch_size).each do |entity|
entity.create_pipeline_trackers!
BulkImports::ExportRequestWorker.perform_async(entity.id) if entity.group_entity?
BulkImports::ExportRequestWorker.perform_async(entity.id)
BulkImports::EntityWorker.perform_async(entity.id)
entity.start!

View file

@ -10,8 +10,6 @@ module BulkImports
worker_has_external_dependencies!
feature_category :importers
GROUP_EXPORTED_URL_PATH = "/groups/%s/export_relations"
def perform(entity_id)
entity = BulkImports::Entity.find(entity_id)
@ -21,8 +19,7 @@ module BulkImports
private
def request_export(entity)
http_client(entity.bulk_import.configuration)
.post(GROUP_EXPORTED_URL_PATH % entity.encoded_source_full_path)
http_client(entity.bulk_import.configuration).post(entity.export_relations_url_path)
end
def http_client(configuration)

View file

@ -10,15 +10,14 @@ Please check the ~"product intelligence" [guide](https://docs.gitlab.com/ee/deve
MSG
# exit if not matching files
# exit if not matching files or if no product intelligence labels
matching_changed_files = product_intelligence.matching_changed_files
return unless matching_changed_files.any?
labels = product_intelligence.missing_labels
return if matching_changed_files.empty? || labels.empty?
warn format(CHANGED_FILES_MESSAGE, changed_files: helper.markdown_list(matching_changed_files))
labels = product_intelligence.missing_labels
return unless labels.any?
gitlab.api.update_merge_request(gitlab.mr_json['project_id'],
gitlab.mr_json['iid'],
add_labels: labels)

View file

@ -99,6 +99,9 @@ categories << :database if helper.mr_labels.include?('database')
# Ensure to spin for Product Intelligence reviewer when ~"product intelligence::review pending" is applied
categories << :product_intelligence if helper.mr_labels.include?("product intelligence::review pending")
# Skip Product intelligence reviews for growth experiment MRs
categories.delete(:product_intelligence) unless helper.mr_labels.include?("growth experiment")
if changes.any?
project = project_helper.project_name

View file

@ -73,7 +73,7 @@ Example response:
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68384) in GitLab 14.3.
For [integrated error tracking](https://gitlab.com/gitlab-org/gitlab/-/issues/329596) feature that is behind a disabled feature flag. Only for project maintainers.
For [integrated error tracking](https://gitlab.com/gitlab-org/gitlab/-/issues/329596) feature. Only for project maintainers.
### List project client keys

View file

@ -134,13 +134,9 @@ You must use the GitLab API to enable it.
![Error Tracking Settings](img/error_tracking_setting_v14_3.png)
1. Create a client key (DSN) to use with Sentry SDK in your application. Make sure to save the
response, as it contains a DSN:
1. Select **Save changes**. After page reload you should see a text field with the DSN string. Copy it.
```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" \
"https://gitlab.example.com/api/v4/projects/PROJECT_ID/error_tracking/client_keys"
```
![Error Tracking Settings DSN](img/error_tracking_setting_dsn_v14_4.png)
1. Take the DSN from the previous step and configure your Sentry SDK with it. Errors are now
reported to the GitLab collector and are visible in the [GitLab UI](#error-tracking-list).

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

View file

@ -63,16 +63,6 @@ You can also use the [certificate-based method](../../../project/clusters/multip
but, for [security implications](#security-implications-for-clusters-connected-with-certificates),
we don't recommend using this method.
## Cluster levels
Choose your cluster's level according to its purpose:
| Level | Purpose |
|--|--|
| [Project level](../../../project/clusters/index.md) | Use your cluster for a single project. |
| [Group level](../../../group/clusters/index.md) | Use the same cluster across multiple projects within your group. |
| [Instance level](../../../instance/clusters/index.md) **(FREE SELF)** | Use the same cluster across groups and projects within your instance. |
## Supported cluster versions
GitLab is committed to support at least two production-ready Kubernetes minor
@ -92,23 +82,33 @@ Kubernetes version to any supported version at any time:
Some GitLab features may support versions outside the range provided here.
## View your clusters
## Cluster levels
Choose your cluster's level according to its purpose:
| Level | Purpose |
|--|--|
| [Project level](../../../project/clusters/index.md) | Use your cluster for a single project. |
| [Group level](../../../group/clusters/index.md) | Use the same cluster across multiple projects within your group. |
| [Instance level](../../../instance/clusters/index.md) | Use the same cluster across groups and projects within your instance. |
### View your clusters
To view the Kubernetes clusters connected to your project,
group, or instance, open the cluster's page according to the
[level](#cluster-levels) of your cluster.
group, or instance, open the cluster's page according to
your cluster's level.
**Project-level clusters:**
1. On the top bar, select **Menu > Projects** and find your project.
1. On the left sidebar, select **Infrastructure > Kubernetes clusters**.
**[Group-level clusters](../../../group/clusters/index.md):**
**Group-level clusters:**
1. On the top bar, select **Menu > Groups** and find your group.
1. On the left sidebar, select **Kubernetes**.
**[Instance-level clusters](../../../instance/clusters/index.md):**
**Instance-level clusters:**
1. On the top bar, select **Menu > Admin**.
1. On the left sidebar, select **Kubernetes**.

View file

@ -26,25 +26,46 @@ You can then modify the project files according to your needs.
**Steps:**
1. [Import the example project](#import-the-example-project).
1. [Add your GCP credentials to GitLab](#add-your-gcp-credentials-to-gitlab).
1. [Create your GCP and GitLab credentials](#create-your-gcp-and-gitlab-credentials).
1. [Configure your project](#configure-your-project).
1. [Deploy your cluster](#deploy-your-cluster).
## Import the example project
To create a new group-level cluster from GitLab using Infrastructure as Code, it is necessary
to create a project to manage the cluster from. In this tutorial, we import a pre-configured
sample project to help you get started.
Start by [importing the example project by URL](../../../project/import/repo_by_url.md). Use `https://gitlab.com/gitlab-org/configure/examples/gitlab-terraform-gke.git` as URL.
## Add your GCP credentials to GitLab
This project provides you with the following resources:
After importing the project, you need to set up [CI environment variables](../../../../ci/variables/index.md) to associate your cluster on GCP to your group in GitLab.
- A [cluster on Google Cloud Platform (GCP)](https://gitlab.com/gitlab-org/configure/examples/gitlab-terraform-gke/-/blob/master/gke.tf)
with defaults for name, location, node count, and Kubernetes version.
- A [`gitlab-admin` K8s service account](https://gitlab.com/gitlab-org/configure/examples/gitlab-terraform-gke/-/blob/master/gitlab-admin.tf) with `cluster-admin` privileges.
- The new group-level cluster connected to GitLab.
- Pre-configures Terraform files:
We advise that you [set environment variables through the GitLab UI](../../../../ci/variables/index.md#add-a-cicd-variable-to-a-project)
so that your credentials are not exposed through the code. To do so, follow the steps below.
```plaintext
├── backend.tf # State file Location Configuration
├── gke.tf # Google GKE Configuration
├── gitlab-admin.tf # Adding kubernetes service account
└── group_cluster.tf # Registering kubernetes cluster to GitLab `apps` Group
```
### Prepare your credentials on GCP
## Create your GCP and GitLab credentials
1. Create a [GCP service account](https://cloud.google.com/docs/authentication/getting-started) to authenticate GCP with GitLab. It needs the following roles: `Computer Network Viewer`, `Kubernetes Engine Admin`, and `Service Account User`.
1. Download the JSON file with the service account key.
To set up your project to communicate to GCP and the GitLab API:
1. Create a [GitLab personal access token](../../../profile/personal_access_tokens.md) with
`api` scope. The Terraform script uses it to connect the cluster to your GitLab group. Take note of the generated token. You will
need it when you [configure your project](#configure-your-project).
1. To authenticate GCP with GitLab, create a [GCP service account](https://cloud.google.com/docs/authentication/getting-started)
with following roles: `Compute Network Viewer`, `Kubernetes Engine Admin`, `Service Account User`, and `Service Account Admin`. Both User and Admin
service accounts are necessary. The User role impersonates the [default service account](https://cloud.google.com/compute/docs/access/service-accounts#default_service_account)
when [creating the node pool](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/using_gke_with_terraform#node-pool-management).
The Admin role creates a service account in the `kube-system` namespace.
1. Download the JSON file with the service account key you created in the previous step.
1. On your computer, encode the JSON file to `base64` (replace `/path/to/sa-key.json` to the path to your key):
```shell
@ -53,36 +74,38 @@ so that your credentials are not exposed through the code. To do so, follow the
1. Use the output of this command as the `BASE64_GOOGLE_CREDENTIALS` environment variable in the next step.
### Add your credentials to GitLab as environment variables
1. On GitLab, from your project's sidebar, go to **Settings > CI/CD** and expand **Variables**.
1. Add your `GITLAB_TOKEN` ([personal access token](../../../profile/personal_access_tokens.md)).
1. Add the variable `BASE64_GOOGLE_CREDENTIALS` from the previous step.
## Configure your project
After authenticating with GCP, replace the project's defaults from the example
project with your own. To do so, edit the files as described below.
**Required configuration:**
Edit `gke.tf`:
Use CI/CD environment variables to configure your project as detailed below.
1. **(Required)** Replace the GCP `project` with a unique project name.
1. **(Optional)** Choose the `name` of your cluster.
1. **(Optional)** Choose the `region` and `zone` that you would like to deploy your cluster to.
1. Push the changes to your project's default branch.
**Required configuration:**
Edit `group_cluster.tf`:
1. On the left sidebar, select **Settings > CI/CD**.
1. Expand **Variables**.
1. Set the variable `TF_VAR_gitlab_token` to the GitLab personal access token you just created.
1. Set the variable `BASE64_GOOGLE_CREDENTIALS` to the `base64` encoded JSON file you just created.
1. Set the variable `TF_VAR_gcp_project` to your GCP's `project` name.
1. Set the variable `TF_VAR_gitlab_group` to the name of the group you want to connect your cluster to. If your group's URL is `https://gitlab.example.com/my-example-group`, `my-example-group` is your group's name.
1. **(Required)**: Replace the `full_path` with the path to your group.
1. **(Optional)**: Choose your cluster base domain through `domain`.
1. **(Optional)**: Choose your environment through `environment_scope`.
1. Push the changes to your project's default branch.
**Optional configuration:**
The file [`variables.tf`](https://gitlab.com/gitlab-org/configure/examples/gitlab-terraform-gke/-/blob/master/variables.tf)
contains other variables that you can override according to your needs:
- `TF_VAR_gcp_region`: Set your cluster's region.
- `TF_VAR_cluster_name`: Set your cluster's name.
- `TF_VAR_machine_type`: Set the machine type for the Kubernetes nodes.
- `TF_VAR_cluster_description`: Set a description for the cluster. We recommend setting this to `$CI_PROJECT_URL` to create a reference to your GitLab project on your GCP cluster detail page. This way you know which project was responsible for provisioning the cluster you see on the GCP dashboard.
- `TF_VAR_base_domain`: Set to the base domain to provision resources under.
- `TF_VAR_environment_scope`: Set to the environment scope for your cluster.
Refer to the [GitLab Terraform provider](https://registry.terraform.io/providers/gitlabhq/gitlab/latest/docs) and the [Google Terraform provider](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/provider_reference) documentation for further resource options.
## Deploy your cluster
After adjusting the files in the previous step, manually trigger the deployment of your cluster. In GitLab:
After configuring your project, manually trigger the deployment of your cluster. In GitLab:
1. From your project's sidebar, go to **CI/CD > Pipelines**.
1. Select the dropdown icon (**{angle-down}**) next to the play icon (**{play}**).

View file

@ -60,7 +60,7 @@ module BulkImports
def relative_resource_url(context)
strong_memoize(:relative_resource_url) do
resource = context.portable.class.name.downcase.pluralize
resource = context.entity.pluralized_name
encoded_full_path = context.entity.encoded_source_full_path
EXPORT_DOWNLOAD_URL_PATH % { resource: resource, full_path: encoded_full_path, relation: relation }

View file

@ -1,7 +1,7 @@
# frozen_string_literal: true
module BulkImports
module Groups
module Common
module Pipelines
class LabelsPipeline
include NdjsonPipeline

View file

@ -24,7 +24,7 @@ module BulkImports
stage: 1
},
labels: {
pipeline: BulkImports::Groups::Pipelines::LabelsPipeline,
pipeline: BulkImports::Common::Pipelines::LabelsPipeline,
stage: 1
},
milestones: {

View file

@ -11,9 +11,13 @@ module BulkImports
pipeline: BulkImports::Projects::Pipelines::ProjectPipeline,
stage: 0
},
labels: {
pipeline: BulkImports::Common::Pipelines::LabelsPipeline,
stage: 1
},
finisher: {
pipeline: BulkImports::Common::Pipelines::EntityFinisher,
stage: 1
stage: 2
}
}
end

View file

@ -19,13 +19,6 @@ module Gitlab
projects_counter.increment
end
def projects_counter
@projects_counter ||= Gitlab::Metrics.counter(
:"#{importer}_imported_projects_total",
'The number of imported projects'
)
end
def issues_counter
@issues_counter ||= Gitlab::Metrics.counter(
:"#{importer}_imported_issues_total",
@ -50,6 +43,13 @@ module Gitlab
IMPORT_DURATION_BUCKETS
)
end
def projects_counter
@projects_counter ||= Gitlab::Metrics.counter(
:"#{importer}_imported_projects_total",
'The number of imported projects'
)
end
end
end
end

View file

@ -57,58 +57,58 @@ RSpec.describe JiraConnect::EventsController do
expect(response).to have_gitlab_http_status(:ok)
end
end
end
describe '#uninstalled' do
let!(:installation) { create(:jira_connect_installation) }
let(:qsh) { Atlassian::Jwt.create_query_string_hash('https://gitlab.test/events/uninstalled', 'POST', 'https://gitlab.test') }
describe '#uninstalled' do
let!(:installation) { create(:jira_connect_installation) }
let(:qsh) { Atlassian::Jwt.create_query_string_hash('https://gitlab.test/events/uninstalled', 'POST', 'https://gitlab.test') }
before do
request.headers['Authorization'] = "JWT #{auth_token}"
before do
request.headers['Authorization'] = "JWT #{auth_token}"
end
subject(:post_uninstalled) { post :uninstalled }
context 'when JWT is invalid' do
let(:auth_token) { 'invalid_token' }
it 'returns 403' do
post_uninstalled
expect(response).to have_gitlab_http_status(:forbidden)
end
subject(:post_uninstalled) { post :uninstalled }
it 'does not delete the installation' do
expect { post_uninstalled }.not_to change { JiraConnectInstallation.count }
end
end
context 'when JWT is invalid' do
let(:auth_token) { 'invalid_token' }
it 'returns 403' do
post_uninstalled
expect(response).to have_gitlab_http_status(:forbidden)
end
it 'does not delete the installation' do
expect { post_uninstalled }.not_to change { JiraConnectInstallation.count }
end
context 'when JWT is valid' do
let(:auth_token) do
Atlassian::Jwt.encode({ iss: installation.client_key, qsh: qsh }, installation.shared_secret)
end
context 'when JWT is valid' do
let(:auth_token) do
Atlassian::Jwt.encode({ iss: installation.client_key, qsh: qsh }, installation.shared_secret)
let(:jira_base_path) { '/-/jira_connect' }
let(:jira_event_path) { '/-/jira_connect/events/uninstalled' }
it 'calls the DestroyService and returns ok in case of success' do
expect_next_instance_of(JiraConnectInstallations::DestroyService, installation, jira_base_path, jira_event_path) do |destroy_service|
expect(destroy_service).to receive(:execute).and_return(true)
end
let(:jira_base_path) { '/-/jira_connect' }
let(:jira_event_path) { '/-/jira_connect/events/uninstalled' }
post_uninstalled
it 'calls the DestroyService and returns ok in case of success' do
expect_next_instance_of(JiraConnectInstallations::DestroyService, installation, jira_base_path, jira_event_path) do |destroy_service|
expect(destroy_service).to receive(:execute).and_return(true)
end
expect(response).to have_gitlab_http_status(:ok)
end
post_uninstalled
expect(response).to have_gitlab_http_status(:ok)
it 'calls the DestroyService and returns unprocessable_entity in case of failure' do
expect_next_instance_of(JiraConnectInstallations::DestroyService, installation, jira_base_path, jira_event_path) do |destroy_service|
expect(destroy_service).to receive(:execute).and_return(false)
end
it 'calls the DestroyService and returns unprocessable_entity in case of failure' do
expect_next_instance_of(JiraConnectInstallations::DestroyService, installation, jira_base_path, jira_event_path) do |destroy_service|
expect(destroy_service).to receive(:execute).and_return(false)
end
post_uninstalled
post_uninstalled
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
end

View file

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
RSpec.describe BulkImports::Common::Pipelines::LabelsPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }

View file

@ -9,7 +9,7 @@ RSpec.describe BulkImports::Groups::Stage do
[1, BulkImports::Groups::Pipelines::GroupAvatarPipeline],
[1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline],
[1, BulkImports::Groups::Pipelines::MembersPipeline],
[1, BulkImports::Groups::Pipelines::LabelsPipeline],
[1, BulkImports::Common::Pipelines::LabelsPipeline],
[1, BulkImports::Groups::Pipelines::MilestonesPipeline],
[1, BulkImports::Groups::Pipelines::BadgesPipeline],
[2, BulkImports::Groups::Pipelines::BoardsPipeline]

View file

@ -6,7 +6,8 @@ RSpec.describe BulkImports::Projects::Stage do
let(:pipelines) do
[
[0, BulkImports::Projects::Pipelines::ProjectPipeline],
[1, BulkImports::Common::Pipelines::EntityFinisher]
[1, BulkImports::Common::Pipelines::LabelsPipeline],
[2, BulkImports::Common::Pipelines::EntityFinisher]
]
end

View file

@ -2,20 +2,22 @@
require 'spec_helper'
RSpec.describe Gitlab::Import::Metrics do
RSpec.describe Gitlab::Import::Metrics, :aggregate_failures do
let(:importer) { :test_importer }
let(:project) { create(:project) }
let(:project) { double(:project, created_at: Time.current) }
let(:histogram) { double(:histogram) }
let(:counter) { double(:counter) }
subject { described_class.new(importer, project) }
describe '#report_import_time' do
before do
allow(Gitlab::Metrics).to receive(:counter) { counter }
allow(counter).to receive(:increment)
end
describe '#track_finished_import' do
before do
allow(Gitlab::Metrics).to receive(:counter) { counter }
allow(Gitlab::Metrics).to receive(:histogram) { histogram }
allow(counter).to receive(:increment)
allow(counter).to receive(:observe)
end
it 'emits importer metrics' do
@ -37,4 +39,26 @@ RSpec.describe Gitlab::Import::Metrics do
subject.track_finished_import
end
end
describe '#issues_counter' do
it 'creates a counter for issues' do
expect(Gitlab::Metrics).to receive(:counter).with(
:test_importer_imported_issues_total,
'The number of imported issues'
)
subject.issues_counter
end
end
describe '#merge_requests_counter' do
it 'creates a counter for issues' do
expect(Gitlab::Metrics).to receive(:counter).with(
:test_importer_imported_merge_requests_total,
'The number of imported merge (pull) requests'
)
subject.merge_requests_counter
end
end
end

View file

@ -207,4 +207,40 @@ RSpec.describe BulkImports::Entity, type: :model do
expect(entity.pipeline_exists?('BulkImports::Groups::Pipelines::InexistentPipeline')).to eq(false)
end
end
describe '#pluralized_name' do
context 'when entity is group' do
it 'returns groups' do
entity = build(:bulk_import_entity, :group_entity)
expect(entity.pluralized_name).to eq('groups')
end
end
context 'when entity is project' do
it 'returns projects' do
entity = build(:bulk_import_entity, :project_entity)
expect(entity.pluralized_name).to eq('projects')
end
end
end
describe '#export_relations_url_path' do
context 'when entity is group' do
it 'returns group export relations url' do
entity = build(:bulk_import_entity, :group_entity)
expect(entity.export_relations_url_path).to eq("/groups/#{entity.encoded_source_full_path}/export_relations")
end
end
context 'when entity is project' do
it 'returns project export relations url' do
entity = build(:bulk_import_entity, :project_entity)
expect(entity.export_relations_url_path).to eq("/projects/#{entity.encoded_source_full_path}/export_relations")
end
end
end
end

View file

@ -1,11 +1,14 @@
# frozen_string_literal: true
# Expects `subject` to be a job/worker instance
# Expects `subject` to be a job/worker instance and
# `job_args` to be arguments to #perform if it takes arguments
RSpec.shared_examples 'reenqueuer' do
before do
allow(subject).to receive(:sleep) # faster tests
end
let(:subject_perform) { defined?(job_args) ? subject.perform(job_args) : subject.perform }
it 'implements lease_timeout' do
expect(subject.lease_timeout).to be_a(ActiveSupport::Duration)
end
@ -18,12 +21,13 @@ RSpec.shared_examples 'reenqueuer' do
it 'tries to obtain a lease' do
expect_to_obtain_exclusive_lease(subject.lease_key)
subject.perform
subject_perform
end
end
end
# Expects `subject` to be a job/worker instance
# Expects `subject` to be a job/worker instance and
# `job_args` to be arguments to #perform if it takes arguments
RSpec.shared_examples '#perform is rate limited to 1 call per' do |minimum_duration|
before do
# Allow Timecop freeze and travel without the block form
@ -38,13 +42,15 @@ RSpec.shared_examples '#perform is rate limited to 1 call per' do |minimum_durat
Timecop.safe_mode = true
end
let(:subject_perform) { defined?(job_args) ? subject.perform(job_args) : subject.perform }
context 'when the work finishes in 0 seconds' do
let(:actual_duration) { 0 }
it 'sleeps exactly the minimum duration' do
expect(subject).to receive(:sleep).with(a_value_within(0.01).of(minimum_duration))
subject.perform
subject_perform
end
end
@ -54,7 +60,7 @@ RSpec.shared_examples '#perform is rate limited to 1 call per' do |minimum_durat
it 'sleeps 90% of minimum duration' do
expect(subject).to receive(:sleep).with(a_value_within(0.01).of(0.9 * minimum_duration))
subject.perform
subject_perform
end
end
@ -64,7 +70,7 @@ RSpec.shared_examples '#perform is rate limited to 1 call per' do |minimum_durat
it 'sleeps 10% of minimum duration' do
expect(subject).to receive(:sleep).with(a_value_within(0.01).of(0.1 * minimum_duration))
subject.perform
subject_perform
end
end
@ -74,7 +80,7 @@ RSpec.shared_examples '#perform is rate limited to 1 call per' do |minimum_durat
it 'does not sleep' do
expect(subject).not_to receive(:sleep)
subject.perform
subject_perform
end
end
@ -84,7 +90,7 @@ RSpec.shared_examples '#perform is rate limited to 1 call per' do |minimum_durat
it 'does not sleep' do
expect(subject).not_to receive(:sleep)
subject.perform
subject_perform
end
end
@ -94,7 +100,7 @@ RSpec.shared_examples '#perform is rate limited to 1 call per' do |minimum_durat
it 'does not sleep' do
expect(subject).not_to receive(:sleep)
subject.perform
subject_perform
end
end

View file

@ -59,6 +59,14 @@ RSpec.describe Tooling::Danger::ProductIntelligence do
it { is_expected.to be_empty }
end
context 'with growth experiment label' do
before do
allow(fake_helper).to receive(:mr_has_labels?).with('growth experiment').and_return(true)
end
it { is_expected.to be_empty }
end
end
describe '#matching_changed_files' do

View file

@ -111,10 +111,10 @@ RSpec.describe BulkImportWorker do
end
context 'when there are project entities to process' do
it 'does not enqueue ExportRequestWorker' do
it 'enqueues ExportRequestWorker' do
create(:bulk_import_entity, :created, :project_entity, bulk_import: bulk_import)
expect(BulkImports::ExportRequestWorker).not_to receive(:perform_async)
expect(BulkImports::ExportRequestWorker).to receive(:perform_async).once
subject.perform(bulk_import.id)
end

View file

@ -5,7 +5,6 @@ require 'spec_helper'
RSpec.describe BulkImports::ExportRequestWorker do
let_it_be(:bulk_import) { create(:bulk_import) }
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import) }
let_it_be(:entity) { create(:bulk_import_entity, source_full_path: 'foo/bar', bulk_import: bulk_import) }
let_it_be(:version_url) { 'https://gitlab.example/api/v4/version' }
let(:response_double) { double(code: 200, success?: true, parsed_response: {}) }
@ -20,16 +19,30 @@ RSpec.describe BulkImports::ExportRequestWorker do
allow(Gitlab::HTTP).to receive(:post).and_return(response_double)
end
include_examples 'an idempotent worker' do
it 'requests relations export' do
expected = "/groups/foo%2Fbar/export_relations"
shared_examples 'requests relations export for api resource' do
include_examples 'an idempotent worker' do
it 'requests relations export' do
expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
expect(client).to receive(:post).with(expected).twice
end
expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
expect(client).to receive(:post).with(expected).twice
perform_multiple(job_args)
end
perform_multiple(job_args)
end
end
context 'when entity is group' do
let(:entity) { create(:bulk_import_entity, :group_entity, source_full_path: 'foo/bar', bulk_import: bulk_import) }
let(:expected) { '/groups/foo%2Fbar/export_relations'}
include_examples 'requests relations export for api resource'
end
context 'when entity is project' do
let(:entity) { create(:bulk_import_entity, :project_entity, source_full_path: 'foo/bar', bulk_import: bulk_import) }
let(:expected) { '/projects/foo%2Fbar/export_relations' }
include_examples 'requests relations export for api resource'
end
end
end

View file

@ -257,6 +257,7 @@ RSpec.describe 'Every Sidekiq worker' do
'Geo::Scheduler::SchedulerWorker' => 3,
'Geo::Scheduler::Secondary::SchedulerWorker' => 3,
'Geo::VerificationBatchWorker' => 0,
'Geo::VerificationStateBackfillWorker' => false,
'Geo::VerificationTimeoutWorker' => false,
'Geo::VerificationWorker' => 3,
'GeoRepositoryDestroyWorker' => 3,

View file

@ -30,7 +30,7 @@ module Tooling
].freeze
def missing_labels
return [] unless helper.ci?
return [] if !helper.ci? || helper.mr_has_labels?('growth experiment')
labels = []
labels << 'product intelligence' unless helper.mr_has_labels?('product intelligence')