Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
7dd006b7ab
commit
6eaa834284
30 changed files with 963 additions and 1600 deletions
|
@ -48,9 +48,13 @@ Geo secondary sites have a [Geo tracking database](https://gitlab.com/gitlab-org
|
||||||
- [ ] Create the migration file in `ee/db/geo/migrate`:
|
- [ ] Create the migration file in `ee/db/geo/migrate`:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
bin/rails generate geo_migration CreateCoolWidgetRegistry
|
bin/rails generate migration CreateCoolWidgetRegistry --database geo
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Geo should continue using `Gitlab::Database::Migration[1.0]` until the `gitlab_geo` schema is supported, and is for the time being exempt from being validated by `Gitlab::Database::Migration[2.0]`. This requires a developer to manually amend the migration file to change from `[2.0]` to `[1.0]` due to the migration defaults being 2.0.
|
||||||
|
|
||||||
|
For more information, see the [Enable Geo migrations to use Migration[2.0]](https://gitlab.com/gitlab-org/gitlab/-/issues/363491) issue.
|
||||||
|
|
||||||
- [ ] Replace the contents of the migration file with the following. Note that we cannot add a foreign key constraint on `cool_widget_id` because the `cool_widgets` table is in a different database. The application code must handle logic such as propagating deletions.
|
- [ ] Replace the contents of the migration file with the following. Note that we cannot add a foreign key constraint on `cool_widget_id` because the `cool_widgets` table is in a different database. The application code must handle logic such as propagating deletions.
|
||||||
|
|
||||||
```ruby
|
```ruby
|
||||||
|
|
|
@ -50,9 +50,13 @@ Geo secondary sites have a [Geo tracking database](https://gitlab.com/gitlab-org
|
||||||
- [ ] Create the migration file in `ee/db/geo/migrate`:
|
- [ ] Create the migration file in `ee/db/geo/migrate`:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
bin/rails generate geo_migration CreateCoolWidgetRegistry
|
bin/rails generate migration CreateCoolWidgetRegistry --database geo
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Geo should continue using `Gitlab::Database::Migration[1.0]` until the `gitlab_geo` schema is supported, and is for the time being exempt from being validated by `Gitlab::Database::Migration[2.0]`. This requires a developer to manually amend the migration file to change from `[2.0]` to `[1.0]` due to the migration defaults being 2.0.
|
||||||
|
|
||||||
|
For more information, see the [Enable Geo migrations to use Migration[2.0]](https://gitlab.com/gitlab-org/gitlab/-/issues/363491) issue.
|
||||||
|
|
||||||
- [ ] Replace the contents of the migration file with the following. Note that we cannot add a foreign key constraint on `cool_widget_id` because the `cool_widgets` table is in a different database. The application code must handle logic such as propagating deletions.
|
- [ ] Replace the contents of the migration file with the following. Note that we cannot add a foreign key constraint on `cool_widget_id` because the `cool_widgets` table is in a different database. The application code must handle logic such as propagating deletions.
|
||||||
|
|
||||||
```ruby
|
```ruby
|
||||||
|
|
|
@ -31,7 +31,6 @@ Rails/FilePath:
|
||||||
- 'ee/db/fixtures/development/32_compliance_report_violations.rb'
|
- 'ee/db/fixtures/development/32_compliance_report_violations.rb'
|
||||||
- 'ee/lib/ee/feature/definition.rb'
|
- 'ee/lib/ee/feature/definition.rb'
|
||||||
- 'ee/lib/ee/gitlab/usage/metric_definition.rb'
|
- 'ee/lib/ee/gitlab/usage/metric_definition.rb'
|
||||||
- 'ee/lib/generators/geo_migration/geo_migration_generator.rb'
|
|
||||||
- 'ee/lib/gitlab/geo/health_check.rb'
|
- 'ee/lib/gitlab/geo/health_check.rb'
|
||||||
- 'ee/lib/tasks/gitlab/seed/metrics.rake'
|
- 'ee/lib/tasks/gitlab/seed/metrics.rake'
|
||||||
- 'ee/spec/db/production/license_spec.rb'
|
- 'ee/spec/db/production/license_spec.rb'
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
<script>
|
<script>
|
||||||
import { GlAlert, GlDropdown, GlDropdownItem, GlSprintf } from '@gitlab/ui';
|
import { GlAlert, GlDropdown, GlDropdownItem, GlSprintf } from '@gitlab/ui';
|
||||||
import { GlAreaChart } from '@gitlab/ui/dist/charts';
|
import { GlAreaChart } from '@gitlab/ui/dist/charts';
|
||||||
import dateFormat from 'dateformat';
|
|
||||||
import { get } from 'lodash';
|
import { get } from 'lodash';
|
||||||
|
import { formatDate } from '~/lib/utils/datetime_utility';
|
||||||
import axios from '~/lib/utils/axios_utils';
|
import axios from '~/lib/utils/axios_utils';
|
||||||
|
|
||||||
import { __ } from '~/locale';
|
import { __ } from '~/locale';
|
||||||
|
@ -38,7 +38,10 @@ export default {
|
||||||
},
|
},
|
||||||
xAxis: {
|
xAxis: {
|
||||||
name: '',
|
name: '',
|
||||||
type: 'category',
|
type: 'time',
|
||||||
|
axisLabel: {
|
||||||
|
formatter: (value) => formatDate(value, 'mmm dd'),
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -74,7 +77,7 @@ export default {
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
formattedData() {
|
formattedData() {
|
||||||
return this.sortedData.map((value) => [dateFormat(value.date, 'mmm dd'), value.coverage]);
|
return this.sortedData.map((value) => [value.date, value.coverage]);
|
||||||
},
|
},
|
||||||
chartData() {
|
chartData() {
|
||||||
return [
|
return [
|
||||||
|
@ -106,7 +109,7 @@ export default {
|
||||||
this.selectedCoverageIndex = index;
|
this.selectedCoverageIndex = index;
|
||||||
},
|
},
|
||||||
formatTooltipText(params) {
|
formatTooltipText(params) {
|
||||||
this.tooltipTitle = params.value;
|
this.tooltipTitle = formatDate(params.value, 'mmm dd');
|
||||||
this.coveragePercentage = get(params, 'seriesData[0].data[1]', '');
|
this.coveragePercentage = get(params, 'seriesData[0].data[1]', '');
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
@ -17,19 +17,8 @@ module ContainerRegistry
|
||||||
idempotent!
|
idempotent!
|
||||||
|
|
||||||
def perform
|
def perform
|
||||||
migration.enqueuer_loop? ? perform_with_loop : perform_without_loop
|
|
||||||
end
|
|
||||||
|
|
||||||
def self.enqueue_a_job
|
|
||||||
perform_async
|
|
||||||
perform_in(7.seconds) if ::ContainerRegistry::Migration.enqueue_twice?
|
|
||||||
end
|
|
||||||
|
|
||||||
private
|
|
||||||
|
|
||||||
def perform_with_loop
|
|
||||||
try_obtain_lease do
|
try_obtain_lease do
|
||||||
while runnable? && Time.zone.now < loop_deadline && migration.enqueuer_loop?
|
while runnable? && Time.zone.now < loop_deadline
|
||||||
repository_handled = handle_aborted_migration || handle_next_migration
|
repository_handled = handle_aborted_migration || handle_next_migration
|
||||||
|
|
||||||
# no repository was found: stop the loop
|
# no repository was found: stop the loop
|
||||||
|
@ -43,40 +32,29 @@ module ContainerRegistry
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform_without_loop
|
def self.enqueue_a_job
|
||||||
re_enqueue = false
|
perform_async
|
||||||
try_obtain_lease do
|
|
||||||
break unless runnable?
|
|
||||||
|
|
||||||
re_enqueue = handle_aborted_migration || handle_next_migration
|
|
||||||
end
|
|
||||||
re_enqueue_if_capacity if re_enqueue
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
def handle_aborted_migration
|
def handle_aborted_migration
|
||||||
return unless next_aborted_repository
|
return unless next_aborted_repository
|
||||||
|
|
||||||
log_on_done(:import_type, 'retry')
|
|
||||||
log_repository(next_aborted_repository)
|
|
||||||
|
|
||||||
next_aborted_repository.retry_aborted_migration
|
next_aborted_repository.retry_aborted_migration
|
||||||
|
|
||||||
true
|
true
|
||||||
rescue StandardError => e
|
rescue StandardError => e
|
||||||
Gitlab::ErrorTracking.log_exception(e, next_aborted_repository_id: next_aborted_repository&.id)
|
Gitlab::ErrorTracking.log_exception(e, next_aborted_repository_id: next_aborted_repository&.id)
|
||||||
|
|
||||||
migration.enqueuer_loop? ? false : true
|
false
|
||||||
ensure
|
ensure
|
||||||
log_repository_migration_state(next_aborted_repository)
|
|
||||||
log_repository_info(next_aborted_repository, import_type: 'retry')
|
log_repository_info(next_aborted_repository, import_type: 'retry')
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_next_migration
|
def handle_next_migration
|
||||||
return unless next_repository
|
return unless next_repository
|
||||||
|
|
||||||
log_on_done(:import_type, 'next')
|
|
||||||
log_repository(next_repository)
|
|
||||||
|
|
||||||
# We return true because the repository was successfully processed (migration_state is changed)
|
# We return true because the repository was successfully processed (migration_state is changed)
|
||||||
return true if tag_count_too_high?
|
return true if tag_count_too_high?
|
||||||
return unless next_repository.start_pre_import
|
return unless next_repository.start_pre_import
|
||||||
|
@ -88,7 +66,6 @@ module ContainerRegistry
|
||||||
|
|
||||||
false
|
false
|
||||||
ensure
|
ensure
|
||||||
log_repository_migration_state(next_repository)
|
|
||||||
log_repository_info(next_repository, import_type: 'next')
|
log_repository_info(next_repository, import_type: 'next')
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -97,8 +74,6 @@ module ContainerRegistry
|
||||||
return false unless next_repository.tags_count > migration.max_tags_count
|
return false unless next_repository.tags_count > migration.max_tags_count
|
||||||
|
|
||||||
next_repository.skip_import(reason: :too_many_tags)
|
next_repository.skip_import(reason: :too_many_tags)
|
||||||
log_on_done(:tags_count_too_high, true)
|
|
||||||
log_on_done(:max_tags_count_setting, migration.max_tags_count)
|
|
||||||
|
|
||||||
true
|
true
|
||||||
end
|
end
|
||||||
|
@ -180,29 +155,11 @@ module ContainerRegistry
|
||||||
self.class.enqueue_a_job
|
self.class.enqueue_a_job
|
||||||
end
|
end
|
||||||
|
|
||||||
def log_repository(repository)
|
|
||||||
log_on_done(:container_repository_id, repository&.id)
|
|
||||||
log_on_done(:container_repository_path, repository&.path)
|
|
||||||
end
|
|
||||||
|
|
||||||
def log_repository_migration_state(repository)
|
|
||||||
return unless repository
|
|
||||||
|
|
||||||
log_on_done(:container_repository_migration_state, repository.migration_state)
|
|
||||||
end
|
|
||||||
|
|
||||||
def log_on_done(key, value)
|
|
||||||
return if migration.enqueuer_loop?
|
|
||||||
|
|
||||||
log_extra_metadata_on_done(key, value)
|
|
||||||
end
|
|
||||||
|
|
||||||
def log_info(extras)
|
def log_info(extras)
|
||||||
logger.info(structured_payload(extras))
|
logger.info(structured_payload(extras))
|
||||||
end
|
end
|
||||||
|
|
||||||
def log_repository_info(repository, extras = {})
|
def log_repository_info(repository, extras = {})
|
||||||
return unless migration.enqueuer_loop?
|
|
||||||
return unless repository
|
return unless repository
|
||||||
|
|
||||||
repository_info = {
|
repository_info = {
|
||||||
|
|
|
@ -1,8 +0,0 @@
|
||||||
---
|
|
||||||
name: container_registry_migration_phase2_enqueue_twice
|
|
||||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/86596
|
|
||||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/350543
|
|
||||||
milestone: '15.0'
|
|
||||||
type: development
|
|
||||||
group: group::package
|
|
||||||
default_enabled: false
|
|
|
@ -1,8 +0,0 @@
|
||||||
---
|
|
||||||
name: container_registry_migration_phase2_enqueuer_loop
|
|
||||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87168
|
|
||||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/350543
|
|
||||||
milestone: '15.0'
|
|
||||||
type: development
|
|
||||||
group: group::package
|
|
||||||
default_enabled: false
|
|
|
@ -4,15 +4,7 @@ group: none
|
||||||
info: "See the Technical Writers assigned to Development Guidelines: https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments-to-development-guidelines"
|
info: "See the Technical Writers assigned to Development Guidelines: https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments-to-development-guidelines"
|
||||||
---
|
---
|
||||||
|
|
||||||
# Deprecations by milestone
|
# Deprecations by version
|
||||||
|
|
||||||
DISCLAIMER:
|
|
||||||
This page contains information related to upcoming products, features, and functionality.
|
|
||||||
It is important to note that the information presented is for informational purposes only.
|
|
||||||
Please do not rely on this information for purchasing or planning purposes.
|
|
||||||
As with all projects, the items mentioned on this page are subject to change or delay.
|
|
||||||
The development, release, and timing of any products, features, or functionality remain at the
|
|
||||||
sole discretion of GitLab Inc.
|
|
||||||
|
|
||||||
<!-- vale off -->
|
<!-- vale off -->
|
||||||
|
|
||||||
|
@ -38,33 +30,38 @@ For deprecation reviewers (Technical Writers only):
|
||||||
|
|
||||||
{::options parse_block_html="true" /}
|
{::options parse_block_html="true" /}
|
||||||
|
|
||||||
View deprecations by the product versions in which they were announced.
|
In each release, GitLab announces features that are deprecated and no longer recommended for use.
|
||||||
|
Each deprecated feature will be removed in a future release.
|
||||||
|
Some features cause breaking changes when they are removed.
|
||||||
|
|
||||||
Each deprecation has a **planned removal milestone** and indicates whether it is a breaking change.
|
DISCLAIMER:
|
||||||
|
This page contains information related to upcoming products, features, and functionality.
|
||||||
Most of the deprecations are **planned for removal in 15.0**, and many of them are **breaking changes**.
|
It is important to note that the information presented is for informational purposes only.
|
||||||
|
Please do not rely on this information for purchasing or planning purposes.
|
||||||
|
As with all projects, the items mentioned on this page are subject to change or delay.
|
||||||
|
The development, release, and timing of any products, features, or functionality remain at the
|
||||||
|
sole discretion of GitLab Inc.
|
||||||
<div class="js-deprecation-filters"></div>
|
<div class="js-deprecation-filters"></div>
|
||||||
<% if milestones.any? -%>
|
<% if milestones.any? -%>
|
||||||
<%- milestones.each do |milestone| %>
|
<%- milestones.each do |milestone| %>
|
||||||
<div class="announcement-milestone">
|
<div class="announcement-milestone">
|
||||||
|
|
||||||
## <%= milestone %>
|
## Announced in <%= milestone %>
|
||||||
<%- entries.select{|d| d["announcement_milestone"] == milestone}.each do |deprecation| %>
|
<%- entries.select{|d| d["announcement_milestone"] == milestone}.each do |deprecation| %>
|
||||||
<div class="deprecation removal-<%= deprecation["removal_milestone"].gsub('.', '') %><% if deprecation["breaking_change"] -%> breaking-change<% end %>">
|
<div class="deprecation removal-<%= deprecation["removal_milestone"].gsub('.', '') %><% if deprecation["breaking_change"] -%> breaking-change<% end %>">
|
||||||
|
|
||||||
### <%= deprecation["name"]%>
|
### <%= deprecation["name"]%>
|
||||||
|
|
||||||
|
Planned removal: GitLab <span class="removal-milestone"><%= deprecation["removal_milestone"]%></span> (<%= deprecation["removal_date"]%>)
|
||||||
<% if deprecation["breaking_change"] -%>
|
<% if deprecation["breaking_change"] -%>
|
||||||
|
|
||||||
WARNING:
|
WARNING:
|
||||||
This feature will be changed or removed in <%= deprecation["removal_milestone"]%>
|
This is a [breaking change](https://docs.gitlab.com/ee/development/contributing/#breaking-changes).
|
||||||
as a [breaking change](https://docs.gitlab.com/ee/development/contributing/#breaking-changes).
|
Review the details carefully before upgrading.
|
||||||
Before updating GitLab, review the details carefully to determine if you need to make any
|
|
||||||
changes to your code, settings, or workflow.
|
|
||||||
|
|
||||||
<%= deprecation["body"] -%><% else %>
|
<%= deprecation["body"] -%><% else %>
|
||||||
<%= deprecation["body"] -%><% end %><%- if deprecation["removal_milestone"] -%>
|
<%= deprecation["body"] -%><% end %><%- if deprecation["removal_milestone"] -%>
|
||||||
|
|
||||||
**Planned removal milestone: <span class="removal-milestone"><%= deprecation["removal_milestone"]%></span> (<%= deprecation["removal_date"]%>)**
|
|
||||||
</div>
|
</div>
|
||||||
<%- end -%>
|
<%- end -%>
|
||||||
<%- end -%>
|
<%- end -%>
|
||||||
|
|
|
@ -4,7 +4,7 @@ group: none
|
||||||
info: "See the Technical Writers assigned to Development Guidelines: https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments-to-development-guidelines"
|
info: "See the Technical Writers assigned to Development Guidelines: https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments-to-development-guidelines"
|
||||||
---
|
---
|
||||||
|
|
||||||
# Removals by milestone
|
# Removals by version
|
||||||
|
|
||||||
<!-- vale off -->
|
<!-- vale off -->
|
||||||
|
|
||||||
|
@ -35,13 +35,11 @@ For removal reviewers (Technical Writers only):
|
||||||
<% if removal["breaking_change"] -%>
|
<% if removal["breaking_change"] -%>
|
||||||
|
|
||||||
WARNING:
|
WARNING:
|
||||||
This feature was changed or removed in <%= removal["removal_milestone"]%>
|
This is a [breaking change](https://docs.gitlab.com/ee/development/contributing/#breaking-changes).
|
||||||
as a [breaking change](https://docs.gitlab.com/ee/development/contributing/#breaking-changes).
|
Review the details carefully before upgrading.
|
||||||
Before updating GitLab, review the details carefully to determine if you need to make any
|
|
||||||
changes to your code, settings, or workflow.
|
|
||||||
|
|
||||||
<%= removal["body"] -%><% else %>
|
<%= removal["body"] -%><% else %>
|
||||||
<%= removal["body"] -%><% end %><%- end -%><%- end -%>
|
<%= removal["body"] -%><% end %><%- end -%><%- end -%>
|
||||||
<%- else -%>
|
<%- else -%>
|
||||||
Features scheduled for removal will be listed here, sorted by GitLab milestone.
|
Features scheduled for removal will be listed here, sorted by GitLab version.
|
||||||
<% end -%>
|
<% end -%>
|
||||||
|
|
|
@ -381,11 +381,12 @@ memory, disk, and CPU utilization.
|
||||||
|
|
||||||
[Read more about the node exporter](node_exporter.md).
|
[Read more about the node exporter](node_exporter.md).
|
||||||
|
|
||||||
### Puma exporter
|
### Web exporter
|
||||||
|
|
||||||
The Puma exporter allows you to measure various Puma metrics.
|
The web exporter is a dedicated metrics server that allows splitting end-user and Prometheus traffic
|
||||||
|
into two separate applications to improve performance and availability.
|
||||||
|
|
||||||
[Read more about the Puma exporter](puma_exporter.md).
|
[Read more about the web exporter](puma_exporter.md).
|
||||||
|
|
||||||
### Redis exporter
|
### Redis exporter
|
||||||
|
|
||||||
|
|
|
@ -1,15 +1,37 @@
|
||||||
---
|
---
|
||||||
stage: Monitor
|
stage: Data Stores
|
||||||
group: Respond
|
group: Memory
|
||||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
|
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
|
||||||
---
|
---
|
||||||
|
|
||||||
# Puma exporter **(FREE SELF)**
|
# Web exporter (dedicated metrics server) **(FREE SELF)**
|
||||||
|
|
||||||
You can use the [Puma exporter](https://github.com/sapcc/puma-exporter)
|
When [monitoring GitLab with Prometheus](index.md), GitLab runs various collectors that
|
||||||
to measure various Puma metrics.
|
sample the application for data related to usage, load and performance. GitLab can then make
|
||||||
|
this data available to a Prometheus scraper by running one or more Prometheus exporters.
|
||||||
|
A Prometheus exporter is an HTTP server that serializes metric data into a format the
|
||||||
|
Prometheus scraper understands.
|
||||||
|
|
||||||
To enable the Puma exporter:
|
NOTE:
|
||||||
|
This page is about web application metrics.
|
||||||
|
To export background job metrics, learn how to [configure the Sidekiq metrics server](../../sidekiq.md#configure-the-sidekiq-metrics-server).
|
||||||
|
|
||||||
|
We provide two mechanisms by which web application metrics can be exported:
|
||||||
|
|
||||||
|
- Through the main Rails application. This means [Puma](../../operations/puma.md), the application server we use,
|
||||||
|
makes metric data available via its own `/-/metrics` endpoint. This is the default,
|
||||||
|
and is described in [GitLab Metrics](index.md#gitlab-metrics). We recommend this
|
||||||
|
default for small GitLab installations where the amount of metrics collected is small.
|
||||||
|
- Through a dedicated metrics server. Enabling this server will cause Puma to launch an
|
||||||
|
additional process whose sole responsibility is to serve metrics. This approach leads
|
||||||
|
to better fault isolation and performance for very large GitLab installations, but
|
||||||
|
comes with additional memory use. We recommend this approach for medium to large
|
||||||
|
GitLab installations that seek high performance and availability.
|
||||||
|
|
||||||
|
Both the dedicated server and the Rails `/-/metrics` endpoint serve the same data, so
|
||||||
|
they are functionally equivalent and differ merely in their performance characteristics.
|
||||||
|
|
||||||
|
To enable the dedicated server:
|
||||||
|
|
||||||
1. [Enable Prometheus](index.md#configuring-prometheus).
|
1. [Enable Prometheus](index.md#configuring-prometheus).
|
||||||
1. Edit `/etc/gitlab/gitlab.rb` to add (or find and uncomment) the following lines. Make sure
|
1. Edit `/etc/gitlab/gitlab.rb` to add (or find and uncomment) the following lines. Make sure
|
||||||
|
@ -21,9 +43,11 @@ To enable the Puma exporter:
|
||||||
puma['exporter_port'] = 8083
|
puma['exporter_port'] = 8083
|
||||||
```
|
```
|
||||||
|
|
||||||
|
1. When using the GitLab-bundled Prometheus, make sure that its `scrape_config` is pointing
|
||||||
|
to `localhost:8083/metrics`. Refer to the [Adding custom scrape configurations](index.md#adding-custom-scrape-configurations) page
|
||||||
|
for how to configure scraper targets. For external Prometheus setup, refer to
|
||||||
|
[Using an external Prometheus server](index.md#using-an-external-prometheus-server) instead.
|
||||||
1. Save the file and [reconfigure GitLab](../../restart_gitlab.md#omnibus-gitlab-reconfigure)
|
1. Save the file and [reconfigure GitLab](../../restart_gitlab.md#omnibus-gitlab-reconfigure)
|
||||||
for the changes to take effect.
|
for the changes to take effect.
|
||||||
|
|
||||||
Prometheus begins collecting performance data from the Puma exporter exposed at `localhost:8083`.
|
Metrics can now be served and scraped from `localhost:8083/metrics`.
|
||||||
|
|
||||||
For more information on using Puma with GitLab, see [Puma](../../operations/puma.md).
|
|
||||||
|
|
|
@ -281,4 +281,4 @@ To switch from Unicorn to Puma:
|
||||||
|
|
||||||
## Related topics
|
## Related topics
|
||||||
|
|
||||||
- [Use the Puma exporter to measure various Puma metrics](../monitoring/prometheus/puma_exporter.md)
|
- [Use a dedicated metrics server to export web metrics](../monitoring/prometheus/puma_exporter.md)
|
||||||
|
|
|
@ -31,7 +31,7 @@ by default:
|
||||||
| GitLab Exporter | Yes | Port | X | 9168 |
|
| GitLab Exporter | Yes | Port | X | 9168 |
|
||||||
| Sidekiq exporter | Yes | Port | X | 8082 |
|
| Sidekiq exporter | Yes | Port | X | 8082 |
|
||||||
| Sidekiq health check | No | Port | X | 8092[^Sidekiq-health] |
|
| Sidekiq health check | No | Port | X | 8092[^Sidekiq-health] |
|
||||||
| Puma exporter | No | Port | X | 8083 |
|
| Web exporter | No | Port | X | 8083 |
|
||||||
| Geo PostgreSQL | No | Socket | Port (5431) | X |
|
| Geo PostgreSQL | No | Socket | Port (5431) | X |
|
||||||
| Redis Sentinel | No | Port | X | 26379 |
|
| Redis Sentinel | No | Port | X | 26379 |
|
||||||
| Incoming email | No | Port | X | 143 |
|
| Incoming email | No | Port | X | 143 |
|
||||||
|
|
|
@ -17,7 +17,7 @@ To use GitLab CI/CD with a Bitbucket Cloud repository:
|
||||||
1. In GitLab, create a project:
|
1. In GitLab, create a project:
|
||||||
1. On the top menu, select **Projects > Create new project**.
|
1. On the top menu, select **Projects > Create new project**.
|
||||||
1. Select **Run CI/CD for external repository**.
|
1. Select **Run CI/CD for external repository**.
|
||||||
1. Select **Repo by URL**.
|
1. Select **Repository by URL**.
|
||||||
|
|
||||||
![Create project](img/external_repository.png)
|
![Create project](img/external_repository.png)
|
||||||
|
|
||||||
|
|
|
@ -62,7 +62,7 @@ To manually enable GitLab CI/CD for your repository:
|
||||||
`repo` so that GitLab can access your project and update commit statuses.
|
`repo` so that GitLab can access your project and update commit statuses.
|
||||||
1. In GitLab, create a project:
|
1. In GitLab, create a project:
|
||||||
1. On the top menu, select **Projects > Create new project**.
|
1. On the top menu, select **Projects > Create new project**.
|
||||||
1. Select **Run CI/CD for external repository** and **Repo by URL**.
|
1. Select **Run CI/CD for external repository** and **Repository by URL**.
|
||||||
1. In the **Git repository URL** field, enter the HTTPS URL for your GitHub repository.
|
1. In the **Git repository URL** field, enter the HTTPS URL for your GitHub repository.
|
||||||
If your project is private, use the personal access token you just created for authentication.
|
If your project is private, use the personal access token you just created for authentication.
|
||||||
1. Fill in all the other fields and select **Create project**.
|
1. Fill in all the other fields and select **Create project**.
|
||||||
|
|
|
@ -24,15 +24,11 @@ snippets disabled. These features
|
||||||
|
|
||||||
To connect to an external repository:
|
To connect to an external repository:
|
||||||
|
|
||||||
<!-- vale gitlab.Spelling = NO -->
|
|
||||||
|
|
||||||
1. On the top bar, select **Menu > Projects > Create new project**.
|
1. On the top bar, select **Menu > Projects > Create new project**.
|
||||||
1. Select **Run CI/CD for external repository**.
|
1. Select **Run CI/CD for external repository**.
|
||||||
1. Select **GitHub** or **Repo by URL**.
|
1. Select **GitHub** or **Repository by URL**.
|
||||||
1. Complete the fields.
|
1. Complete the fields.
|
||||||
|
|
||||||
<!-- vale gitlab.Spelling = YES -->
|
|
||||||
|
|
||||||
## Pipelines for external pull requests
|
## Pipelines for external pull requests
|
||||||
|
|
||||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/65139) in GitLab 12.3.
|
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/65139) in GitLab 12.3.
|
||||||
|
|
|
@ -192,12 +192,16 @@ The database configuration is set in [`config/database.yml`](https://gitlab.com/
|
||||||
The directory [`ee/db/geo`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/ee/db/geo)
|
The directory [`ee/db/geo`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/ee/db/geo)
|
||||||
contains the schema and migrations for this database.
|
contains the schema and migrations for this database.
|
||||||
|
|
||||||
To write a migration for the database, use the `GeoMigrationGenerator`:
|
To write a migration for the database, run:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
rails g geo_migration [args] [options]
|
rails g migration [args] [options] --database geo
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Geo should continue using `Gitlab::Database::Migration[1.0]` until the `gitlab_geo` schema is supported, and is for the time being exempt from being validated by `Gitlab::Database::Migration[2.0]`. This requires a developer to manually amend the migration file to change from `[2.0]` to `[1.0]` due to the migration defaults being 2.0.
|
||||||
|
|
||||||
|
For more information, see the [Enable Geo migrations to use Migration[2.0]](https://gitlab.com/gitlab-org/gitlab/-/issues/363491) issue.
|
||||||
|
|
||||||
To migrate the tracking database, run:
|
To migrate the tracking database, run:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -49,7 +49,7 @@ To create a project from the cluster management project template:
|
||||||
1. Select **Create project**.
|
1. Select **Create project**.
|
||||||
|
|
||||||
If you use self-managed GitLab, your instance might not include the latest version of the template.
|
If you use self-managed GitLab, your instance might not include the latest version of the template.
|
||||||
In that case, select **Import project**, **Repo by URL** and for the **Git repository URL**, enter
|
In that case, select **Import project**, **Repository by URL** and for the **Git repository URL**, enter
|
||||||
`https://gitlab.com/gitlab-org/project-templates/cluster-management.git`.
|
`https://gitlab.com/gitlab-org/project-templates/cluster-management.git`.
|
||||||
|
|
||||||
## Configure the project
|
## Configure the project
|
||||||
|
|
|
@ -37,7 +37,7 @@ To import the project:
|
||||||
|
|
||||||
1. On the top bar, select **Menu > Create new project**.
|
1. On the top bar, select **Menu > Create new project**.
|
||||||
1. Select **Import project**.
|
1. Select **Import project**.
|
||||||
1. Select **Repo by URL**.
|
1. Select **Repository by URL**.
|
||||||
1. For the **Git repository URL**, enter `https://gitlab.com/civocloud/gitlab-terraform-civo.git`.
|
1. For the **Git repository URL**, enter `https://gitlab.com/civocloud/gitlab-terraform-civo.git`.
|
||||||
1. Complete the fields and select **Create project**.
|
1. Complete the fields and select **Create project**.
|
||||||
|
|
||||||
|
|
|
@ -36,7 +36,7 @@ To import the project:
|
||||||
|
|
||||||
1. On the top bar, select **Menu > Create new project**.
|
1. On the top bar, select **Menu > Create new project**.
|
||||||
1. Select **Import project**.
|
1. Select **Import project**.
|
||||||
1. Select **Repo by URL**.
|
1. Select **Repository by URL**.
|
||||||
1. For the **Git repository URL**, enter `https://gitlab.com/gitlab-org/configure/examples/gitlab-terraform-eks.git`.
|
1. For the **Git repository URL**, enter `https://gitlab.com/gitlab-org/configure/examples/gitlab-terraform-eks.git`.
|
||||||
1. Complete the fields and select **Create project**.
|
1. Complete the fields and select **Create project**.
|
||||||
|
|
||||||
|
|
|
@ -43,7 +43,7 @@ To import the project:
|
||||||
|
|
||||||
1. On the top bar, select **Menu > Create new project**.
|
1. On the top bar, select **Menu > Create new project**.
|
||||||
1. Select **Import project**.
|
1. Select **Import project**.
|
||||||
1. Select **Repo by URL**.
|
1. Select **Repository by URL**.
|
||||||
1. For the **Git repository URL**, enter `https://gitlab.com/gitlab-org/configure/examples/gitlab-terraform-gke.git`.
|
1. For the **Git repository URL**, enter `https://gitlab.com/gitlab-org/configure/examples/gitlab-terraform-gke.git`.
|
||||||
1. Complete the fields and select **Create project**.
|
1. Complete the fields and select **Create project**.
|
||||||
|
|
||||||
|
|
|
@ -9,19 +9,13 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
||||||
|
|
||||||
You can import your existing repositories by providing the Git URL:
|
You can import your existing repositories by providing the Git URL:
|
||||||
|
|
||||||
<!-- vale gitlab.Spelling = NO -->
|
|
||||||
<!-- vale gitlab.SubstitutionWarning = NO -->
|
|
||||||
|
|
||||||
1. From your GitLab dashboard select **New project**.
|
1. From your GitLab dashboard select **New project**.
|
||||||
1. Switch to the **Import project** tab.
|
1. Switch to the **Import project** tab.
|
||||||
1. Select **Repo by URL**.
|
1. Select **Repository by URL**.
|
||||||
1. Fill in the "Git repository URL" and the remaining project fields.
|
1. Fill in the "Git repository URL" and the remaining project fields.
|
||||||
1. Select **Create project** to begin the import process.
|
1. Select **Create project** to begin the import process.
|
||||||
1. Once complete, you are redirected to your newly created project.
|
1. Once complete, you are redirected to your newly created project.
|
||||||
|
|
||||||
<!-- vale gitlab.Spelling = YES -->
|
|
||||||
<!-- vale gitlab.SubstitutionWarning = YES -->
|
|
||||||
|
|
||||||
![Import project by repository URL](img/import_projects_from_repo_url.png)
|
![Import project by repository URL](img/import_projects_from_repo_url.png)
|
||||||
|
|
||||||
## Automate group and project import **(PREMIUM)**
|
## Automate group and project import **(PREMIUM)**
|
||||||
|
|
|
@ -73,13 +73,5 @@ module ContainerRegistry
|
||||||
def self.all_plans?
|
def self.all_plans?
|
||||||
Feature.enabled?(:container_registry_migration_phase2_all_plans)
|
Feature.enabled?(:container_registry_migration_phase2_all_plans)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.enqueue_twice?
|
|
||||||
Feature.enabled?(:container_registry_migration_phase2_enqueue_twice)
|
|
||||||
end
|
|
||||||
|
|
||||||
def self.enqueuer_loop?
|
|
||||||
Feature.enabled?(:container_registry_migration_phase2_enqueuer_loop)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -50,8 +50,11 @@ module QA
|
||||||
|
|
||||||
# 500 error page in header surrounded by newlines, try to match
|
# 500 error page in header surrounded by newlines, try to match
|
||||||
five_hundred_test = Nokogiri::HTML.parse(page.html).xpath("//h1/text()").map.first
|
five_hundred_test = Nokogiri::HTML.parse(page.html).xpath("//h1/text()").map.first
|
||||||
|
five_hundred_title = Nokogiri::HTML.parse(page.html).xpath("//head/title/text()").map.first
|
||||||
unless five_hundred_test.nil?
|
unless five_hundred_test.nil?
|
||||||
error_code = 500 if five_hundred_test.text.include?('500')
|
error_code = 500 if
|
||||||
|
five_hundred_test.text.include?('500') &&
|
||||||
|
five_hundred_title.text.eql?('Something went wrong (500)')
|
||||||
end
|
end
|
||||||
# GDK shows backtrace rather than error page
|
# GDK shows backtrace rather than error page
|
||||||
error_code = 500 if Nokogiri::HTML.parse(page.html).xpath("//body//section").map { |t| t[:class] }.first.eql?('backtrace')
|
error_code = 500 if Nokogiri::HTML.parse(page.html).xpath("//body//section").map { |t| t[:class] }.first.eql?('backtrace')
|
||||||
|
|
|
@ -182,9 +182,10 @@ RSpec.describe QA::Support::PageErrorChecker do
|
||||||
"</div>"
|
"</div>"
|
||||||
end
|
end
|
||||||
|
|
||||||
let(:error_500_str) { "<h1> 500 </h1>"}
|
let(:error_500_str) { "<head><title>Something went wrong (500)</title></head><body><h1> 500 </h1></body>"}
|
||||||
let(:backtrace_str) {"<body><section class=\"backtrace\">foo</section></body>"}
|
let(:project_name_500_str) {"<head><title>Project</title></head><h1 class=\"home-panel-title gl-mt-3 gl-mb-2\" itemprop=\"name\">qa-test-2022-05-25-12-12-16-d4500c2e79c37289</h1>"}
|
||||||
let(:no_error_str) {"<body>no 404 or 500 or backtrace</body>"}
|
let(:backtrace_str) {"<head><title>Error::Backtrace</title></head><body><section class=\"backtrace\">foo</section></body>"}
|
||||||
|
let(:no_error_str) {"<head><title>Nothing wrong here</title></head><body>no 404 or 500 or backtrace</body>"}
|
||||||
|
|
||||||
it 'calls report with 404 if 404 found' do
|
it 'calls report with 404 if 404 found' do
|
||||||
allow(page).to receive(:html).and_return(error_404_str)
|
allow(page).to receive(:html).and_return(error_404_str)
|
||||||
|
@ -207,6 +208,13 @@ RSpec.describe QA::Support::PageErrorChecker do
|
||||||
expect(QA::Support::PageErrorChecker).to receive(:report!).with(page, 500)
|
expect(QA::Support::PageErrorChecker).to receive(:report!).with(page, 500)
|
||||||
QA::Support::PageErrorChecker.check_page_for_error_code(page)
|
QA::Support::PageErrorChecker.check_page_for_error_code(page)
|
||||||
end
|
end
|
||||||
|
it 'does not call report if 500 found in project name' do
|
||||||
|
allow(page).to receive(:html).and_return(project_name_500_str)
|
||||||
|
allow(Nokogiri::HTML).to receive(:parse).with(project_name_500_str).and_return(NokogiriParse.parse(project_name_500_str))
|
||||||
|
|
||||||
|
expect(QA::Support::PageErrorChecker).not_to receive(:report!)
|
||||||
|
QA::Support::PageErrorChecker.check_page_for_error_code(page)
|
||||||
|
end
|
||||||
it 'does not call report if no 404, 500 or backtrace found' do
|
it 'does not call report if no 404, 500 or backtrace found' do
|
||||||
allow(page).to receive(:html).and_return(no_error_str)
|
allow(page).to receive(:html).and_return(no_error_str)
|
||||||
allow(Nokogiri::HTML).to receive(:parse).with(no_error_str).and_return(NokogiriParse.parse(no_error_str))
|
allow(Nokogiri::HTML).to receive(:parse).with(no_error_str).and_return(NokogiriParse.parse(no_error_str))
|
||||||
|
|
|
@ -229,32 +229,4 @@ RSpec.describe ContainerRegistry::Migration do
|
||||||
it { is_expected.to eq(false) }
|
it { is_expected.to eq(false) }
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe '.enqueue_twice?' do
|
|
||||||
subject { described_class.enqueue_twice? }
|
|
||||||
|
|
||||||
it { is_expected.to eq(true) }
|
|
||||||
|
|
||||||
context 'feature flag disabled' do
|
|
||||||
before do
|
|
||||||
stub_feature_flags(container_registry_migration_phase2_enqueue_twice: false)
|
|
||||||
end
|
|
||||||
|
|
||||||
it { is_expected.to eq(false) }
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
describe '.enqueue_loop?' do
|
|
||||||
subject { described_class.enqueuer_loop? }
|
|
||||||
|
|
||||||
it { is_expected.to eq(true) }
|
|
||||||
|
|
||||||
context 'feature flag disabled' do
|
|
||||||
before do
|
|
||||||
stub_feature_flags(container_registry_migration_phase2_enqueuer_loop: false)
|
|
||||||
end
|
|
||||||
|
|
||||||
it { is_expected.to eq(false) }
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -208,23 +208,9 @@ RSpec.describe ContainerRepository, :aggregate_failures do
|
||||||
shared_examples 'queueing the next import' do
|
shared_examples 'queueing the next import' do
|
||||||
it 'starts the worker' do
|
it 'starts the worker' do
|
||||||
expect(::ContainerRegistry::Migration::EnqueuerWorker).to receive(:perform_async)
|
expect(::ContainerRegistry::Migration::EnqueuerWorker).to receive(:perform_async)
|
||||||
expect(::ContainerRegistry::Migration::EnqueuerWorker).to receive(:perform_in)
|
|
||||||
|
|
||||||
subject
|
subject
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'enqueue_twice feature flag disabled' do
|
|
||||||
before do
|
|
||||||
stub_feature_flags(container_registry_migration_phase2_enqueue_twice: false)
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'starts the worker only once' do
|
|
||||||
expect(::ContainerRegistry::Migration::EnqueuerWorker).to receive(:perform_async)
|
|
||||||
expect(::ContainerRegistry::Migration::EnqueuerWorker).not_to receive(:perform_in)
|
|
||||||
|
|
||||||
subject
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
describe '#start_pre_import' do
|
describe '#start_pre_import' do
|
||||||
|
|
|
@ -32,525 +32,156 @@ RSpec.describe ContainerRegistry::Migration::EnqueuerWorker, :aggregate_failures
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'with container_registry_migration_phase2_enqueuer_loop disabled' do
|
context 'migrations are disabled' do
|
||||||
before do
|
before do
|
||||||
stub_feature_flags(container_registry_migration_phase2_enqueuer_loop: false)
|
allow(ContainerRegistry::Migration).to receive(:enabled?).and_return(false)
|
||||||
end
|
end
|
||||||
|
|
||||||
shared_examples 're-enqueuing based on capacity' do |capacity_limit: 4|
|
it_behaves_like 'no action' do
|
||||||
context 'below capacity' do
|
|
||||||
before do
|
|
||||||
allow(ContainerRegistry::Migration).to receive(:capacity).and_return(capacity_limit)
|
|
||||||
end
|
|
||||||
|
|
||||||
it 're-enqueues the worker' do
|
|
||||||
expect(described_class).to receive(:perform_async)
|
|
||||||
expect(described_class).to receive(:perform_in).with(7.seconds)
|
|
||||||
|
|
||||||
subject
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'enqueue_twice feature flag disabled' do
|
|
||||||
before do
|
|
||||||
stub_feature_flags(container_registry_migration_phase2_enqueue_twice: false)
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'only enqueues the worker once' do
|
|
||||||
expect(described_class).to receive(:perform_async)
|
|
||||||
expect(described_class).not_to receive(:perform_in)
|
|
||||||
|
|
||||||
subject
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'above capacity' do
|
|
||||||
before do
|
|
||||||
allow(ContainerRegistry::Migration).to receive(:capacity).and_return(-1)
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'does not re-enqueue the worker' do
|
|
||||||
expect(described_class).not_to receive(:perform_async)
|
|
||||||
expect(described_class).not_to receive(:perform_in).with(7.seconds)
|
|
||||||
|
|
||||||
subject
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'with qualified repository' do
|
|
||||||
before do
|
before do
|
||||||
allow_worker(on: :next_repository) do |repository|
|
expect_log_extra_metadata(migration_enabled: false)
|
||||||
allow(repository).to receive(:migration_pre_import).and_return(:ok)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
shared_examples 'starting the next import' do
|
|
||||||
it 'starts the pre-import for the next qualified repository' do
|
|
||||||
expect_log_extra_metadata(
|
|
||||||
import_type: 'next',
|
|
||||||
container_repository_id: container_repository.id,
|
|
||||||
container_repository_path: container_repository.path,
|
|
||||||
container_repository_migration_state: 'pre_importing'
|
|
||||||
)
|
|
||||||
|
|
||||||
expect { subject }.to make_queries_matching(/LIMIT 2/)
|
|
||||||
|
|
||||||
expect(container_repository.reload).to be_pre_importing
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
it_behaves_like 'starting the next import'
|
|
||||||
|
|
||||||
context 'when the new pre-import maxes out the capacity' do
|
|
||||||
before do
|
|
||||||
# set capacity to 10
|
|
||||||
stub_feature_flags(
|
|
||||||
container_registry_migration_phase2_capacity_25: false,
|
|
||||||
container_registry_migration_phase2_capacity_40: false
|
|
||||||
)
|
|
||||||
|
|
||||||
# Plus 2 created above gives 9 importing repositories
|
|
||||||
create_list(:container_repository, 7, :importing)
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'does not re-enqueue the worker' do
|
|
||||||
expect(described_class).not_to receive(:perform_async)
|
|
||||||
expect(described_class).not_to receive(:perform_in)
|
|
||||||
|
|
||||||
subject
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
it_behaves_like 're-enqueuing based on capacity'
|
|
||||||
|
|
||||||
context 'max tag count is 0' do
|
|
||||||
before do
|
|
||||||
stub_application_setting(container_registry_import_max_tags_count: 0)
|
|
||||||
# Add 8 tags to the next repository
|
|
||||||
stub_container_registry_tags(
|
|
||||||
repository: container_repository.path, tags: %w(a b c d e f g h), with_manifest: true
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
it_behaves_like 'starting the next import'
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'migrations are disabled' do
|
|
||||||
before do
|
|
||||||
allow(ContainerRegistry::Migration).to receive(:enabled?).and_return(false)
|
|
||||||
end
|
|
||||||
|
|
||||||
it_behaves_like 'no action' do
|
|
||||||
before do
|
|
||||||
expect_log_extra_metadata(migration_enabled: false)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'above capacity' do
|
|
||||||
before do
|
|
||||||
create(:container_repository, :importing)
|
|
||||||
create(:container_repository, :importing)
|
|
||||||
allow(ContainerRegistry::Migration).to receive(:capacity).and_return(1)
|
|
||||||
end
|
|
||||||
|
|
||||||
it_behaves_like 'no action' do
|
|
||||||
before do
|
|
||||||
expect_log_extra_metadata(below_capacity: false, max_capacity_setting: 1)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'does not re-enqueue the worker' do
|
|
||||||
expect(ContainerRegistry::Migration::EnqueuerWorker).not_to receive(:perform_async)
|
|
||||||
expect(ContainerRegistry::Migration::EnqueuerWorker).not_to receive(:perform_in)
|
|
||||||
|
|
||||||
subject
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'too soon before previous completed import step' do
|
|
||||||
where(:state, :timestamp) do
|
|
||||||
:import_done | :migration_import_done_at
|
|
||||||
:pre_import_done | :migration_pre_import_done_at
|
|
||||||
:import_aborted | :migration_aborted_at
|
|
||||||
:import_skipped | :migration_skipped_at
|
|
||||||
end
|
|
||||||
|
|
||||||
with_them do
|
|
||||||
before do
|
|
||||||
allow(ContainerRegistry::Migration).to receive(:enqueue_waiting_time).and_return(45.minutes)
|
|
||||||
create(:container_repository, state, timestamp => 1.minute.ago)
|
|
||||||
end
|
|
||||||
|
|
||||||
it_behaves_like 'no action' do
|
|
||||||
before do
|
|
||||||
expect_log_extra_metadata(waiting_time_passed: false, current_waiting_time_setting: 45.minutes)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when last completed repository has nil timestamps' do
|
|
||||||
before do
|
|
||||||
allow(ContainerRegistry::Migration).to receive(:enqueue_waiting_time).and_return(45.minutes)
|
|
||||||
create(:container_repository, migration_state: 'import_done')
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'continues to try the next import' do
|
|
||||||
expect { subject }.to change { container_repository.reload.migration_state }
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when an aborted import is available' do
|
|
||||||
let_it_be(:aborted_repository) { create(:container_repository, :import_aborted) }
|
|
||||||
|
|
||||||
context 'with a successful registry request' do
|
|
||||||
before do
|
|
||||||
allow_worker(on: :next_aborted_repository) do |repository|
|
|
||||||
allow(repository).to receive(:migration_import).and_return(:ok)
|
|
||||||
allow(repository.gitlab_api_client).to receive(:import_status).and_return('import_failed')
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'retries the import for the aborted repository' do
|
|
||||||
expect_log_extra_metadata(
|
|
||||||
import_type: 'retry',
|
|
||||||
container_repository_id: aborted_repository.id,
|
|
||||||
container_repository_path: aborted_repository.path,
|
|
||||||
container_repository_migration_state: 'importing'
|
|
||||||
)
|
|
||||||
|
|
||||||
subject
|
|
||||||
|
|
||||||
expect(aborted_repository.reload).to be_importing
|
|
||||||
expect(container_repository.reload).to be_default
|
|
||||||
end
|
|
||||||
|
|
||||||
it_behaves_like 're-enqueuing based on capacity'
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when an error occurs' do
|
|
||||||
it 'does not abort that migration' do
|
|
||||||
allow_worker(on: :next_aborted_repository) do |repository|
|
|
||||||
allow(repository).to receive(:retry_aborted_migration).and_raise(StandardError)
|
|
||||||
end
|
|
||||||
|
|
||||||
expect_log_extra_metadata(
|
|
||||||
import_type: 'retry',
|
|
||||||
container_repository_id: aborted_repository.id,
|
|
||||||
container_repository_path: aborted_repository.path,
|
|
||||||
container_repository_migration_state: 'import_aborted'
|
|
||||||
)
|
|
||||||
|
|
||||||
subject
|
|
||||||
|
|
||||||
expect(aborted_repository.reload).to be_import_aborted
|
|
||||||
expect(container_repository.reload).to be_default
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when no repository qualifies' do
|
|
||||||
include_examples 'an idempotent worker' do
|
|
||||||
before do
|
|
||||||
allow(ContainerRepository).to receive(:ready_for_import).and_return(ContainerRepository.none)
|
|
||||||
end
|
|
||||||
|
|
||||||
it_behaves_like 'no action'
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'over max tag count' do
|
|
||||||
before do
|
|
||||||
stub_application_setting(container_registry_import_max_tags_count: 2)
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'skips the repository' do
|
|
||||||
expect_log_extra_metadata(
|
|
||||||
import_type: 'next',
|
|
||||||
container_repository_id: container_repository.id,
|
|
||||||
container_repository_path: container_repository.path,
|
|
||||||
container_repository_migration_state: 'import_skipped',
|
|
||||||
tags_count_too_high: true,
|
|
||||||
max_tags_count_setting: 2
|
|
||||||
)
|
|
||||||
|
|
||||||
subject
|
|
||||||
|
|
||||||
expect(container_repository.reload).to be_import_skipped
|
|
||||||
expect(container_repository.migration_skipped_reason).to eq('too_many_tags')
|
|
||||||
expect(container_repository.migration_skipped_at).not_to be_nil
|
|
||||||
end
|
|
||||||
|
|
||||||
context 're-enqueuing' do
|
|
||||||
before do
|
|
||||||
# skipping will also re-enqueue, so we isolate the capacity behavior here
|
|
||||||
allow_worker(on: :next_repository) do |repository|
|
|
||||||
allow(repository).to receive(:skip_import).and_return(true)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
it_behaves_like 're-enqueuing based on capacity', capacity_limit: 3
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when an error occurs' do
|
|
||||||
before do
|
|
||||||
allow(ContainerRegistry::Migration).to receive(:max_tags_count).and_raise(StandardError)
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'aborts the import' do
|
|
||||||
expect_log_extra_metadata(
|
|
||||||
import_type: 'next',
|
|
||||||
container_repository_id: container_repository.id,
|
|
||||||
container_repository_path: container_repository.path,
|
|
||||||
container_repository_migration_state: 'import_aborted'
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
|
|
||||||
instance_of(StandardError),
|
|
||||||
next_repository_id: container_repository.id
|
|
||||||
)
|
|
||||||
|
|
||||||
subject
|
|
||||||
|
|
||||||
expect(container_repository.reload).to be_import_aborted
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'with the exclusive lease taken' do
|
|
||||||
let(:lease_key) { worker.send(:lease_key) }
|
|
||||||
|
|
||||||
before do
|
|
||||||
stub_exclusive_lease_taken(lease_key, timeout: 30.minutes)
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'does not perform' do
|
|
||||||
expect(worker).not_to receive(:runnable?)
|
|
||||||
expect(worker).not_to receive(:re_enqueue_if_capacity)
|
|
||||||
|
|
||||||
subject
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'with container_registry_migration_phase2_enqueuer_loop enabled' do
|
context 'with no repository qualifies' do
|
||||||
context 'migrations are disabled' do
|
include_examples 'an idempotent worker' do
|
||||||
before do
|
before do
|
||||||
allow(ContainerRegistry::Migration).to receive(:enabled?).and_return(false)
|
allow(ContainerRepository).to receive(:ready_for_import).and_return(ContainerRepository.none)
|
||||||
end
|
end
|
||||||
|
|
||||||
it_behaves_like 'no action' do
|
it_behaves_like 'no action'
|
||||||
before do
|
end
|
||||||
expect_log_extra_metadata(migration_enabled: false)
|
end
|
||||||
end
|
|
||||||
end
|
context 'when multiple aborted imports are available' do
|
||||||
|
let_it_be(:aborted_repository1) { create(:container_repository, :import_aborted) }
|
||||||
|
let_it_be(:aborted_repository2) { create(:container_repository, :import_aborted) }
|
||||||
|
|
||||||
|
before do
|
||||||
|
container_repository.update!(created_at: 30.seconds.ago)
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'with no repository qualifies' do
|
context 'with successful registry requests' do
|
||||||
include_examples 'an idempotent worker' do
|
|
||||||
before do
|
|
||||||
allow(ContainerRepository).to receive(:ready_for_import).and_return(ContainerRepository.none)
|
|
||||||
end
|
|
||||||
|
|
||||||
it_behaves_like 'no action'
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when multiple aborted imports are available' do
|
|
||||||
let_it_be(:aborted_repository1) { create(:container_repository, :import_aborted) }
|
|
||||||
let_it_be(:aborted_repository2) { create(:container_repository, :import_aborted) }
|
|
||||||
|
|
||||||
before do
|
|
||||||
container_repository.update!(created_at: 30.seconds.ago)
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'with successful registry requests' do
|
|
||||||
before do
|
|
||||||
allow_worker(on: :next_aborted_repository) do |repository|
|
|
||||||
allow(repository).to receive(:migration_import).and_return(:ok)
|
|
||||||
allow(repository.gitlab_api_client).to receive(:import_status).and_return('import_failed')
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'retries the import for the aborted repository' do
|
|
||||||
expect_log_info(
|
|
||||||
[
|
|
||||||
{
|
|
||||||
import_type: 'retry',
|
|
||||||
container_repository_id: aborted_repository1.id,
|
|
||||||
container_repository_path: aborted_repository1.path,
|
|
||||||
container_repository_migration_state: 'importing'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
import_type: 'retry',
|
|
||||||
container_repository_id: aborted_repository2.id,
|
|
||||||
container_repository_path: aborted_repository2.path,
|
|
||||||
container_repository_migration_state: 'importing'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(worker).to receive(:handle_next_migration).and_call_original
|
|
||||||
|
|
||||||
subject
|
|
||||||
|
|
||||||
expect(aborted_repository1.reload).to be_importing
|
|
||||||
expect(aborted_repository2.reload).to be_importing
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when an error occurs' do
|
|
||||||
it 'does abort that migration' do
|
|
||||||
allow_worker(on: :next_aborted_repository) do |repository|
|
|
||||||
allow(repository).to receive(:retry_aborted_migration).and_raise(StandardError)
|
|
||||||
end
|
|
||||||
|
|
||||||
expect_log_info(
|
|
||||||
[
|
|
||||||
{
|
|
||||||
import_type: 'retry',
|
|
||||||
container_repository_id: aborted_repository1.id,
|
|
||||||
container_repository_path: aborted_repository1.path,
|
|
||||||
container_repository_migration_state: 'import_aborted'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
subject
|
|
||||||
|
|
||||||
expect(aborted_repository1.reload).to be_import_aborted
|
|
||||||
expect(aborted_repository2.reload).to be_import_aborted
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when multiple qualified repositories are available' do
|
|
||||||
let_it_be(:container_repository2) { create(:container_repository, created_at: 2.days.ago) }
|
|
||||||
|
|
||||||
before do
|
|
||||||
allow_worker(on: :next_repository) do |repository|
|
|
||||||
allow(repository).to receive(:migration_pre_import).and_return(:ok)
|
|
||||||
end
|
|
||||||
|
|
||||||
stub_container_registry_tags(
|
|
||||||
repository: container_repository2.path,
|
|
||||||
tags: %w(tag4 tag5 tag6),
|
|
||||||
with_manifest: true
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
shared_examples 'starting all the next imports' do
|
|
||||||
it 'starts the pre-import for the next qualified repositories' do
|
|
||||||
expect_log_info(
|
|
||||||
[
|
|
||||||
{
|
|
||||||
import_type: 'next',
|
|
||||||
container_repository_id: container_repository.id,
|
|
||||||
container_repository_path: container_repository.path,
|
|
||||||
container_repository_migration_state: 'pre_importing'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
import_type: 'next',
|
|
||||||
container_repository_id: container_repository2.id,
|
|
||||||
container_repository_path: container_repository2.path,
|
|
||||||
container_repository_migration_state: 'pre_importing'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(worker).to receive(:handle_next_migration).exactly(3).times.and_call_original
|
|
||||||
|
|
||||||
expect { subject }.to make_queries_matching(/LIMIT 2/)
|
|
||||||
|
|
||||||
expect(container_repository.reload).to be_pre_importing
|
|
||||||
expect(container_repository2.reload).to be_pre_importing
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
it_behaves_like 'starting all the next imports'
|
|
||||||
|
|
||||||
context 'when the new pre-import maxes out the capacity' do
|
|
||||||
before do
|
|
||||||
# set capacity to 10
|
|
||||||
stub_feature_flags(
|
|
||||||
container_registry_migration_phase2_capacity_25: false,
|
|
||||||
container_registry_migration_phase2_capacity_40: false
|
|
||||||
)
|
|
||||||
|
|
||||||
# Plus 2 created above gives 9 importing repositories
|
|
||||||
create_list(:container_repository, 7, :importing)
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'starts the pre-import only for one qualified repository' do
|
|
||||||
expect_log_info(
|
|
||||||
[
|
|
||||||
{
|
|
||||||
import_type: 'next',
|
|
||||||
container_repository_id: container_repository.id,
|
|
||||||
container_repository_path: container_repository.path,
|
|
||||||
container_repository_migration_state: 'pre_importing'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
subject
|
|
||||||
|
|
||||||
expect(container_repository.reload).to be_pre_importing
|
|
||||||
expect(container_repository2.reload).to be_default
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'max tag count is 0' do
|
|
||||||
before do
|
|
||||||
stub_application_setting(container_registry_import_max_tags_count: 0)
|
|
||||||
# Add 8 tags to the next repository
|
|
||||||
stub_container_registry_tags(
|
|
||||||
repository: container_repository.path, tags: %w(a b c d e f g h), with_manifest: true
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
it_behaves_like 'starting all the next imports'
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when the deadline is hit' do
|
|
||||||
it 'does not handle the second qualified repository' do
|
|
||||||
expect(worker).to receive(:loop_deadline).and_return(5.seconds.from_now, 2.seconds.ago)
|
|
||||||
expect(worker).to receive(:handle_next_migration).once.and_call_original
|
|
||||||
|
|
||||||
subject
|
|
||||||
|
|
||||||
expect(container_repository.reload).to be_pre_importing
|
|
||||||
expect(container_repository2.reload).to be_default
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when a mix of aborted imports and qualified repositories are available' do
|
|
||||||
let_it_be(:aborted_repository) { create(:container_repository, :import_aborted) }
|
|
||||||
|
|
||||||
before do
|
before do
|
||||||
allow_worker(on: :next_aborted_repository) do |repository|
|
allow_worker(on: :next_aborted_repository) do |repository|
|
||||||
allow(repository).to receive(:migration_import).and_return(:ok)
|
allow(repository).to receive(:migration_import).and_return(:ok)
|
||||||
allow(repository.gitlab_api_client).to receive(:import_status).and_return('import_failed')
|
allow(repository.gitlab_api_client).to receive(:import_status).and_return('import_failed')
|
||||||
end
|
end
|
||||||
|
|
||||||
allow_worker(on: :next_repository) do |repository|
|
|
||||||
allow(repository).to receive(:migration_pre_import).and_return(:ok)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'retries the aborted repository and start the migration on the qualified repository' do
|
it 'retries the import for the aborted repository' do
|
||||||
expect_log_info(
|
expect_log_info(
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
import_type: 'retry',
|
import_type: 'retry',
|
||||||
container_repository_id: aborted_repository.id,
|
container_repository_id: aborted_repository1.id,
|
||||||
container_repository_path: aborted_repository.path,
|
container_repository_path: aborted_repository1.path,
|
||||||
container_repository_migration_state: 'importing'
|
container_repository_migration_state: 'importing'
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
import_type: 'retry',
|
||||||
|
container_repository_id: aborted_repository2.id,
|
||||||
|
container_repository_path: aborted_repository2.path,
|
||||||
|
container_repository_migration_state: 'importing'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(worker).to receive(:handle_next_migration).and_call_original
|
||||||
|
|
||||||
|
subject
|
||||||
|
|
||||||
|
expect(aborted_repository1.reload).to be_importing
|
||||||
|
expect(aborted_repository2.reload).to be_importing
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when an error occurs' do
|
||||||
|
it 'does abort that migration' do
|
||||||
|
allow_worker(on: :next_aborted_repository) do |repository|
|
||||||
|
allow(repository).to receive(:retry_aborted_migration).and_raise(StandardError)
|
||||||
|
end
|
||||||
|
|
||||||
|
expect_log_info(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
import_type: 'retry',
|
||||||
|
container_repository_id: aborted_repository1.id,
|
||||||
|
container_repository_path: aborted_repository1.path,
|
||||||
|
container_repository_migration_state: 'import_aborted'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
subject
|
||||||
|
|
||||||
|
expect(aborted_repository1.reload).to be_import_aborted
|
||||||
|
expect(aborted_repository2.reload).to be_import_aborted
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when multiple qualified repositories are available' do
|
||||||
|
let_it_be(:container_repository2) { create(:container_repository, created_at: 2.days.ago) }
|
||||||
|
|
||||||
|
before do
|
||||||
|
allow_worker(on: :next_repository) do |repository|
|
||||||
|
allow(repository).to receive(:migration_pre_import).and_return(:ok)
|
||||||
|
end
|
||||||
|
|
||||||
|
stub_container_registry_tags(
|
||||||
|
repository: container_repository2.path,
|
||||||
|
tags: %w(tag4 tag5 tag6),
|
||||||
|
with_manifest: true
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
shared_examples 'starting all the next imports' do
|
||||||
|
it 'starts the pre-import for the next qualified repositories' do
|
||||||
|
expect_log_info(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
import_type: 'next',
|
||||||
|
container_repository_id: container_repository.id,
|
||||||
|
container_repository_path: container_repository.path,
|
||||||
|
container_repository_migration_state: 'pre_importing'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
import_type: 'next',
|
||||||
|
container_repository_id: container_repository2.id,
|
||||||
|
container_repository_path: container_repository2.path,
|
||||||
|
container_repository_migration_state: 'pre_importing'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(worker).to receive(:handle_next_migration).exactly(3).times.and_call_original
|
||||||
|
|
||||||
|
expect { subject }.to make_queries_matching(/LIMIT 2/)
|
||||||
|
|
||||||
|
expect(container_repository.reload).to be_pre_importing
|
||||||
|
expect(container_repository2.reload).to be_pre_importing
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
it_behaves_like 'starting all the next imports'
|
||||||
|
|
||||||
|
context 'when the new pre-import maxes out the capacity' do
|
||||||
|
before do
|
||||||
|
# set capacity to 10
|
||||||
|
stub_feature_flags(
|
||||||
|
container_registry_migration_phase2_capacity_25: false,
|
||||||
|
container_registry_migration_phase2_capacity_40: false
|
||||||
|
)
|
||||||
|
|
||||||
|
# Plus 2 created above gives 9 importing repositories
|
||||||
|
create_list(:container_repository, 7, :importing)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'starts the pre-import only for one qualified repository' do
|
||||||
|
expect_log_info(
|
||||||
|
[
|
||||||
{
|
{
|
||||||
import_type: 'next',
|
import_type: 'next',
|
||||||
container_repository_id: container_repository.id,
|
container_repository_id: container_repository.id,
|
||||||
|
@ -562,132 +193,195 @@ RSpec.describe ContainerRegistry::Migration::EnqueuerWorker, :aggregate_failures
|
||||||
|
|
||||||
subject
|
subject
|
||||||
|
|
||||||
expect(aborted_repository.reload).to be_importing
|
|
||||||
expect(container_repository.reload).to be_pre_importing
|
expect(container_repository.reload).to be_pre_importing
|
||||||
|
expect(container_repository2.reload).to be_default
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'above capacity' do
|
context 'max tag count is 0' do
|
||||||
before do
|
before do
|
||||||
create(:container_repository, :importing)
|
stub_application_setting(container_registry_import_max_tags_count: 0)
|
||||||
create(:container_repository, :importing)
|
# Add 8 tags to the next repository
|
||||||
allow(ContainerRegistry::Migration).to receive(:capacity).and_return(1)
|
stub_container_registry_tags(
|
||||||
|
repository: container_repository.path, tags: %w(a b c d e f g h), with_manifest: true
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
it_behaves_like 'starting all the next imports'
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when the deadline is hit' do
|
||||||
|
it 'does not handle the second qualified repository' do
|
||||||
|
expect(worker).to receive(:loop_deadline).and_return(5.seconds.from_now, 2.seconds.ago)
|
||||||
|
expect(worker).to receive(:handle_next_migration).once.and_call_original
|
||||||
|
|
||||||
|
subject
|
||||||
|
|
||||||
|
expect(container_repository.reload).to be_pre_importing
|
||||||
|
expect(container_repository2.reload).to be_default
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when a mix of aborted imports and qualified repositories are available' do
|
||||||
|
let_it_be(:aborted_repository) { create(:container_repository, :import_aborted) }
|
||||||
|
|
||||||
|
before do
|
||||||
|
allow_worker(on: :next_aborted_repository) do |repository|
|
||||||
|
allow(repository).to receive(:migration_import).and_return(:ok)
|
||||||
|
allow(repository.gitlab_api_client).to receive(:import_status).and_return('import_failed')
|
||||||
|
end
|
||||||
|
|
||||||
|
allow_worker(on: :next_repository) do |repository|
|
||||||
|
allow(repository).to receive(:migration_pre_import).and_return(:ok)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'retries the aborted repository and start the migration on the qualified repository' do
|
||||||
|
expect_log_info(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
import_type: 'retry',
|
||||||
|
container_repository_id: aborted_repository.id,
|
||||||
|
container_repository_path: aborted_repository.path,
|
||||||
|
container_repository_migration_state: 'importing'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
import_type: 'next',
|
||||||
|
container_repository_id: container_repository.id,
|
||||||
|
container_repository_path: container_repository.path,
|
||||||
|
container_repository_migration_state: 'pre_importing'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
subject
|
||||||
|
|
||||||
|
expect(aborted_repository.reload).to be_importing
|
||||||
|
expect(container_repository.reload).to be_pre_importing
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'above capacity' do
|
||||||
|
before do
|
||||||
|
create(:container_repository, :importing)
|
||||||
|
create(:container_repository, :importing)
|
||||||
|
allow(ContainerRegistry::Migration).to receive(:capacity).and_return(1)
|
||||||
|
end
|
||||||
|
|
||||||
|
it_behaves_like 'no action' do
|
||||||
|
before do
|
||||||
|
expect_log_extra_metadata(below_capacity: false, max_capacity_setting: 1)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'too soon before previous completed import step' do
|
||||||
|
where(:state, :timestamp) do
|
||||||
|
:import_done | :migration_import_done_at
|
||||||
|
:pre_import_done | :migration_pre_import_done_at
|
||||||
|
:import_aborted | :migration_aborted_at
|
||||||
|
:import_skipped | :migration_skipped_at
|
||||||
|
end
|
||||||
|
|
||||||
|
with_them do
|
||||||
|
before do
|
||||||
|
allow(ContainerRegistry::Migration).to receive(:enqueue_waiting_time).and_return(45.minutes)
|
||||||
|
create(:container_repository, state, timestamp => 1.minute.ago)
|
||||||
end
|
end
|
||||||
|
|
||||||
it_behaves_like 'no action' do
|
it_behaves_like 'no action' do
|
||||||
before do
|
before do
|
||||||
expect_log_extra_metadata(below_capacity: false, max_capacity_setting: 1)
|
expect_log_extra_metadata(waiting_time_passed: false, current_waiting_time_setting: 45.minutes)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'too soon before previous completed import step' do
|
context 'when last completed repository has nil timestamps' do
|
||||||
where(:state, :timestamp) do
|
|
||||||
:import_done | :migration_import_done_at
|
|
||||||
:pre_import_done | :migration_pre_import_done_at
|
|
||||||
:import_aborted | :migration_aborted_at
|
|
||||||
:import_skipped | :migration_skipped_at
|
|
||||||
end
|
|
||||||
|
|
||||||
with_them do
|
|
||||||
before do
|
|
||||||
allow(ContainerRegistry::Migration).to receive(:enqueue_waiting_time).and_return(45.minutes)
|
|
||||||
create(:container_repository, state, timestamp => 1.minute.ago)
|
|
||||||
end
|
|
||||||
|
|
||||||
it_behaves_like 'no action' do
|
|
||||||
before do
|
|
||||||
expect_log_extra_metadata(waiting_time_passed: false, current_waiting_time_setting: 45.minutes)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when last completed repository has nil timestamps' do
|
|
||||||
before do
|
|
||||||
allow(ContainerRegistry::Migration).to receive(:enqueue_waiting_time).and_return(45.minutes)
|
|
||||||
create(:container_repository, migration_state: 'import_done')
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'continues to try the next import' do
|
|
||||||
expect { subject }.to change { container_repository.reload.migration_state }
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'over max tag count' do
|
|
||||||
before do
|
before do
|
||||||
stub_application_setting(container_registry_import_max_tags_count: 2)
|
allow(ContainerRegistry::Migration).to receive(:enqueue_waiting_time).and_return(45.minutes)
|
||||||
|
create(:container_repository, migration_state: 'import_done')
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'skips the repository' do
|
it 'continues to try the next import' do
|
||||||
expect_log_info(
|
expect { subject }.to change { container_repository.reload.migration_state }
|
||||||
[
|
|
||||||
{
|
|
||||||
import_type: 'next',
|
|
||||||
container_repository_id: container_repository.id,
|
|
||||||
container_repository_path: container_repository.path,
|
|
||||||
container_repository_migration_state: 'import_skipped',
|
|
||||||
container_repository_migration_skipped_reason: 'too_many_tags'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(worker).to receive(:handle_next_migration).twice.and_call_original
|
|
||||||
# skipping the migration will re_enqueue the job
|
|
||||||
expect(described_class).to receive(:enqueue_a_job)
|
|
||||||
|
|
||||||
subject
|
|
||||||
|
|
||||||
expect(container_repository.reload).to be_import_skipped
|
|
||||||
expect(container_repository.migration_skipped_reason).to eq('too_many_tags')
|
|
||||||
expect(container_repository.migration_skipped_at).not_to be_nil
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
context 'when an error occurs' do
|
context 'over max tag count' do
|
||||||
before do
|
before do
|
||||||
allow(ContainerRegistry::Migration).to receive(:max_tags_count).and_raise(StandardError)
|
stub_application_setting(container_registry_import_max_tags_count: 2)
|
||||||
end
|
|
||||||
|
|
||||||
it 'aborts the import' do
|
|
||||||
expect_log_info(
|
|
||||||
[
|
|
||||||
{
|
|
||||||
import_type: 'next',
|
|
||||||
container_repository_id: container_repository.id,
|
|
||||||
container_repository_path: container_repository.path,
|
|
||||||
container_repository_migration_state: 'import_aborted'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
|
|
||||||
instance_of(StandardError),
|
|
||||||
next_repository_id: container_repository.id
|
|
||||||
)
|
|
||||||
|
|
||||||
# aborting the migration will re_enqueue the job
|
|
||||||
expect(described_class).to receive(:enqueue_a_job)
|
|
||||||
|
|
||||||
subject
|
|
||||||
|
|
||||||
expect(container_repository.reload).to be_import_aborted
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'with the exclusive lease taken' do
|
it 'skips the repository' do
|
||||||
let(:lease_key) { worker.send(:lease_key) }
|
expect_log_info(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
import_type: 'next',
|
||||||
|
container_repository_id: container_repository.id,
|
||||||
|
container_repository_path: container_repository.path,
|
||||||
|
container_repository_migration_state: 'import_skipped',
|
||||||
|
container_repository_migration_skipped_reason: 'too_many_tags'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
before do
|
expect(worker).to receive(:handle_next_migration).twice.and_call_original
|
||||||
stub_exclusive_lease_taken(lease_key, timeout: 30.minutes)
|
# skipping the migration will re_enqueue the job
|
||||||
end
|
expect(described_class).to receive(:enqueue_a_job)
|
||||||
|
|
||||||
it 'does not perform' do
|
subject
|
||||||
expect(worker).not_to receive(:handle_aborted_migration)
|
|
||||||
expect(worker).not_to receive(:handle_next_migration)
|
|
||||||
|
|
||||||
subject
|
expect(container_repository.reload).to be_import_skipped
|
||||||
end
|
expect(container_repository.migration_skipped_reason).to eq('too_many_tags')
|
||||||
|
expect(container_repository.migration_skipped_at).not_to be_nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when an error occurs' do
|
||||||
|
before do
|
||||||
|
allow(ContainerRegistry::Migration).to receive(:max_tags_count).and_raise(StandardError)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'aborts the import' do
|
||||||
|
expect_log_info(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
import_type: 'next',
|
||||||
|
container_repository_id: container_repository.id,
|
||||||
|
container_repository_path: container_repository.path,
|
||||||
|
container_repository_migration_state: 'import_aborted'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
|
||||||
|
instance_of(StandardError),
|
||||||
|
next_repository_id: container_repository.id
|
||||||
|
)
|
||||||
|
|
||||||
|
# aborting the migration will re_enqueue the job
|
||||||
|
expect(described_class).to receive(:enqueue_a_job)
|
||||||
|
|
||||||
|
subject
|
||||||
|
|
||||||
|
expect(container_repository.reload).to be_import_aborted
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'with the exclusive lease taken' do
|
||||||
|
let(:lease_key) { worker.send(:lease_key) }
|
||||||
|
|
||||||
|
before do
|
||||||
|
stub_exclusive_lease_taken(lease_key, timeout: 30.minutes)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'does not perform' do
|
||||||
|
expect(worker).not_to receive(:handle_aborted_migration)
|
||||||
|
expect(worker).not_to receive(:handle_next_migration)
|
||||||
|
|
||||||
|
subject
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue