Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-07-05 12:08:33 +00:00
parent cf98b5d69a
commit e8fc7f5650
45 changed files with 1057 additions and 93 deletions

View File

@ -7,6 +7,8 @@ const defaultConfig = {
ADD_TAGS: ['use'],
};
const forbiddenDataAttrs = ['data-remote', 'data-url', 'data-type', 'data-method'];
// Only icons urls from `gon` are allowed
const getAllowedIconUrls = (gon = window.gon) =>
[gon.sprite_file_icons, gon.sprite_icons].filter(Boolean);
@ -44,10 +46,19 @@ const sanitizeSvgIcon = (node) => {
removeUnsafeHref(node, 'xlink:href');
};
const sanitizeHTMLAttributes = (node) => {
forbiddenDataAttrs.forEach((attr) => {
if (node.hasAttribute(attr)) {
node.removeAttribute(attr);
}
});
};
addHook('afterSanitizeAttributes', (node) => {
if (node.tagName.toLowerCase() === 'use') {
sanitizeSvgIcon(node);
}
sanitizeHTMLAttributes(node);
});
export const sanitize = (val, config = defaultConfig) => dompurifySanitize(val, config);

View File

@ -112,7 +112,7 @@ export default {
<div v-else class="media-body space-children gl-display-flex gl-align-items-center">
<span v-if="shouldBeRebased" class="bold">
{{
s__(`mrWidget|Fast-forward merge is not possible.
s__(`mrWidget|Merge blocked: fast-forward merge is not possible.
To merge this request, first rebase locally.`)
}}
</span>

View File

@ -87,9 +87,7 @@ export default {
},
fastForwardMergeText() {
return sprintf(
__(
'Fast-forward merge is not possible. Rebase the source branch onto %{targetBranch} to allow this merge request to be merged.',
),
__('Merge blocked: the source branch must be rebased onto the target branch.'),
{
targetBranch: `<span class="label-branch">${escape(this.targetBranch)}</span>`,
},
@ -193,9 +191,7 @@ export default {
data-testid="rebase-message"
data-qa-selector="no_fast_forward_message_content"
>{{
__(
'Fast-forward merge is not possible. Rebase the source branch onto the target branch.',
)
__('Merge blocked: the source branch must be rebased onto the target branch.')
}}</span
>
<span v-else class="gl-font-weight-bold danger" data-testid="rebase-message">{{

View File

@ -0,0 +1,35 @@
# frozen_string_literal: true
module BulkImports
class EntitiesFinder
def initialize(user:, bulk_import: nil, status: nil)
@user = user
@bulk_import = bulk_import
@status = status
end
def execute
::BulkImports::Entity
.preload(:failures) # rubocop: disable CodeReuse/ActiveRecord
.by_user_id(user.id)
.then(&method(:filter_by_bulk_import))
.then(&method(:filter_by_status))
end
private
attr_reader :user, :bulk_import, :status
def filter_by_bulk_import(entities)
return entities unless bulk_import
entities.where(bulk_import_id: bulk_import.id) # rubocop: disable CodeReuse/ActiveRecord
end
def filter_by_status(entities)
return entities unless ::BulkImports::Entity.all_human_statuses.include?(status)
entities.with_status(status)
end
end
end

View File

@ -0,0 +1,24 @@
# frozen_string_literal: true
module BulkImports
class ImportsFinder
def initialize(user:, status: nil)
@user = user
@status = status
end
def execute
filter_by_status(user.bulk_imports)
end
private
attr_reader :user, :status
def filter_by_status(imports)
return imports unless BulkImport.all_human_statuses.include?(status)
imports.with_status(status)
end
end
end

View File

@ -32,11 +32,11 @@ module WhatsNewHelper
def whats_new_variants_description(variant)
case variant
when 'all_tiers'
_("What's new presents new features from all tiers to help you keep track of all new features.")
_("Include new features from all tiers.")
when 'current_tier'
_("What's new presents new features for your current subscription tier, while hiding new features not available to your subscription tier.")
_("Only include features new to your current subscription tier.")
when 'disabled'
_("What's new is disabled and can no longer be viewed.")
_("%{italic_start}What's new%{italic_end} is inactive and cannot be viewed.").html_safe % { italic_start: '<i>'.html_safe, italic_end: '</i>'.html_safe }
end
end
end

View File

@ -33,4 +33,8 @@ class BulkImport < ApplicationRecord
transition any => :failed
end
end
def self.all_human_statuses
state_machine.states.map(&:human_name)
end
end

View File

@ -48,6 +48,8 @@ class BulkImports::Entity < ApplicationRecord
enum source_type: { group_entity: 0, project_entity: 1 }
scope :by_user_id, ->(user_id) { joins(:bulk_import).where(bulk_imports: { user_id: user_id }) }
state_machine :status, initial: :created do
state :created, value: 0
state :started, value: 1
@ -68,6 +70,10 @@ class BulkImports::Entity < ApplicationRecord
end
end
def self.all_human_statuses
state_machine.states.map(&:human_name)
end
def encoded_source_full_path
ERB::Util.url_encode(source_full_path)
end

View File

@ -22,7 +22,7 @@ module Projects
def execute
return unless project&.lfs_enabled? && lfs_download_object
return error("LFS file with oid #{lfs_oid} has invalid attributes") unless lfs_download_object.valid?
return link_existing_lfs_object! if lfs_size > LARGE_FILE_SIZE && lfs_object
return link_existing_lfs_object! if Feature.enabled?(:lfs_link_existing_object, project, default_enabled: :yaml) && lfs_size > LARGE_FILE_SIZE && lfs_object
wrap_download_errors do
download_lfs_file!

View File

@ -20,7 +20,7 @@
%button.btn.gl-button.btn-default.js-settings-toggle{ type: 'button' }
= expanded_by_default? ? _('Collapse') : _('Expand')
%p
= _("Configure What's new drawer and content.")
= _("Configure %{italic_start}What's new%{italic_end} drawer and content.").html_safe % { italic_start: '<i>'.html_safe, italic_end: '</i>'.html_safe }
.settings-content
= render 'whats_new'

View File

@ -0,0 +1,7 @@
---
name: lfs_link_existing_object
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/41770
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/249246
group: group::source code
type: development
default_enabled: false

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
class AddIndexToBulkImportEntitiesOnBulkImportIdAndStatus < ActiveRecord::Migration[6.1]
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
NEW_INDEX_NAME = 'index_bulk_import_entities_on_bulk_import_id_and_status'
OLD_INDEX_NAME = 'index_bulk_import_entities_on_bulk_import_id'
def up
add_concurrent_index :bulk_import_entities, [:bulk_import_id, :status], name: NEW_INDEX_NAME
remove_concurrent_index_by_name :bulk_import_entities, name: OLD_INDEX_NAME
end
def down
add_concurrent_index :bulk_import_entities, :bulk_import_id, name: OLD_INDEX_NAME
remove_concurrent_index_by_name :bulk_import_entities, name: NEW_INDEX_NAME
end
end

View File

@ -0,0 +1 @@
cba36a2e8bedd70f8ccaca47517314d0a3c75a9b8d90715a29919247aa686835

View File

@ -22822,7 +22822,7 @@ CREATE INDEX index_broadcast_message_on_ends_at_and_broadcast_type_and_id ON bro
CREATE INDEX index_bulk_import_configurations_on_bulk_import_id ON bulk_import_configurations USING btree (bulk_import_id);
CREATE INDEX index_bulk_import_entities_on_bulk_import_id ON bulk_import_entities USING btree (bulk_import_id);
CREATE INDEX index_bulk_import_entities_on_bulk_import_id_and_status ON bulk_import_entities USING btree (bulk_import_id, status);
CREATE INDEX index_bulk_import_entities_on_namespace_id ON bulk_import_entities USING btree (namespace_id);

View File

@ -588,6 +588,75 @@ to start again from scratch, there are a few steps that can help you:
gitlab-ctl start
```
### Design repository failures on mirrored projects and project imports
On the top bar, under **Menu >** **{admin}** **Admin > Geo > Nodes**,
if the Design repositories progress bar shows
`Synced` and `Failed` greater than 100%, and negative `Queued`, then the instance
is likely affected by
[a bug in GitLab 13.2 and 13.3](https://gitlab.com/gitlab-org/gitlab/-/issues/241668).
It was [fixed in 13.4+](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/40643).
To determine the actual replication status of design repositories in
a [Rails console](../../operations/rails_console.md):
```ruby
secondary = Gitlab::Geo.current_node
counts = {}
secondary.designs.select("projects.id").find_each do |p|
registry = Geo::DesignRegistry.find_by(project_id: p.id)
state = registry ? "#{registry.state}" : "registry does not exist yet"
# puts "Design ID##{p.id}: #{state}" # uncomment this for granular information
counts[state] ||= 0
counts[state] += 1
end
puts "\nCounts:", counts
```
Example output:
```plaintext
Design ID#5: started
Design ID#6: synced
Design ID#7: failed
Design ID#8: pending
Design ID#9: synced
Counts:
{"started"=>1, "synced"=>2, "failed"=>1, "pending"=>1}
```
Example output if there are actually zero design repository replication failures:
```plaintext
Design ID#5: synced
Design ID#6: synced
Design ID#7: synced
Counts:
{"synced"=>3}
```
#### If you are promoting a Geo secondary site running on a single server
`gitlab-ctl promotion-preflight-checks` will fail due to the existence of
`failed` rows in the `geo_design_registry` table. Use the
[previous snippet](#design-repository-failures-on-mirrored-projects-and-project-imports) to
determine the actual replication status of Design repositories.
`gitlab-ctl promote-to-primary-node` will fail since it runs preflight checks.
If the [previous snippet](#design-repository-failures-on-mirrored-projects-and-project-imports)
shows that all designs are synced, then you can use the
`--skip-preflight-checks` option or the `--force` option to move forward with
promotion.
#### If you are promoting a Geo secondary site running on multiple servers
`gitlab-ctl promotion-preflight-checks` will fail due to the existence of
`failed` rows in the `geo_design_registry` table. Use the
[previous snippet](#design-repository-failures-on-mirrored-projects-and-project-imports) to
determine the actual replication status of Design repositories.
## Fixing errors during a failover or when promoting a secondary to a primary node
The following are possible errors that might be encountered during failover or
@ -726,6 +795,7 @@ sudo gitlab-ctl promotion-preflight-checks
sudo /opt/gitlab/embedded/bin/gitlab-pg-ctl promote
sudo gitlab-ctl reconfigure
sudo gitlab-rake geo:set_secondary_as_primary
```
## Expired artifacts

View File

@ -922,7 +922,7 @@ In installations from source:
In GitLab 14.0 the underlying storage format of GitLab Pages is changing from
files stored directly in disk to a single ZIP archive per project.
These ZIP archives can be stored either locally on disk storage or on the [object storage](#using-object-storage) if it is configured.
These ZIP archives can be stored either locally on disk storage or on [object storage](#using-object-storage) if it is configured.
[Starting from GitLab 13.5](https://gitlab.com/gitlab-org/gitlab/-/issues/245308) ZIP archives are stored every time pages site is updated.
@ -984,9 +984,8 @@ to using that.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/325285) in GitLab 13.11.
Existing Pages deployments objects (which store [ZIP archives](#zip-storage)) can similarly be
migrated to [object storage](#using-object-storage), if
you've been having them stored locally.
Existing Pages deployment objects (which store [ZIP archives](#zip-storage)) can similarly be
migrated to [object storage](#using-object-storage).
Migrate your existing Pages deployments from local storage to object storage:
@ -996,7 +995,7 @@ sudo gitlab-rake gitlab:pages:deployments:migrate_to_object_storage
### Rolling Pages deployments back to local storage
After the migration to object storage is performed, you can choose to revert your Pages deployments back to local storage:
After the migration to object storage is performed, you can choose to move your Pages deployments back to local storage:
```shell
sudo gitlab-rake gitlab:pages:deployments:migrate_to_local
@ -1006,7 +1005,7 @@ sudo gitlab-rake gitlab:pages:deployments:migrate_to_local
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/301159) in GitLab 13.11.
If you use [object storage](#using-object-storage), disable local storage:
If you use [object storage](#using-object-storage), you can disable local storage:
1. Edit `/etc/gitlab/gitlab.rb`:
@ -1020,22 +1019,22 @@ Starting from GitLab 13.12, this setting also disables the [legacy storage](#mig
## Migrate GitLab Pages to 14.0
In GitLab 14.0 a number of breaking changes are introduced which may require some user intervention.
In GitLab 14.0 a number of breaking changes were introduced which may require some user intervention.
The steps below describe the best way to migrate without causing any downtime for your GitLab instance.
If you run GitLab on a single server, then most likely you will not notice any problem after
upgrading to GitLab 14.0, but it may be safer to follow the steps anyway.
If you run GitLab on a single server, then most likely the upgrade process to 14.0 will go smoothly for you. Regardless, we recommend everyone follow the migration steps to ensure a successful upgrade.
If you run GitLab on a single server, then most likely the upgrade process to 14.0 will go smoothly for you
and you will not notice any problem after upgrading.
Regardless, we recommend everyone follow the migration steps to ensure a successful upgrade.
If at any point you run into issues, consult the [troubleshooting section](#troubleshooting).
To migrate GitLab Pages to GitLab 14.0:
If your current GitLab version is lower than 13.12, then you first need to update to 13.12.
Updating directly to 14.0 is [not supported](../../update/index.md#upgrade-paths)
and may cause downtime for some web-sites hosted on GitLab Pages. Once you update to 13.12,
migrate GitLab Pages to prepare them for GitLab 14.0:
1. If your current GitLab version is lower than 13.12, then you first need to upgrade to 13.12.
Upgrading directly to 14.0 may cause downtime for some web-sites hosted on GitLab Pages
until you finish the following steps.
1. Set [`domain_config_source` to `gitlab`](#domain-source-configuration-before-140), which
is the default starting from GitLab 14.0. Skip this step if you're already running GitLab 14.0 or above.
1. If you want to store your pages content in the [object storage](#using-object-storage), make sure to configure it.
1. If you want to store your pages content in [object storage](#using-object-storage), make sure to configure it.
If you want to store the pages content locally or continue using an NFS server, skip this step.
1. [Migrate legacy storage to ZIP storage.](#migrate-legacy-storage-to-zip-storage)
1. Upgrade GitLab to 14.0.

193
doc/api/bulk_imports.md Normal file
View File

@ -0,0 +1,193 @@
---
stage: Manage
group: Import
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# GitLab Migrations (Bulk Imports) API
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/64335) in GitLab 14.1.
With the GitLab Migrations API, you can view the progress of migrations initiated with
[GitLab Group Migration](../user/group/import/index.md).
## List all GitLab migrations
```plaintext
GET /bulk_imports
```
| Attribute | Type | Required | Description |
|:-----------|:--------|:---------|:---------------------------------------|
| `per_page` | integer | no | Number of records to return per page. |
| `page` | integer | no | Page to retrieve. |
| `status` | string | no | Import status. |
The status can be one of the following:
- `created`
- `started`
- `finished`
- `failed`
```shell
curl --request GET --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/bulk_imports?per_page=2&page=1"
```
```json
[
{
"id": 1,
"status": "finished",
"source_type": "gitlab",
"created_at": "2021-06-18T09:45:55.358Z",
"updated_at": "2021-06-18T09:46:27.003Z"
},
{
"id": 2,
"status": "started",
"source_type": "gitlab",
"created_at": "2021-06-18T09:47:36.581Z",
"updated_at": "2021-06-18T09:47:58.286Z"
}
]
```
## List all GitLab migrations' entities
```plaintext
GET /bulk_imports/entities
```
| Attribute | Type | Required | Description |
|:-----------|:--------|:---------|:---------------------------------------|
| `per_page` | integer | no | Number of records to return per page. |
| `page` | integer | no | Page to retrieve. |
| `status` | string | no | Import status. |
The status can be one of the following:
- `created`
- `started`
- `finished`
- `failed`
```shell
curl --request GET --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/bulk_imports/entities?per_page=2&page=1&status=started"
```
```json
[
{
"id": 1,
"bulk_import_id": 1,
"status": "finished",
"source_full_path": "source_group",
"destination_name": "destination_name",
"destination_namespace": "destination_path",
"parent_id": null,
"namespace_id": 1,
"project_id": null,
"created_at": "2021-06-18T09:47:37.390Z",
"updated_at": "2021-06-18T09:47:51.867Z",
"failures": []
},
{
"id": 2,
"bulk_import_id": 2,
"status": "failed",
"source_full_path": "another_group",
"destination_name": "another_name",
"destination_namespace": "another_namespace",
"parent_id": null,
"namespace_id": null,
"project_id": null,
"created_at": "2021-06-24T10:40:20.110Z",
"updated_at": "2021-06-24T10:40:46.590Z",
"failures": [
{
"pipeline_class": "BulkImports::Groups::Pipelines::GroupPipeline",
"pipeline_step": "extractor",
"exception_class": "Exception",
"correlation_id_value": "dfcf583058ed4508e4c7c617bd7f0edd",
"created_at": "2021-06-24T10:40:46.495Z"
}
]
}
]
```
## Get GitLab migration details
```plaintext
GET /bulk_imports/:id
```
```shell
curl --request GET --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/bulk_imports/1"
```
```json
{
"id": 1,
"status": "finished",
"source_type": "gitlab",
"created_at": "2021-06-18T09:45:55.358Z",
"updated_at": "2021-06-18T09:46:27.003Z"
}
```
## List GitLab migration entities
```plaintext
GET /bulk_imports/:id/entities
```
| Attribute | Type | Required | Description |
|:-----------|:--------|:---------|:---------------------------------------|
| `per_page` | integer | no | Number of records to return per page. |
| `page` | integer | no | Page to retrieve. |
| `status` | string | no | Import status. |
The status can be one of the following:
- `created`
- `started`
- `finished`
- `failed`
```shell
curl --request GET --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/bulk_imports/1/entities?per_page=2&page=1&status=finished"
```
```json
[
{
"id": 1,
"status": "finished",
"source_type": "gitlab",
"created_at": "2021-06-18T09:45:55.358Z",
"updated_at": "2021-06-18T09:46:27.003Z"
}
]
```
## Get GitLab migration entity details
```plaintext
GET /bulk_imports/:id/entities/:entity_id
```
```shell
curl --request GET --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/bulk_imports/1/entities/2"
```
```json
{
"id": 1,
"status": "finished",
"source_type": "gitlab",
"created_at": "2021-06-18T09:45:55.358Z",
"updated_at": "2021-06-18T09:46:27.003Z"
}
```

View File

@ -183,11 +183,58 @@ tune the Puma settings:
The recommended number of workers is calculated as the highest of the following:
- `2`
- Number of CPU cores - 1
- A combination of CPU and memory resource availability (see how this is configured automatically for the [Linux package](https://gitlab.com/gitlab-org/omnibus-gitlab/-/blob/ef9facdc927e7389db6a5e0655414ba8318c7b8a/files/gitlab-cookbooks/gitlab/libraries/puma.rb#L31-46)).
For example a node with 4 cores should be configured with 3 Puma workers.
Take for example the following scenarios:
You can increase the number of Puma workers, providing enough CPU and memory capacity is available.
- A node with 2 cores / 8 GB memory should be configured with **2 Puma workers**.
Calculated as:
```plaintext
The highest number from
2
And
[
the lowest number from
- number of cores: 2
- memory limit: (8 - 1.5) = 6
]
```
So, the highest from 2 and 2 is 2.
- A node with 4 cores / 4 GB memory should be configured with **2 Puma workers**.
```plaintext
The highest number from
2
And
[
the lowest number from
- number of cores: 4
- memory limit: (4 - 1.5) = 2.5
]
``
So, the highest from 2 and 2 is 2.
- A node with 4 cores / 8 GB memory should be configured with **4 Puma workers**.
```plaintext
The highest number from
2
And
[
the lowest number from
- number of cores: 4
- memory limit: (8 - 1.5) = 6.5
]
```
So, the highest from 2 and 4 is 4.
You can increase the number of Puma workers, provided enough CPU and memory capacity is available.
A higher number of Puma workers usually helps to reduce the response time of the application
and increase the ability to handle parallel requests. You must perform testing to verify the
optimal settings for your infrastructure.

View File

@ -1290,6 +1290,9 @@ You may need to reconfigure or restart GitLab for the changes to take effect.
UPDATE namespaces SET runners_token = null, runners_token_encrypted = null;
-- Clear instance tokens
UPDATE application_settings SET runners_registration_token_encrypted = null;
-- Clear key used for JWT authentication
-- This may break the $CI_JWT_TOKEN job variable:
-- https://gitlab.com/gitlab-org/gitlab/-/issues/325965
UPDATE application_settings SET encrypted_ci_jwt_signing_key = null;
-- Clear runner tokens
UPDATE ci_runners SET token = null, token_encrypted = null;

View File

@ -152,6 +152,7 @@ module API
mount ::API::Boards
mount ::API::Branches
mount ::API::BroadcastMessages
mount ::API::BulkImports
mount ::API::Ci::Pipelines
mount ::API::Ci::PipelineSchedules
mount ::API::Ci::Runner

91
lib/api/bulk_imports.rb Normal file
View File

@ -0,0 +1,91 @@
# frozen_string_literal: true
module API
class BulkImports < ::API::Base
include PaginationParams
feature_category :importers
helpers do
def bulk_imports
@bulk_imports ||= ::BulkImports::ImportsFinder.new(user: current_user, status: params[:status]).execute
end
def bulk_import
@bulk_import ||= bulk_imports.find(params[:import_id])
end
def bulk_import_entities
@bulk_import_entities ||= ::BulkImports::EntitiesFinder.new(user: current_user, bulk_import: bulk_import, status: params[:status]).execute
end
def bulk_import_entity
@bulk_import_entity ||= bulk_import_entities.find(params[:entity_id])
end
end
before { authenticate! }
resource :bulk_imports do
desc 'List all GitLab Migrations' do
detail 'This feature was introduced in GitLab 14.1.'
end
params do
use :pagination
optional :status, type: String, values: BulkImport.all_human_statuses,
desc: 'Return GitLab Migrations with specified status'
end
get do
present paginate(bulk_imports), with: Entities::BulkImport
end
desc "List all GitLab Migrations' entities" do
detail 'This feature was introduced in GitLab 14.1.'
end
params do
use :pagination
optional :status, type: String, values: ::BulkImports::Entity.all_human_statuses,
desc: "Return all GitLab Migrations' entities with specified status"
end
get :entities do
entities = ::BulkImports::EntitiesFinder.new(user: current_user, status: params[:status]).execute
present paginate(entities), with: Entities::BulkImports::Entity
end
desc 'Get GitLab Migration details' do
detail 'This feature was introduced in GitLab 14.1.'
end
params do
requires :import_id, type: Integer, desc: "The ID of user's GitLab Migration"
end
get ':import_id' do
present bulk_import, with: Entities::BulkImport
end
desc "List GitLab Migration entities" do
detail 'This feature was introduced in GitLab 14.1.'
end
params do
requires :import_id, type: Integer, desc: "The ID of user's GitLab Migration"
optional :status, type: String, values: ::BulkImports::Entity.all_human_statuses,
desc: 'Return import entities with specified status'
use :pagination
end
get ':import_id/entities' do
present paginate(bulk_import_entities), with: Entities::BulkImports::Entity
end
desc 'Get GitLab Migration entity details' do
detail 'This feature was introduced in GitLab 14.1.'
end
params do
requires :import_id, type: Integer, desc: "The ID of user's GitLab Migration"
requires :entity_id, type: Integer, desc: "The ID of GitLab Migration entity"
end
get ':import_id/entities/:entity_id' do
present bulk_import_entity, with: Entities::BulkImports::Entity
end
end
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
module API
module Entities
class BulkImport < Grape::Entity
expose :id
expose :status_name, as: :status
expose :source_type
expose :created_at
expose :updated_at
end
end
end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
module API
module Entities
module BulkImports
class Entity < Grape::Entity
expose :id
expose :bulk_import_id
expose :status_name, as: :status
expose :source_full_path
expose :destination_name
expose :destination_namespace
expose :parent_id
expose :namespace_id
expose :project_id
expose :created_at
expose :updated_at
expose :failures, using: EntityFailure
end
end
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
module API
module Entities
module BulkImports
class EntityFailure < Grape::Entity
expose :pipeline_class
expose :pipeline_step
expose :exception_class
expose :correlation_id_value
expose :created_at
end
end
end
end

View File

@ -612,6 +612,9 @@ msgstr ""
msgid "%{issuesSize} with a limit of %{maxIssueCount}"
msgstr ""
msgid "%{italic_start}What's new%{italic_end} is inactive and cannot be viewed."
msgstr ""
msgid "%{itemsCount} issues with a limit of %{maxIssueCount}"
msgstr ""
@ -8199,6 +8202,9 @@ msgstr ""
msgid "Configure"
msgstr ""
msgid "Configure %{italic_start}What's new%{italic_end} drawer and content."
msgstr ""
msgid "Configure %{link} to track events. %{link_start}Learn more.%{link_end}"
msgstr ""
@ -8232,9 +8238,6 @@ msgstr ""
msgid "Configure Tracing"
msgstr ""
msgid "Configure What's new drawer and content."
msgstr ""
msgid "Configure a %{codeStart}.gitlab-webide.yml%{codeEnd} file in the %{codeStart}.gitlab%{codeEnd} directory to start using the Web Terminal. %{helpStart}Learn more.%{helpEnd}"
msgstr ""
@ -13602,12 +13605,6 @@ msgstr ""
msgid "Fast timeout"
msgstr ""
msgid "Fast-forward merge is not possible. Rebase the source branch onto %{targetBranch} to allow this merge request to be merged."
msgstr ""
msgid "Fast-forward merge is not possible. Rebase the source branch onto the target branch."
msgstr ""
msgid "Fast-forward merge without a merge commit"
msgstr ""
@ -17279,6 +17276,9 @@ msgstr ""
msgid "Include merge request description"
msgstr ""
msgid "Include new features from all tiers."
msgstr ""
msgid "Include the username in the URL if required: %{code_open}https://username@gitlab.company.com/group/project.git%{code_close}."
msgstr ""
@ -20426,6 +20426,9 @@ msgstr ""
msgid "Merge automatically (%{strategy})"
msgstr ""
msgid "Merge blocked: the source branch must be rebased onto the target branch."
msgstr ""
msgid "Merge commit SHA"
msgstr ""
@ -22934,6 +22937,9 @@ msgstr ""
msgid "Only admins can delete project"
msgstr ""
msgid "Only include features new to your current subscription tier."
msgstr ""
msgid "Only policy:"
msgstr ""
@ -36718,15 +36724,6 @@ msgstr ""
msgid "What's new"
msgstr ""
msgid "What's new is disabled and can no longer be viewed."
msgstr ""
msgid "What's new presents new features for your current subscription tier, while hiding new features not available to your subscription tier."
msgstr ""
msgid "What's new presents new features from all tiers to help you keep track of all new features."
msgstr ""
msgid "Whats your experience level?"
msgstr ""
@ -38979,9 +38976,6 @@ msgstr ""
msgid "mrWidget|Failed to load deployment statistics"
msgstr ""
msgid "mrWidget|Fast-forward merge is not possible. To merge this request, first rebase locally."
msgstr ""
msgid "mrWidget|If the %{missingBranchName} branch exists in your local repository, you can merge this merge request manually using the command line"
msgstr ""
@ -39009,6 +39003,9 @@ msgstr ""
msgid "mrWidget|Merge blocked: all threads must be resolved."
msgstr ""
msgid "mrWidget|Merge blocked: fast-forward merge is not possible. To merge this request, first rebase locally."
msgstr ""
msgid "mrWidget|Merge blocked: pipeline must succeed. It's waiting for a manual action to continue."
msgstr ""

View File

@ -592,6 +592,7 @@ module QA
autoload :Minikube, 'qa/service/cluster_provider/minikube'
autoload :K3d, 'qa/service/cluster_provider/k3d'
autoload :K3s, 'qa/service/cluster_provider/k3s'
autoload :K3sCilium, 'qa/service/cluster_provider/k3s_cilium'
end
module DockerRun

View File

@ -9,10 +9,6 @@ module QA
element :fork_namespace_button
end
view 'app/assets/javascripts/pages/projects/forks/new/components/fork_groups_list.vue' do
element :fork_groups_list_search_field
end
view 'app/assets/javascripts/pages/projects/forks/new/components/fork_form.vue' do
element :fork_namespace_dropdown
element :fork_project_button
@ -27,8 +23,8 @@ module QA
end
end
def search_for_group(group_name)
find_element(:fork_groups_list_search_field).set(group_name)
def fork_namespace_dropdown_values
find_element(:fork_namespace_dropdown).all(:option).map { |option| option.text.tr("\n", '').strip }
end
end
end

View File

@ -13,8 +13,8 @@ module QA
Resource::Project.fabricate!
end
attribute :ingress_ip do
Page::Project::Infrastructure::Kubernetes::Show.perform(&:ingress_ip)
def ingress_ip
@ingress_ip ||= @cluster.fetch_external_ip_for_ingress
end
def fabricate!
@ -42,19 +42,6 @@ module QA
# We must wait a few seconds for permissions to be set up correctly for new cluster
sleep 25
# TODO: These steps do not work anymore, see https://gitlab.com/gitlab-org/gitlab/-/issues/333818
# Open applications tab
show.open_applications
show.install!(:ingress) if @install_ingress
show.install!(:prometheus) if @install_prometheus
show.install!(:runner) if @install_runner
show.await_installed(:ingress) if @install_ingress
show.await_installed(:prometheus) if @install_prometheus
show.await_installed(:runner) if @install_runner
if @install_ingress
populate(:ingress_ip)

View File

@ -0,0 +1,93 @@
# frozen_string_literal: true
module QA
module Service
module ClusterProvider
class K3sCilium < K3s
def setup
@k3s = Service::DockerRun::K3s.new.tap do |k3s|
k3s.remove!
k3s.cni_enabled = true
k3s.register!
shell "kubectl config set-cluster k3s --server https://#{k3s.host_name}:6443 --insecure-skip-tls-verify"
shell 'kubectl config set-credentials default --username=node --password=some-secret'
shell 'kubectl config set-context k3s --cluster=k3s --user=default'
shell 'kubectl config use-context k3s'
wait_for_server(k3s.host_name) do
shell 'kubectl version'
# install local storage
shell 'kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/master/deploy/local-path-storage.yaml'
# patch local storage
shell %(kubectl patch storageclass local-path -p '{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}')
shell 'kubectl create -f https://raw.githubusercontent.com/cilium/cilium/v1.8/install/kubernetes/quick-install.yaml'
wait_for_namespaces do
wait_for_cilium
wait_for_coredns do
shell 'kubectl create -f https://raw.githubusercontent.com/kubernetes/ingress-nginx/controller-0.31.0/deploy/static/provider/cloud/deploy.yaml'
wait_for_ingress
end
end
end
end
end
private
def wait_for_cilium
QA::Runtime::Logger.info 'Waiting for Cilium pod to be initialized'
60.times do
if service_available?('kubectl get pods --all-namespaces -l k8s-app=cilium --no-headers=true | grep -o "cilium-.*1/1"')
return yield if block_given?
return true
end
sleep 1
QA::Runtime::Logger.info '.'
end
raise 'Cilium pod has not initialized correctly'
end
def wait_for_coredns
QA::Runtime::Logger.info 'Waiting for CoreDNS pod to be initialized'
60.times do
if service_available?('kubectl get pods --all-namespaces --no-headers=true | grep -o "coredns.*1/1"')
return yield if block_given?
return true
end
sleep 1
QA::Runtime::Logger.info '.'
end
raise 'CoreDNS pod has not been initialized correctly'
end
def wait_for_ingress
QA::Runtime::Logger.info 'Waiting for Ingress controller pod to be initialized'
60.times do
if service_available?('kubectl get pods --all-namespaces -l app.kubernetes.io/component=controller | grep -o "ingress-nginx-controller.*1/1"')
return yield if block_given?
return true
end
sleep 1
QA::Runtime::Logger.info '.'
end
raise 'Ingress pod has not been initialized correctly'
end
end
end
end
end

View File

@ -4,15 +4,20 @@ module QA
module Service
module DockerRun
class K3s < Base
attr_accessor :cni_enabled
def initialize
@image = 'registry.gitlab.com/gitlab-org/cluster-integration/test-utils/k3s-gitlab-ci/releases/v0.6.1'
@image = 'registry.gitlab.com/gitlab-org/cluster-integration/test-utils/k3s-gitlab-ci/releases/v0.9.1'
@name = 'k3s'
@cni_enabled = false
super
end
def register!
pull
start_k3s
# Mount the berkeley packet filter if container network interface is enabled
mount_bpf if @cni_enabled
end
def host_name
@ -36,12 +41,20 @@ module QA
#{@image} server
--cluster-secret some-secret
--no-deploy traefik
#{@cni_enabled ? '--no-flannel' : ''}
CMD
command.gsub!("--network #{network} --hostname #{host_name}", '') unless QA::Runtime::Env.running_in_ci?
shell command
end
private
def mount_bpf
shell "docker exec --privileged k3s mount bpffs -t bpf /sys/fs/bpf"
shell "docker exec --privileged k3s mount --make-shared bpffs -t bpf /sys/fs/bpf"
end
end
end
end

View File

@ -51,6 +51,30 @@ module QA
shell('kubectl apply -f -', stdin_data: manifest)
end
def add_sample_policy(project, policy_name: 'sample-policy')
namespace = "#{project.name}-#{project.id}-production"
network_policy = <<~YAML
apiVersion: "cilium.io/v2"
kind: CiliumNetworkPolicy
metadata:
name: #{policy_name}
namespace: #{namespace}
spec:
endpointSelector:
matchLabels:
role: backend
ingress:
- fromEndpoints:
- matchLabels:
role: frontend
YAML
shell('kubectl apply -f -', stdin_data: network_policy)
end
def fetch_external_ip_for_ingress
`kubectl get svc --all-namespaces --no-headers=true -l app.kubernetes.io/name=ingress-nginx -o custom-columns=:'status.loadBalancer.ingress[0].ip' | grep -v 'none'`
end
private
def fetch_api_url

View File

@ -373,7 +373,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
wait_for_requests
page.within('.mr-widget-body') do
expect(page).to have_content('Fast-forward merge is not possible')
expect(page).to have_content('Merge Merge blocked: fast-forward merge is not possible. To merge this request, first rebase locally.')
end
end
end

View File

@ -0,0 +1,84 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::EntitiesFinder do
let_it_be(:user) { create(:user) }
let_it_be(:user_import_1) { create(:bulk_import, user: user) }
let_it_be(:started_entity_1) { create(:bulk_import_entity, :started, bulk_import: user_import_1) }
let_it_be(:finished_entity_1) { create(:bulk_import_entity, :finished, bulk_import: user_import_1) }
let_it_be(:failed_entity_1) { create(:bulk_import_entity, :failed, bulk_import: user_import_1) }
let_it_be(:user_import_2) { create(:bulk_import, user: user) }
let_it_be(:started_entity_2) { create(:bulk_import_entity, :started, bulk_import: user_import_2) }
let_it_be(:finished_entity_2) { create(:bulk_import_entity, :finished, bulk_import: user_import_2) }
let_it_be(:failed_entity_2) { create(:bulk_import_entity, :failed, bulk_import: user_import_2) }
let_it_be(:not_user_import) { create(:bulk_import) }
let_it_be(:started_entity_3) { create(:bulk_import_entity, :started, bulk_import: not_user_import) }
let_it_be(:finished_entity_3) { create(:bulk_import_entity, :finished, bulk_import: not_user_import) }
let_it_be(:failed_entity_3) { create(:bulk_import_entity, :failed, bulk_import: not_user_import) }
subject { described_class.new(user: user) }
describe '#execute' do
it 'returns a list of import entities associated with user' do
expect(subject.execute)
.to contain_exactly(
started_entity_1, finished_entity_1, failed_entity_1,
started_entity_2, finished_entity_2, failed_entity_2
)
end
context 'when bulk import is specified' do
subject { described_class.new(user: user, bulk_import: user_import_1) }
it 'returns a list of import entities filtered by bulk import' do
expect(subject.execute)
.to contain_exactly(
started_entity_1, finished_entity_1, failed_entity_1
)
end
context 'when specified import is not associated with user' do
subject { described_class.new(user: user, bulk_import: not_user_import) }
it 'does not return entities' do
expect(subject.execute).to be_empty
end
end
end
context 'when status is specified' do
subject { described_class.new(user: user, status: 'failed') }
it 'returns a list of import entities filtered by status' do
expect(subject.execute)
.to contain_exactly(
failed_entity_1, failed_entity_2
)
end
context 'when invalid status is specified' do
subject { described_class.new(user: user, status: 'invalid') }
it 'does not filter entities by status' do
expect(subject.execute)
.to contain_exactly(
started_entity_1, finished_entity_1, failed_entity_1,
started_entity_2, finished_entity_2, failed_entity_2
)
end
end
end
context 'when bulk import and status are specified' do
subject { described_class.new(user: user, bulk_import: user_import_2, status: 'finished') }
it 'returns matched import entities' do
expect(subject.execute).to contain_exactly(finished_entity_2)
end
end
end
end

View File

@ -0,0 +1,34 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::ImportsFinder do
let_it_be(:user) { create(:user) }
let_it_be(:started_import) { create(:bulk_import, :started, user: user) }
let_it_be(:finished_import) { create(:bulk_import, :finished, user: user) }
let_it_be(:not_user_import) { create(:bulk_import) }
subject { described_class.new(user: user) }
describe '#execute' do
it 'returns a list of imports associated with user' do
expect(subject.execute).to contain_exactly(started_import, finished_import)
end
context 'when status is specified' do
subject { described_class.new(user: user, status: 'started') }
it 'returns a list of import entities filtered by status' do
expect(subject.execute).to contain_exactly(started_import)
end
context 'when invalid status is specified' do
subject { described_class.new(user: user, status: 'invalid') }
it 'does not filter entities by status' do
expect(subject.execute).to contain_exactly(started_import, finished_import)
end
end
end
end
end

View File

@ -30,6 +30,9 @@ const unsafeUrls = [
`https://evil.url/${absoluteGon.sprite_file_icons}`,
];
const forbiddenDataAttrs = ['data-remote', 'data-url', 'data-type', 'data-method'];
const acceptedDataAttrs = ['data-random', 'data-custom'];
describe('~/lib/dompurify', () => {
let originalGon;
@ -95,4 +98,17 @@ describe('~/lib/dompurify', () => {
expect(sanitize(htmlXlink)).toBe(expectedSanitized);
});
});
describe('handles data attributes correctly', () => {
it.each(forbiddenDataAttrs)('removes %s attributes', (attr) => {
const htmlHref = `<a ${attr}="true">hello</a>`;
expect(sanitize(htmlHref)).toBe('<a>hello</a>');
});
it.each(acceptedDataAttrs)('does not remove %s attributes', (attr) => {
const attrWithValue = `${attr}="true"`;
const htmlHref = `<a ${attrWithValue}>hello</a>`;
expect(sanitize(htmlHref)).toBe(`<a ${attrWithValue}>hello</a>`);
});
});
});

View File

@ -70,9 +70,9 @@ describe('Merge request widget rebase component', () => {
const text = findRebaseMessageElText();
expect(text).toContain('Fast-forward merge is not possible.');
expect(text).toContain('Merge blocked');
expect(text.replace(/\s\s+/g, ' ')).toContain(
'Rebase the source branch onto the target branch.',
'the source branch must be rebased onto the target branch',
);
});
@ -111,12 +111,10 @@ describe('Merge request widget rebase component', () => {
const text = findRebaseMessageElText();
expect(text).toContain('Fast-forward merge is not possible.');
expect(text).toContain('Rebase the source branch onto');
expect(text).toContain('foo');
expect(text.replace(/\s\s+/g, ' ')).toContain(
'to allow this merge request to be merged.',
expect(text).toContain(
'Merge blocked: the source branch must be rebased onto the target branch.',
);
expect(text).toContain('the source branch must be rebased');
});
it('should render the correct target branch name', () => {
@ -136,7 +134,7 @@ describe('Merge request widget rebase component', () => {
const elem = findRebaseMessageEl();
expect(elem.text()).toContain(
`Fast-forward merge is not possible. Rebase the source branch onto ${targetBranch} to allow this merge request to be merged.`,
`Merge blocked: the source branch must be rebased onto the target branch.`,
);
});
});

View File

@ -199,7 +199,7 @@ describe('MRWidgetConflicts', () => {
});
expect(removeBreakLine(wrapper.text()).trim()).toContain(
'Fast-forward merge is not possible. To merge this request, first rebase locally.',
'Merge blocked: fast-forward merge is not possible. To merge this request, first rebase locally.',
);
});
});

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe API::Entities::BulkImport do
let_it_be(:import) { create(:bulk_import) }
subject { described_class.new(import).as_json }
it 'has the correct attributes' do
expect(subject).to include(
:id,
:status,
:source_type,
:created_at,
:updated_at
)
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe API::Entities::BulkImports::EntityFailure do
let_it_be(:failure) { create(:bulk_import_failure) }
subject { described_class.new(failure).as_json }
it 'has the correct attributes' do
expect(subject).to include(
:pipeline_class,
:pipeline_step,
:exception_class,
:correlation_id_value,
:created_at
)
end
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe API::Entities::BulkImports::Entity do
let_it_be(:entity) { create(:bulk_import_entity) }
subject { described_class.new(entity).as_json }
it 'has the correct attributes' do
expect(subject).to include(
:id,
:bulk_import_id,
:status,
:source_full_path,
:destination_name,
:destination_namespace,
:parent_id,
:namespace_id,
:project_id,
:created_at,
:updated_at,
:failures
)
end
end

View File

@ -15,4 +15,10 @@ RSpec.describe BulkImport, type: :model do
it { is_expected.to define_enum_for(:source_type).with_values(%i[gitlab]) }
end
describe '.all_human_statuses' do
it 'returns all human readable entity statuses' do
expect(described_class.all_human_statuses).to contain_exactly('created', 'started', 'finished', 'failed')
end
end
end

View File

@ -134,4 +134,24 @@ RSpec.describe BulkImports::Entity, type: :model do
expect(entity.encoded_source_full_path).to eq(expected)
end
end
describe 'scopes' do
describe '.by_user_id' do
it 'returns entities associated with specified user' do
user = create(:user)
import = create(:bulk_import, user: user)
entity_1 = create(:bulk_import_entity, bulk_import: import)
entity_2 = create(:bulk_import_entity, bulk_import: import)
create(:bulk_import_entity)
expect(described_class.by_user_id(user.id)).to contain_exactly(entity_1, entity_2)
end
end
end
describe '.all_human_statuses' do
it 'returns all human readable entity statuses' do
expect(described_class.all_human_statuses).to contain_exactly('created', 'started', 'finished', 'failed')
end
end
end

View File

@ -0,0 +1,67 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe API::BulkImports do
let_it_be(:user) { create(:user) }
let_it_be(:import_1) { create(:bulk_import, user: user) }
let_it_be(:import_2) { create(:bulk_import, user: user) }
let_it_be(:entity_1) { create(:bulk_import_entity, bulk_import: import_1) }
let_it_be(:entity_2) { create(:bulk_import_entity, bulk_import: import_1) }
let_it_be(:entity_3) { create(:bulk_import_entity, bulk_import: import_2) }
let_it_be(:failure_3) { create(:bulk_import_failure, entity: entity_3) }
describe 'GET /bulk_imports' do
it 'returns a list of bulk imports authored by the user' do
get api('/bulk_imports', user)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.pluck('id')).to contain_exactly(import_1.id, import_2.id)
end
end
describe 'GET /bulk_imports/entities' do
it 'returns a list of all import entities authored by the user' do
get api('/bulk_imports/entities', user)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.pluck('id')).to contain_exactly(entity_1.id, entity_2.id, entity_3.id)
end
end
describe 'GET /bulk_imports/:id' do
it 'returns specified bulk import' do
get api("/bulk_imports/#{import_1.id}", user)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq(import_1.id)
end
end
describe 'GET /bulk_imports/:id/entities' do
it 'returns specified bulk import entities with failures' do
get api("/bulk_imports/#{import_2.id}/entities", user)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.pluck('id')).to contain_exactly(entity_3.id)
expect(json_response.first['failures'].first['exception_class']).to eq(failure_3.exception_class)
end
end
describe 'GET /bulk_imports/:id/entities/:entity_id' do
it 'returns specified bulk import entity' do
get api("/bulk_imports/#{import_1.id}/entities/#{entity_2.id}", user)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq(entity_2.id)
end
end
context 'when user is unauthenticated' do
it 'returns 401' do
get api('/bulk_imports', nil)
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end

View File

@ -241,6 +241,18 @@ RSpec.describe Projects::LfsPointers::LfsDownloadService do
context 'and first fragments are the same' do
let(:lfs_content) { existing_lfs_object.file.read }
context 'when lfs_link_existing_object feature flag disabled' do
before do
stub_feature_flags(lfs_link_existing_object: false)
end
it 'does not call link_existing_lfs_object!' do
expect(subject).not_to receive(:link_existing_lfs_object!)
subject.execute
end
end
it 'returns success' do
expect(subject.execute).to eq({ status: :success })
end

View File

@ -4,12 +4,12 @@ require 'spec_helper'
RSpec.describe Users::DeactivateDormantUsersWorker do
describe '#perform' do
let_it_be(:dormant) { create(:user, last_activity_on: User::MINIMUM_INACTIVE_DAYS.days.ago.to_date) }
let_it_be(:inactive) { create(:user, last_activity_on: nil) }
subject(:worker) { described_class.new }
it 'does not run for GitLab.com' do
create(:user, last_activity_on: User::MINIMUM_INACTIVE_DAYS.days.ago.to_date)
create(:user, last_activity_on: nil)
expect(Gitlab).to receive(:com?).and_return(true)
expect(Gitlab::CurrentSettings).not_to receive(:current_application_settings)
@ -29,9 +29,6 @@ RSpec.describe Users::DeactivateDormantUsersWorker do
stub_const("#{described_class.name}::BATCH_SIZE", 1)
stub_const("#{described_class.name}::PAUSE_SECONDS", 0)
create(:user, last_activity_on: User::MINIMUM_INACTIVE_DAYS.days.ago.to_date)
create(:user, last_activity_on: nil)
expect(worker).to receive(:sleep).twice
worker.perform
@ -48,9 +45,6 @@ RSpec.describe Users::DeactivateDormantUsersWorker do
end
it 'does nothing' do
create(:user, last_activity_on: User::MINIMUM_INACTIVE_DAYS.days.ago.to_date)
create(:user, last_activity_on: nil)
worker.perform
expect(User.dormant.count).to eq(1)