From e8fc7f565017d915278fd0efbcff2f81b7e94093 Mon Sep 17 00:00:00 2001 From: GitLab Bot Date: Mon, 5 Jul 2021 12:08:33 +0000 Subject: [PATCH] Add latest changes from gitlab-org/gitlab@master --- app/assets/javascripts/lib/dompurify.js | 11 + .../components/states/mr_widget_conflicts.vue | 2 +- .../components/states/mr_widget_rebase.vue | 8 +- app/finders/bulk_imports/entities_finder.rb | 35 ++++ app/finders/bulk_imports/imports_finder.rb | 24 +++ app/helpers/whats_new_helper.rb | 6 +- app/models/bulk_import.rb | 4 + app/models/bulk_imports/entity.rb | 6 + .../lfs_pointers/lfs_download_service.rb | 2 +- .../preferences.html.haml | 2 +- .../development/lfs_link_existing_object.yml | 7 + ...t_entities_on_bulk_import_id_and_status.rb | 20 ++ db/schema_migrations/20210629153519 | 1 + db/structure.sql | 2 +- .../geo/replication/troubleshooting.md | 70 +++++++ doc/administration/pages/index.md | 29 ++- doc/api/bulk_imports.md | 193 ++++++++++++++++++ doc/install/requirements.md | 53 ++++- doc/raketasks/backup_restore.md | 3 + lib/api/api.rb | 1 + lib/api/bulk_imports.rb | 91 +++++++++ lib/api/entities/bulk_import.rb | 13 ++ lib/api/entities/bulk_imports/entity.rb | 22 ++ .../entities/bulk_imports/entity_failure.rb | 15 ++ locale/gitlab.pot | 39 ++-- qa/qa.rb | 1 + qa/qa/page/project/fork/new.rb | 8 +- .../kubernetes_cluster/project_cluster.rb | 17 +- qa/qa/service/cluster_provider/k3s_cilium.rb | 93 +++++++++ qa/qa/service/docker_run/k3s.rb | 15 +- qa/qa/service/kubernetes_cluster.rb | 24 +++ .../user_sees_merge_widget_spec.rb | 2 +- .../bulk_imports/entities_finder_spec.rb | 84 ++++++++ .../bulk_imports/imports_finder_spec.rb | 34 +++ spec/frontend/lib/dompurify_spec.js | 16 ++ .../components/mr_widget_rebase_spec.js | 14 +- .../states/mr_widget_conflicts_spec.js | 2 +- spec/lib/api/entities/bulk_import_spec.rb | 19 ++ .../bulk_imports/entity_failure_spec.rb | 19 ++ .../api/entities/bulk_imports/entity_spec.rb | 26 +++ spec/models/bulk_import_spec.rb | 6 + spec/models/bulk_imports/entity_spec.rb | 20 ++ spec/requests/api/bulk_imports_spec.rb | 67 ++++++ .../lfs_pointers/lfs_download_service_spec.rb | 12 ++ .../deactivate_dormant_users_worker_spec.rb | 12 +- 45 files changed, 1057 insertions(+), 93 deletions(-) create mode 100644 app/finders/bulk_imports/entities_finder.rb create mode 100644 app/finders/bulk_imports/imports_finder.rb create mode 100644 config/feature_flags/development/lfs_link_existing_object.yml create mode 100644 db/migrate/20210629153519_add_index_to_bulk_import_entities_on_bulk_import_id_and_status.rb create mode 100644 db/schema_migrations/20210629153519 create mode 100644 doc/api/bulk_imports.md create mode 100644 lib/api/bulk_imports.rb create mode 100644 lib/api/entities/bulk_import.rb create mode 100644 lib/api/entities/bulk_imports/entity.rb create mode 100644 lib/api/entities/bulk_imports/entity_failure.rb create mode 100644 qa/qa/service/cluster_provider/k3s_cilium.rb create mode 100644 spec/finders/bulk_imports/entities_finder_spec.rb create mode 100644 spec/finders/bulk_imports/imports_finder_spec.rb create mode 100644 spec/lib/api/entities/bulk_import_spec.rb create mode 100644 spec/lib/api/entities/bulk_imports/entity_failure_spec.rb create mode 100644 spec/lib/api/entities/bulk_imports/entity_spec.rb create mode 100644 spec/requests/api/bulk_imports_spec.rb diff --git a/app/assets/javascripts/lib/dompurify.js b/app/assets/javascripts/lib/dompurify.js index 76624c81ed5..4357918672d 100644 --- a/app/assets/javascripts/lib/dompurify.js +++ b/app/assets/javascripts/lib/dompurify.js @@ -7,6 +7,8 @@ const defaultConfig = { ADD_TAGS: ['use'], }; +const forbiddenDataAttrs = ['data-remote', 'data-url', 'data-type', 'data-method']; + // Only icons urls from `gon` are allowed const getAllowedIconUrls = (gon = window.gon) => [gon.sprite_file_icons, gon.sprite_icons].filter(Boolean); @@ -44,10 +46,19 @@ const sanitizeSvgIcon = (node) => { removeUnsafeHref(node, 'xlink:href'); }; +const sanitizeHTMLAttributes = (node) => { + forbiddenDataAttrs.forEach((attr) => { + if (node.hasAttribute(attr)) { + node.removeAttribute(attr); + } + }); +}; + addHook('afterSanitizeAttributes', (node) => { if (node.tagName.toLowerCase() === 'use') { sanitizeSvgIcon(node); } + sanitizeHTMLAttributes(node); }); export const sanitize = (val, config = defaultConfig) => dompurifySanitize(val, config); diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_conflicts.vue b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_conflicts.vue index ee90d734ecb..5a93021978c 100644 --- a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_conflicts.vue +++ b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_conflicts.vue @@ -112,7 +112,7 @@ export default {
{{ - s__(`mrWidget|Fast-forward merge is not possible. + s__(`mrWidget|Merge blocked: fast-forward merge is not possible. To merge this request, first rebase locally.`) }} diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_rebase.vue b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_rebase.vue index 98171ef69f5..22f41b43095 100644 --- a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_rebase.vue +++ b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_rebase.vue @@ -87,9 +87,7 @@ export default { }, fastForwardMergeText() { return sprintf( - __( - 'Fast-forward merge is not possible. Rebase the source branch onto %{targetBranch} to allow this merge request to be merged.', - ), + __('Merge blocked: the source branch must be rebased onto the target branch.'), { targetBranch: `${escape(this.targetBranch)}`, }, @@ -193,9 +191,7 @@ export default { data-testid="rebase-message" data-qa-selector="no_fast_forward_message_content" >{{ - __( - 'Fast-forward merge is not possible. Rebase the source branch onto the target branch.', - ) + __('Merge blocked: the source branch must be rebased onto the target branch.') }} {{ diff --git a/app/finders/bulk_imports/entities_finder.rb b/app/finders/bulk_imports/entities_finder.rb new file mode 100644 index 00000000000..2947d155668 --- /dev/null +++ b/app/finders/bulk_imports/entities_finder.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +module BulkImports + class EntitiesFinder + def initialize(user:, bulk_import: nil, status: nil) + @user = user + @bulk_import = bulk_import + @status = status + end + + def execute + ::BulkImports::Entity + .preload(:failures) # rubocop: disable CodeReuse/ActiveRecord + .by_user_id(user.id) + .then(&method(:filter_by_bulk_import)) + .then(&method(:filter_by_status)) + end + + private + + attr_reader :user, :bulk_import, :status + + def filter_by_bulk_import(entities) + return entities unless bulk_import + + entities.where(bulk_import_id: bulk_import.id) # rubocop: disable CodeReuse/ActiveRecord + end + + def filter_by_status(entities) + return entities unless ::BulkImports::Entity.all_human_statuses.include?(status) + + entities.with_status(status) + end + end +end diff --git a/app/finders/bulk_imports/imports_finder.rb b/app/finders/bulk_imports/imports_finder.rb new file mode 100644 index 00000000000..b554bbfa5e7 --- /dev/null +++ b/app/finders/bulk_imports/imports_finder.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +module BulkImports + class ImportsFinder + def initialize(user:, status: nil) + @user = user + @status = status + end + + def execute + filter_by_status(user.bulk_imports) + end + + private + + attr_reader :user, :status + + def filter_by_status(imports) + return imports unless BulkImport.all_human_statuses.include?(status) + + imports.with_status(status) + end + end +end diff --git a/app/helpers/whats_new_helper.rb b/app/helpers/whats_new_helper.rb index 5fca00c5dce..ccccfcb930b 100644 --- a/app/helpers/whats_new_helper.rb +++ b/app/helpers/whats_new_helper.rb @@ -32,11 +32,11 @@ module WhatsNewHelper def whats_new_variants_description(variant) case variant when 'all_tiers' - _("What's new presents new features from all tiers to help you keep track of all new features.") + _("Include new features from all tiers.") when 'current_tier' - _("What's new presents new features for your current subscription tier, while hiding new features not available to your subscription tier.") + _("Only include features new to your current subscription tier.") when 'disabled' - _("What's new is disabled and can no longer be viewed.") + _("%{italic_start}What's new%{italic_end} is inactive and cannot be viewed.").html_safe % { italic_start: ''.html_safe, italic_end: ''.html_safe } end end end diff --git a/app/models/bulk_import.rb b/app/models/bulk_import.rb index 04e660b418e..dee55675304 100644 --- a/app/models/bulk_import.rb +++ b/app/models/bulk_import.rb @@ -33,4 +33,8 @@ class BulkImport < ApplicationRecord transition any => :failed end end + + def self.all_human_statuses + state_machine.states.map(&:human_name) + end end diff --git a/app/models/bulk_imports/entity.rb b/app/models/bulk_imports/entity.rb index bb543b39a79..24f86b44841 100644 --- a/app/models/bulk_imports/entity.rb +++ b/app/models/bulk_imports/entity.rb @@ -48,6 +48,8 @@ class BulkImports::Entity < ApplicationRecord enum source_type: { group_entity: 0, project_entity: 1 } + scope :by_user_id, ->(user_id) { joins(:bulk_import).where(bulk_imports: { user_id: user_id }) } + state_machine :status, initial: :created do state :created, value: 0 state :started, value: 1 @@ -68,6 +70,10 @@ class BulkImports::Entity < ApplicationRecord end end + def self.all_human_statuses + state_machine.states.map(&:human_name) + end + def encoded_source_full_path ERB::Util.url_encode(source_full_path) end diff --git a/app/services/projects/lfs_pointers/lfs_download_service.rb b/app/services/projects/lfs_pointers/lfs_download_service.rb index 525f8a25d04..8058e082397 100644 --- a/app/services/projects/lfs_pointers/lfs_download_service.rb +++ b/app/services/projects/lfs_pointers/lfs_download_service.rb @@ -22,7 +22,7 @@ module Projects def execute return unless project&.lfs_enabled? && lfs_download_object return error("LFS file with oid #{lfs_oid} has invalid attributes") unless lfs_download_object.valid? - return link_existing_lfs_object! if lfs_size > LARGE_FILE_SIZE && lfs_object + return link_existing_lfs_object! if Feature.enabled?(:lfs_link_existing_object, project, default_enabled: :yaml) && lfs_size > LARGE_FILE_SIZE && lfs_object wrap_download_errors do download_lfs_file! diff --git a/app/views/admin/application_settings/preferences.html.haml b/app/views/admin/application_settings/preferences.html.haml index bb584818f25..ae07dd9c22f 100644 --- a/app/views/admin/application_settings/preferences.html.haml +++ b/app/views/admin/application_settings/preferences.html.haml @@ -20,7 +20,7 @@ %button.btn.gl-button.btn-default.js-settings-toggle{ type: 'button' } = expanded_by_default? ? _('Collapse') : _('Expand') %p - = _("Configure What's new drawer and content.") + = _("Configure %{italic_start}What's new%{italic_end} drawer and content.").html_safe % { italic_start: ''.html_safe, italic_end: ''.html_safe } .settings-content = render 'whats_new' diff --git a/config/feature_flags/development/lfs_link_existing_object.yml b/config/feature_flags/development/lfs_link_existing_object.yml new file mode 100644 index 00000000000..b8a0b810209 --- /dev/null +++ b/config/feature_flags/development/lfs_link_existing_object.yml @@ -0,0 +1,7 @@ +--- +name: lfs_link_existing_object +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/41770 +rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/249246 +group: group::source code +type: development +default_enabled: false diff --git a/db/migrate/20210629153519_add_index_to_bulk_import_entities_on_bulk_import_id_and_status.rb b/db/migrate/20210629153519_add_index_to_bulk_import_entities_on_bulk_import_id_and_status.rb new file mode 100644 index 00000000000..c84a42cbea4 --- /dev/null +++ b/db/migrate/20210629153519_add_index_to_bulk_import_entities_on_bulk_import_id_and_status.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +class AddIndexToBulkImportEntitiesOnBulkImportIdAndStatus < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + disable_ddl_transaction! + + NEW_INDEX_NAME = 'index_bulk_import_entities_on_bulk_import_id_and_status' + OLD_INDEX_NAME = 'index_bulk_import_entities_on_bulk_import_id' + + def up + add_concurrent_index :bulk_import_entities, [:bulk_import_id, :status], name: NEW_INDEX_NAME + remove_concurrent_index_by_name :bulk_import_entities, name: OLD_INDEX_NAME + end + + def down + add_concurrent_index :bulk_import_entities, :bulk_import_id, name: OLD_INDEX_NAME + remove_concurrent_index_by_name :bulk_import_entities, name: NEW_INDEX_NAME + end +end diff --git a/db/schema_migrations/20210629153519 b/db/schema_migrations/20210629153519 new file mode 100644 index 00000000000..304ff5c9fa6 --- /dev/null +++ b/db/schema_migrations/20210629153519 @@ -0,0 +1 @@ +cba36a2e8bedd70f8ccaca47517314d0a3c75a9b8d90715a29919247aa686835 \ No newline at end of file diff --git a/db/structure.sql b/db/structure.sql index 12f62635bdf..b31d3e0ee25 100644 --- a/db/structure.sql +++ b/db/structure.sql @@ -22822,7 +22822,7 @@ CREATE INDEX index_broadcast_message_on_ends_at_and_broadcast_type_and_id ON bro CREATE INDEX index_bulk_import_configurations_on_bulk_import_id ON bulk_import_configurations USING btree (bulk_import_id); -CREATE INDEX index_bulk_import_entities_on_bulk_import_id ON bulk_import_entities USING btree (bulk_import_id); +CREATE INDEX index_bulk_import_entities_on_bulk_import_id_and_status ON bulk_import_entities USING btree (bulk_import_id, status); CREATE INDEX index_bulk_import_entities_on_namespace_id ON bulk_import_entities USING btree (namespace_id); diff --git a/doc/administration/geo/replication/troubleshooting.md b/doc/administration/geo/replication/troubleshooting.md index c00f523957c..6bf306a625c 100644 --- a/doc/administration/geo/replication/troubleshooting.md +++ b/doc/administration/geo/replication/troubleshooting.md @@ -588,6 +588,75 @@ to start again from scratch, there are a few steps that can help you: gitlab-ctl start ``` +### Design repository failures on mirrored projects and project imports + +On the top bar, under **Menu >** **{admin}** **Admin > Geo > Nodes**, +if the Design repositories progress bar shows +`Synced` and `Failed` greater than 100%, and negative `Queued`, then the instance +is likely affected by +[a bug in GitLab 13.2 and 13.3](https://gitlab.com/gitlab-org/gitlab/-/issues/241668). +It was [fixed in 13.4+](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/40643). + +To determine the actual replication status of design repositories in +a [Rails console](../../operations/rails_console.md): + +```ruby +secondary = Gitlab::Geo.current_node +counts = {} +secondary.designs.select("projects.id").find_each do |p| + registry = Geo::DesignRegistry.find_by(project_id: p.id) + state = registry ? "#{registry.state}" : "registry does not exist yet" + # puts "Design ID##{p.id}: #{state}" # uncomment this for granular information + counts[state] ||= 0 + counts[state] += 1 +end +puts "\nCounts:", counts +``` + +Example output: + +```plaintext +Design ID#5: started +Design ID#6: synced +Design ID#7: failed +Design ID#8: pending +Design ID#9: synced + +Counts: +{"started"=>1, "synced"=>2, "failed"=>1, "pending"=>1} +``` + +Example output if there are actually zero design repository replication failures: + +```plaintext +Design ID#5: synced +Design ID#6: synced +Design ID#7: synced + +Counts: +{"synced"=>3} +``` + +#### If you are promoting a Geo secondary site running on a single server + +`gitlab-ctl promotion-preflight-checks` will fail due to the existence of +`failed` rows in the `geo_design_registry` table. Use the +[previous snippet](#design-repository-failures-on-mirrored-projects-and-project-imports) to +determine the actual replication status of Design repositories. + +`gitlab-ctl promote-to-primary-node` will fail since it runs preflight checks. +If the [previous snippet](#design-repository-failures-on-mirrored-projects-and-project-imports) +shows that all designs are synced, then you can use the +`--skip-preflight-checks` option or the `--force` option to move forward with +promotion. + +#### If you are promoting a Geo secondary site running on multiple servers + +`gitlab-ctl promotion-preflight-checks` will fail due to the existence of +`failed` rows in the `geo_design_registry` table. Use the +[previous snippet](#design-repository-failures-on-mirrored-projects-and-project-imports) to +determine the actual replication status of Design repositories. + ## Fixing errors during a failover or when promoting a secondary to a primary node The following are possible errors that might be encountered during failover or @@ -726,6 +795,7 @@ sudo gitlab-ctl promotion-preflight-checks sudo /opt/gitlab/embedded/bin/gitlab-pg-ctl promote sudo gitlab-ctl reconfigure sudo gitlab-rake geo:set_secondary_as_primary +``` ## Expired artifacts diff --git a/doc/administration/pages/index.md b/doc/administration/pages/index.md index 2e35ef8fa9a..a787791a33e 100644 --- a/doc/administration/pages/index.md +++ b/doc/administration/pages/index.md @@ -922,7 +922,7 @@ In installations from source: In GitLab 14.0 the underlying storage format of GitLab Pages is changing from files stored directly in disk to a single ZIP archive per project. -These ZIP archives can be stored either locally on disk storage or on the [object storage](#using-object-storage) if it is configured. +These ZIP archives can be stored either locally on disk storage or on [object storage](#using-object-storage) if it is configured. [Starting from GitLab 13.5](https://gitlab.com/gitlab-org/gitlab/-/issues/245308) ZIP archives are stored every time pages site is updated. @@ -984,9 +984,8 @@ to using that. > [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/325285) in GitLab 13.11. -Existing Pages deployments objects (which store [ZIP archives](#zip-storage)) can similarly be -migrated to [object storage](#using-object-storage), if -you've been having them stored locally. +Existing Pages deployment objects (which store [ZIP archives](#zip-storage)) can similarly be +migrated to [object storage](#using-object-storage). Migrate your existing Pages deployments from local storage to object storage: @@ -996,7 +995,7 @@ sudo gitlab-rake gitlab:pages:deployments:migrate_to_object_storage ### Rolling Pages deployments back to local storage -After the migration to object storage is performed, you can choose to revert your Pages deployments back to local storage: +After the migration to object storage is performed, you can choose to move your Pages deployments back to local storage: ```shell sudo gitlab-rake gitlab:pages:deployments:migrate_to_local @@ -1006,7 +1005,7 @@ sudo gitlab-rake gitlab:pages:deployments:migrate_to_local > [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/301159) in GitLab 13.11. -If you use [object storage](#using-object-storage), disable local storage: +If you use [object storage](#using-object-storage), you can disable local storage: 1. Edit `/etc/gitlab/gitlab.rb`: @@ -1020,22 +1019,22 @@ Starting from GitLab 13.12, this setting also disables the [legacy storage](#mig ## Migrate GitLab Pages to 14.0 -In GitLab 14.0 a number of breaking changes are introduced which may require some user intervention. +In GitLab 14.0 a number of breaking changes were introduced which may require some user intervention. The steps below describe the best way to migrate without causing any downtime for your GitLab instance. -If you run GitLab on a single server, then most likely you will not notice any problem after -upgrading to GitLab 14.0, but it may be safer to follow the steps anyway. -If you run GitLab on a single server, then most likely the upgrade process to 14.0 will go smoothly for you. Regardless, we recommend everyone follow the migration steps to ensure a successful upgrade. +If you run GitLab on a single server, then most likely the upgrade process to 14.0 will go smoothly for you +and you will not notice any problem after upgrading. +Regardless, we recommend everyone follow the migration steps to ensure a successful upgrade. If at any point you run into issues, consult the [troubleshooting section](#troubleshooting). -To migrate GitLab Pages to GitLab 14.0: +If your current GitLab version is lower than 13.12, then you first need to update to 13.12. +Updating directly to 14.0 is [not supported](../../update/index.md#upgrade-paths) +and may cause downtime for some web-sites hosted on GitLab Pages. Once you update to 13.12, +migrate GitLab Pages to prepare them for GitLab 14.0: -1. If your current GitLab version is lower than 13.12, then you first need to upgrade to 13.12. -Upgrading directly to 14.0 may cause downtime for some web-sites hosted on GitLab Pages -until you finish the following steps. 1. Set [`domain_config_source` to `gitlab`](#domain-source-configuration-before-140), which is the default starting from GitLab 14.0. Skip this step if you're already running GitLab 14.0 or above. -1. If you want to store your pages content in the [object storage](#using-object-storage), make sure to configure it. +1. If you want to store your pages content in [object storage](#using-object-storage), make sure to configure it. If you want to store the pages content locally or continue using an NFS server, skip this step. 1. [Migrate legacy storage to ZIP storage.](#migrate-legacy-storage-to-zip-storage) 1. Upgrade GitLab to 14.0. diff --git a/doc/api/bulk_imports.md b/doc/api/bulk_imports.md new file mode 100644 index 00000000000..9521c769d49 --- /dev/null +++ b/doc/api/bulk_imports.md @@ -0,0 +1,193 @@ +--- +stage: Manage +group: Import +info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments +--- + +# GitLab Migrations (Bulk Imports) API + +> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/64335) in GitLab 14.1. + +With the GitLab Migrations API, you can view the progress of migrations initiated with +[GitLab Group Migration](../user/group/import/index.md). + +## List all GitLab migrations + +```plaintext +GET /bulk_imports +``` + +| Attribute | Type | Required | Description | +|:-----------|:--------|:---------|:---------------------------------------| +| `per_page` | integer | no | Number of records to return per page. | +| `page` | integer | no | Page to retrieve. | +| `status` | string | no | Import status. | + +The status can be one of the following: + +- `created` +- `started` +- `finished` +- `failed` + +```shell +curl --request GET --header "PRIVATE-TOKEN: " "https://gitlab.example.com/api/v4/bulk_imports?per_page=2&page=1" +``` + +```json +[ + { + "id": 1, + "status": "finished", + "source_type": "gitlab", + "created_at": "2021-06-18T09:45:55.358Z", + "updated_at": "2021-06-18T09:46:27.003Z" + }, + { + "id": 2, + "status": "started", + "source_type": "gitlab", + "created_at": "2021-06-18T09:47:36.581Z", + "updated_at": "2021-06-18T09:47:58.286Z" + } +] +``` + +## List all GitLab migrations' entities + +```plaintext +GET /bulk_imports/entities +``` + +| Attribute | Type | Required | Description | +|:-----------|:--------|:---------|:---------------------------------------| +| `per_page` | integer | no | Number of records to return per page. | +| `page` | integer | no | Page to retrieve. | +| `status` | string | no | Import status. | + +The status can be one of the following: + +- `created` +- `started` +- `finished` +- `failed` + +```shell +curl --request GET --header "PRIVATE-TOKEN: " "https://gitlab.example.com/api/v4/bulk_imports/entities?per_page=2&page=1&status=started" +``` + +```json +[ + { + "id": 1, + "bulk_import_id": 1, + "status": "finished", + "source_full_path": "source_group", + "destination_name": "destination_name", + "destination_namespace": "destination_path", + "parent_id": null, + "namespace_id": 1, + "project_id": null, + "created_at": "2021-06-18T09:47:37.390Z", + "updated_at": "2021-06-18T09:47:51.867Z", + "failures": [] + }, + { + "id": 2, + "bulk_import_id": 2, + "status": "failed", + "source_full_path": "another_group", + "destination_name": "another_name", + "destination_namespace": "another_namespace", + "parent_id": null, + "namespace_id": null, + "project_id": null, + "created_at": "2021-06-24T10:40:20.110Z", + "updated_at": "2021-06-24T10:40:46.590Z", + "failures": [ + { + "pipeline_class": "BulkImports::Groups::Pipelines::GroupPipeline", + "pipeline_step": "extractor", + "exception_class": "Exception", + "correlation_id_value": "dfcf583058ed4508e4c7c617bd7f0edd", + "created_at": "2021-06-24T10:40:46.495Z" + } + ] + } +] +``` + +## Get GitLab migration details + +```plaintext +GET /bulk_imports/:id +``` + +```shell +curl --request GET --header "PRIVATE-TOKEN: " "https://gitlab.example.com/api/v4/bulk_imports/1" +``` + +```json +{ + "id": 1, + "status": "finished", + "source_type": "gitlab", + "created_at": "2021-06-18T09:45:55.358Z", + "updated_at": "2021-06-18T09:46:27.003Z" +} +``` + +## List GitLab migration entities + +```plaintext +GET /bulk_imports/:id/entities +``` + +| Attribute | Type | Required | Description | +|:-----------|:--------|:---------|:---------------------------------------| +| `per_page` | integer | no | Number of records to return per page. | +| `page` | integer | no | Page to retrieve. | +| `status` | string | no | Import status. | + +The status can be one of the following: + +- `created` +- `started` +- `finished` +- `failed` + +```shell +curl --request GET --header "PRIVATE-TOKEN: " "https://gitlab.example.com/api/v4/bulk_imports/1/entities?per_page=2&page=1&status=finished" +``` + +```json +[ + { + "id": 1, + "status": "finished", + "source_type": "gitlab", + "created_at": "2021-06-18T09:45:55.358Z", + "updated_at": "2021-06-18T09:46:27.003Z" + } +] +``` + +## Get GitLab migration entity details + +```plaintext +GET /bulk_imports/:id/entities/:entity_id +``` + +```shell +curl --request GET --header "PRIVATE-TOKEN: " "https://gitlab.example.com/api/v4/bulk_imports/1/entities/2" +``` + +```json +{ + "id": 1, + "status": "finished", + "source_type": "gitlab", + "created_at": "2021-06-18T09:45:55.358Z", + "updated_at": "2021-06-18T09:46:27.003Z" +} +``` diff --git a/doc/install/requirements.md b/doc/install/requirements.md index a84cd179b30..133c3103c7b 100644 --- a/doc/install/requirements.md +++ b/doc/install/requirements.md @@ -183,11 +183,58 @@ tune the Puma settings: The recommended number of workers is calculated as the highest of the following: - `2` -- Number of CPU cores - 1 +- A combination of CPU and memory resource availability (see how this is configured automatically for the [Linux package](https://gitlab.com/gitlab-org/omnibus-gitlab/-/blob/ef9facdc927e7389db6a5e0655414ba8318c7b8a/files/gitlab-cookbooks/gitlab/libraries/puma.rb#L31-46)). -For example a node with 4 cores should be configured with 3 Puma workers. +Take for example the following scenarios: -You can increase the number of Puma workers, providing enough CPU and memory capacity is available. +- A node with 2 cores / 8 GB memory should be configured with **2 Puma workers**. + + Calculated as: + + ```plaintext + The highest number from + 2 + And + [ + the lowest number from + - number of cores: 2 + - memory limit: (8 - 1.5) = 6 + ] + ``` + + So, the highest from 2 and 2 is 2. + +- A node with 4 cores / 4 GB memory should be configured with **2 Puma workers**. + + ```plaintext + The highest number from + 2 + And + [ + the lowest number from + - number of cores: 4 + - memory limit: (4 - 1.5) = 2.5 + ] + `` + + So, the highest from 2 and 2 is 2. + +- A node with 4 cores / 8 GB memory should be configured with **4 Puma workers**. + + ```plaintext + The highest number from + 2 + And + [ + the lowest number from + - number of cores: 4 + - memory limit: (8 - 1.5) = 6.5 + ] + ``` + + So, the highest from 2 and 4 is 4. + +You can increase the number of Puma workers, provided enough CPU and memory capacity is available. A higher number of Puma workers usually helps to reduce the response time of the application and increase the ability to handle parallel requests. You must perform testing to verify the optimal settings for your infrastructure. diff --git a/doc/raketasks/backup_restore.md b/doc/raketasks/backup_restore.md index 76b383a3a76..bd37ddd9557 100644 --- a/doc/raketasks/backup_restore.md +++ b/doc/raketasks/backup_restore.md @@ -1290,6 +1290,9 @@ You may need to reconfigure or restart GitLab for the changes to take effect. UPDATE namespaces SET runners_token = null, runners_token_encrypted = null; -- Clear instance tokens UPDATE application_settings SET runners_registration_token_encrypted = null; + -- Clear key used for JWT authentication + -- This may break the $CI_JWT_TOKEN job variable: + -- https://gitlab.com/gitlab-org/gitlab/-/issues/325965 UPDATE application_settings SET encrypted_ci_jwt_signing_key = null; -- Clear runner tokens UPDATE ci_runners SET token = null, token_encrypted = null; diff --git a/lib/api/api.rb b/lib/api/api.rb index 88343384f07..659af98f861 100644 --- a/lib/api/api.rb +++ b/lib/api/api.rb @@ -152,6 +152,7 @@ module API mount ::API::Boards mount ::API::Branches mount ::API::BroadcastMessages + mount ::API::BulkImports mount ::API::Ci::Pipelines mount ::API::Ci::PipelineSchedules mount ::API::Ci::Runner diff --git a/lib/api/bulk_imports.rb b/lib/api/bulk_imports.rb new file mode 100644 index 00000000000..189851cee65 --- /dev/null +++ b/lib/api/bulk_imports.rb @@ -0,0 +1,91 @@ +# frozen_string_literal: true + +module API + class BulkImports < ::API::Base + include PaginationParams + + feature_category :importers + + helpers do + def bulk_imports + @bulk_imports ||= ::BulkImports::ImportsFinder.new(user: current_user, status: params[:status]).execute + end + + def bulk_import + @bulk_import ||= bulk_imports.find(params[:import_id]) + end + + def bulk_import_entities + @bulk_import_entities ||= ::BulkImports::EntitiesFinder.new(user: current_user, bulk_import: bulk_import, status: params[:status]).execute + end + + def bulk_import_entity + @bulk_import_entity ||= bulk_import_entities.find(params[:entity_id]) + end + end + + before { authenticate! } + + resource :bulk_imports do + desc 'List all GitLab Migrations' do + detail 'This feature was introduced in GitLab 14.1.' + end + params do + use :pagination + optional :status, type: String, values: BulkImport.all_human_statuses, + desc: 'Return GitLab Migrations with specified status' + end + get do + present paginate(bulk_imports), with: Entities::BulkImport + end + + desc "List all GitLab Migrations' entities" do + detail 'This feature was introduced in GitLab 14.1.' + end + params do + use :pagination + optional :status, type: String, values: ::BulkImports::Entity.all_human_statuses, + desc: "Return all GitLab Migrations' entities with specified status" + end + get :entities do + entities = ::BulkImports::EntitiesFinder.new(user: current_user, status: params[:status]).execute + + present paginate(entities), with: Entities::BulkImports::Entity + end + + desc 'Get GitLab Migration details' do + detail 'This feature was introduced in GitLab 14.1.' + end + params do + requires :import_id, type: Integer, desc: "The ID of user's GitLab Migration" + end + get ':import_id' do + present bulk_import, with: Entities::BulkImport + end + + desc "List GitLab Migration entities" do + detail 'This feature was introduced in GitLab 14.1.' + end + params do + requires :import_id, type: Integer, desc: "The ID of user's GitLab Migration" + optional :status, type: String, values: ::BulkImports::Entity.all_human_statuses, + desc: 'Return import entities with specified status' + use :pagination + end + get ':import_id/entities' do + present paginate(bulk_import_entities), with: Entities::BulkImports::Entity + end + + desc 'Get GitLab Migration entity details' do + detail 'This feature was introduced in GitLab 14.1.' + end + params do + requires :import_id, type: Integer, desc: "The ID of user's GitLab Migration" + requires :entity_id, type: Integer, desc: "The ID of GitLab Migration entity" + end + get ':import_id/entities/:entity_id' do + present bulk_import_entity, with: Entities::BulkImports::Entity + end + end + end +end diff --git a/lib/api/entities/bulk_import.rb b/lib/api/entities/bulk_import.rb new file mode 100644 index 00000000000..373ae486dcf --- /dev/null +++ b/lib/api/entities/bulk_import.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +module API + module Entities + class BulkImport < Grape::Entity + expose :id + expose :status_name, as: :status + expose :source_type + expose :created_at + expose :updated_at + end + end +end diff --git a/lib/api/entities/bulk_imports/entity.rb b/lib/api/entities/bulk_imports/entity.rb new file mode 100644 index 00000000000..e8c31256b17 --- /dev/null +++ b/lib/api/entities/bulk_imports/entity.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +module API + module Entities + module BulkImports + class Entity < Grape::Entity + expose :id + expose :bulk_import_id + expose :status_name, as: :status + expose :source_full_path + expose :destination_name + expose :destination_namespace + expose :parent_id + expose :namespace_id + expose :project_id + expose :created_at + expose :updated_at + expose :failures, using: EntityFailure + end + end + end +end diff --git a/lib/api/entities/bulk_imports/entity_failure.rb b/lib/api/entities/bulk_imports/entity_failure.rb new file mode 100644 index 00000000000..a3dbe3280ee --- /dev/null +++ b/lib/api/entities/bulk_imports/entity_failure.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +module API + module Entities + module BulkImports + class EntityFailure < Grape::Entity + expose :pipeline_class + expose :pipeline_step + expose :exception_class + expose :correlation_id_value + expose :created_at + end + end + end +end diff --git a/locale/gitlab.pot b/locale/gitlab.pot index 1ba925254b6..59b360d83b7 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -612,6 +612,9 @@ msgstr "" msgid "%{issuesSize} with a limit of %{maxIssueCount}" msgstr "" +msgid "%{italic_start}What's new%{italic_end} is inactive and cannot be viewed." +msgstr "" + msgid "%{itemsCount} issues with a limit of %{maxIssueCount}" msgstr "" @@ -8199,6 +8202,9 @@ msgstr "" msgid "Configure" msgstr "" +msgid "Configure %{italic_start}What's new%{italic_end} drawer and content." +msgstr "" + msgid "Configure %{link} to track events. %{link_start}Learn more.%{link_end}" msgstr "" @@ -8232,9 +8238,6 @@ msgstr "" msgid "Configure Tracing" msgstr "" -msgid "Configure What's new drawer and content." -msgstr "" - msgid "Configure a %{codeStart}.gitlab-webide.yml%{codeEnd} file in the %{codeStart}.gitlab%{codeEnd} directory to start using the Web Terminal. %{helpStart}Learn more.%{helpEnd}" msgstr "" @@ -13602,12 +13605,6 @@ msgstr "" msgid "Fast timeout" msgstr "" -msgid "Fast-forward merge is not possible. Rebase the source branch onto %{targetBranch} to allow this merge request to be merged." -msgstr "" - -msgid "Fast-forward merge is not possible. Rebase the source branch onto the target branch." -msgstr "" - msgid "Fast-forward merge without a merge commit" msgstr "" @@ -17279,6 +17276,9 @@ msgstr "" msgid "Include merge request description" msgstr "" +msgid "Include new features from all tiers." +msgstr "" + msgid "Include the username in the URL if required: %{code_open}https://username@gitlab.company.com/group/project.git%{code_close}." msgstr "" @@ -20426,6 +20426,9 @@ msgstr "" msgid "Merge automatically (%{strategy})" msgstr "" +msgid "Merge blocked: the source branch must be rebased onto the target branch." +msgstr "" + msgid "Merge commit SHA" msgstr "" @@ -22934,6 +22937,9 @@ msgstr "" msgid "Only admins can delete project" msgstr "" +msgid "Only include features new to your current subscription tier." +msgstr "" + msgid "Only policy:" msgstr "" @@ -36718,15 +36724,6 @@ msgstr "" msgid "What's new" msgstr "" -msgid "What's new is disabled and can no longer be viewed." -msgstr "" - -msgid "What's new presents new features for your current subscription tier, while hiding new features not available to your subscription tier." -msgstr "" - -msgid "What's new presents new features from all tiers to help you keep track of all new features." -msgstr "" - msgid "What’s your experience level?" msgstr "" @@ -38979,9 +38976,6 @@ msgstr "" msgid "mrWidget|Failed to load deployment statistics" msgstr "" -msgid "mrWidget|Fast-forward merge is not possible. To merge this request, first rebase locally." -msgstr "" - msgid "mrWidget|If the %{missingBranchName} branch exists in your local repository, you can merge this merge request manually using the command line" msgstr "" @@ -39009,6 +39003,9 @@ msgstr "" msgid "mrWidget|Merge blocked: all threads must be resolved." msgstr "" +msgid "mrWidget|Merge blocked: fast-forward merge is not possible. To merge this request, first rebase locally." +msgstr "" + msgid "mrWidget|Merge blocked: pipeline must succeed. It's waiting for a manual action to continue." msgstr "" diff --git a/qa/qa.rb b/qa/qa.rb index 0b2157da265..b5499eb102a 100644 --- a/qa/qa.rb +++ b/qa/qa.rb @@ -592,6 +592,7 @@ module QA autoload :Minikube, 'qa/service/cluster_provider/minikube' autoload :K3d, 'qa/service/cluster_provider/k3d' autoload :K3s, 'qa/service/cluster_provider/k3s' + autoload :K3sCilium, 'qa/service/cluster_provider/k3s_cilium' end module DockerRun diff --git a/qa/qa/page/project/fork/new.rb b/qa/qa/page/project/fork/new.rb index 5a08f6a3cbd..7062702679a 100644 --- a/qa/qa/page/project/fork/new.rb +++ b/qa/qa/page/project/fork/new.rb @@ -9,10 +9,6 @@ module QA element :fork_namespace_button end - view 'app/assets/javascripts/pages/projects/forks/new/components/fork_groups_list.vue' do - element :fork_groups_list_search_field - end - view 'app/assets/javascripts/pages/projects/forks/new/components/fork_form.vue' do element :fork_namespace_dropdown element :fork_project_button @@ -27,8 +23,8 @@ module QA end end - def search_for_group(group_name) - find_element(:fork_groups_list_search_field).set(group_name) + def fork_namespace_dropdown_values + find_element(:fork_namespace_dropdown).all(:option).map { |option| option.text.tr("\n", '').strip } end end end diff --git a/qa/qa/resource/kubernetes_cluster/project_cluster.rb b/qa/qa/resource/kubernetes_cluster/project_cluster.rb index ab8c8e93ed4..b3eba77fc46 100644 --- a/qa/qa/resource/kubernetes_cluster/project_cluster.rb +++ b/qa/qa/resource/kubernetes_cluster/project_cluster.rb @@ -13,8 +13,8 @@ module QA Resource::Project.fabricate! end - attribute :ingress_ip do - Page::Project::Infrastructure::Kubernetes::Show.perform(&:ingress_ip) + def ingress_ip + @ingress_ip ||= @cluster.fetch_external_ip_for_ingress end def fabricate! @@ -42,19 +42,6 @@ module QA # We must wait a few seconds for permissions to be set up correctly for new cluster sleep 25 - # TODO: These steps do not work anymore, see https://gitlab.com/gitlab-org/gitlab/-/issues/333818 - - # Open applications tab - show.open_applications - - show.install!(:ingress) if @install_ingress - show.install!(:prometheus) if @install_prometheus - show.install!(:runner) if @install_runner - - show.await_installed(:ingress) if @install_ingress - show.await_installed(:prometheus) if @install_prometheus - show.await_installed(:runner) if @install_runner - if @install_ingress populate(:ingress_ip) diff --git a/qa/qa/service/cluster_provider/k3s_cilium.rb b/qa/qa/service/cluster_provider/k3s_cilium.rb new file mode 100644 index 00000000000..5b529caa20b --- /dev/null +++ b/qa/qa/service/cluster_provider/k3s_cilium.rb @@ -0,0 +1,93 @@ +# frozen_string_literal: true + +module QA + module Service + module ClusterProvider + class K3sCilium < K3s + def setup + @k3s = Service::DockerRun::K3s.new.tap do |k3s| + k3s.remove! + k3s.cni_enabled = true + k3s.register! + + shell "kubectl config set-cluster k3s --server https://#{k3s.host_name}:6443 --insecure-skip-tls-verify" + shell 'kubectl config set-credentials default --username=node --password=some-secret' + shell 'kubectl config set-context k3s --cluster=k3s --user=default' + shell 'kubectl config use-context k3s' + + wait_for_server(k3s.host_name) do + shell 'kubectl version' + # install local storage + shell 'kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/master/deploy/local-path-storage.yaml' + + # patch local storage + shell %(kubectl patch storageclass local-path -p '{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}') + shell 'kubectl create -f https://raw.githubusercontent.com/cilium/cilium/v1.8/install/kubernetes/quick-install.yaml' + + wait_for_namespaces do + wait_for_cilium + wait_for_coredns do + shell 'kubectl create -f https://raw.githubusercontent.com/kubernetes/ingress-nginx/controller-0.31.0/deploy/static/provider/cloud/deploy.yaml' + wait_for_ingress + end + end + end + end + end + + private + + def wait_for_cilium + QA::Runtime::Logger.info 'Waiting for Cilium pod to be initialized' + + 60.times do + if service_available?('kubectl get pods --all-namespaces -l k8s-app=cilium --no-headers=true | grep -o "cilium-.*1/1"') + return yield if block_given? + + return true + end + + sleep 1 + QA::Runtime::Logger.info '.' + end + + raise 'Cilium pod has not initialized correctly' + end + + def wait_for_coredns + QA::Runtime::Logger.info 'Waiting for CoreDNS pod to be initialized' + + 60.times do + if service_available?('kubectl get pods --all-namespaces --no-headers=true | grep -o "coredns.*1/1"') + return yield if block_given? + + return true + end + + sleep 1 + QA::Runtime::Logger.info '.' + end + + raise 'CoreDNS pod has not been initialized correctly' + end + + def wait_for_ingress + QA::Runtime::Logger.info 'Waiting for Ingress controller pod to be initialized' + + 60.times do + if service_available?('kubectl get pods --all-namespaces -l app.kubernetes.io/component=controller | grep -o "ingress-nginx-controller.*1/1"') + return yield if block_given? + + return true + end + + sleep 1 + QA::Runtime::Logger.info '.' + end + + raise 'Ingress pod has not been initialized correctly' + end + end + end + end +end diff --git a/qa/qa/service/docker_run/k3s.rb b/qa/qa/service/docker_run/k3s.rb index 07211b220f1..a09b62cb613 100644 --- a/qa/qa/service/docker_run/k3s.rb +++ b/qa/qa/service/docker_run/k3s.rb @@ -4,15 +4,20 @@ module QA module Service module DockerRun class K3s < Base + attr_accessor :cni_enabled + def initialize - @image = 'registry.gitlab.com/gitlab-org/cluster-integration/test-utils/k3s-gitlab-ci/releases/v0.6.1' + @image = 'registry.gitlab.com/gitlab-org/cluster-integration/test-utils/k3s-gitlab-ci/releases/v0.9.1' @name = 'k3s' + @cni_enabled = false super end def register! pull start_k3s + # Mount the berkeley packet filter if container network interface is enabled + mount_bpf if @cni_enabled end def host_name @@ -36,12 +41,20 @@ module QA #{@image} server --cluster-secret some-secret --no-deploy traefik + #{@cni_enabled ? '--no-flannel' : ''} CMD command.gsub!("--network #{network} --hostname #{host_name}", '') unless QA::Runtime::Env.running_in_ci? shell command end + + private + + def mount_bpf + shell "docker exec --privileged k3s mount bpffs -t bpf /sys/fs/bpf" + shell "docker exec --privileged k3s mount --make-shared bpffs -t bpf /sys/fs/bpf" + end end end end diff --git a/qa/qa/service/kubernetes_cluster.rb b/qa/qa/service/kubernetes_cluster.rb index ddf97046fb0..adef1b46af2 100644 --- a/qa/qa/service/kubernetes_cluster.rb +++ b/qa/qa/service/kubernetes_cluster.rb @@ -51,6 +51,30 @@ module QA shell('kubectl apply -f -', stdin_data: manifest) end + def add_sample_policy(project, policy_name: 'sample-policy') + namespace = "#{project.name}-#{project.id}-production" + network_policy = <<~YAML + apiVersion: "cilium.io/v2" + kind: CiliumNetworkPolicy + metadata: + name: #{policy_name} + namespace: #{namespace} + spec: + endpointSelector: + matchLabels: + role: backend + ingress: + - fromEndpoints: + - matchLabels: + role: frontend + YAML + shell('kubectl apply -f -', stdin_data: network_policy) + end + + def fetch_external_ip_for_ingress + `kubectl get svc --all-namespaces --no-headers=true -l app.kubernetes.io/name=ingress-nginx -o custom-columns=:'status.loadBalancer.ingress[0].ip' | grep -v 'none'` + end + private def fetch_api_url diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb index a85700fc721..2f7758143a1 100644 --- a/spec/features/merge_request/user_sees_merge_widget_spec.rb +++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb @@ -373,7 +373,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do wait_for_requests page.within('.mr-widget-body') do - expect(page).to have_content('Fast-forward merge is not possible') + expect(page).to have_content('Merge Merge blocked: fast-forward merge is not possible. To merge this request, first rebase locally.') end end end diff --git a/spec/finders/bulk_imports/entities_finder_spec.rb b/spec/finders/bulk_imports/entities_finder_spec.rb new file mode 100644 index 00000000000..e053011b60d --- /dev/null +++ b/spec/finders/bulk_imports/entities_finder_spec.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::EntitiesFinder do + let_it_be(:user) { create(:user) } + + let_it_be(:user_import_1) { create(:bulk_import, user: user) } + let_it_be(:started_entity_1) { create(:bulk_import_entity, :started, bulk_import: user_import_1) } + let_it_be(:finished_entity_1) { create(:bulk_import_entity, :finished, bulk_import: user_import_1) } + let_it_be(:failed_entity_1) { create(:bulk_import_entity, :failed, bulk_import: user_import_1) } + + let_it_be(:user_import_2) { create(:bulk_import, user: user) } + let_it_be(:started_entity_2) { create(:bulk_import_entity, :started, bulk_import: user_import_2) } + let_it_be(:finished_entity_2) { create(:bulk_import_entity, :finished, bulk_import: user_import_2) } + let_it_be(:failed_entity_2) { create(:bulk_import_entity, :failed, bulk_import: user_import_2) } + + let_it_be(:not_user_import) { create(:bulk_import) } + let_it_be(:started_entity_3) { create(:bulk_import_entity, :started, bulk_import: not_user_import) } + let_it_be(:finished_entity_3) { create(:bulk_import_entity, :finished, bulk_import: not_user_import) } + let_it_be(:failed_entity_3) { create(:bulk_import_entity, :failed, bulk_import: not_user_import) } + + subject { described_class.new(user: user) } + + describe '#execute' do + it 'returns a list of import entities associated with user' do + expect(subject.execute) + .to contain_exactly( + started_entity_1, finished_entity_1, failed_entity_1, + started_entity_2, finished_entity_2, failed_entity_2 + ) + end + + context 'when bulk import is specified' do + subject { described_class.new(user: user, bulk_import: user_import_1) } + + it 'returns a list of import entities filtered by bulk import' do + expect(subject.execute) + .to contain_exactly( + started_entity_1, finished_entity_1, failed_entity_1 + ) + end + + context 'when specified import is not associated with user' do + subject { described_class.new(user: user, bulk_import: not_user_import) } + + it 'does not return entities' do + expect(subject.execute).to be_empty + end + end + end + + context 'when status is specified' do + subject { described_class.new(user: user, status: 'failed') } + + it 'returns a list of import entities filtered by status' do + expect(subject.execute) + .to contain_exactly( + failed_entity_1, failed_entity_2 + ) + end + + context 'when invalid status is specified' do + subject { described_class.new(user: user, status: 'invalid') } + + it 'does not filter entities by status' do + expect(subject.execute) + .to contain_exactly( + started_entity_1, finished_entity_1, failed_entity_1, + started_entity_2, finished_entity_2, failed_entity_2 + ) + end + end + end + + context 'when bulk import and status are specified' do + subject { described_class.new(user: user, bulk_import: user_import_2, status: 'finished') } + + it 'returns matched import entities' do + expect(subject.execute).to contain_exactly(finished_entity_2) + end + end + end +end diff --git a/spec/finders/bulk_imports/imports_finder_spec.rb b/spec/finders/bulk_imports/imports_finder_spec.rb new file mode 100644 index 00000000000..aac83c86c84 --- /dev/null +++ b/spec/finders/bulk_imports/imports_finder_spec.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::ImportsFinder do + let_it_be(:user) { create(:user) } + let_it_be(:started_import) { create(:bulk_import, :started, user: user) } + let_it_be(:finished_import) { create(:bulk_import, :finished, user: user) } + let_it_be(:not_user_import) { create(:bulk_import) } + + subject { described_class.new(user: user) } + + describe '#execute' do + it 'returns a list of imports associated with user' do + expect(subject.execute).to contain_exactly(started_import, finished_import) + end + + context 'when status is specified' do + subject { described_class.new(user: user, status: 'started') } + + it 'returns a list of import entities filtered by status' do + expect(subject.execute).to contain_exactly(started_import) + end + + context 'when invalid status is specified' do + subject { described_class.new(user: user, status: 'invalid') } + + it 'does not filter entities by status' do + expect(subject.execute).to contain_exactly(started_import, finished_import) + end + end + end + end +end diff --git a/spec/frontend/lib/dompurify_spec.js b/spec/frontend/lib/dompurify_spec.js index a01f86678e9..fa8dbb12a08 100644 --- a/spec/frontend/lib/dompurify_spec.js +++ b/spec/frontend/lib/dompurify_spec.js @@ -30,6 +30,9 @@ const unsafeUrls = [ `https://evil.url/${absoluteGon.sprite_file_icons}`, ]; +const forbiddenDataAttrs = ['data-remote', 'data-url', 'data-type', 'data-method']; +const acceptedDataAttrs = ['data-random', 'data-custom']; + describe('~/lib/dompurify', () => { let originalGon; @@ -95,4 +98,17 @@ describe('~/lib/dompurify', () => { expect(sanitize(htmlXlink)).toBe(expectedSanitized); }); }); + + describe('handles data attributes correctly', () => { + it.each(forbiddenDataAttrs)('removes %s attributes', (attr) => { + const htmlHref = `hello`; + expect(sanitize(htmlHref)).toBe('hello'); + }); + + it.each(acceptedDataAttrs)('does not remove %s attributes', (attr) => { + const attrWithValue = `${attr}="true"`; + const htmlHref = `hello`; + expect(sanitize(htmlHref)).toBe(`hello`); + }); + }); }); diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js index 5081e1e5906..d3221cc2fc7 100644 --- a/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js +++ b/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js @@ -70,9 +70,9 @@ describe('Merge request widget rebase component', () => { const text = findRebaseMessageElText(); - expect(text).toContain('Fast-forward merge is not possible.'); + expect(text).toContain('Merge blocked'); expect(text.replace(/\s\s+/g, ' ')).toContain( - 'Rebase the source branch onto the target branch.', + 'the source branch must be rebased onto the target branch', ); }); @@ -111,12 +111,10 @@ describe('Merge request widget rebase component', () => { const text = findRebaseMessageElText(); - expect(text).toContain('Fast-forward merge is not possible.'); - expect(text).toContain('Rebase the source branch onto'); - expect(text).toContain('foo'); - expect(text.replace(/\s\s+/g, ' ')).toContain( - 'to allow this merge request to be merged.', + expect(text).toContain( + 'Merge blocked: the source branch must be rebased onto the target branch.', ); + expect(text).toContain('the source branch must be rebased'); }); it('should render the correct target branch name', () => { @@ -136,7 +134,7 @@ describe('Merge request widget rebase component', () => { const elem = findRebaseMessageEl(); expect(elem.text()).toContain( - `Fast-forward merge is not possible. Rebase the source branch onto ${targetBranch} to allow this merge request to be merged.`, + `Merge blocked: the source branch must be rebased onto the target branch.`, ); }); }); diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js index fee78d3af94..e1bce7f0474 100644 --- a/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js +++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js @@ -199,7 +199,7 @@ describe('MRWidgetConflicts', () => { }); expect(removeBreakLine(wrapper.text()).trim()).toContain( - 'Fast-forward merge is not possible. To merge this request, first rebase locally.', + 'Merge blocked: fast-forward merge is not possible. To merge this request, first rebase locally.', ); }); }); diff --git a/spec/lib/api/entities/bulk_import_spec.rb b/spec/lib/api/entities/bulk_import_spec.rb new file mode 100644 index 00000000000..2db6862b079 --- /dev/null +++ b/spec/lib/api/entities/bulk_import_spec.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe API::Entities::BulkImport do + let_it_be(:import) { create(:bulk_import) } + + subject { described_class.new(import).as_json } + + it 'has the correct attributes' do + expect(subject).to include( + :id, + :status, + :source_type, + :created_at, + :updated_at + ) + end +end diff --git a/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb b/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb new file mode 100644 index 00000000000..adc8fdcdd9c --- /dev/null +++ b/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe API::Entities::BulkImports::EntityFailure do + let_it_be(:failure) { create(:bulk_import_failure) } + + subject { described_class.new(failure).as_json } + + it 'has the correct attributes' do + expect(subject).to include( + :pipeline_class, + :pipeline_step, + :exception_class, + :correlation_id_value, + :created_at + ) + end +end diff --git a/spec/lib/api/entities/bulk_imports/entity_spec.rb b/spec/lib/api/entities/bulk_imports/entity_spec.rb new file mode 100644 index 00000000000..f91ae1fc5a1 --- /dev/null +++ b/spec/lib/api/entities/bulk_imports/entity_spec.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe API::Entities::BulkImports::Entity do + let_it_be(:entity) { create(:bulk_import_entity) } + + subject { described_class.new(entity).as_json } + + it 'has the correct attributes' do + expect(subject).to include( + :id, + :bulk_import_id, + :status, + :source_full_path, + :destination_name, + :destination_namespace, + :parent_id, + :namespace_id, + :project_id, + :created_at, + :updated_at, + :failures + ) + end +end diff --git a/spec/models/bulk_import_spec.rb b/spec/models/bulk_import_spec.rb index 1a7e1ed8119..4cfec6b20b7 100644 --- a/spec/models/bulk_import_spec.rb +++ b/spec/models/bulk_import_spec.rb @@ -15,4 +15,10 @@ RSpec.describe BulkImport, type: :model do it { is_expected.to define_enum_for(:source_type).with_values(%i[gitlab]) } end + + describe '.all_human_statuses' do + it 'returns all human readable entity statuses' do + expect(described_class.all_human_statuses).to contain_exactly('created', 'started', 'finished', 'failed') + end + end end diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb index d1b7125a6e6..11a3e53dd16 100644 --- a/spec/models/bulk_imports/entity_spec.rb +++ b/spec/models/bulk_imports/entity_spec.rb @@ -134,4 +134,24 @@ RSpec.describe BulkImports::Entity, type: :model do expect(entity.encoded_source_full_path).to eq(expected) end end + + describe 'scopes' do + describe '.by_user_id' do + it 'returns entities associated with specified user' do + user = create(:user) + import = create(:bulk_import, user: user) + entity_1 = create(:bulk_import_entity, bulk_import: import) + entity_2 = create(:bulk_import_entity, bulk_import: import) + create(:bulk_import_entity) + + expect(described_class.by_user_id(user.id)).to contain_exactly(entity_1, entity_2) + end + end + end + + describe '.all_human_statuses' do + it 'returns all human readable entity statuses' do + expect(described_class.all_human_statuses).to contain_exactly('created', 'started', 'finished', 'failed') + end + end end diff --git a/spec/requests/api/bulk_imports_spec.rb b/spec/requests/api/bulk_imports_spec.rb new file mode 100644 index 00000000000..f0edfa6f227 --- /dev/null +++ b/spec/requests/api/bulk_imports_spec.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe API::BulkImports do + let_it_be(:user) { create(:user) } + let_it_be(:import_1) { create(:bulk_import, user: user) } + let_it_be(:import_2) { create(:bulk_import, user: user) } + let_it_be(:entity_1) { create(:bulk_import_entity, bulk_import: import_1) } + let_it_be(:entity_2) { create(:bulk_import_entity, bulk_import: import_1) } + let_it_be(:entity_3) { create(:bulk_import_entity, bulk_import: import_2) } + let_it_be(:failure_3) { create(:bulk_import_failure, entity: entity_3) } + + describe 'GET /bulk_imports' do + it 'returns a list of bulk imports authored by the user' do + get api('/bulk_imports', user) + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response.pluck('id')).to contain_exactly(import_1.id, import_2.id) + end + end + + describe 'GET /bulk_imports/entities' do + it 'returns a list of all import entities authored by the user' do + get api('/bulk_imports/entities', user) + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response.pluck('id')).to contain_exactly(entity_1.id, entity_2.id, entity_3.id) + end + end + + describe 'GET /bulk_imports/:id' do + it 'returns specified bulk import' do + get api("/bulk_imports/#{import_1.id}", user) + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response['id']).to eq(import_1.id) + end + end + + describe 'GET /bulk_imports/:id/entities' do + it 'returns specified bulk import entities with failures' do + get api("/bulk_imports/#{import_2.id}/entities", user) + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response.pluck('id')).to contain_exactly(entity_3.id) + expect(json_response.first['failures'].first['exception_class']).to eq(failure_3.exception_class) + end + end + + describe 'GET /bulk_imports/:id/entities/:entity_id' do + it 'returns specified bulk import entity' do + get api("/bulk_imports/#{import_1.id}/entities/#{entity_2.id}", user) + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response['id']).to eq(entity_2.id) + end + end + + context 'when user is unauthenticated' do + it 'returns 401' do + get api('/bulk_imports', nil) + + expect(response).to have_gitlab_http_status(:unauthorized) + end + end +end diff --git a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb index 1b829df6e6a..cfe8e863223 100644 --- a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb +++ b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb @@ -241,6 +241,18 @@ RSpec.describe Projects::LfsPointers::LfsDownloadService do context 'and first fragments are the same' do let(:lfs_content) { existing_lfs_object.file.read } + context 'when lfs_link_existing_object feature flag disabled' do + before do + stub_feature_flags(lfs_link_existing_object: false) + end + + it 'does not call link_existing_lfs_object!' do + expect(subject).not_to receive(:link_existing_lfs_object!) + + subject.execute + end + end + it 'returns success' do expect(subject.execute).to eq({ status: :success }) end diff --git a/spec/workers/users/deactivate_dormant_users_worker_spec.rb b/spec/workers/users/deactivate_dormant_users_worker_spec.rb index 32291a143ee..934c497c79a 100644 --- a/spec/workers/users/deactivate_dormant_users_worker_spec.rb +++ b/spec/workers/users/deactivate_dormant_users_worker_spec.rb @@ -4,12 +4,12 @@ require 'spec_helper' RSpec.describe Users::DeactivateDormantUsersWorker do describe '#perform' do + let_it_be(:dormant) { create(:user, last_activity_on: User::MINIMUM_INACTIVE_DAYS.days.ago.to_date) } + let_it_be(:inactive) { create(:user, last_activity_on: nil) } + subject(:worker) { described_class.new } it 'does not run for GitLab.com' do - create(:user, last_activity_on: User::MINIMUM_INACTIVE_DAYS.days.ago.to_date) - create(:user, last_activity_on: nil) - expect(Gitlab).to receive(:com?).and_return(true) expect(Gitlab::CurrentSettings).not_to receive(:current_application_settings) @@ -29,9 +29,6 @@ RSpec.describe Users::DeactivateDormantUsersWorker do stub_const("#{described_class.name}::BATCH_SIZE", 1) stub_const("#{described_class.name}::PAUSE_SECONDS", 0) - create(:user, last_activity_on: User::MINIMUM_INACTIVE_DAYS.days.ago.to_date) - create(:user, last_activity_on: nil) - expect(worker).to receive(:sleep).twice worker.perform @@ -48,9 +45,6 @@ RSpec.describe Users::DeactivateDormantUsersWorker do end it 'does nothing' do - create(:user, last_activity_on: User::MINIMUM_INACTIVE_DAYS.days.ago.to_date) - create(:user, last_activity_on: nil) - worker.perform expect(User.dormant.count).to eq(1)