Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-11-08 00:10:26 +00:00
parent 364e69bafd
commit 9695fcf519
35 changed files with 641 additions and 1358 deletions

View File

@ -49,6 +49,7 @@ Style/Lambda:
- 'lib/gitlab/action_cable/request_store_callbacks.rb'
- 'lib/gitlab/checks/diff_check.rb'
- 'lib/gitlab/database/load_balancing/action_cable_callbacks.rb'
- 'lib/gitlab/memory/watchdog/configurator.rb'
- 'lib/gitlab/middleware/rack_multipart_tempfile_factory.rb'
- 'lib/gitlab/omniauth_initializer.rb'
- 'lib/gitlab/prometheus/queries/query_additional_metrics.rb'

View File

@ -147,7 +147,7 @@ gem 'fog-aws', '~> 3.15'
# Also see config/initializers/fog_core_patch.rb.
gem 'fog-core', '= 2.1.0'
gem 'fog-google', '~> 1.15', require: 'fog/google'
gem 'fog-local', '~> 0.6'
gem 'fog-local', '~> 0.8'
gem 'fog-openstack', '~> 1.0'
gem 'fog-rackspace', '~> 0.1.1'
gem 'fog-aliyun', '~> 0.3'

View File

@ -184,7 +184,7 @@
{"name":"fog-core","version":"2.1.0","platform":"ruby","checksum":"53e5d793554d7080d015ef13cd44b54027e421d924d9dba4ce3d83f95f37eda9"},
{"name":"fog-google","version":"1.15.0","platform":"ruby","checksum":"2f840780fbf2384718e961b05ef2fc522b4213bbda6f25b28c1bbd875ff0b306"},
{"name":"fog-json","version":"1.2.0","platform":"ruby","checksum":"dd4f5ab362dbc72b687240bba9d2dd841d5dfe888a285797533f85c03ea548fe"},
{"name":"fog-local","version":"0.6.0","platform":"ruby","checksum":"417473fe22a839af8f1388218d1843dbd09a5edfc8fcc59a893edb322ca5442d"},
{"name":"fog-local","version":"0.8.0","platform":"ruby","checksum":"263b2d09e54c69d1b87ad7f235a1a1e53c8a674edcedf7512c1715765ad7ef79"},
{"name":"fog-openstack","version":"1.0.8","platform":"ruby","checksum":"8f174ab5e5b1bc107c7da90cc7c47a24930e1566cd88ab4df447026ea8b63d9c"},
{"name":"fog-rackspace","version":"0.1.1","platform":"ruby","checksum":"4a8c7a2432dd32321958c869f3b1b8190cf4eac292024e6ea267bc6040a44b78"},
{"name":"fog-xml","version":"0.1.3","platform":"ruby","checksum":"5604c42649ebb0d8a31bd973aa000c2dd0127f1c1c4c174b69266a2e78e37410"},

View File

@ -510,7 +510,7 @@ GEM
fog-json (1.2.0)
fog-core
multi_json (~> 1.10)
fog-local (0.6.0)
fog-local (0.8.0)
fog-core (>= 1.27, < 3.0)
fog-openstack (1.0.8)
fog-core (~> 2.1)
@ -1616,7 +1616,7 @@ DEPENDENCIES
fog-aws (~> 3.15)
fog-core (= 2.1.0)
fog-google (~> 1.15)
fog-local (~> 0.6)
fog-local (~> 0.8)
fog-openstack (~> 1.0)
fog-rackspace (~> 0.1.1)
fugit (~> 1.2.1)

View File

@ -1,42 +0,0 @@
# frozen_string_literal: true
module Clusters
module Applications
class CheckInstallationProgressService < CheckProgressService
private
def operation_in_progress?
app.installing? || app.updating?
end
def on_success
app.make_installed!
Gitlab::Tracking.event('cluster:applications', "cluster_application_#{app.name}_installed")
ensure
remove_installation_pod
end
def check_timeout
if timed_out?
app.make_errored!("Operation timed out. Check pod logs for #{pod_name} for more details.")
else
ClusterWaitForAppInstallationWorker.perform_in(
ClusterWaitForAppInstallationWorker::INTERVAL, app.name, app.id)
end
end
def pod_name
install_command.pod_name
end
def timed_out?
Time.current.utc - app.updated_at.utc > ClusterWaitForAppInstallationWorker::TIMEOUT
end
def remove_installation_pod
helm_api.delete_pod!(pod_name)
end
end
end
end

View File

@ -10,7 +10,9 @@ module Users
@current_user = current_user
end
# Synchronously destroys +user+
# Asynchronously destroys +user+
# Migrating the associated user records, and post-migration cleanup is
# handled by the Users::MigrateRecordsToGhostUserWorker cron worker.
#
# The operation will fail if the user is the sole owner of any groups. To
# force the groups to be destroyed, pass `delete_solo_owned_groups: true` in
@ -24,10 +26,7 @@ module Users
# a hard deletion without destroying solo-owned groups, pass
# `delete_solo_owned_groups: false, hard_delete: true` in +options+.
#
# To make the service asynchronous, a new behaviour is being introduced
# behind the user_destroy_with_limited_execution_time_worker feature flag.
# Migrating the associated user records, and post-migration cleanup is
# handled by the Users::MigrateRecordsToGhostUserWorker cron worker.
def execute(user, options = {})
delete_solo_owned_groups = options.fetch(:delete_solo_owned_groups, options[:hard_delete])
@ -62,31 +61,9 @@ module Users
yield(user) if block_given?
hard_delete = options.fetch(:hard_delete, false)
if Feature.enabled?(:user_destroy_with_limited_execution_time_worker)
Users::GhostUserMigration.create!(user: user,
initiator_user: current_user,
hard_delete: hard_delete)
else
MigrateToGhostUserService.new(user).execute(hard_delete: options[:hard_delete])
response = Snippets::BulkDestroyService.new(current_user, user.snippets)
.execute(skip_authorization: hard_delete)
raise DestroyError, response.message if response.error?
# Rails attempts to load all related records into memory before
# destroying: https://github.com/rails/rails/issues/22510
# This ensures we delete records in batches.
user.destroy_dependent_associations_in_batches(exclude: [:snippets])
user.nullify_dependent_associations_in_batches
# Destroy the namespace after destroying the user since certain methods may depend on the namespace existing
user_data = user.destroy
namespace.destroy
user_data
end
end
end
end

View File

@ -1,5 +1,8 @@
# frozen_string_literal: true
# DEPRECATED
#
# To be removed by https://gitlab.com/gitlab-org/gitlab/-/issues/366573
class ClusterWaitForAppInstallationWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
@ -16,9 +19,5 @@ class ClusterWaitForAppInstallationWorker # rubocop:disable Scalability/Idempote
worker_resource_boundary :cpu
loggable_arguments 0
def perform(app_name, app_id)
find_application(app_name, app_id) do |app|
Clusters::Applications::CheckInstallationProgressService.new(app).execute
end
end
def perform(app_name, app_id); end
end

View File

@ -12,8 +12,6 @@ module Users
idempotent!
def perform
return unless Feature.enabled?(:user_destroy_with_limited_execution_time_worker)
in_lock(self.class.name.underscore, ttl: Gitlab::Utils::ExecutionTracker::MAX_RUNTIME, retries: 0) do
Users::MigrateRecordsToGhostUserInBatchesService.new.execute
end

View File

@ -1,8 +0,0 @@
---
name: user_destroy_with_limited_execution_time_worker
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/97141
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/373138
milestone: '15.4'
type: development
group: group::authentication and authorization
default_enabled: false

View File

@ -614,6 +614,16 @@ ldap_group.member_dns
ldap_group.member_uids
```
#### LDAP synchronization does not remove group creator from group
[LDAP synchronization](ldap_synchronization.md) should remove an LDAP group's creator
from that group, if that user does not exist in the group. If running LDAP synchronization
does not do this:
1. Add the user to the LDAP group.
1. Wait until LDAP group synchronization has finished running.
1. Remove the user from the LDAP group.
### User DN or/and email have changed
When an LDAP user is created in GitLab, their LDAP DN is stored for later reference.

View File

@ -364,6 +364,59 @@ Example response:
}
```
## Download a release asset
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/358188) in GitLab 15.4.
Download a release asset file by making a request with the following format:
```plaintext
GET /projects/:id/releases/:tag_name/downloads/:filepath
```
| Attribute | Type | Required | Description |
|----------------------------| -------------- | -------- | ----------------------------------------------------------------------------------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](../index.md#namespaced-path-encoding). |
| `tag_name` | string | yes | The Git tag the release is associated with. |
| `filepath` | string | yes | Path to the release asset file as specified when [creating](links.md#create-a-release-link) or [updating](links.md#update-a-release-link) its link. |
Example request:
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/24/releases/v0.1/downloads/bin/asset.exe"
```
### Get the latest release
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/358188) in GitLab 15.4.
Latest release information is accessible through a permanent API URL.
The format of the URL is:
```plaintext
GET /projects/:id/releases/permalink/latest
```
To call any other GET API that requires a release tag, append a suffix to the `permalink/latest` API path.
For example, to get latest [release evidence](#collect-release-evidence) you can use:
```plaintext
GET /projects/:id/releases/permalink/latest/evidence
```
Another example is [downloading an asset](#download-a-release-asset) of the latest release, for which you can use:
```plaintext
GET /projects/:id/releases/permalink/latest/downloads/bin/asset.exe
```
#### Sorting preferences
By default, GitLab fetches the release using `released_at` time. The use of the query parameter
`?order_by=released_at` is optional, and support for `?order_by=semver` is tracked [in issue 352945](https://gitlab.com/gitlab-org/gitlab/-/issues/352945).
## Create a release
Creates a release. Developer level access to the project is required to create a release.

View File

@ -84,6 +84,14 @@ To make submodules work correctly in CI/CD jobs:
GIT_SUBMODULE_UPDATE_FLAGS: --jobs 4
```
1. You can set the [GIT_SUBMODULE_PATHS](runners/configure_runners.md#sync-or-exclude-specific-submodules-from-ci-jobs) to explicitly ignore submodules during cloning:
```yaml
variables:
GIT_SUBMODULE_STRATEGY: recursive
GIT_SUBMODULE_PATHS: ':(exclude)submodule'
```
If you use the [`CI_JOB_TOKEN`](jobs/ci_job_token.md) to clone a submodule in a
pipeline job, the user executing the job must be assigned to a role that has
[permission](../user/permissions.md#gitlab-cicd-permissions) to trigger a pipeline

View File

@ -586,6 +586,10 @@ trigger-job:
The `UPSTREAM_BRANCH` variable, which contains the value of the upstream pipeline's `$CI_COMMIT_REF_NAME`
predefined CI/CD variable, is available in the downstream pipeline.
Do not use this method to pass [masked variables](../variables/index.md#mask-a-cicd-variable)
to a multi-project pipeline. The CI/CD masking configuration is not passed to the
downstream pipeline and the variable could be unmasked in job logs in the downstream project.
You cannot use this method to forward [job-level persisted variables](../variables/where_variables_can_be_used.md#persisted-variables)
to a downstream pipeline, as they are not available in trigger jobs.

View File

@ -306,6 +306,7 @@ globally or for individual jobs:
- [`GIT_STRATEGY`](#git-strategy)
- [`GIT_SUBMODULE_STRATEGY`](#git-submodule-strategy)
- [`GIT_SUBMODULE_PATHS`](#sync-or-exclude-specific-submodules-from-ci-jobs)
- [`GIT_CHECKOUT`](#git-checkout)
- [`GIT_CLEAN_FLAGS`](#git-clean-flags)
- [`GIT_FETCH_EXTRA_FLAGS`](#git-fetch-extra-flags)
@ -564,6 +565,34 @@ You should be aware of the implications for the security, stability, and reprodu
your builds when using the `--remote` flag. In most cases, it is better to explicitly track
submodule commits as designed, and update them using an auto-remediation/dependency bot.
### Sync or exclude specific submodules from CI jobs
> [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26495) in GitLab Runner 14.0.
Some projects have a large number of submodules, and not all of them need to be
synced or updated in all CI jobs. Use the `GIT_SUBMODULE_PATHS` variable to control this behavior.
The path syntax is the same as [`git submodule`](https://git-scm.com/docs/git-submodule#Documentation/git-submodule.txt-ltpathgt82308203):
- To sync and update specific paths:
```yaml
variables:
GIT_SUBMODULE_PATHS: 'submoduleA'
```
- To exclude specific paths:
```yaml
variables:
GIT_SUBMODULE_PATHS: ':(exclude)submoduleA'
```
WARNING:
Git ignores nested and multiple submodule paths. To ignore a nested submodule, exclude
the parent submodule and then manually clone it in the job's scripts. For example,
`git clone <repo> --recurse-submodules=':(exclude)nested-submodule'`. Make sure
to wrap the string in single quotes so the YAML can be parsed successfully.
### Shallow cloning
> Introduced in GitLab 8.9 as an experimental feature.

View File

@ -20,8 +20,9 @@ GitLab.com generates an application ID and secret key for you to use.
- Redirect URI:
```plaintext
http://your-gitlab.example.com/import/gitlab/callback
http://your-gitlab.example.com/users/auth/gitlab/callback
# You can also use a non-SSL URL, but you should use SSL URLs.
https://your-gitlab.example.com/import/gitlab/callback
https://your-gitlab.example.com/users/auth/gitlab/callback
```
The first link is required for the importer and second for authentication.

View File

@ -38,7 +38,7 @@ GitLab supports several ways of adding a new OAuth 2 application to an instance:
- [Instance-wide applications](#instance-wide-applications)
The only difference between these methods is the [permission](../user/permissions.md)
levels. The default callback URL is `http://your-gitlab.example.com/users/auth/gitlab/callback`.
levels. The default callback URL is `https://your-gitlab.example.com/users/auth/gitlab/callback` (you can also use a non-SSL URL, but you should use SSL URLs).
## User owned applications

View File

@ -100,7 +100,7 @@ To configure IaC Scanning for a project you can:
### Configure IaC Scanning manually
To enable IaC Scanning you must [include](../../../ci/yaml/index.md#includetemplate) the
[`SAST-IaC.gitlab-ci.yml template`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Jobs/SAST-IaC.gitlab-ci.yml) provided as part of your GitLab installation. Here is an example of how to include it:
[`SAST-IaC.gitlab-ci.yml` template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Jobs/SAST-IaC.gitlab-ci.yml) provided as part of your GitLab installation. Here is an example of how to include it:
```yaml
include:
@ -125,6 +125,36 @@ To enable IaC Scanning in a project, you can create a merge request:
Pipelines now include an IaC job.
## Pinning to specific analyzer version
The GitLab-managed CI/CD template specifies a major version and automatically pulls the latest analyzer release within that major version.
In some cases, you may need to use a specific version.
For example, you might need to avoid a regression in a later release.
To override the automatic update behavior, set the `SAST_ANALYZER_IMAGE_TAG` CI/CD variable
in your CI/CD configuration file after you include the [`SAST-IaC.gitlab-ci.yml` template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Jobs/SAST-IaC.gitlab-ci.yml).
Only set this variable within a specific job.
If you set it [at the top level](../../../ci/variables/index.md#create-a-custom-cicd-variable-in-the-gitlab-ciyml-file), the version you set will be used for other SAST analyzers.
You can set the tag to:
- A major version, like `3`. Your pipelines will use any minor or patch updates that are released within this major version.
- A minor version, like `3.7`. Your pipelines will use any patch updates that are released within this minor version.
- A patch version, like `3.7.0`. Your pipelines won't receive any updates.
This example uses a specific minor version of the `KICS` analyzer:
```yaml
include:
- template: Security/SAST-IaC.gitlab-ci.yml
kics-iac-sast:
variables:
SAST_ANALYZER_IMAGE_TAG: "3.1"
```
## Reports JSON format
The IaC tool emits a JSON report file in the existing SAST report format. For more information, see the

View File

@ -30665,24 +30665,60 @@ msgstr ""
msgid "PreScanVerification|(optional)"
msgstr ""
msgid "PreScanVerification|Attempts to authenticate with the scan target"
msgstr ""
msgid "PreScanVerification|Attempts to find and connect to the scan target"
msgstr ""
msgid "PreScanVerification|Attempts to follow internal links and crawl 3 pages without errors"
msgstr ""
msgid "PreScanVerification|Authentication"
msgstr ""
msgid "PreScanVerification|Cancel pre-scan verification"
msgstr ""
msgid "PreScanVerification|Connection"
msgstr ""
msgid "PreScanVerification|Download results"
msgstr ""
msgid "PreScanVerification|Last run %{timeAgo} in pipeline"
msgstr ""
msgid "PreScanVerification|Pre-scan verification"
msgstr ""
msgid "PreScanVerification|Save and run verification"
msgstr ""
msgid "PreScanVerification|Started %{timeAgo} in pipeline"
msgstr ""
msgid "PreScanVerification|Target exploration"
msgstr ""
msgid "PreScanVerification|Test your configuration and identify potential errors before running a full scan."
msgstr ""
msgid "PreScanVerification|Verification checks"
msgstr ""
msgid "PreScanVerification|Verification checks are determined by a scans configuration details. Changing configuration details may alter or reset the verification checks and their status."
msgstr ""
msgid "PreScanVerification|Verify configuration"
msgstr ""
msgid "PreScanVerification|View results"
msgstr ""
msgid "PreScanVerification|You must complete the scan configuration form before running pre-scan verification"
msgstr ""
msgid "Preferences"
msgstr ""

View File

@ -27,7 +27,6 @@ RSpec.describe Admin::SpamLogsController do
expect(response).to have_gitlab_http_status(:ok)
end
context 'when user_destroy_with_limited_execution_time_worker is enabled' do
it 'initiates user removal', :sidekiq_inline do
expect do
delete :destroy, params: { id: first_spam.id, remove_user: true }
@ -42,22 +41,6 @@ RSpec.describe Admin::SpamLogsController do
end
end
context 'when user_destroy_with_limited_execution_time_worker is disabled' do
before do
stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
end
it 'removes user and their spam logs when removing the user', :sidekiq_inline do
delete :destroy, params: { id: first_spam.id, remove_user: true }
expect(flash[:notice]).to eq "User #{user.username} was successfully removed."
expect(response).to have_gitlab_http_status(:found)
expect(SpamLog.count).to eq(0)
expect { User.find(user.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
end
end
describe '#mark_as_ham' do
before do
allow_next_instance_of(Spam::AkismetService) do |instance|

View File

@ -73,7 +73,6 @@ RSpec.describe Admin::UsersController do
project.add_developer(user)
end
context 'when user_destroy_with_limited_execution_time_worker is enabled' do
it 'initiates user removal' do
delete :destroy, params: { id: user.username }, format: :json
@ -136,71 +135,12 @@ RSpec.describe Admin::UsersController do
end
end
context 'when user_destroy_with_limited_execution_time_worker is disabled' do
before do
stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
end
it 'deletes user and ghosts their contributions' do
delete :destroy, params: { id: user.username }, format: :json
expect(response).to have_gitlab_http_status(:ok)
expect(User.exists?(user.id)).to be_falsy
expect(issue.reload.author).to be_ghost
end
it 'deletes the user and their contributions when hard delete is specified' do
delete :destroy, params: { id: user.username, hard_delete: true }, format: :json
expect(response).to have_gitlab_http_status(:ok)
expect(User.exists?(user.id)).to be_falsy
expect(Issue.exists?(issue.id)).to be_falsy
end
context 'prerequisites for account deletion' do
context 'solo-owned groups' do
let(:group) { create(:group) }
context 'if the user is the sole owner of at least one group' do
before do
create(:group_member, :owner, group: group, user: user)
end
context 'soft-delete' do
it 'fails' do
delete :destroy, params: { id: user.username }
message = s_('AdminUsers|You must transfer ownership or delete the groups owned by this user before you can delete their account')
expect(flash[:alert]).to eq(message)
expect(response).to have_gitlab_http_status(:see_other)
expect(response).to redirect_to admin_user_path(user)
expect(User.exists?(user.id)).to be_truthy
end
end
context 'hard-delete' do
it 'succeeds' do
delete :destroy, params: { id: user.username, hard_delete: true }
expect(response).to redirect_to(admin_users_path)
expect(flash[:notice]).to eq(_('The user is being deleted.'))
expect(User.exists?(user.id)).to be_falsy
end
end
end
end
end
end
end
describe 'DELETE #reject' do
subject { put :reject, params: { id: user.username } }
context 'when rejecting a pending user' do
let(:user) { create(:user, :blocked_pending_approval) }
context 'when user_destroy_with_limited_execution_time_worker is enabled' do
it 'initiates user removal', :sidekiq_inline do
subject
@ -209,19 +149,6 @@ RSpec.describe Admin::UsersController do
initiator_user: admin)
).to be_exists
end
end
context 'when user_destroy_with_limited_execution_time_worker is disabled' do
before do
stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
end
it 'hard deletes the user', :sidekiq_inline do
subject
expect(User.exists?(user.id)).to be_falsy
end
end
it 'displays the rejection message' do
subject

View File

@ -37,7 +37,6 @@ RSpec.describe 'Admin mode for workers', :request_store do
gitlab_enable_admin_mode_sign_in(user)
end
context 'when user_destroy_with_limited_execution_time_worker is enabled' do
it 'can delete user', :js do
visit admin_user_path(user_to_delete)
@ -60,35 +59,6 @@ RSpec.describe 'Admin mode for workers', :request_store do
expect(find('h1.page-title')).to have_content('(Blocked)')
end
end
context 'when user_destroy_with_limited_execution_time_worker is disabled' do
before do
stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
end
it 'can delete user', :js do
visit admin_user_path(user_to_delete)
click_action_in_user_dropdown(user_to_delete.id, 'Delete user')
page.within '.modal-dialog' do
find("input[name='username']").send_keys(user_to_delete.name)
click_button 'Delete user'
wait_for_requests
end
expect(page).to have_content('The user is being deleted.')
# Perform jobs while logged out so that admin mode is only enabled in job metadata
execute_jobs_signed_out(user)
visit admin_user_path(user_to_delete)
expect(page).to have_title('Not Found')
end
end
end
end
def execute_jobs_signed_out(user)

View File

@ -27,7 +27,6 @@ RSpec.describe 'Profile account page', :js do
expect(User.exists?(user.id)).to be_truthy
end
context 'when user_destroy_with_limited_execution_time_worker is enabled' do
it 'deletes user', :js, :sidekiq_inline do
click_button 'Delete account'
@ -43,26 +42,6 @@ RSpec.describe 'Profile account page', :js do
initiator_user: user)
).to be_exists
end
end
context 'when user_destroy_with_limited_execution_time_worker is disabled' do
before do
stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
end
it 'deletes user', :js, :sidekiq_inline do
click_button 'Delete account'
fill_in 'password', with: user.password
page.within '.modal' do
click_button 'Delete account'
end
expect(page).to have_content('Account scheduled for removal')
expect(User.exists?(user.id)).to be_falsy
end
end
it 'shows invalid password flash message', :js do
click_button 'Delete account'

View File

@ -34,11 +34,7 @@ RSpec.describe RemoveOrphanGroupTokenUsers, :migration, :sidekiq_inline do
let(:members) { table(:members) }
let(:namespaces) { table(:namespaces) }
before do
stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
end
it 'removes orphan project bot and its tokens', :aggregate_failures do
it 'initiates orphan project bot removal', :aggregate_failures do
expect(DeleteUserWorker)
.to receive(:perform_async)
.with(orphan_bot.id, orphan_bot.id, skip_authorization: true)
@ -46,7 +42,8 @@ RSpec.describe RemoveOrphanGroupTokenUsers, :migration, :sidekiq_inline do
migrate!
expect(users.count).to eq 2
expect(Users::GhostUserMigration.where(user: orphan_bot)).to be_exists
expect(users.count).to eq 3
expect(personal_access_tokens.count).to eq 2
expect(personal_access_tokens.find_by(user_id: orphan_bot.id)).to eq nil
end

View File

@ -21,7 +21,6 @@ RSpec.describe SpamLog do
end
context 'when admin mode is enabled', :enable_admin_mode do
context 'when user_destroy_with_limited_execution_time_worker is enabled' do
it 'initiates user removal', :sidekiq_inline do
spam_log = build(:spam_log)
user = spam_log.user
@ -37,24 +36,6 @@ RSpec.describe SpamLog do
end
end
context 'when user_destroy_with_limited_execution_time_worker is disabled' do
before do
stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
end
it 'removes the user', :sidekiq_inline do
spam_log = build(:spam_log)
user = spam_log.user
perform_enqueued_jobs do
spam_log.remove_user(deleted_by: admin)
end
expect { User.find(user.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
end
end
context 'when admin mode is disabled' do
it 'does not allow to remove the user', :sidekiq_might_not_need_inline do
spam_log = build(:spam_log)

View File

@ -243,7 +243,6 @@ RSpec.describe API::ResourceAccessTokens do
end
context "when the user has valid permissions" do
context 'when user_destroy_with_limited_execution_time_worker is enabled' do
it "deletes the #{source_type} access token from the #{source_type}" do
delete_token
@ -273,37 +272,6 @@ RSpec.describe API::ResourceAccessTokens do
).to be_exists
end
end
end
context 'when user_destroy_with_limited_execution_time_worker is disabled' do
before do
stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
end
it "deletes the #{source_type} access token from the #{source_type}" do
delete_token
expect(response).to have_gitlab_http_status(:no_content)
expect(User.exists?(project_bot.id)).to be_falsy
end
context "when using #{source_type} access token to DELETE other #{source_type} access token" do
let_it_be(:other_project_bot) { create(:user, :project_bot) }
let_it_be(:other_token) { create(:personal_access_token, user: other_project_bot) }
let_it_be(:token_id) { other_token.id }
before do
resource.add_maintainer(other_project_bot)
end
it "deletes the #{source_type} access token from the #{source_type}" do
delete_token
expect(response).to have_gitlab_http_status(:no_content)
expect(User.exists?(other_project_bot.id)).to be_falsy
end
end
end
context "when attempting to delete a non-existent #{source_type} access token" do
let_it_be(:token_id) { non_existing_record_id }

View File

@ -2564,8 +2564,6 @@ RSpec.describe API::Users do
describe "DELETE /users/:id" do
let_it_be(:issue) { create(:issue, author: user) }
context 'user deletion' do
context 'when user_destroy_with_limited_execution_time_worker is enabled' do
it "deletes user", :sidekiq_inline do
perform_enqueued_jobs { delete api("/users/#{user.id}", admin) }
@ -2573,24 +2571,6 @@ RSpec.describe API::Users do
expect(Users::GhostUserMigration.where(user: user,
initiator_user: admin)).to be_exists
end
end
context 'when user_destroy_with_limited_execution_time_worker is disabled' do
before do
stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
end
it "deletes user", :sidekiq_inline do
namespace_id = user.namespace.id
perform_enqueued_jobs { delete api("/users/#{user.id}", admin) }
expect(response).to have_gitlab_http_status(:no_content)
expect { User.find(user.id) }.to raise_error ActiveRecord::RecordNotFound
expect { Namespace.find(namespace_id) }.to raise_error ActiveRecord::RecordNotFound
end
end
end
context "sole owner of a group" do
let!(:group) { create(:group).tap { |group| group.add_owner(user) } }
@ -2653,8 +2633,6 @@ RSpec.describe API::Users do
expect(response).to have_gitlab_http_status(:not_found)
end
context 'hard delete' do
context 'when user_destroy_with_limited_execution_time_worker is enabled' do
context "hard delete disabled" do
it "moves contributions to the ghost user", :sidekiq_might_not_need_inline do
perform_enqueued_jobs { delete api("/users/#{user.id}", admin) }
@ -2679,33 +2657,6 @@ RSpec.describe API::Users do
end
end
context 'when user_destroy_with_limited_execution_time_worker is disabled' do
before do
stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
end
context "hard delete disabled" do
it "moves contributions to the ghost user", :sidekiq_might_not_need_inline do
perform_enqueued_jobs { delete api("/users/#{user.id}", admin) }
expect(response).to have_gitlab_http_status(:no_content)
expect(issue.reload).to be_persisted
expect(issue.author.ghost?).to be_truthy
end
end
context "hard delete enabled" do
it "removes contributions", :sidekiq_might_not_need_inline do
perform_enqueued_jobs { delete api("/users/#{user.id}?hard_delete=true", admin) }
expect(response).to have_gitlab_http_status(:no_content)
expect(Issue.exists?(issue.id)).to be_falsy
end
end
end
end
end
describe "GET /user" do
shared_examples 'get user info' do |version|
context 'with regular user' do

View File

@ -1,204 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Applications::CheckInstallationProgressService, '#execute' do
RESCHEDULE_PHASES = Gitlab::Kubernetes::Pod::PHASES - [Gitlab::Kubernetes::Pod::SUCCEEDED, Gitlab::Kubernetes::Pod::FAILED].freeze
let(:application) { create(:clusters_applications_helm, :installing) }
let(:service) { described_class.new(application) }
let(:phase) { Gitlab::Kubernetes::Pod::UNKNOWN }
let(:errors) { nil }
shared_examples 'a not yet terminated installation' do |a_phase|
let(:phase) { a_phase }
before do
expect(service).to receive(:pod_phase).once.and_return(phase)
end
context "when phase is #{a_phase}" do
context 'when not timed_out' do
it 'reschedule a new check' do
expect(ClusterWaitForAppInstallationWorker).to receive(:perform_in).once
expect(service).not_to receive(:remove_installation_pod)
expect do
service.execute
application.reload
end.not_to change(application, :status)
expect(application.status_reason).to be_nil
end
end
end
end
shared_examples 'error handling' do
context 'when installation raises a Kubeclient::HttpError' do
let(:cluster) { create(:cluster, :provided_by_user, :project) }
let(:logger) { service.send(:logger) }
let(:error) { Kubeclient::HttpError.new(401, 'Unauthorized', nil) }
before do
application.update!(cluster: cluster)
expect(service).to receive(:pod_phase).and_raise(error)
end
include_examples 'logs kubernetes errors' do
let(:error_name) { 'Kubeclient::HttpError' }
let(:error_message) { 'Unauthorized' }
let(:error_code) { 401 }
end
it 'shows the response code from the error' do
service.execute
expect(application).to be_errored.or(be_update_errored)
expect(application.status_reason).to eq('Kubernetes error: 401')
end
end
end
before do
allow(service).to receive(:installation_errors).and_return(errors)
allow(service).to receive(:remove_installation_pod).and_return(nil)
end
context 'when application is updating' do
let(:application) { create(:clusters_applications_helm, :updating) }
include_examples 'error handling'
RESCHEDULE_PHASES.each { |phase| it_behaves_like 'a not yet terminated installation', phase }
context 'when installation POD succeeded' do
let(:phase) { Gitlab::Kubernetes::Pod::SUCCEEDED }
before do
expect(service).to receive(:pod_phase).once.and_return(phase)
end
it 'removes the installation POD' do
expect(service).to receive(:remove_installation_pod).once
service.execute
end
it 'make the application installed' do
expect(ClusterWaitForAppInstallationWorker).not_to receive(:perform_in)
service.execute
expect(application).to be_updated
expect(application.status_reason).to be_nil
end
end
context 'when installation POD failed' do
let(:phase) { Gitlab::Kubernetes::Pod::FAILED }
let(:errors) { 'test installation failed' }
before do
expect(service).to receive(:pod_phase).once.and_return(phase)
end
it 'make the application errored' do
service.execute
expect(application).to be_update_errored
expect(application.status_reason).to eq('Operation failed. Check pod logs for install-helm for more details.')
end
end
context 'when timed out' do
let(:application) { create(:clusters_applications_helm, :timed_out, :updating) }
before do
expect(service).to receive(:pod_phase).once.and_return(phase)
end
it 'make the application errored' do
expect(ClusterWaitForAppInstallationWorker).not_to receive(:perform_in)
service.execute
expect(application).to be_update_errored
expect(application.status_reason).to eq('Operation timed out. Check pod logs for install-helm for more details.')
end
end
end
context 'when application is installing' do
include_examples 'error handling'
RESCHEDULE_PHASES.each { |phase| it_behaves_like 'a not yet terminated installation', phase }
context 'when installation POD succeeded' do
let(:phase) { Gitlab::Kubernetes::Pod::SUCCEEDED }
before do
expect(service).to receive(:pod_phase).once.and_return(phase)
end
it 'removes the installation POD' do
expect_next_instance_of(Gitlab::Kubernetes::Helm::API) do |instance|
expect(instance).to receive(:delete_pod!).with(kind_of(String)).once
end
expect(service).to receive(:remove_installation_pod).and_call_original
service.execute
end
it 'make the application installed' do
expect(ClusterWaitForAppInstallationWorker).not_to receive(:perform_in)
service.execute
expect(application).to be_installed
expect(application.status_reason).to be_nil
end
it 'tracks application install', :snowplow do
service.execute
expect_snowplow_event(category: 'cluster:applications', action: 'cluster_application_helm_installed')
end
end
context 'when installation POD failed' do
let(:phase) { Gitlab::Kubernetes::Pod::FAILED }
let(:errors) { 'test installation failed' }
before do
expect(service).to receive(:pod_phase).once.and_return(phase)
end
it 'make the application errored' do
service.execute
expect(application).to be_errored
expect(application.status_reason).to eq('Operation failed. Check pod logs for install-helm for more details.')
end
end
context 'when timed out' do
let(:application) { create(:clusters_applications_helm, :timed_out) }
before do
expect(service).to receive(:pod_phase).once.and_return(phase)
end
it 'make the application errored' do
expect(ClusterWaitForAppInstallationWorker).not_to receive(:perform_in)
service.execute
expect(application).to be_errored
expect(application.status_reason).to eq('Operation timed out. Check pod logs for install-helm for more details.')
end
end
end
end

View File

@ -36,7 +36,6 @@ RSpec.describe Groups::DestroyService do
end
context 'bot tokens', :sidekiq_inline do
context 'when user_destroy_with_limited_execution_time_worker is enabled' do
it 'initiates group bot removal', :aggregate_failures do
bot = create(:user, :project_bot)
group.add_developer(bot)
@ -51,25 +50,6 @@ RSpec.describe Groups::DestroyService do
end
end
context 'when user_destroy_with_limited_execution_time_worker is disabled' do
before do
stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
end
it 'removes group bot', :aggregate_failures do
bot = create(:user, :project_bot)
group.add_developer(bot)
token = create(:personal_access_token, user: bot)
destroy_group(group, user, async)
expect(PersonalAccessToken.find_by(id: token.id)).to be_nil
expect(User.find_by(id: bot.id)).to be_nil
expect(User.find_by(id: user.id)).not_to be_nil
end
end
end
context 'mattermost team', :sidekiq_might_not_need_inline do
let!(:chat_team) { create(:chat_team, namespace: group) }

View File

@ -29,7 +29,6 @@ RSpec.describe ResourceAccessTokens::RevokeService do
expect(resource.reload.users).not_to include(resource_bot)
end
context 'when user_destroy_with_limited_execution_time_worker is enabled' do
it 'initiates user removal' do
subject
@ -38,27 +37,6 @@ RSpec.describe ResourceAccessTokens::RevokeService do
initiator_user: user)
).to be_exists
end
end
context 'when user_destroy_with_limited_execution_time_worker is disabled' do
before do
stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
end
it 'transfer issuables of bot user to ghost user' do
issue = create(:issue, author: resource_bot)
subject
expect(issue.reload.author.ghost?).to be true
end
it 'deletes project bot user' do
subject
expect(User.exists?(resource_bot.id)).to be_falsy
end
end
it 'logs the event' do
allow(Gitlab::AppLogger).to receive(:info)

View File

@ -10,9 +10,18 @@ RSpec.describe Users::DestroyService do
let(:service) { described_class.new(admin) }
let(:gitlab_shell) { Gitlab::Shell.new }
shared_examples 'pre-migrate clean-up' do
describe "Deletes a user and all their personal projects", :enable_admin_mode do
context 'no options are given' do
it 'creates GhostUserMigration record to handle migration in a worker' do
expect { service.execute(user) }
.to(
change do
Users::GhostUserMigration.where(user: user,
initiator_user: admin)
.exists?
end.from(false).to(true))
end
it 'will delete the personal project' do
expect_next_instance_of(Projects::DestroyService) do |destroy_service|
expect(destroy_service).to receive(:execute).once.and_return(true)
@ -155,356 +164,6 @@ RSpec.describe Users::DestroyService do
end
end
end
end
context 'when user_destroy_with_limited_execution_time_worker is disabled' do
before do
stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
end
include_examples 'pre-migrate clean-up'
describe "Deletes a user and all their personal projects", :enable_admin_mode do
context 'no options are given' do
it 'deletes the user' do
user_data = service.execute(user)
expect(user_data['email']).to eq(user.email)
expect { User.find(user.id) }.to raise_error(ActiveRecord::RecordNotFound)
expect { Namespace.find(namespace.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
it 'deletes user associations in batches' do
expect(user).to receive(:destroy_dependent_associations_in_batches)
service.execute(user)
end
it 'does not include snippets when deleting in batches' do
expect(user).to receive(:destroy_dependent_associations_in_batches).with({ exclude: [:snippets] })
service.execute(user)
end
it 'calls the bulk snippet destroy service for the user personal snippets' do
repo1 = create(:personal_snippet, :repository, author: user).snippet_repository
repo2 = create(:project_snippet, :repository, project: project, author: user).snippet_repository
aggregate_failures do
expect(gitlab_shell.repository_exists?(repo1.shard_name, repo1.disk_path + '.git')).to be_truthy
expect(gitlab_shell.repository_exists?(repo2.shard_name, repo2.disk_path + '.git')).to be_truthy
end
# Call made when destroying user personal projects
expect(Snippets::BulkDestroyService).to receive(:new)
.with(admin, project.snippets).and_call_original
# Call to remove user personal snippets and for
# project snippets where projects are not user personal
# ones
expect(Snippets::BulkDestroyService).to receive(:new)
.with(admin, user.snippets.only_personal_snippets).and_call_original
service.execute(user)
aggregate_failures do
expect(gitlab_shell.repository_exists?(repo1.shard_name, repo1.disk_path + '.git')).to be_falsey
expect(gitlab_shell.repository_exists?(repo2.shard_name, repo2.disk_path + '.git')).to be_falsey
end
end
it 'calls the bulk snippet destroy service with hard delete option if it is present' do
# this avoids getting into Projects::DestroyService as it would
# call Snippets::BulkDestroyService first!
allow(user).to receive(:personal_projects).and_return([])
expect_next_instance_of(Snippets::BulkDestroyService) do |bulk_destroy_service|
expect(bulk_destroy_service).to receive(:execute).with({ skip_authorization: true }).and_call_original
end
service.execute(user, { hard_delete: true })
end
it 'does not delete project snippets that the user is the author of' do
repo = create(:project_snippet, :repository, author: user).snippet_repository
service.execute(user)
expect(gitlab_shell.repository_exists?(repo.shard_name, repo.disk_path + '.git')).to be_truthy
expect(User.ghost.snippets).to include(repo.snippet)
end
context 'when an error is raised deleting snippets' do
it 'does not delete user' do
snippet = create(:personal_snippet, :repository, author: user)
bulk_service = double
allow(Snippets::BulkDestroyService).to receive(:new).and_call_original
allow(Snippets::BulkDestroyService).to receive(:new).with(admin, user.snippets).and_return(bulk_service)
allow(bulk_service).to receive(:execute).and_return(ServiceResponse.error(message: 'foo'))
aggregate_failures do
expect { service.execute(user) }
.to raise_error(Users::DestroyService::DestroyError, 'foo')
expect(snippet.reload).not_to be_nil
expect(
gitlab_shell.repository_exists?(snippet.repository_storage,
snippet.disk_path + '.git')
).to be_truthy
end
end
end
end
context 'projects in pending_delete' do
before do
project.pending_delete = true
project.save!
end
it 'destroys a project in pending_delete' do
expect_next_instance_of(Projects::DestroyService) do |destroy_service|
expect(destroy_service).to receive(:execute).once.and_return(true)
end
service.execute(user)
expect { Project.find(project.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
end
context "a deleted user's issues" do
let(:project) { create(:project) }
before do
project.add_developer(user)
end
context "for an issue the user was assigned to" do
let!(:issue) { create(:issue, project: project, assignees: [user]) }
before do
service.execute(user)
end
it 'does not delete issues the user is assigned to' do
expect(Issue.find_by_id(issue.id)).to be_present
end
it 'migrates the issue so that it is "Unassigned"' do
migrated_issue = Issue.find_by_id(issue.id)
expect(migrated_issue.assignees).to be_empty
end
end
end
context "a deleted user's merge_requests" do
let(:project) { create(:project, :repository) }
before do
project.add_developer(user)
end
context "for an merge request the user was assigned to" do
let!(:merge_request) { create(:merge_request, source_project: project, assignees: [user]) }
before do
service.execute(user)
end
it 'does not delete merge requests the user is assigned to' do
expect(MergeRequest.find_by_id(merge_request.id)).to be_present
end
it 'migrates the merge request so that it is "Unassigned"' do
migrated_merge_request = MergeRequest.find_by_id(merge_request.id)
expect(migrated_merge_request.assignees).to be_empty
end
end
end
context 'migrating associated records' do
let!(:issue) { create(:issue, author: user) }
it 'delegates to the `MigrateToGhostUser` service to move associated records to the ghost user' do
expect_any_instance_of(Users::MigrateToGhostUserService).to receive(:execute).once.and_call_original
service.execute(user)
expect(issue.reload.author).to be_ghost
end
context 'when hard_delete option is given' do
it 'will not ghost certain records' do
expect_any_instance_of(Users::MigrateToGhostUserService).to receive(:execute).once.and_call_original
service.execute(user, hard_delete: true)
expect(Issue.exists?(issue.id)).to be_falsy
end
end
end
end
describe "Deletion permission checks" do
it 'does not delete the user when user is not an admin' do
other_user = create(:user)
expect { described_class.new(other_user).execute(user) }.to raise_error(Gitlab::Access::AccessDeniedError)
expect(User.exists?(user.id)).to be(true)
end
context 'when admin mode is enabled', :enable_admin_mode do
it 'allows admins to delete anyone' do
described_class.new(admin).execute(user)
expect(User.exists?(user.id)).to be(false)
end
end
context 'when admin mode is disabled' do
it 'disallows admins to delete anyone' do
expect { described_class.new(admin).execute(user) }.to raise_error(Gitlab::Access::AccessDeniedError)
expect(User.exists?(user.id)).to be(true)
end
end
it 'allows users to delete their own account' do
described_class.new(user).execute(user)
expect(User.exists?(user.id)).to be(false)
end
it 'allows user to be deleted if skip_authorization: true' do
other_user = create(:user)
described_class.new(user).execute(other_user, skip_authorization: true)
expect(User.exists?(other_user.id)).to be(false)
end
end
context 'batched nullify' do
let(:other_user) { create(:user) }
# rubocop:disable Layout/LineLength
def nullify_in_batches_regexp(table, column, user, batch_size: 100)
%r{^UPDATE "#{table}" SET "#{column}" = NULL WHERE "#{table}"."id" IN \(SELECT "#{table}"."id" FROM "#{table}" WHERE "#{table}"."#{column}" = #{user.id} LIMIT #{batch_size}\)}
end
def delete_in_batches_regexps(table, column, user, items, batch_size: 1000)
select_query = %r{^SELECT "#{table}".* FROM "#{table}" WHERE "#{table}"."#{column}" = #{user.id}.*ORDER BY "#{table}"."id" ASC LIMIT #{batch_size}}
[select_query] + items.map { |item| %r{^DELETE FROM "#{table}" WHERE "#{table}"."id" = #{item.id}} }
end
# rubocop:enable Layout/LineLength
it 'nullifies related associations in batches' do
expect(other_user).to receive(:nullify_dependent_associations_in_batches).and_call_original
described_class.new(user).execute(other_user, skip_authorization: true)
end
it 'nullifies associations marked as `dependent: :nullify` and'\
'destroys the associations marked as `dependent: :destroy`, in batches', :aggregate_failures do
# associations to be nullified
issue = create(:issue, closed_by: other_user, updated_by: other_user)
resource_label_event = create(:resource_label_event, user: other_user)
resource_state_event = create(:resource_state_event, user: other_user)
created_project = create(:project, creator: other_user)
# associations to be destroyed
todos = create_list(:todo, 2, project: issue.project, user: other_user, author: other_user, target: issue)
event = create(:event, project: issue.project, author: other_user)
query_recorder = ActiveRecord::QueryRecorder.new do
described_class.new(user).execute(other_user, skip_authorization: true)
end
issue.reload
resource_label_event.reload
resource_state_event.reload
created_project.reload
expect(issue.closed_by).to be_nil
expect(issue.updated_by_id).to be_nil
expect(resource_label_event.user_id).to be_nil
expect(resource_state_event.user_id).to be_nil
expect(created_project.creator_id).to be_nil
expect(other_user.authored_todos).to be_empty
expect(other_user.todos).to be_empty
expect(other_user.authored_events).to be_empty
expected_queries = [
nullify_in_batches_regexp(:issues, :updated_by_id, other_user),
nullify_in_batches_regexp(:issues, :closed_by_id, other_user),
nullify_in_batches_regexp(:resource_label_events, :user_id, other_user),
nullify_in_batches_regexp(:resource_state_events, :user_id, other_user),
nullify_in_batches_regexp(:projects, :creator_id, other_user)
]
expected_queries += delete_in_batches_regexps(:todos, :user_id, other_user, todos)
expected_queries += delete_in_batches_regexps(:todos, :author_id, other_user, todos)
expected_queries += delete_in_batches_regexps(:events, :author_id, other_user, [event])
expect(query_recorder.log).to include(*expected_queries)
end
it 'nullifies merge request associations', :aggregate_failures do
merge_request = create(:merge_request, source_project: project, target_project: project,
assignee: other_user, updated_by: other_user, merge_user: other_user)
merge_request.metrics.update!(merged_by: other_user, latest_closed_by: other_user)
merge_request.reviewers = [other_user]
merge_request.assignees = [other_user]
query_recorder = ActiveRecord::QueryRecorder.new do
described_class.new(user).execute(other_user, skip_authorization: true)
end
merge_request.reload
expect(merge_request.updated_by).to be_nil
expect(merge_request.assignee).to be_nil
expect(merge_request.assignee_id).to be_nil
expect(merge_request.metrics.merged_by).to be_nil
expect(merge_request.metrics.latest_closed_by).to be_nil
expect(merge_request.reviewers).to be_empty
expect(merge_request.assignees).to be_empty
expected_queries = [
nullify_in_batches_regexp(:merge_requests, :updated_by_id, other_user),
nullify_in_batches_regexp(:merge_requests, :assignee_id, other_user),
nullify_in_batches_regexp(:merge_request_metrics, :merged_by_id, other_user),
nullify_in_batches_regexp(:merge_request_metrics, :latest_closed_by_id, other_user)
]
expected_queries += delete_in_batches_regexps(:merge_request_assignees, :user_id, other_user,
merge_request.assignees)
expected_queries += delete_in_batches_regexps(:merge_request_reviewers, :user_id, other_user,
merge_request.reviewers)
expect(query_recorder.log).to include(*expected_queries)
end
end
end
context 'when user_destroy_with_limited_execution_time_worker is enabled' do
include_examples 'pre-migrate clean-up'
describe "Deletes a user and all their personal projects", :enable_admin_mode do
context 'no options are given' do
it 'creates GhostUserMigration record to handle migration in a worker' do
expect { service.execute(user) }
.to(
change do
Users::GhostUserMigration.where(user: user,
initiator_user: admin)
.exists?
end.from(false).to(true))
end
end
end
describe "Deletion permission checks" do
it 'does not delete the user when user is not an admin' do
@ -560,4 +219,3 @@ RSpec.describe Users::DestroyService do
end
end
end
end

View File

@ -146,24 +146,106 @@ RSpec.describe Users::MigrateRecordsToGhostUserService do
end
context 'for batched nullify' do
# rubocop:disable Layout/LineLength
def nullify_in_batches_regexp(table, column, user, batch_size: 100)
%r{^UPDATE "#{table}" SET "#{column}" = NULL WHERE "#{table}"."id" IN \(SELECT "#{table}"."id" FROM "#{table}" WHERE "#{table}"."#{column}" = #{user.id} LIMIT #{batch_size}\)}
end
def delete_in_batches_regexps(table, column, user, items, batch_size: 1000)
select_query = %r{^SELECT "#{table}".* FROM "#{table}" WHERE "#{table}"."#{column}" = #{user.id}.*ORDER BY "#{table}"."id" ASC LIMIT #{batch_size}}
[select_query] + items.map { |item| %r{^DELETE FROM "#{table}" WHERE "#{table}"."id" = #{item.id}} }
end
# rubocop:enable Layout/LineLength
it 'nullifies related associations in batches' do
expect(user).to receive(:nullify_dependent_associations_in_batches).and_call_original
service.execute
end
it 'nullifies last_updated_issues, closed_issues, resource_label_events' do
it 'nullifies associations marked as `dependent: :nullify` and'\
'destroys the associations marked as `dependent: :destroy`, in batches', :aggregate_failures do
# associations to be nullified
issue = create(:issue, closed_by: user, updated_by: user)
resource_label_event = create(:resource_label_event, user: user)
resource_state_event = create(:resource_state_event, user: user)
created_project = create(:project, creator: user)
# associations to be destroyed
todos = create_list(:todo, 2, project: issue.project, user: user, author: user, target: issue)
event = create(:event, project: issue.project, author: user)
query_recorder = ActiveRecord::QueryRecorder.new do
service.execute
end
issue.reload
resource_label_event.reload
resource_state_event.reload
created_project.reload
expect(issue.closed_by).to be_nil
expect(issue.updated_by).to be_nil
expect(resource_label_event.user).to be_nil
expect(issue.updated_by_id).to be_nil
expect(resource_label_event.user_id).to be_nil
expect(resource_state_event.user_id).to be_nil
expect(created_project.creator_id).to be_nil
expect(user.authored_todos).to be_empty
expect(user.todos).to be_empty
expect(user.authored_events).to be_empty
expected_queries = [
nullify_in_batches_regexp(:issues, :updated_by_id, user),
nullify_in_batches_regexp(:issues, :closed_by_id, user),
nullify_in_batches_regexp(:resource_label_events, :user_id, user),
nullify_in_batches_regexp(:resource_state_events, :user_id, user),
nullify_in_batches_regexp(:projects, :creator_id, user)
]
expected_queries += delete_in_batches_regexps(:todos, :user_id, user, todos)
expected_queries += delete_in_batches_regexps(:todos, :author_id, user, todos)
expected_queries += delete_in_batches_regexps(:events, :author_id, user, [event])
expect(query_recorder.log).to include(*expected_queries)
end
it 'nullifies merge request associations', :aggregate_failures do
merge_request = create(:merge_request, source_project: project,
target_project: project,
assignee: user,
updated_by: user,
merge_user: user)
merge_request.metrics.update!(merged_by: user, latest_closed_by: user)
merge_request.reviewers = [user]
merge_request.assignees = [user]
query_recorder = ActiveRecord::QueryRecorder.new do
service.execute
end
merge_request.reload
expect(merge_request.updated_by).to be_nil
expect(merge_request.assignee).to be_nil
expect(merge_request.assignee_id).to be_nil
expect(merge_request.metrics.merged_by).to be_nil
expect(merge_request.metrics.latest_closed_by).to be_nil
expect(merge_request.reviewers).to be_empty
expect(merge_request.assignees).to be_empty
expected_queries = [
nullify_in_batches_regexp(:merge_requests, :updated_by_id, user),
nullify_in_batches_regexp(:merge_requests, :assignee_id, user),
nullify_in_batches_regexp(:merge_request_metrics, :merged_by_id, user),
nullify_in_batches_regexp(:merge_request_metrics, :latest_closed_by_id, user)
]
expected_queries += delete_in_batches_regexps(:merge_request_assignees, :user_id, user,
merge_request.assignees)
expected_queries += delete_in_batches_regexps(:merge_request_reviewers, :user_id, user,
merge_request.reviewers)
expect(query_recorder.log).to include(*expected_queries)
end
end

View File

@ -35,7 +35,6 @@ RSpec.describe Users::RejectService do
context 'success' do
context 'when the executor user is an admin in admin mode', :enable_admin_mode do
context 'when user_destroy_with_limited_execution_time_worker is enabled' do
it 'initiates user removal', :sidekiq_inline do
subject
@ -45,20 +44,6 @@ RSpec.describe Users::RejectService do
initiator_user: current_user)
).to be_exists
end
end
context 'when user_destroy_with_limited_execution_time_worker is disabled' do
before do
stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
end
it 'deletes the user', :sidekiq_inline do
subject
expect(subject[:status]).to eq(:success)
expect { User.find(user.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
end
it 'emails the user on rejection' do
expect_next_instance_of(NotificationService) do |notification|

View File

@ -164,32 +164,10 @@ RSpec.shared_examples 'PUT resource access tokens available' do
expect(resource.reload.bots).not_to include(bot_user)
end
context 'when user_destroy_with_limited_execution_time_worker is enabled' do
it 'creates GhostUserMigration records to handle migration in a worker' do
expect { subject }.to(
change { Users::GhostUserMigration.count }.from(0).to(1))
end
end
context 'when user_destroy_with_limited_execution_time_worker is disabled' do
before do
stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
end
it 'converts issuables of the bot user to ghost user' do
issue = create(:issue, author: bot_user)
subject
expect(issue.reload.author.ghost?).to be true
end
it 'deletes project bot user' do
subject
expect(User.exists?(bot_user.id)).to be_falsy
end
end
context 'when unsuccessful' do
before do

View File

@ -56,7 +56,6 @@ RSpec.describe RemoveExpiredMembersWorker do
expect(Member.find_by(user_id: expired_project_bot.id)).to be_nil
end
context 'when user_destroy_with_limited_execution_time_worker is enabled' do
it 'initiates project bot removal' do
worker.perform
@ -67,19 +66,6 @@ RSpec.describe RemoveExpiredMembersWorker do
end
end
context 'when user_destroy_with_limited_execution_time_worker is disabled' do
before do
stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
end
it 'deletes expired project bot' do
worker.perform
expect(User.exists?(expired_project_bot.id)).to be(false)
end
end
end
context 'non-expired project bot' do
let_it_be(:other_project_bot) { create(:user, :project_bot) }

View File

@ -38,16 +38,4 @@ RSpec.describe Users::MigrateRecordsToGhostUserInBatchesWorker do
expect(issue.last_edited_by).to eq(User.ghost)
end
end
context 'when user_destroy_with_limited_execution_time_worker is disabled' do
before do
stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
end
it 'does not execute the service' do
expect(Users::MigrateRecordsToGhostUserInBatchesService).not_to receive(:new)
worker.perform
end
end
end