Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-04-24 12:10:16 +00:00
parent b1b7c2f9a7
commit 5c8c561ac6
61 changed files with 1014 additions and 231 deletions

View file

@ -78,6 +78,7 @@
- "{,ee/}rubocop/**/*"
- "{,ee/}spec/**/*"
- "doc/README.md" # Some RSpec test rely on this file
- "doc/administration/raketasks/maintenance.md" # Some RSpec test rely on this file
.code-patterns: &code-patterns
- "{package.json,yarn.lock}"
@ -121,6 +122,7 @@
- "{,ee/}rubocop/**/*"
- "{,ee/}spec/**/*"
- "doc/README.md" # Some RSpec test rely on this file
- "doc/administration/raketasks/maintenance.md" # Some RSpec test rely on this file
.code-qa-patterns: &code-qa-patterns
- "{package.json,yarn.lock}"
@ -163,6 +165,7 @@
- "{,ee/}rubocop/**/*"
- "{,ee/}spec/**/*"
- "doc/README.md" # Some RSpec test rely on this file
- "doc/administration/raketasks/maintenance.md" # Some RSpec test rely on this file
# QA changes
- ".dockerignore"
- "qa/**/*"
@ -500,14 +503,6 @@
- <<: *if-dot-com-gitlab-org-schedule
when: on_success
.review:rules:review-gcp-cleanup:
rules:
- <<: *if-dot-com-gitlab-org-merge-request
changes: *code-qa-patterns
when: manual
- <<: *if-dot-com-gitlab-org-schedule
when: on_success
.review:rules:danger:
rules:
- if: '$DANGER_GITLAB_API_TOKEN && $CI_MERGE_REQUEST_IID'

View file

@ -287,7 +287,7 @@ gem 'addressable', '~> 2.7'
gem 'font-awesome-rails', '~> 4.7'
gem 'gemojione', '~> 3.3'
gem 'gon', '~> 6.2'
gem 'request_store', '~> 1.3'
gem 'request_store', '~> 1.5'
gem 'base32', '~> 0.3.0'
gem "gitlab-license", "~> 1.0"

View file

@ -883,7 +883,8 @@ GEM
declarative (< 0.1.0)
declarative-option (< 0.2.0)
uber (< 0.2.0)
request_store (1.3.1)
request_store (1.5.0)
rack (>= 1.4)
responders (3.0.0)
actionpack (>= 5.0)
railties (>= 5.0)
@ -1350,7 +1351,7 @@ DEPENDENCIES
redis (~> 4.0)
redis-namespace (~> 1.6.0)
redis-rails (~> 5.0.2)
request_store (~> 1.3)
request_store (~> 1.5)
responders (~> 3.0)
retriable (~> 3.1.2)
rouge (~> 3.18.0)

View file

@ -642,5 +642,28 @@ export const setSuggestPopoverDismissed = ({ commit, state }) =>
createFlash(s__('MergeRequest|Error dismissing suggestion popover. Please try again.'));
});
export function changeCurrentCommit({ dispatch, commit, state }, { commitId }) {
/* eslint-disable @gitlab/require-i18n-strings */
if (!commitId) {
return Promise.reject(new Error('`commitId` is a required argument'));
} else if (!state.commit) {
return Promise.reject(new Error('`state` must already contain a valid `commit`'));
}
/* eslint-enable @gitlab/require-i18n-strings */
// this is less than ideal, see: https://gitlab.com/gitlab-org/gitlab/-/issues/215421
const commitRE = new RegExp(state.commit.id, 'g');
commit(types.SET_DIFF_FILES, []);
commit(types.SET_BASE_CONFIG, {
...state,
endpoint: state.endpoint.replace(commitRE, commitId),
endpointBatch: state.endpointBatch.replace(commitRE, commitId),
endpointMetadata: state.endpointMetadata.replace(commitRE, commitId),
});
return dispatch('fetchDiffFilesMeta');
}
// prevent babel-plugin-rewire from generating an invalid default during karma tests
export default () => {};

View file

@ -0,0 +1,18 @@
export const AJAX_USERS_SELECT_OPTIONS_MAP = {
projectId: 'projectId',
groupId: 'groupId',
showCurrentUser: 'currentUser',
authorId: 'authorId',
skipUsers: 'skipUsers',
};
export const AJAX_USERS_SELECT_PARAMS_MAP = {
project_id: 'projectId',
group_id: 'groupId',
skip_ldap: 'skipLdap',
todo_filter: 'todoFilter',
todo_state_filter: 'todoStateFilter',
current_user: 'showCurrentUser',
author_id: 'authorId',
skip_users: 'skipUsers',
};

View file

@ -4,10 +4,15 @@
import $ from 'jquery';
import { escape, template, uniqBy } from 'lodash';
import axios from './lib/utils/axios_utils';
import { s__, __, sprintf } from './locale';
import ModalStore from './boards/stores/modal_store';
import { parseBoolean } from './lib/utils/common_utils';
import axios from '../lib/utils/axios_utils';
import { s__, __, sprintf } from '../locale';
import ModalStore from '../boards/stores/modal_store';
import { parseBoolean } from '../lib/utils/common_utils';
import {
AJAX_USERS_SELECT_OPTIONS_MAP,
AJAX_USERS_SELECT_PARAMS_MAP,
} from 'ee_else_ce/users_select/constants';
import { getAjaxUsersSelectOptions, getAjaxUsersSelectParams } from './utils';
// TODO: remove eventHub hack after code splitting refactor
window.emitSidebarEvent = window.emitSidebarEvent || $.noop;
@ -558,13 +563,8 @@ function UsersSelect(currentUser, els, options = {}) {
import(/* webpackChunkName: 'select2' */ 'select2/select2')
.then(() => {
$('.ajax-users-select').each((i, select) => {
const options = {};
const options = getAjaxUsersSelectOptions($(select), AJAX_USERS_SELECT_OPTIONS_MAP);
options.skipLdap = $(select).hasClass('skip_ldap');
options.projectId = $(select).data('projectId');
options.groupId = $(select).data('groupId');
options.showCurrentUser = $(select).data('currentUser');
options.authorId = $(select).data('authorId');
options.skipUsers = $(select).data('skipUsers');
const showNullUser = $(select).data('nullUser');
const showAnyUser = $(select).data('anyUser');
const showEmailUser = $(select).data('emailUser');
@ -705,14 +705,7 @@ UsersSelect.prototype.users = function(query, options, callback) {
const params = {
search: query,
active: true,
project_id: options.projectId || null,
group_id: options.groupId || null,
skip_ldap: options.skipLdap || null,
todo_filter: options.todoFilter || null,
todo_state_filter: options.todoStateFilter || null,
current_user: options.showCurrentUser || null,
author_id: options.authorId || null,
skip_users: options.skipUsers || null,
...getAjaxUsersSelectParams(options, AJAX_USERS_SELECT_PARAMS_MAP),
};
if (options.issuableType === 'merge_request') {

View file

@ -0,0 +1,27 @@
/**
* Get options from data attributes on passed `$select`.
* @param {jQuery} $select
* @param {Object} optionsMap e.g. { optionKeyName: 'dataAttributeName' }
*/
export const getAjaxUsersSelectOptions = ($select, optionsMap) => {
return Object.keys(optionsMap).reduce((accumulator, optionKey) => {
const dataKey = optionsMap[optionKey];
accumulator[optionKey] = $select.data(dataKey);
return accumulator;
}, {});
};
/**
* Get query parameters used for users request from passed `options` parameter
* @param {Object} options e.g. { currentUserId: 1, fooBar: 'baz' }
* @param {Object} paramsMap e.g. { user_id: 'currentUserId', foo_bar: 'fooBar' }
*/
export const getAjaxUsersSelectParams = (options, paramsMap) => {
return Object.keys(paramsMap).reduce((accumulator, paramKey) => {
const optionKey = paramsMap[paramKey];
accumulator[paramKey] = options[optionKey] || null;
return accumulator;
}, {});
};

View file

@ -21,7 +21,7 @@ export default function deviseState(data) {
return stateKey.unresolvedDiscussions;
} else if (this.isPipelineBlocked) {
return stateKey.pipelineBlocked;
} else if (this.isSHAMismatch) {
} else if (this.canMerge && this.isSHAMismatch) {
return stateKey.shaMismatch;
} else if (this.autoMergeEnabled) {
return this.mergeError ? stateKey.autoMergeFailed : stateKey.autoMergeEnabled;

View file

@ -135,11 +135,6 @@ class Namespace < ApplicationRecord
name = host.delete_suffix(gitlab_host)
Namespace.where(parent_id: nil).by_path(name)
end
# overridden in ee
def reset_ci_minutes!(namespace_id)
false
end
end
def default_branch_protection

View file

@ -21,6 +21,9 @@ class ProjectStatistics < ApplicationRecord
scope :for_project_ids, ->(project_ids) { where(project_id: project_ids) }
scope :for_namespaces, -> (namespaces) { where(namespace: namespaces) }
scope :with_any_ci_minutes_used, -> { where.not(shared_runners_seconds: 0) }
def total_repository_size
repository_size + lfs_objects_size
end

View file

@ -27,18 +27,29 @@ module Groups
private
def import_file
@import_file ||= Gitlab::ImportExport::FileImporter.import(importable: @group,
archive_file: nil,
shared: @shared)
@import_file ||= Gitlab::ImportExport::FileImporter.import(
importable: @group,
archive_file: nil,
shared: @shared
)
end
def restorer
@restorer ||= Gitlab::ImportExport::Group::LegacyTreeRestorer.new(
user: @current_user,
shared: @shared,
group: @group,
group_hash: nil
)
@restorer ||=
if ::Feature.enabled?(:group_import_export_ndjson, @group&.parent)
Gitlab::ImportExport::Group::TreeRestorer.new(
user: @current_user,
shared: @shared,
group: @group
)
else
Gitlab::ImportExport::Group::LegacyTreeRestorer.new(
user: @current_user,
shared: @shared,
group: @group,
group_hash: nil
)
end
end
def remove_import_file

View file

@ -10,8 +10,13 @@ module Projects
@shared = project.import_export_shared
save_all!
execute_after_export_action(after_export_strategy)
measurement_enabled = !!options[:measurement_enabled]
measurement_logger = options[:measurement_logger]
::Gitlab::Utils::Measuring.execute_with(measurement_enabled, measurement_logger, base_log_data) do
save_all!
execute_after_export_action(after_export_strategy)
end
ensure
cleanup
end
@ -20,6 +25,15 @@ module Projects
attr_accessor :shared
def base_log_data
{
class: self.class.name,
current_user: current_user.name,
project_full_path: project.full_path,
file_path: shared.export_path
}
end
def execute_after_export_action(after_export_strategy)
return unless after_export_strategy

View file

@ -0,0 +1,5 @@
---
title: Bump max search depth from 2 to 4 when looking for files SAST analyzers can handle
merge_request: 29732
author:
type: fixed

View file

@ -0,0 +1,5 @@
---
title: Remove the SIDEKIQ_REQUEST_STORE configuration
merge_request: 29955
author:
type: other

View file

@ -0,0 +1,5 @@
---
title: Fixed enabled merge button incorrectly showing to users who can't merge
merge_request:
author:
type: fixed

View file

@ -33,7 +33,6 @@ enable_json_logs = Gitlab.config.sidekiq.log_format == 'json'
enable_sidekiq_memory_killer = ENV['SIDEKIQ_MEMORY_KILLER_MAX_RSS'].to_i.nonzero?
use_sidekiq_daemon_memory_killer = ENV["SIDEKIQ_DAEMON_MEMORY_KILLER"].to_i.nonzero?
use_sidekiq_legacy_memory_killer = !use_sidekiq_daemon_memory_killer
use_request_store = ENV.fetch('SIDEKIQ_REQUEST_STORE', 1).to_i.nonzero?
Sidekiq.configure_server do |config|
if enable_json_logs
@ -50,8 +49,7 @@ Sidekiq.configure_server do |config|
config.server_middleware(&Gitlab::SidekiqMiddleware.server_configurator({
metrics: Settings.monitoring.sidekiq_exporter,
arguments_logger: ENV['SIDEKIQ_LOG_ARGUMENTS'] && !enable_json_logs,
memory_killer: enable_sidekiq_memory_killer && use_sidekiq_legacy_memory_killer,
request_store: use_request_store
memory_killer: enable_sidekiq_memory_killer && use_sidekiq_legacy_memory_killer
}))
config.client_middleware(&Gitlab::SidekiqMiddleware.client_configurator)

View file

@ -209,7 +209,6 @@ Piwik
PgBouncer
plaintext
PostgreSQL
precompile
preconfigure
preconfigured
preconfigures

View file

@ -1,8 +1,6 @@
# Integrity check Rake task
# Integrity Check Rake Task
GitLab provides Rake tasks to check the integrity of various components.
## Repository integrity
## Repository Integrity
Even though Git is very resilient and tries to prevent data integrity issues,
there are times when things go wrong. The following Rake tasks intend to
@ -45,7 +43,7 @@ sudo gitlab-rake gitlab:git:fsck
sudo -u git -H bundle exec rake gitlab:git:fsck RAILS_ENV=production
```
## Uploaded files integrity
## Uploaded Files Integrity
Various types of files can be uploaded to a GitLab installation by users.
These integrity checks can detect missing files. Additionally, for locally
@ -129,7 +127,7 @@ Checking integrity of Uploads
Done!
```
## LDAP check
## LDAP Check
The LDAP check Rake task will test the bind DN and password credentials
(if configured) and will list a sample of LDAP users. This task is also

View file

@ -1,11 +1,9 @@
# Geo Rake Tasks **(PREMIUM ONLY)**
The following Rake tasks are for [Geo installations](../geo/replication/index.md).
## Git housekeeping
There are few tasks you can run to schedule a Git housekeeping to start at the
next repository sync in a **secondary** node:
next repository sync in a **Secondary node**:
### Incremental Repack

View file

@ -9,7 +9,7 @@ which will become the owner of the project. You can resume an import
with the same command.
Bear in mind that the syntax is very specific. Remove any spaces within the argument block and
before/after the brackets. Also, some shells (for example, `zsh`) can interpret the open/close brackets
before/after the brackets. Also, Some shells (e.g., zsh) can interpret the open/close brackets
(`[]`) separately. You may need to either escape the brackets or use double quotes.
## Importing multiple projects

View file

@ -1,6 +1,4 @@
# LDAP Rake tasks
The following are LDAP-related Rake tasks.
# LDAP Rake Tasks
## Check
@ -28,7 +26,7 @@ limit by passing a number to the check task:
rake gitlab:ldap:check[50]
```
## Run a group sync
## Run a Group Sync
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/14735) in [GitLab Starter](https://about.gitlab.com/pricing/) 12.2.

View file

@ -1,11 +1,8 @@
# Maintenance Rake tasks
# Maintenance Rake Tasks
GitLab provides Rake tasks for general maintenance.
## Gather information about GitLab and the system it runs on
## Gather GitLab and system information
This command gathers information about your GitLab installation and the system it runs on.
These may be useful when asking for help or reporting issues.
This command gathers information about your GitLab installation and the System it runs on. These may be useful when asking for help or reporting issues.
**Omnibus Installation**
@ -53,23 +50,20 @@ Git: /usr/bin/git
## Check GitLab configuration
The `gitlab:check` Rake task runs the following Rake tasks:
Runs the following Rake tasks:
- `gitlab:gitlab_shell:check`
- `gitlab:gitaly:check`
- `gitlab:sidekiq:check`
- `gitlab:app:check`
It will check that each component was set up according to the installation guide and suggest fixes
for issues found. This command must be run from your application server and will not work correctly on
component servers like [Gitaly](../gitaly/index.md#running-gitaly-on-its-own-server).
It will check that each component was set up according to the installation guide and suggest fixes for issues found.
This command must be run from your app server and will not work correctly on component servers like [Gitaly](../gitaly/index.md#running-gitaly-on-its-own-server).
You may also have a look at our troubleshooting guides for:
You may also have a look at our Troubleshooting Guides:
- [GitLab](../index.md#troubleshooting)
- [Omnibus GitLab](https://docs.gitlab.com/omnibus/README.html#troubleshooting)
To run `gitlab:check`, run:
- [Troubleshooting Guide (GitLab)](../index.md#troubleshooting)
- [Troubleshooting Guide (Omnibus GitLab)](https://docs.gitlab.com/omnibus/README.html#troubleshooting)
**Omnibus Installation**
@ -83,8 +77,7 @@ sudo gitlab-rake gitlab:check
bundle exec rake gitlab:check RAILS_ENV=production
```
NOTE: **Note:**
Use `SANITIZE=true` for `gitlab:check` if you want to omit project names from the output.
NOTE: Use `SANITIZE=true` for `gitlab:check` if you want to omit project names from the output.
Example output:
@ -133,7 +126,7 @@ Checking GitLab ... Finished
## Rebuild authorized_keys file
In some case it is necessary to rebuild the `authorized_keys` file. To do this, run:
In some case it is necessary to rebuild the `authorized_keys` file.
**Omnibus Installation**
@ -148,8 +141,6 @@ cd /home/git/gitlab
sudo -u git -H bundle exec rake gitlab:shell:setup RAILS_ENV=production
```
Example output:
```plaintext
This will rebuild an authorized_keys file.
You will lose any data stored in authorized_keys file.
@ -158,8 +149,8 @@ Do you want to continue (yes/no)? yes
## Clear Redis cache
If for some reason the dashboard displays the wrong information, you might want to
clear Redis' cache. To do this, run:
If for some reason the dashboard shows wrong information you might want to
clear Redis' cache.
**Omnibus Installation**
@ -179,7 +170,7 @@ sudo -u git -H bundle exec rake cache:clear RAILS_ENV=production
Sometimes during version upgrades you might end up with some wrong CSS or
missing some icons. In that case, try to precompile the assets again.
This only applies to source installations and does NOT apply to
Note that this only applies to source installations and does NOT apply to
Omnibus packages.
**Source Installation**
@ -202,8 +193,6 @@ GitLab provides a Rake task that lets you track deployments in GitLab
Performance Monitoring. This Rake task simply stores the current GitLab version
in the GitLab Performance Monitoring database.
To run `gitlab:track_deployment`:
**Omnibus Installation**
```shell

View file

@ -3,13 +3,11 @@
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/3050) in GitLab 8.9.
> - From GitLab 11.3, import/export can use object storage automatically.
GitLab provides Rake tasks relating to project import and export. For more information, see:
See also:
- [Project import/export documentation](../../user/project/settings/import_export.md).
- [Project import/export API](../../api/project_import_export.md).
## Import/export tasks
The GitLab import/export version can be checked by using the following command:
```shell
@ -30,6 +28,8 @@ sudo gitlab-rake gitlab:import_export:data
bundle exec rake gitlab:import_export:data RAILS_ENV=production
```
## Important notes
Note the following:
- Importing is only possible if the version of the import and export GitLab instances are

View file

@ -1,4 +1,4 @@
# Repository storage Rake tasks
# Repository Storage Rake Tasks
This is a collection of Rake tasks you can use to help you list and migrate
existing projects and attachments associated with it from Legacy storage to
@ -6,7 +6,7 @@ the new Hashed storage type.
You can read more about the storage types [here](../repository_storage_types.md).
## Migrate existing projects to hashed storage
## Migrate existing projects to Hashed storage
Before migrating your existing projects, you should
[enable hashed storage](../repository_storage_types.md#how-to-migrate-to-hashed-storage) for the new projects as well.
@ -34,9 +34,9 @@ export ID_FROM=20
export ID_TO=50
```
You can monitor the progress in the **{admin}** **Admin Area > Monitoring > Background Jobs** page.
There is a specific queue you can watch to see how long it will take to finish:
`hashed_storage:hashed_storage_project_migrate`.
You can monitor the progress in the **Admin Area > Monitoring > Background Jobs** page.
There is a specific Queue you can watch to see how long it will take to finish:
`hashed_storage:hashed_storage_project_migrate`
After it reaches zero, you can confirm every project has been migrated by running the commands bellow.
If you find it necessary, you can run this migration script again to schedule missing projects.
@ -44,18 +44,16 @@ If you find it necessary, you can run this migration script again to schedule mi
Any error or warning will be logged in Sidekiq's log file.
NOTE: **Note:**
If [Geo](../geo/replication/index.md) is enabled, each project that is successfully migrated
generates an event to replicate the changes on any **secondary** nodes.
If Geo is enabled, each project that is successfully migrated generates an event to replicate the changes on any **secondary** nodes.
You only need the `gitlab:storage:migrate_to_hashed` Rake task to migrate your repositories, but we have additional
commands below that helps you inspect projects and attachments in both legacy and hashed storage.
## Rollback from hashed storage to legacy storage
## Rollback from Hashed storage to Legacy storage
If you need to rollback the storage migration for any reason, you can follow the steps described here.
NOTE: **Note:**
Hashed storage will be required in future version of GitLab.
NOTE: **Note:** Hashed Storage will be required in future version of GitLab.
To prevent new projects from being created in the Hashed storage,
you need to undo the [enable hashed storage](../repository_storage_types.md#how-to-migrate-to-hashed-storage) changes.
@ -83,7 +81,7 @@ export ID_FROM=20
export ID_TO=50
```
You can monitor the progress in the **{admin}** **Admin Area > Monitoring > Background Jobs** page.
You can monitor the progress in the **Admin Area > Monitoring > Background Jobs** page.
On the **Queues** tab, you can watch the `hashed_storage:hashed_storage_project_rollback` queue to see how long the process will take to finish.
After it reaches zero, you can confirm every project has been rolled back by running the commands bellow.
@ -91,13 +89,9 @@ If some projects weren't rolled back, you can run this rollback script again to
Any error or warning will be logged in Sidekiq's log file.
## List projects
## List projects on Legacy storage
The following are Rake tasks for listing projects.
### List projects on legacy storage
To have a simple summary of projects using legacy storage:
To have a simple summary of projects using **Legacy** storage:
**Omnibus Installation**
@ -111,7 +105,7 @@ sudo gitlab-rake gitlab:storage:legacy_projects
sudo -u git -H bundle exec rake gitlab:storage:legacy_projects RAILS_ENV=production
```
To list projects using legacy storage:
To list projects using **Legacy** storage:
**Omnibus Installation**
@ -126,9 +120,9 @@ sudo -u git -H bundle exec rake gitlab:storage:list_legacy_projects RAILS_ENV=pr
```
### List projects on hashed storage
## List projects on Hashed storage
To have a simple summary of projects using hashed storage:
To have a simple summary of projects using **Hashed** storage:
**Omnibus Installation**
@ -142,7 +136,7 @@ sudo gitlab-rake gitlab:storage:hashed_projects
sudo -u git -H bundle exec rake gitlab:storage:hashed_projects RAILS_ENV=production
```
To list projects using hashed storage:
To list projects using **Hashed** storage:
**Omnibus Installation**
@ -156,13 +150,9 @@ sudo gitlab-rake gitlab:storage:list_hashed_projects
sudo -u git -H bundle exec rake gitlab:storage:list_hashed_projects RAILS_ENV=production
```
## List attachments
## List attachments on Legacy storage
The following are Rake tasks for listing attachments.
### List attachments on legacy storage
To have a simple summary of project attachments using legacy storage:
To have a simple summary of project attachments using **Legacy** storage:
**Omnibus Installation**
@ -176,7 +166,7 @@ sudo gitlab-rake gitlab:storage:legacy_attachments
sudo -u git -H bundle exec rake gitlab:storage:legacy_attachments RAILS_ENV=production
```
To list project attachments using legacy storage:
To list project attachments using **Legacy** storage:
**Omnibus Installation**
@ -190,9 +180,9 @@ sudo gitlab-rake gitlab:storage:list_legacy_attachments
sudo -u git -H bundle exec rake gitlab:storage:list_legacy_attachments RAILS_ENV=production
```
### List attachments on hashed storage
## List attachments on Hashed storage
To have a simple summary of project attachments using hashed storage:
To have a simple summary of project attachments using **Hashed** storage:
**Omnibus Installation**
@ -206,7 +196,7 @@ sudo gitlab-rake gitlab:storage:hashed_attachments
sudo -u git -H bundle exec rake gitlab:storage:hashed_attachments RAILS_ENV=production
```
To list project attachments using hashed storage:
To list project attachments using **Hashed** storage:
**Omnibus Installation**

View file

@ -2,7 +2,7 @@
type: howto, reference
---
# Command Line basic commands
# Edit files through the command line
When [working with Git from the command line](start-using-git.md), you will need to
use more than just the Git commands. There are several basic commands that you should

View file

@ -1,63 +1,61 @@
# Import bare repositories
# Import bare repositories into your GitLab instance
Rake tasks are available to import bare repositories into a GitLab instance.
## Notes
Note that:
- The owner of the project will be the first admin
- The groups will be created as needed, including subgroups
- The owner of the group will be the first admin
- Existing projects will be skipped
- Projects in hashed storage may be skipped (see [Importing bare repositories from hashed storage](#importing-bare-repositories-from-hashed-storage))
- The existing Git repos will be moved from disk (removed from the original path)
- The owner of the project will be the first administrator.
- The groups will be created as needed, including subgroups.
- The owner of the group will be the first administrator.
- Existing projects will be skipped.
- Projects in hashed storage may be skipped. For more information, see
[Importing bare repositories from hashed storage](#importing-bare-repositories-from-hashed-storage).
- The existing Git repositories will be moved from disk (removed from the original path).
## How to use
To import bare repositories into a GitLab instance:
### Create a new folder to import your Git repositories from
1. Create a new folder to import your Git repositories from. The new folder needs to have Git user
ownership and read/write/execute access for Git user and its group:
The new folder needs to have Git user ownership and read/write/execute access for Git user and its group:
```shell
sudo -u git mkdir -p /var/opt/gitlab/git-data/repository-import-<date>/new_group
```
```shell
sudo -u git mkdir -p /var/opt/gitlab/git-data/repository-import-<date>/new_group
```
1. Copy your bare repositories inside this newly created folder. Note:
### Copy your bare repositories inside this newly created folder
- Any `.git` repositories found on any of the subfolders will be imported as projects.
- Groups will be created as needed, these could be nested folders.
- Any `.git` repositories found on any of the subfolders will be imported as projects
- Groups will be created as needed, these could be nested folders. Example:
For example, if we copy the repositories to `/var/opt/gitlab/git-data/repository-import-<date>`,
and repository `A` needs to be under the groups `G1` and `G2`, it must be created under those folders:
`/var/opt/gitlab/git-data/repository-import-<date>/G1/G2/A.git`.
If we copy the repos to `/var/opt/gitlab/git-data/repository-import-<date>`, and repo A needs to be under the groups G1 and G2, it will
have to be created under those folders: `/var/opt/gitlab/git-data/repository-import-<date>/G1/G2/A.git`.
```shell
sudo cp -r /old/git/foo.git /var/opt/gitlab/git-data/repository-import-<date>/new_group/
```shell
sudo cp -r /old/git/foo.git /var/opt/gitlab/git-data/repository-import-<date>/new_group/
# Do this once when you are done copying git repositories
sudo chown -R git:git /var/opt/gitlab/git-data/repository-import-<date>
```
# Do this once when you are done copying git repositories
sudo chown -R git:git /var/opt/gitlab/git-data/repository-import-<date>
```
`foo.git` needs to be owned by the `git` user and `git` users group.
`foo.git` needs to be owned by the `git` user and `git` users group.
If you are using an installation from source, replace `/var/opt/gitlab/` with `/home/git`.
If you are using an installation from source, replace `/var/opt/gitlab/` with `/home/git`.
1. Run the following command depending on your type of installation:
### Run the command below depending on your type of installation
- Omnibus Installation
#### Omnibus Installation
```shell
sudo gitlab-rake gitlab:import:repos['/var/opt/gitlab/git-data/repository-import-<date>']
```
```shell
sudo gitlab-rake gitlab:import:repos['/var/opt/gitlab/git-data/repository-import-<date>']
```
- Installation from source. Before running this command you need to change to the directory where
your GitLab installation is located:
#### Installation from source
```shell
cd /home/git/gitlab
sudo -u git -H bundle exec rake gitlab:import:repos['/var/opt/gitlab/git-data/repository-import-<date>'] RAILS_ENV=production
```
Before running this command you need to change the directory to where your GitLab installation is located:
## Example output
```shell
cd /home/git/gitlab
sudo -u git -H bundle exec rake gitlab:import:repos['/var/opt/gitlab/git-data/repository-import-<date>'] RAILS_ENV=production
```
#### Example output
```plaintext
Processing /var/opt/gitlab/git-data/repository-import-1/a/b/c/blah.git
@ -75,6 +73,8 @@ Processing /var/opt/gitlab/git-data/repository-import-1/group/xyz.git
## Importing bare repositories from hashed storage
### Background
Projects in legacy storage have a directory structure that mirrors their full
project path in GitLab, including their namespace structure. This information is
leveraged by the bare repository importer to import projects into their proper
@ -86,17 +86,17 @@ improved performance and data integrity. See
[Repository Storage Types](../administration/repository_storage_types.md) for
more details.
The repositories that are importable depends on the version of GitLab.
### Which repositories are importable?
### GitLab 10.3 or earlier
#### GitLab 10.3 or earlier
Importing bare repositories from hashed storage is unsupported.
### GitLab 10.4 and later
#### GitLab 10.4 and later
To support importing bare repositories from hashed storage, GitLab 10.4 and
later stores the full project path with each repository, in a special section of
the Git repository's configuration file. This section is formatted as follows:
the Git repository's config file. This section is formatted as follows:
```ini
[gitlab]

View file

@ -1,8 +1,7 @@
# Listing repository directories
You can print a list of all Git repositories on disk managed by GitLab.
To print a list, run the following command:
You can print a list of all Git repositories on disk managed by
GitLab with the following command:
```shell
# Omnibus
@ -13,13 +12,10 @@ cd /home/git/gitlab
sudo -u git -H bundle exec rake gitlab:list_repos RAILS_ENV=production
```
NOTE: **Note:**
The results use the default ordering of the GitLab Rails application.
## Limit search results
To list only projects with recent activity, pass a date with the `SINCE` environment variable. The
time you specify is parsed by the Rails [TimeZone#parse function](https://api.rubyonrails.org/classes/ActiveSupport/TimeZone.html#method-i-parse).
If you only want to list projects with recent activity you can pass
a date with the 'SINCE' environment variable. The time you specify
is parsed by the Rails [TimeZone#parse
function](https://api.rubyonrails.org/classes/ActiveSupport/TimeZone.html#method-i-parse).
```shell
# Omnibus
@ -29,3 +25,6 @@ sudo gitlab-rake gitlab:list_repos SINCE='Sep 1 2015'
cd /home/git/gitlab
sudo -u git -H bundle exec rake gitlab:list_repos RAILS_ENV=production SINCE='Sep 1 2015'
```
Note that the projects listed are NOT sorted by activity; they use
the default ordering of the GitLab Rails application.

View file

@ -236,7 +236,7 @@ To apply labels across multiple epics:
NOTE: **Note:**
To delete an epic, you need to be an [Owner](../../permissions.md#group-members-permissions) of a group/subgroup.
When inside a single epic view, click the **Delete** button to delete the epic.
When editing the description of an epic, click the **Delete** button to delete the epic.
A modal will pop-up to confirm your action.
Deleting an epic releases all existing issues from their associated epic in the

View file

@ -24,6 +24,7 @@ sast:
- $GITLAB_FEATURES =~ /\bsast\b/
image: docker:stable
variables:
SEARCH_MAX_DEPTH: 4
DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: ""
services:

View file

@ -157,7 +157,7 @@ module Gitlab
return unless job[:stage]
unless job[:stage].is_a?(String) && job[:stage].in?(@stages)
raise ValidationError, "#{name} job: stage parameter should be #{@stages.join(", ")}"
raise ValidationError, "#{name} job: chosen stage does not exist; available stages are #{@stages.join(", ")}"
end
end

View file

@ -0,0 +1,71 @@
# frozen_string_literal: true
module Gitlab
module ImportExport
module Group
class GroupRestorer
def initialize(
user:,
shared:,
group:,
attributes:,
importable_path:,
relation_reader:,
reader:
)
@user = user
@shared = shared
@group = group
@group_attributes = attributes
@importable_path = importable_path
@relation_reader = relation_reader
@reader = reader
end
def restore
# consume_relation returns a list of [relation, index]
@group_members = @relation_reader
.consume_relation(@importable_path, 'members')
.map(&:first)
return unless members_mapper.map
restorer.restore
end
private
def restorer
@relation_tree_restorer ||= RelationTreeRestorer.new(
user: @user,
shared: @shared,
relation_reader: @relation_reader,
members_mapper: members_mapper,
object_builder: object_builder,
relation_factory: relation_factory,
reader: @reader,
importable: @group,
importable_attributes: @group_attributes,
importable_path: @importable_path
)
end
def members_mapper
@members_mapper ||= Gitlab::ImportExport::MembersMapper.new(
exported_members: @group_members,
user: @user,
importable: @group
)
end
def relation_factory
Gitlab::ImportExport::Group::RelationFactory
end
def object_builder
Gitlab::ImportExport::Group::ObjectBuilder
end
end
end
end
end

View file

@ -0,0 +1,140 @@
# frozen_string_literal: true
module Gitlab
module ImportExport
module Group
class TreeRestorer
include Gitlab::Utils::StrongMemoize
attr_reader :user, :shared
def initialize(user:, shared:, group:)
@user = user
@shared = shared
@top_level_group = group
@groups_mapping = {}
end
def restore
group_ids = relation_reader.consume_relation('groups', '_all').map { |value, _idx| Integer(value) }
root_group_id = group_ids.delete_at(0)
process_root(root_group_id)
group_ids.each do |group_id|
process_child(group_id)
end
true
rescue => e
shared.error(e)
false
end
class GroupAttributes
attr_reader :attributes, :group_id, :id, :path
def initialize(group_id, relation_reader)
@group_id = group_id
@path = "groups/#{group_id}"
@attributes = relation_reader.consume_attributes(@path)
@id = @attributes.delete('id')
unless @id == @group_id
raise ArgumentError, "Invalid group_id for #{group_id}"
end
end
def delete_attribute(name)
attributes.delete(name)
end
def delete_attributes(*names)
names.map(&method(:delete_attribute))
end
end
private_constant :GroupAttributes
private
def process_root(group_id)
group_attributes = GroupAttributes.new(group_id, relation_reader)
# name and path are not imported on the root group to avoid conflict
# with existing groups name and/or path.
group_attributes.delete_attributes('name', 'path')
restore_group(@top_level_group, group_attributes)
end
def process_child(group_id)
group_attributes = GroupAttributes.new(group_id, relation_reader)
group = create_group(group_attributes)
restore_group(group, group_attributes)
end
def create_group(group_attributes)
parent_id = group_attributes.delete_attribute('parent_id')
name = group_attributes.delete_attribute('name')
path = group_attributes.delete_attribute('path')
parent_group = @groups_mapping.fetch(parent_id) { raise(ArgumentError, 'Parent group not found') }
::Groups::CreateService.new(
user,
name: name,
path: path,
parent_id: parent_group.id,
visibility_level: sub_group_visibility_level(group_attributes.attributes, parent_group)
).execute
end
def restore_group(group, group_attributes)
@groups_mapping[group_attributes.id] = group
Group::GroupRestorer.new(
user: user,
shared: shared,
group: group,
attributes: group_attributes.attributes,
importable_path: group_attributes.path,
relation_reader: relation_reader,
reader: reader
).restore
end
def relation_reader
strong_memoize(:relation_reader) do
ImportExport::JSON::NdjsonReader.new(
File.join(shared.export_path, 'tree')
)
end
end
def sub_group_visibility_level(group_hash, parent_group)
original_visibility_level = group_hash['visibility_level'] || Gitlab::VisibilityLevel::PRIVATE
if parent_group && parent_group.visibility_level < original_visibility_level
Gitlab::VisibilityLevel.closest_allowed_level(parent_group.visibility_level)
else
original_visibility_level
end
end
def reader
strong_memoize(:reader) do
Gitlab::ImportExport::Reader.new(
shared: @shared,
config: Gitlab::ImportExport::Config.new(
config: Gitlab::ImportExport.group_config_file
).to_h
)
end
end
end
end
end
end

View file

@ -12,16 +12,18 @@ module Gitlab
@namespace = Namespace.find_by_full_path(opts.fetch(:namespace_path))
@current_user = User.find_by_username(opts.fetch(:username))
@measurement_enabled = opts.fetch(:measurement_enabled)
@measurement = Gitlab::Utils::Measuring.new(logger: logger) if @measurement_enabled
@logger = logger
end
private
attr_reader :measurement, :project, :namespace, :current_user, :file_path, :project_path, :logger
attr_reader :project, :namespace, :current_user, :file_path, :project_path, :logger, :measurement_enabled
def measurement_enabled?
@measurement_enabled
def measurement_options
{
measurement_enabled: measurement_enabled,
measurement_logger: logger
}
end
def success(message)
@ -30,13 +32,6 @@ module Gitlab
true
end
def measurement_options
{
measurement_enabled: measurement_enabled?,
measurement_logger: logger
}
end
def error(message)
logger.error(message)

View file

@ -16,7 +16,7 @@ module Gitlab
with_export do
::Projects::ImportExport::ExportService.new(project, current_user)
.execute(Gitlab::ImportExport::AfterExportStrategies::MoveFileStrategy.new(archive_path: file_path))
.execute(Gitlab::ImportExport::AfterExportStrategies::MoveFileStrategy.new(archive_path: file_path), measurement_options)
end
success('Done!')
@ -33,7 +33,7 @@ module Gitlab
def with_export
with_request_store do
::Gitlab::GitalyClient.allow_n_plus_1_calls do
measurement_enabled? ? measurement.with_measuring { yield } : yield
yield
end
end
end

View file

@ -7,13 +7,13 @@ module Gitlab
# The result of this method should be passed to
# Sidekiq's `config.server_middleware` method
# eg: `config.server_middleware(&Gitlab::SidekiqMiddleware.server_configurator)`
def self.server_configurator(metrics: true, arguments_logger: true, memory_killer: true, request_store: true)
def self.server_configurator(metrics: true, arguments_logger: true, memory_killer: true)
lambda do |chain|
chain.add ::Gitlab::SidekiqMiddleware::Monitor
chain.add ::Gitlab::SidekiqMiddleware::ServerMetrics if metrics
chain.add ::Gitlab::SidekiqMiddleware::ArgumentsLogger if arguments_logger
chain.add ::Gitlab::SidekiqMiddleware::MemoryKiller if memory_killer
chain.add ::Gitlab::SidekiqMiddleware::RequestStoreMiddleware if request_store
chain.add ::Gitlab::SidekiqMiddleware::RequestStoreMiddleware
chain.add ::Gitlab::SidekiqMiddleware::BatchLoader
chain.add ::Labkit::Middleware::Sidekiq::Server
chain.add ::Gitlab::SidekiqMiddleware::InstrumentationLogger

View file

@ -23,10 +23,11 @@ module Gitlab
end
def with_measuring
result = with_gc_stats do
result = nil
with_gc_stats do
with_count_queries do
with_measure_time do
yield
result = yield
end
end
end
@ -56,31 +57,27 @@ module Gitlab
ActiveSupport::Notifications.subscribed(counter_f, "sql.active_record", &block)
end
def log_info(details)
details = base_log_data.merge(details)
details = details.to_yaml if ActiveSupport::Logger.logger_outputs_to?(logger, STDOUT)
logger.info(details)
end
def with_gc_stats
GC.start # perform a full mark-and-sweep
stats_before = GC.stat
result = yield
yield
stats_after = GC.stat
@gc_stats = stats_after.map do |key, after_value|
before_value = stats_before[key]
[key, before: before_value, after: after_value, diff: after_value - before_value]
end.to_h
result
end
def with_measure_time
result = nil
@time_to_finish = Benchmark.realtime do
result = yield
yield
end
end
result
def log_info(details)
details = base_log_data.merge(details)
details = details.to_yaml if ActiveSupport::Logger.logger_outputs_to?(logger, STDOUT)
logger.info(details)
end
def duration_in_numbers(duration_in_seconds)

View file

@ -2,12 +2,24 @@
module Gitlab
module WithRequestStore
def with_request_store
def with_request_store(&block)
# Skip enabling the request store if it was already active. Whatever
# instantiated the request store first is responsible for clearing it
return yield if RequestStore.active?
enabling_request_store(&block)
end
private
def enabling_request_store
RequestStore.begin!
yield
ensure
RequestStore.end!
RequestStore.clear!
end
extend self
end
end

View file

@ -3,6 +3,6 @@
FactoryBot.define do
factory :identity do
provider { 'ldapmain' }
extern_uid { 'my-ldap-id' }
sequence(:extern_uid) { |n| "my-ldap-id-#{n}" }
end
end

Binary file not shown.

BIN
spec/fixtures/legacy_group_export.tar.gz vendored Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -43,6 +43,7 @@ import {
setFileCollapsed,
setExpandedDiffLines,
setSuggestPopoverDismissed,
changeCurrentCommit,
} from '~/diffs/store/actions';
import eventHub from '~/notes/event_hub';
import * as types from '~/diffs/store/mutation_types';
@ -1347,4 +1348,62 @@ describe('DiffsStoreActions', () => {
);
});
});
describe('changeCurrentCommit', () => {
it('commits the new commit information and re-requests the diff metadata for the commit', () => {
return testAction(
changeCurrentCommit,
{ commitId: 'NEW' },
{
commit: {
id: 'OLD',
},
endpoint: 'URL/OLD',
endpointBatch: 'URL/OLD',
endpointMetadata: 'URL/OLD',
},
[
{ type: types.SET_DIFF_FILES, payload: [] },
{
type: types.SET_BASE_CONFIG,
payload: {
commit: {
id: 'OLD', // Not a typo: the action fired next will overwrite all of the `commit` in state
},
endpoint: 'URL/NEW',
endpointBatch: 'URL/NEW',
endpointMetadata: 'URL/NEW',
},
},
],
[{ type: 'fetchDiffFilesMeta' }],
);
});
it.each`
commitId | commit | msg
${undefined} | ${{ id: 'OLD' }} | ${'`commitId` is a required argument'}
${'NEW'} | ${null} | ${'`state` must already contain a valid `commit`'}
${undefined} | ${null} | ${'`commitId` is a required argument'}
`(
'returns a rejected promise with the error message $msg given `{ "commitId": $commitId, "state.commit": $commit }`',
({ commitId, commit, msg }) => {
const err = new Error(msg);
const actionReturn = testAction(
changeCurrentCommit,
{ commitId },
{
endpoint: 'URL/OLD',
endpointBatch: 'URL/OLD',
endpointMetadata: 'URL/OLD',
commit,
},
[],
[],
);
return expect(actionReturn).rejects.toStrictEqual(err);
},
);
});
});

View file

@ -0,0 +1,33 @@
import $ from 'jquery';
import { getAjaxUsersSelectOptions, getAjaxUsersSelectParams } from '~/users_select/utils';
const options = {
fooBar: 'baz',
activeUserId: 1,
};
describe('getAjaxUsersSelectOptions', () => {
it('returns options built from select data attributes', () => {
const $select = $('<select />', { 'data-foo-bar': 'baz', 'data-user-id': 1 });
expect(
getAjaxUsersSelectOptions($select, { fooBar: 'fooBar', activeUserId: 'user-id' }),
).toEqual(options);
});
});
describe('getAjaxUsersSelectParams', () => {
it('returns query parameters built from provided options', () => {
expect(
getAjaxUsersSelectParams(options, {
foo_bar: 'fooBar',
active_user_id: 'activeUserId',
non_existent_key: 'nonExistentKey',
}),
).toEqual({
foo_bar: 'baz',
active_user_id: 1,
non_existent_key: null,
});
});
});

View file

@ -35,10 +35,12 @@ describe('getStateKey', () => {
expect(bound()).toEqual('autoMergeEnabled');
context.canMerge = true;
context.isSHAMismatch = true;
expect(bound()).toEqual('shaMismatch');
context.canMerge = false;
context.isPipelineBlocked = true;
expect(bound()).toEqual('pipelineBlocked');
@ -100,4 +102,26 @@ describe('getStateKey', () => {
expect(bound()).toEqual('rebase');
});
it.each`
canMerge | isSHAMismatch | stateKey
${true} | ${true} | ${'shaMismatch'}
${false} | ${true} | ${'notAllowedToMerge'}
${false} | ${false} | ${'notAllowedToMerge'}
`(
'returns $stateKey when canMerge is $canMerge and isSHAMismatch is $isSHAMismatch',
({ canMerge, isSHAMismatch, stateKey }) => {
const bound = getStateKey.bind(
{
canMerge,
isSHAMismatch,
},
{
commits_count: 2,
},
);
expect(bound()).toEqual(stateKey);
},
);
});

View file

@ -2264,14 +2264,14 @@ module Gitlab
config = YAML.dump({ rspec: { script: "test", type: "acceptance" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "rspec job: stage parameter should be .pre, build, test, deploy, .post")
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "rspec job: chosen stage does not exist; available stages are .pre, build, test, deploy, .post")
end
it "returns errors if job stage is not a defined stage" do
config = YAML.dump({ types: %w(build test), rspec: { script: "test", type: "acceptance" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "rspec job: stage parameter should be .pre, build, test, .post")
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "rspec job: chosen stage does not exist; available stages are .pre, build, test, .post")
end
it "returns errors if stages is not an array" do

View file

@ -0,0 +1,200 @@
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::ImportExport::Group::TreeRestorer do
include ImportExport::CommonUtil
describe 'restore group tree' do
before_all do
# Using an admin for import, so we can check assignment of existing members
user = create(:admin, email: 'root@gitlabexample.com')
create(:user, email: 'adriene.mcclure@gitlabexample.com')
create(:user, email: 'gwendolyn_robel@gitlabexample.com')
RSpec::Mocks.with_temporary_scope do
@group = create(:group, name: 'group', path: 'group')
@shared = Gitlab::ImportExport::Shared.new(@group)
setup_import_export_config('group_exports/complex')
group_tree_restorer = described_class.new(user: user, shared: @shared, group: @group)
expect(group_tree_restorer.restore).to be_truthy
end
end
after(:context) do
cleanup_artifacts_from_extract_archive('group_exports/complex')
end
it 'has the group description' do
expect(Group.find_by_path('group').description).to eq('Group Description')
end
it 'has group labels' do
expect(@group.labels.count).to eq(10)
end
context 'issue boards' do
it 'has issue boards' do
expect(@group.boards.count).to eq(1)
end
it 'has board label lists' do
lists = @group.boards.find_by(name: 'first board').lists
expect(lists.count).to eq(3)
expect(lists.first.label.title).to eq('TSL')
expect(lists.second.label.title).to eq('Sosync')
end
end
it 'has badges' do
expect(@group.badges.count).to eq(1)
end
it 'has milestones' do
expect(@group.milestones.count).to eq(5)
end
it 'has group children' do
expect(@group.children.count).to eq(2)
end
it 'has group members' do
expect(@group.members.map(&:user).map(&:email)).to contain_exactly(
'root@gitlabexample.com',
'adriene.mcclure@gitlabexample.com',
'gwendolyn_robel@gitlabexample.com'
)
end
end
context 'child with no parent' do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:shared) { Gitlab::ImportExport::Shared.new(group) }
let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
before do
setup_import_export_config('group_exports/child_with_no_parent')
expect(group_tree_restorer.restore).to be_falsey
end
after do
cleanup_artifacts_from_extract_archive('group_exports/child_with_no_parent')
end
it 'fails when a child group does not have a valid parent_id' do
expect(shared.errors).to include('Parent group not found')
end
end
context 'excluded attributes' do
let!(:source_user) { create(:user, id: 123) }
let!(:importer_user) { create(:user) }
let(:group) { create(:group, name: 'user-inputed-name', path: 'user-inputed-path') }
let(:shared) { Gitlab::ImportExport::Shared.new(group) }
let(:group_tree_restorer) { described_class.new(user: importer_user, shared: shared, group: group) }
let(:exported_file) { File.join(shared.export_path, 'tree/groups/4352.json') }
let(:group_json) { ActiveSupport::JSON.decode(IO.read(exported_file)) }
shared_examples 'excluded attributes' do
excluded_attributes = %w[
id
parent_id
owner_id
created_at
updated_at
runners_token
runners_token_encrypted
saml_discovery_token
]
before do
group.add_owner(importer_user)
setup_import_export_config('group_exports/complex')
expect(File.exist?(exported_file)).to be_truthy
group_tree_restorer.restore
group.reload
end
after do
cleanup_artifacts_from_extract_archive('group_exports/complex')
end
it 'does not import root group name' do
expect(group.name).to eq('user-inputed-name')
end
it 'does not import root group path' do
expect(group.path).to eq('user-inputed-path')
end
excluded_attributes.each do |excluded_attribute|
it 'does not allow override of excluded attributes' do
unless group.public_send(excluded_attribute).nil?
expect(group_json[excluded_attribute]).not_to eq(group.public_send(excluded_attribute))
end
end
end
end
include_examples 'excluded attributes'
end
context 'group.json file access check' do
let(:user) { create(:user) }
let!(:group) { create(:group, name: 'group2', path: 'group2') }
let(:shared) { Gitlab::ImportExport::Shared.new(group) }
let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
it 'does not read a symlink' do
Dir.mktmpdir do |tmpdir|
FileUtils.mkdir_p(File.join(tmpdir, 'tree', 'groups'))
setup_symlink(tmpdir, 'tree/groups/_all.ndjson')
allow(shared).to receive(:export_path).and_return(tmpdir)
expect(group_tree_restorer.restore).to eq(false)
expect(shared.errors).to include('Incorrect JSON format')
end
end
end
context 'group visibility levels' do
let(:user) { create(:user) }
let(:shared) { Gitlab::ImportExport::Shared.new(group) }
let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group) }
before do
setup_import_export_config(filepath)
group_tree_restorer.restore
end
after do
cleanup_artifacts_from_extract_archive(filepath)
end
shared_examples 'with visibility level' do |visibility_level, expected_visibilities|
context "when visibility level is #{visibility_level}" do
let(:group) { create(:group, visibility_level) }
let(:filepath) { "group_exports/visibility_levels/#{visibility_level}" }
it "imports all subgroups as #{visibility_level}" do
expect(group.children.map(&:visibility_level)).to eq(expected_visibilities)
end
end
end
include_examples 'with visibility level', :public, [20, 10, 0]
include_examples 'with visibility level', :private, [0, 0, 0]
include_examples 'with visibility level', :internal, [10, 10, 0]
end
end

View file

@ -32,8 +32,7 @@ describe Gitlab::SidekiqMiddleware do
described_class.server_configurator(
metrics: metrics,
arguments_logger: arguments_logger,
memory_killer: memory_killer,
request_store: request_store
memory_killer: memory_killer
).call(chain)
example.run
@ -77,13 +76,11 @@ describe Gitlab::SidekiqMiddleware do
let(:metrics) { false }
let(:arguments_logger) { false }
let(:memory_killer) { false }
let(:request_store) { false }
let(:disabled_sidekiq_middlewares) do
[
Gitlab::SidekiqMiddleware::ServerMetrics,
Gitlab::SidekiqMiddleware::ArgumentsLogger,
Gitlab::SidekiqMiddleware::MemoryKiller,
Gitlab::SidekiqMiddleware::RequestStoreMiddleware
Gitlab::SidekiqMiddleware::MemoryKiller
]
end
@ -94,7 +91,6 @@ describe Gitlab::SidekiqMiddleware do
let(:metrics) { true }
let(:arguments_logger) { true }
let(:memory_killer) { true }
let(:request_store) { true }
let(:disabled_sidekiq_middlewares) { [] }
it_behaves_like "a server middleware chain"

View file

@ -3,7 +3,7 @@
require 'fast_spec_helper'
describe Gitlab::Utils::Measuring do
describe '#execute_with' do
describe '.execute_with' do
let(:measurement_logger) { double(:logger) }
let(:base_log_data) do
{

View file

@ -0,0 +1,30 @@
# frozen_string_literal: true
require 'fast_spec_helper'
require 'request_store'
describe Gitlab::WithRequestStore do
let(:fake_class) { Class.new { include Gitlab::WithRequestStore } }
subject(:object) { fake_class.new }
describe "#with_request_store" do
it 'starts a request store and yields control' do
expect(RequestStore).to receive(:begin!).ordered
expect(RequestStore).to receive(:end!).ordered
expect(RequestStore).to receive(:clear!).ordered
expect { |b| object.with_request_store(&b) }.to yield_control
end
it 'only starts a request store once when nested' do
expect(RequestStore).to receive(:begin!).ordered.once.and_call_original
expect(RequestStore).to receive(:end!).ordered.once.and_call_original
expect(RequestStore).to receive(:clear!).ordered.once.and_call_original
object.with_request_store do
expect { |b| object.with_request_store(&b) }.to yield_control
end
end
end
end

View file

@ -3,7 +3,7 @@
require 'spec_helper'
describe Groups::ImportExport::ImportService do
describe '#execute' do
describe '#execute with TreeRestorer' do
let(:user) { create(:admin) }
let(:group) { create(:group) }
let(:service) { described_class.new(group: group, user: user) }
@ -14,6 +14,8 @@ describe Groups::ImportExport::ImportService do
subject { service.execute }
before do
stub_feature_flags(group_import_export_ndjson: true)
ImportExportUpload.create(group: group, import_file: import_file)
allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
@ -89,6 +91,108 @@ describe Groups::ImportExport::ImportService do
expect(subject).to be_truthy
end
it 'logs the import success' do
allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
expect(import_logger).to receive(:info).with(
group_id: group.id,
group_name: group.name,
message: 'Group Import/Export: Import succeeded'
)
subject
end
end
end
describe '#execute with LegacyTreeRestorer' do
let(:user) { create(:admin) }
let(:group) { create(:group) }
let(:service) { described_class.new(group: group, user: user) }
let(:import_file) { fixture_file_upload('spec/fixtures/legacy_group_export.tar.gz') }
let(:import_logger) { instance_double(Gitlab::Import::Logger) }
subject { service.execute }
before do
stub_feature_flags(group_import_export_ndjson: false)
ImportExportUpload.create(group: group, import_file: import_file)
allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
allow(import_logger).to receive(:error)
allow(import_logger).to receive(:info)
end
context 'when user has correct permissions' do
it 'imports group structure successfully' do
expect(subject).to be_truthy
end
it 'removes import file' do
subject
expect(group.import_export_upload.import_file.file).to be_nil
end
it 'logs the import success' do
expect(import_logger).to receive(:info).with(
group_id: group.id,
group_name: group.name,
message: 'Group Import/Export: Import succeeded'
).once
subject
end
end
context 'when user does not have correct permissions' do
let(:user) { create(:user) }
it 'logs the error and raises an exception' do
expect(import_logger).to receive(:error).with(
group_id: group.id,
group_name: group.name,
message: a_string_including('Errors occurred')
)
expect { subject }.to raise_error(Gitlab::ImportExport::Error)
end
it 'tracks the error' do
shared = Gitlab::ImportExport::Shared.new(group)
allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
expect(shared).to receive(:error) do |param|
expect(param.message).to include 'does not have required permissions for'
end
expect { subject }.to raise_error(Gitlab::ImportExport::Error)
end
end
context 'when there are errors with the import file' do
let(:import_file) { fixture_file_upload('spec/fixtures/legacy_symlink_export.tar.gz') }
it 'logs the error and raises an exception' do
expect(import_logger).to receive(:error).with(
group_id: group.id,
group_name: group.name,
message: a_string_including('Errors occurred')
).once
expect { subject }.to raise_error(Gitlab::ImportExport::Error)
end
end
context 'when there are errors with the sub-relations' do
let(:import_file) { fixture_file_upload('spec/fixtures/legacy_group_export_invalid_subrelations.tar.gz') }
it 'successfully imports the group' do
expect(subject).to be_truthy
end
it 'logs the import success' do
allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)

View file

@ -177,5 +177,56 @@ describe Projects::ImportExport::ExportService do
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error).with_message(expected_message)
end
end
context 'when measurable params are provided' do
let(:base_log_data) do
{
class: described_class.name,
current_user: user.name,
project_full_path: project.full_path,
file_path: shared.export_path
}
end
subject(:service) { described_class.new(project, user) }
context 'when measurement is enabled' do
let(:logger) { double(:logger) }
let(:measurable_options) do
{
measurement_enabled: true,
measurement_logger: logger
}
end
before do
allow(logger).to receive(:info)
end
it 'measure service execution with Gitlab::Utils::Measuring' do
expect(Gitlab::Utils::Measuring).to receive(:execute_with).with(true, logger, base_log_data).and_call_original
expect_next_instance_of(Gitlab::Utils::Measuring) do |measuring|
expect(measuring).to receive(:with_measuring).and_call_original
end
service.execute(after_export_strategy, measurable_options)
end
end
context 'when measurement is disabled' do
let(:measurable_options) do
{
measurement_enabled: false
}
end
it 'does not measure service execution' do
expect(Gitlab::Utils::Measuring).to receive(:execute_with).with(false, nil, base_log_data).and_call_original
expect(Gitlab::Utils::Measuring).not_to receive(:new)
service.execute(after_export_strategy, measurable_options)
end
end
end
end
end

View file

@ -286,12 +286,7 @@ RSpec.configure do |config|
end
config.around(:example, :request_store) do |example|
RequestStore.begin!
example.run
RequestStore.end!
RequestStore.clear!
Gitlab::WithRequestStore.with_request_store { example.run }
end
config.around do |example|
@ -305,12 +300,10 @@ RSpec.configure do |config|
Gitlab::SidekiqMiddleware.server_configurator(
metrics: false, # The metrics don't go anywhere in tests
arguments_logger: false, # We're not logging the regular messages for inline jobs
memory_killer: false, # This is not a thing we want to do inline in tests
# Don't enable this if the request store is active in the spec itself
# This needs to run within the `request_store` around block defined above
request_store: !RequestStore.active?
memory_killer: false # This is not a thing we want to do inline in tests
).call(chain)
chain.add DisableQueryLimit
chain.insert_after ::Gitlab::SidekiqMiddleware::RequestStoreMiddleware, IsolatedRequestStore
example.run
end

View file

@ -31,3 +31,16 @@ class DisableQueryLimit
end
end
end
# When running `Sidekiq::Testing.inline!` each job is using a request-store.
# This middleware makes sure the values don't leak into eachother.
class IsolatedRequestStore
def call(_worker, msg, queue)
old_store = RequestStore.store.dup
RequestStore.clear!
yield
RequestStore.store = old_store
end
end