diff --git a/app/assets/images/vulnerability/kontra-logo.svg b/app/assets/images/vulnerability/kontra-logo.svg
new file mode 100644
index 00000000000..e12e2545e77
--- /dev/null
+++ b/app/assets/images/vulnerability/kontra-logo.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/app/assets/images/vulnerability/scw-logo.svg b/app/assets/images/vulnerability/scw-logo.svg
new file mode 100644
index 00000000000..6d160ddc495
--- /dev/null
+++ b/app/assets/images/vulnerability/scw-logo.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/app/assets/javascripts/security_configuration/components/constants.js b/app/assets/javascripts/security_configuration/components/constants.js
index 3def5e9d6b7..c498c87e72f 100644
--- a/app/assets/javascripts/security_configuration/components/constants.js
+++ b/app/assets/javascripts/security_configuration/components/constants.js
@@ -284,9 +284,9 @@ export const AUTO_DEVOPS_ENABLED_ALERT_DISMISSED_STORAGE_KEY =
// https://gitlab.com/gitlab-org/gitlab/-/issues/346899
export const TEMP_PROVIDER_LOGOS = {
Kontra: {
- svg: '/assets/illustrations/vulnerability/kontra-logo.svg',
+ svg: '/assets/vulnerability/kontra-logo.svg',
},
[__('Secure Code Warrior')]: {
- svg: '/assets/illustrations/vulnerability/scw-logo.svg',
+ svg: '/assets/vulnerability/scw-logo.svg',
},
};
diff --git a/app/assets/javascripts/security_configuration/graphql/security_training_vulnerability.query.graphql b/app/assets/javascripts/security_configuration/graphql/security_training_vulnerability.query.graphql
index 2edaa685a18..f0474614dab 100644
--- a/app/assets/javascripts/security_configuration/graphql/security_training_vulnerability.query.graphql
+++ b/app/assets/javascripts/security_configuration/graphql/security_training_vulnerability.query.graphql
@@ -1,13 +1,10 @@
-query getSecurityTrainingVulnerability($id: ID!) {
- vulnerability(id: $id) @client {
+query getSecurityTrainingUrls($projectFullPath: ID!, $identifierExternalIds: [String!]!) {
+ project(fullPath: $projectFullPath) {
id
- identifiers {
- externalType
- }
- securityTrainingUrls {
+ securityTrainingUrls(identifierExternalIds: $identifierExternalIds) {
name
- url
status
+ url
}
}
}
diff --git a/app/controllers/projects/merge_requests_controller.rb b/app/controllers/projects/merge_requests_controller.rb
index 5f39a8419c9..04f311f58e9 100644
--- a/app/controllers/projects/merge_requests_controller.rb
+++ b/app/controllers/projects/merge_requests_controller.rb
@@ -42,6 +42,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
push_frontend_feature_flag(:refactor_mr_widgets_extensions, project, default_enabled: :yaml)
push_frontend_feature_flag(:rebase_without_ci_ui, project, default_enabled: :yaml)
push_frontend_feature_flag(:markdown_continue_lists, project, default_enabled: :yaml)
+ push_frontend_feature_flag(:secure_vulnerability_training, project, default_enabled: :yaml)
# Usage data feature flags
push_frontend_feature_flag(:users_expanding_widgets_usage_data, project, default_enabled: :yaml)
push_frontend_feature_flag(:diff_settings_usage_data, default_enabled: :yaml)
diff --git a/app/services/users/migrate_to_ghost_user_service.rb b/app/services/users/migrate_to_ghost_user_service.rb
index 575614e8743..604b83f621f 100644
--- a/app/services/users/migrate_to_ghost_user_service.rb
+++ b/app/services/users/migrate_to_ghost_user_service.rb
@@ -66,20 +66,20 @@ module Users
# rubocop: disable CodeReuse/ActiveRecord
def migrate_issues
- user.issues.update_all(author_id: ghost_user.id)
- Issue.where(last_edited_by_id: user.id).update_all(last_edited_by_id: ghost_user.id)
+ batched_migrate(Issue, :author_id)
+ batched_migrate(Issue, :last_edited_by_id)
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def migrate_merge_requests
- user.merge_requests.update_all(author_id: ghost_user.id)
- MergeRequest.where(merge_user_id: user.id).update_all(merge_user_id: ghost_user.id)
+ batched_migrate(MergeRequest, :author_id)
+ batched_migrate(MergeRequest, :merge_user_id)
end
# rubocop: enable CodeReuse/ActiveRecord
def migrate_notes
- user.notes.update_all(author_id: ghost_user.id)
+ batched_migrate(Note, :author_id)
end
def migrate_abuse_reports
@@ -96,8 +96,17 @@ module Users
end
def migrate_reviews
- user.reviews.update_all(author_id: ghost_user.id)
+ batched_migrate(Review, :author_id)
end
+
+ # rubocop:disable CodeReuse/ActiveRecord
+ def batched_migrate(base_scope, column)
+ loop do
+ update_count = base_scope.where(column => user.id).limit(100).update_all(column => ghost_user.id)
+ break if update_count == 0
+ end
+ end
+ # rubocop:enable CodeReuse/ActiveRecord
end
end
diff --git a/doc/ci/jobs/job_control.md b/doc/ci/jobs/job_control.md
index 3e0091b0baa..3a302cf352b 100644
--- a/doc/ci/jobs/job_control.md
+++ b/doc/ci/jobs/job_control.md
@@ -749,6 +749,38 @@ deploystacks:
- ${PROVIDER}-${STACK}
```
+#### Fetch artifacts from a `parallel:matrix` job
+
+You can fetch artifacts from a job created with [`parallel:matrix`](../yaml/index.md#parallelmatrix)
+by using the [`dependencies`](../yaml/index.md#dependencies) keyword. Use the job name
+as the value for `dependencies` as a string in the form:
+
+```plaintext
+ [, , ... ]
+```
+
+For example, to fetch the artifacts from the job with a `RUBY_VERSION` of `2.7` and
+a `PROVIDER` of `aws`:
+
+```yaml
+ruby:
+ image: ruby:${RUBY_VERSION}
+ parallel:
+ matrix:
+ - RUBY_VERSION: ["2.5", "2.6", "2.7", "3.0", "3.1"]
+ PROVIDER: [aws, gcp]
+ script: bundle install
+
+deploy:
+ image: ruby:2.7
+ stage: deploy
+ dependencies:
+ - "ruby: [2.7, aws]"
+ script: echo hello
+```
+
+Quotes around the `dependencies` entry are required.
+
## Use predefined CI/CD variables to run jobs only in specific pipeline types
You can use [predefined CI/CD variables](../variables/predefined_variables.md) to choose
diff --git a/doc/user/application_security/dast/index.md b/doc/user/application_security/dast/index.md
index 23e669aa859..fd6c39ffbf1 100644
--- a/doc/user/application_security/dast/index.md
+++ b/doc/user/application_security/dast/index.md
@@ -51,7 +51,7 @@ results. On failure, the analyzer outputs an
## Prerequisites
- [GitLab Runner](../../../ci/runners/index.md) available, with the
-[`docker` executor](https://docs.gitlab.com/runner/executors/docker.html).
+[`docker` executor](https://docs.gitlab.com/runner/executors/docker.html) on Linux/amd64.
- Target application deployed. For more details, read [Deployment options](#deployment-options).
- DAST runs in the `dast` stage, which must be added manually to your `.gitlab-ci.yml`.
diff --git a/doc/user/application_security/sast/index.md b/doc/user/application_security/sast/index.md
index 056a31c276d..d3a79410eea 100644
--- a/doc/user/application_security/sast/index.md
+++ b/doc/user/application_security/sast/index.md
@@ -57,7 +57,7 @@ To run SAST jobs, by default, you need GitLab Runner with the
If you're using the shared runners on GitLab.com, this is enabled by default.
WARNING:
-Our SAST jobs require a Linux container type. Windows containers are not yet supported.
+Our SAST jobs require a Linux/amd64 container type. Windows containers are not yet supported.
WARNING:
If you use your own runners, make sure the Docker version installed
diff --git a/lib/backup/artifacts.rb b/lib/backup/artifacts.rb
index 163446998e9..4ef76b0aaf3 100644
--- a/lib/backup/artifacts.rb
+++ b/lib/backup/artifacts.rb
@@ -2,14 +2,11 @@
module Backup
class Artifacts < Backup::Files
- attr_reader :progress
-
def initialize(progress)
- @progress = progress
-
- super('artifacts', JobArtifactUploader.root, excludes: ['tmp'])
+ super(progress, 'artifacts', JobArtifactUploader.root, excludes: ['tmp'])
end
+ override :human_name
def human_name
_('artifacts')
end
diff --git a/lib/backup/builds.rb b/lib/backup/builds.rb
index 51a68ca933d..fbf932e3f6b 100644
--- a/lib/backup/builds.rb
+++ b/lib/backup/builds.rb
@@ -2,14 +2,11 @@
module Backup
class Builds < Backup::Files
- attr_reader :progress
-
def initialize(progress)
- @progress = progress
-
- super('builds', Settings.gitlab_ci.builds_path)
+ super(progress, 'builds', Settings.gitlab_ci.builds_path)
end
+ override :human_name
def human_name
_('builds')
end
diff --git a/lib/backup/database.rb b/lib/backup/database.rb
index de26dbab038..afc84a4b913 100644
--- a/lib/backup/database.rb
+++ b/lib/backup/database.rb
@@ -3,10 +3,10 @@
require 'yaml'
module Backup
- class Database
+ class Database < Task
+ extend ::Gitlab::Utils::Override
include Backup::Helper
- attr_reader :progress
- attr_reader :config, :db_file_name
+ attr_reader :force, :config
IGNORED_ERRORS = [
# Ignore warnings
@@ -18,13 +18,14 @@ module Backup
].freeze
IGNORED_ERRORS_REGEXP = Regexp.union(IGNORED_ERRORS).freeze
- def initialize(progress, filename: nil)
- @progress = progress
+ def initialize(progress, force:)
+ super(progress)
@config = ActiveRecord::Base.configurations.find_db_config(Rails.env).configuration_hash
- @db_file_name = filename || File.join(Gitlab.config.backup.path, 'db', 'database.sql.gz')
+ @force = force
end
- def dump
+ override :dump
+ def dump(db_file_name)
FileUtils.mkdir_p(File.dirname(db_file_name))
FileUtils.rm_f(db_file_name)
compress_rd, compress_wr = IO.pipe
@@ -64,12 +65,24 @@ module Backup
raise DatabaseBackupError.new(config, db_file_name) unless success
end
- def restore
+ override :restore
+ def restore(db_file_name)
+ unless force
+ progress.puts 'Removing all tables. Press `Ctrl-C` within 5 seconds to abort'.color(:yellow)
+ sleep(5)
+ end
+
+ # Drop all tables Load the schema to ensure we don't have any newer tables
+ # hanging out from a failed upgrade
+ puts_time 'Cleaning the database ... '.color(:blue)
+ Rake::Task['gitlab:db:drop_tables'].invoke
+ puts_time 'done'.color(:green)
+
decompress_rd, decompress_wr = IO.pipe
decompress_pid = spawn(*%w(gzip -cd), out: decompress_wr, in: db_file_name)
decompress_wr.close
- status, errors =
+ status, @errors =
case config[:adapter]
when "postgresql" then
progress.print "Restoring PostgreSQL database #{database} ... "
@@ -81,33 +94,47 @@ module Backup
Process.waitpid(decompress_pid)
success = $?.success? && status.success?
- if errors.present?
+ if @errors.present?
progress.print "------ BEGIN ERRORS -----\n".color(:yellow)
- progress.print errors.join.color(:yellow)
+ progress.print @errors.join.color(:yellow)
progress.print "------ END ERRORS -------\n".color(:yellow)
end
report_success(success)
raise Backup::Error, 'Restore failed' unless success
-
- if errors.present?
- warning = <<~MSG
- There were errors in restoring the schema. This may cause
- issues if this results in missing indexes, constraints, or
- columns. Please record the errors above and contact GitLab
- Support if you have questions:
- https://about.gitlab.com/support/
- MSG
-
- warn warning.color(:red)
- Gitlab::TaskHelpers.ask_to_continue
- end
end
- def enabled
- true
+ override :pre_restore_warning
+ def pre_restore_warning
+ return if force
+
+ <<-MSG.strip_heredoc
+ Be sure to stop Puma, Sidekiq, and any other process that
+ connects to the database before proceeding. For Omnibus
+ installs, see the following link for more information:
+ https://docs.gitlab.com/ee/raketasks/backup_restore.html#restore-for-omnibus-gitlab-installations
+
+ Before restoring the database, we will remove all existing
+ tables to avoid future upgrade problems. Be aware that if you have
+ custom tables in the GitLab database these tables and all data will be
+ removed.
+ MSG
end
+ override :post_restore_warning
+ def post_restore_warning
+ return unless @errors.present?
+
+ <<-MSG.strip_heredoc
+ There were errors in restoring the schema. This may cause
+ issues if this results in missing indexes, constraints, or
+ columns. Please record the errors above and contact GitLab
+ Support if you have questions:
+ https://about.gitlab.com/support/
+ MSG
+ end
+
+ override :human_name
def human_name
_('database')
end
diff --git a/lib/backup/files.rb b/lib/backup/files.rb
index db6278360a3..7fa07e40cee 100644
--- a/lib/backup/files.rb
+++ b/lib/backup/files.rb
@@ -1,25 +1,27 @@
# frozen_string_literal: true
require 'open3'
-require_relative 'helper'
module Backup
- class Files
+ class Files < Task
+ extend ::Gitlab::Utils::Override
include Backup::Helper
DEFAULT_EXCLUDE = 'lost+found'
- attr_reader :name, :backup_tarball, :excludes
+ attr_reader :name, :excludes
+
+ def initialize(progress, name, app_files_dir, excludes: [])
+ super(progress)
- def initialize(name, app_files_dir, excludes: [])
@name = name
@app_files_dir = app_files_dir
- @backup_tarball = File.join(Gitlab.config.backup.path, name + '.tar.gz')
@excludes = [DEFAULT_EXCLUDE].concat(excludes)
end
# Copy files from public/files to backup/files
- def dump
+ override :dump
+ def dump(backup_tarball)
FileUtils.mkdir_p(Gitlab.config.backup.path)
FileUtils.rm_f(backup_tarball)
@@ -35,7 +37,7 @@ module Backup
unless status == 0
puts output
- raise_custom_error
+ raise_custom_error(backup_tarball)
end
tar_cmd = [tar, exclude_dirs(:tar), %W[-C #{backup_files_realpath} -cf - .]].flatten
@@ -47,11 +49,12 @@ module Backup
end
unless pipeline_succeeded?(tar_status: status_list[0], gzip_status: status_list[1], output: output)
- raise_custom_error
+ raise_custom_error(backup_tarball)
end
end
- def restore
+ override :restore
+ def restore(backup_tarball)
backup_existing_files_dir
cmd_list = [%w[gzip -cd], %W[#{tar} --unlink-first --recursive-unlink -C #{app_files_realpath} -xf -]]
@@ -61,10 +64,6 @@ module Backup
end
end
- def enabled
- true
- end
-
def tar
if system(*%w[gtar --version], out: '/dev/null')
# It looks like we can get GNU tar by running 'gtar'
@@ -146,7 +145,7 @@ module Backup
end
end
- def raise_custom_error
+ def raise_custom_error(backup_tarball)
raise FileBackupError.new(app_files_realpath, backup_tarball)
end
diff --git a/lib/backup/gitaly_backup.rb b/lib/backup/gitaly_backup.rb
index 8ac09e94004..149aa00c2ce 100644
--- a/lib/backup/gitaly_backup.rb
+++ b/lib/backup/gitaly_backup.rb
@@ -15,7 +15,7 @@ module Backup
@storage_parallelism = storage_parallelism
end
- def start(type)
+ def start(type, backup_repos_path)
raise Error, 'already started' if started?
command = case type
@@ -93,10 +93,6 @@ module Backup
@thread.present?
end
- def backup_repos_path
- File.absolute_path(File.join(Gitlab.config.backup.path, 'repositories'))
- end
-
def bin_path
File.absolute_path(Gitlab.config.backup.gitaly_backup_path)
end
diff --git a/lib/backup/gitaly_rpc_backup.rb b/lib/backup/gitaly_rpc_backup.rb
index bbd83cd2157..89ed27cfa13 100644
--- a/lib/backup/gitaly_rpc_backup.rb
+++ b/lib/backup/gitaly_rpc_backup.rb
@@ -7,10 +7,11 @@ module Backup
@progress = progress
end
- def start(type)
+ def start(type, backup_repos_path)
raise Error, 'already started' if @type
@type = type
+ @backup_repos_path = backup_repos_path
case type
when :create
FileUtils.rm_rf(backup_repos_path)
@@ -31,7 +32,7 @@ module Backup
backup_restore = BackupRestore.new(
progress,
repository_type.repository_for(container),
- backup_repos_path
+ @backup_repos_path
)
case @type
@@ -52,10 +53,6 @@ module Backup
attr_reader :progress
- def backup_repos_path
- @backup_repos_path ||= File.join(Gitlab.config.backup.path, 'repositories')
- end
-
class BackupRestore
attr_accessor :progress, :repository, :backup_repos_path
diff --git a/lib/backup/lfs.rb b/lib/backup/lfs.rb
index 17f7b8bf8b0..e92f235a2d7 100644
--- a/lib/backup/lfs.rb
+++ b/lib/backup/lfs.rb
@@ -2,14 +2,11 @@
module Backup
class Lfs < Backup::Files
- attr_reader :progress
-
def initialize(progress)
- @progress = progress
-
- super('lfs', Settings.lfs.storage_path)
+ super(progress, 'lfs', Settings.lfs.storage_path)
end
+ override :human_name
def human_name
_('lfs objects')
end
diff --git a/lib/backup/manager.rb b/lib/backup/manager.rb
index 5b393cf9477..cefa1c34ac3 100644
--- a/lib/backup/manager.rb
+++ b/lib/backup/manager.rb
@@ -2,37 +2,77 @@
module Backup
class Manager
- ARCHIVES_TO_BACKUP = %w[uploads builds artifacts pages lfs terraform_state registry packages].freeze
- FOLDERS_TO_BACKUP = %w[repositories db].freeze
FILE_NAME_SUFFIX = '_gitlab_backup.tar'
+ MANIFEST_NAME = 'backup_information.yml'
+
+ TaskDefinition = Struct.new(
+ :destination_path, # Where the task should put its backup file/dir.
+ :destination_optional, # `true` if the destination might not exist on a successful backup.
+ :cleanup_path, # Path to remove after a successful backup. Uses `destination_path` when not specified.
+ :task,
+ keyword_init: true
+ )
attr_reader :progress
- def initialize(progress)
+ def initialize(progress, definitions: nil)
@progress = progress
max_concurrency = ENV.fetch('GITLAB_BACKUP_MAX_CONCURRENCY', 1).to_i
max_storage_concurrency = ENV.fetch('GITLAB_BACKUP_MAX_STORAGE_CONCURRENCY', 1).to_i
+ force = ENV['force'] == 'yes'
- @tasks = {
- 'db' => Database.new(progress),
- 'repositories' => Repositories.new(progress,
- strategy: repository_backup_strategy,
- max_concurrency: max_concurrency,
- max_storage_concurrency: max_storage_concurrency),
- 'uploads' => Uploads.new(progress),
- 'builds' => Builds.new(progress),
- 'artifacts' => Artifacts.new(progress),
- 'pages' => Pages.new(progress),
- 'lfs' => Lfs.new(progress),
- 'terraform_state' => TerraformState.new(progress),
- 'registry' => Registry.new(progress),
- 'packages' => Packages.new(progress)
+ @definitions = definitions || {
+ 'db' => TaskDefinition.new(
+ destination_path: 'db/database.sql.gz',
+ cleanup_path: 'db',
+ task: Database.new(progress, force: force)
+ ),
+ 'repositories' => TaskDefinition.new(
+ destination_path: 'repositories',
+ destination_optional: true,
+ task: Repositories.new(progress,
+ strategy: repository_backup_strategy,
+ max_concurrency: max_concurrency,
+ max_storage_concurrency: max_storage_concurrency)
+ ),
+ 'uploads' => TaskDefinition.new(
+ destination_path: 'uploads.tar.gz',
+ task: Uploads.new(progress)
+ ),
+ 'builds' => TaskDefinition.new(
+ destination_path: 'builds.tar.gz',
+ task: Builds.new(progress)
+ ),
+ 'artifacts' => TaskDefinition.new(
+ destination_path: 'artifacts.tar.gz',
+ task: Artifacts.new(progress)
+ ),
+ 'pages' => TaskDefinition.new(
+ destination_path: 'pages.tar.gz',
+ task: Pages.new(progress)
+ ),
+ 'lfs' => TaskDefinition.new(
+ destination_path: 'lfs.tar.gz',
+ task: Lfs.new(progress)
+ ),
+ 'terraform_state' => TaskDefinition.new(
+ destination_path: 'terraform_state.tar.gz',
+ task: TerraformState.new(progress)
+ ),
+ 'registry' => TaskDefinition.new(
+ destination_path: 'registry.tar.gz',
+ task: Registry.new(progress)
+ ),
+ 'packages' => TaskDefinition.new(
+ destination_path: 'packages.tar.gz',
+ task: Packages.new(progress)
+ )
}.freeze
end
def create
- @tasks.keys.each do |task_name|
+ @definitions.keys.each do |task_name|
run_create_task(task_name)
end
@@ -54,11 +94,11 @@ module Backup
end
def run_create_task(task_name)
- task = @tasks[task_name]
+ definition = @definitions[task_name]
- puts_time "Dumping #{task.human_name} ... ".color(:blue)
+ puts_time "Dumping #{definition.task.human_name} ... ".color(:blue)
- unless task.enabled
+ unless definition.task.enabled
puts_time "[DISABLED]".color(:cyan)
return
end
@@ -68,7 +108,8 @@ module Backup
return
end
- task.dump
+ definition.task.dump(File.join(Gitlab.config.backup.path, definition.destination_path))
+
puts_time "done".color(:green)
rescue Backup::DatabaseBackupError, Backup::FileBackupError => e
@@ -79,39 +120,7 @@ module Backup
cleanup_required = unpack
verify_backup_version
- unless skipped?('db')
- begin
- unless ENV['force'] == 'yes'
- warning = <<-MSG.strip_heredoc
- Be sure to stop Puma, Sidekiq, and any other process that
- connects to the database before proceeding. For Omnibus
- installs, see the following link for more information:
- https://docs.gitlab.com/ee/raketasks/backup_restore.html#restore-for-omnibus-gitlab-installations
-
- Before restoring the database, we will remove all existing
- tables to avoid future upgrade problems. Be aware that if you have
- custom tables in the GitLab database these tables and all data will be
- removed.
- MSG
- puts warning.color(:red)
- Gitlab::TaskHelpers.ask_to_continue
- puts 'Removing all tables. Press `Ctrl-C` within 5 seconds to abort'.color(:yellow)
- sleep(5)
- end
-
- # Drop all tables Load the schema to ensure we don't have any newer tables
- # hanging out from a failed upgrade
- puts_time 'Cleaning the database ... '.color(:blue)
- Rake::Task['gitlab:db:drop_tables'].invoke
- puts_time 'done'.color(:green)
- run_restore_task('db')
- rescue Gitlab::TaskAbortedByUserError
- puts "Quitting...".color(:red)
- exit 1
- end
- end
-
- @tasks.except('db').keys.each do |task_name|
+ @definitions.keys.each do |task_name|
run_restore_task(task_name) unless skipped?(task_name)
end
@@ -130,25 +139,44 @@ module Backup
end
def run_restore_task(task_name)
- task = @tasks[task_name]
+ definition = @definitions[task_name]
- puts_time "Restoring #{task.human_name} ... ".color(:blue)
+ puts_time "Restoring #{definition.task.human_name} ... ".color(:blue)
- unless task.enabled
+ unless definition.task.enabled
puts_time "[DISABLED]".color(:cyan)
return
end
- task.restore
+ warning = definition.task.pre_restore_warning
+ if warning.present?
+ puts_time warning.color(:red)
+ Gitlab::TaskHelpers.ask_to_continue
+ end
+
+ definition.task.restore(File.join(Gitlab.config.backup.path, definition.destination_path))
+
puts_time "done".color(:green)
+
+ warning = definition.task.post_restore_warning
+ if warning.present?
+ puts_time warning.color(:red)
+ Gitlab::TaskHelpers.ask_to_continue
+ end
+
+ rescue Gitlab::TaskAbortedByUserError
+ puts_time "Quitting...".color(:red)
+ exit 1
end
+ private
+
def write_info
# Make sure there is a connection
ActiveRecord::Base.connection.reconnect!
Dir.chdir(backup_path) do
- File.open("#{backup_path}/backup_information.yml", "w+") do |file|
+ File.open("#{backup_path}/#{MANIFEST_NAME}", "w+") do |file|
file << backup_information.to_yaml.gsub(/^---\n/, '')
end
end
@@ -182,8 +210,11 @@ module Backup
upload = directory.files.create(create_attributes)
if upload
- progress.puts "done".color(:green)
- upload
+ if upload.respond_to?(:encryption) && upload.encryption
+ progress.puts "done (encrypted with #{upload.encryption})".color(:green)
+ else
+ progress.puts "done".color(:green)
+ end
else
puts "uploading backup to #{remote_directory} failed".color(:red)
raise Backup::Error, 'Backup failed'
@@ -193,18 +224,19 @@ module Backup
def cleanup
progress.print "Deleting tmp directories ... "
- backup_contents.each do |dir|
- next unless File.exist?(File.join(backup_path, dir))
-
- if FileUtils.rm_rf(File.join(backup_path, dir))
- progress.puts "done".color(:green)
- else
- puts "deleting tmp directory '#{dir}' failed".color(:red)
- raise Backup::Error, 'Backup failed'
- end
+ remove_backup_path(MANIFEST_NAME)
+ @definitions.each do |_, definition|
+ remove_backup_path(definition.cleanup_path || definition.destination_path)
end
end
+ def remove_backup_path(path)
+ return unless File.exist?(File.join(backup_path, path))
+
+ FileUtils.rm_rf(File.join(backup_path, path))
+ progress.puts "done".color(:green)
+ end
+
def remove_tmp
# delete tmp inside backups
progress.print "Deleting backups/tmp ... "
@@ -322,10 +354,8 @@ module Backup
settings[:skipped] && settings[:skipped].include?(item) || !enabled_task?(item)
end
- private
-
def enabled_task?(task_name)
- @tasks[task_name].enabled
+ @definitions[task_name].task.enabled
end
def backup_file?(file)
@@ -333,7 +363,7 @@ module Backup
end
def non_tarred_backup?
- File.exist?(File.join(backup_path, 'backup_information.yml'))
+ File.exist?(File.join(backup_path, MANIFEST_NAME))
end
def backup_path
@@ -380,19 +410,14 @@ module Backup
end
def backup_contents
- folders_to_backup + archives_to_backup + ["backup_information.yml"]
- end
-
- def archives_to_backup
- ARCHIVES_TO_BACKUP.map { |name| (name + ".tar.gz") unless skipped?(name) }.compact
- end
-
- def folders_to_backup
- FOLDERS_TO_BACKUP.select { |name| !skipped?(name) && Dir.exist?(File.join(backup_path, name)) }
+ [MANIFEST_NAME] + @definitions.reject do |name, definition|
+ skipped?(name) ||
+ (definition.destination_optional && !File.exist?(File.join(backup_path, definition.destination_path)))
+ end.values.map(&:destination_path)
end
def settings
- @settings ||= YAML.load_file("backup_information.yml")
+ @settings ||= YAML.load_file(MANIFEST_NAME)
end
def tar_file
diff --git a/lib/backup/packages.rb b/lib/backup/packages.rb
index 037ff31fd9b..9384e007162 100644
--- a/lib/backup/packages.rb
+++ b/lib/backup/packages.rb
@@ -2,14 +2,11 @@
module Backup
class Packages < Backup::Files
- attr_reader :progress
-
def initialize(progress)
- @progress = progress
-
- super('packages', Settings.packages.storage_path, excludes: ['tmp'])
+ super(progress, 'packages', Settings.packages.storage_path, excludes: ['tmp'])
end
+ override :human_name
def human_name
_('packages')
end
diff --git a/lib/backup/pages.rb b/lib/backup/pages.rb
index 724972d212d..ebed6820724 100644
--- a/lib/backup/pages.rb
+++ b/lib/backup/pages.rb
@@ -6,14 +6,11 @@ module Backup
# if some of these files are still there, we don't need them in the backup
LEGACY_PAGES_TMP_PATH = '@pages.tmp'
- attr_reader :progress
-
def initialize(progress)
- @progress = progress
-
- super('pages', Gitlab.config.pages.path, excludes: [LEGACY_PAGES_TMP_PATH])
+ super(progress, 'pages', Gitlab.config.pages.path, excludes: [LEGACY_PAGES_TMP_PATH])
end
+ override :human_name
def human_name
_('pages')
end
diff --git a/lib/backup/registry.rb b/lib/backup/registry.rb
index 7ba3a9e9c60..68ea635034d 100644
--- a/lib/backup/registry.rb
+++ b/lib/backup/registry.rb
@@ -2,18 +2,16 @@
module Backup
class Registry < Backup::Files
- attr_reader :progress
-
def initialize(progress)
- @progress = progress
-
- super('registry', Settings.registry.path)
+ super(progress, 'registry', Settings.registry.path)
end
+ override :human_name
def human_name
_('container registry images')
end
+ override :enabled
def enabled
Gitlab.config.registry.enabled
end
diff --git a/lib/backup/repositories.rb b/lib/backup/repositories.rb
index e7c3e869928..3633ebd661e 100644
--- a/lib/backup/repositories.rb
+++ b/lib/backup/repositories.rb
@@ -3,16 +3,20 @@
require 'yaml'
module Backup
- class Repositories
+ class Repositories < Task
+ extend ::Gitlab::Utils::Override
+
def initialize(progress, strategy:, max_concurrency: 1, max_storage_concurrency: 1)
- @progress = progress
+ super(progress)
+
@strategy = strategy
@max_concurrency = max_concurrency
@max_storage_concurrency = max_storage_concurrency
end
- def dump
- strategy.start(:create)
+ override :dump
+ def dump(path)
+ strategy.start(:create, path)
# gitaly-backup is designed to handle concurrency on its own. So we want
# to avoid entering the buggy concurrency code here when gitaly-backup
@@ -50,8 +54,9 @@ module Backup
strategy.finish!
end
- def restore
- strategy.start(:restore)
+ override :restore
+ def restore(path)
+ strategy.start(:restore, path)
enqueue_consecutive
ensure
@@ -61,17 +66,14 @@ module Backup
restore_object_pools
end
- def enabled
- true
- end
-
+ override :human_name
def human_name
_('repositories')
end
private
- attr_reader :progress, :strategy, :max_concurrency, :max_storage_concurrency
+ attr_reader :strategy, :max_concurrency, :max_storage_concurrency
def check_valid_storages!
repository_storage_klasses.each do |klass|
diff --git a/lib/backup/task.rb b/lib/backup/task.rb
new file mode 100644
index 00000000000..15cd2aa64d3
--- /dev/null
+++ b/lib/backup/task.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+module Backup
+ class Task
+ def initialize(progress)
+ @progress = progress
+ end
+
+ # human readable task name used for logging
+ def human_name
+ raise NotImplementedError
+ end
+
+ # dump task backup to `path`
+ def dump(path)
+ raise NotImplementedError
+ end
+
+ # restore task backup from `path`
+ def restore(path)
+ raise NotImplementedError
+ end
+
+ # a string returned here will be displayed to the user before calling #restore
+ def pre_restore_warning
+ end
+
+ # a string returned here will be displayed to the user after calling #restore
+ def post_restore_warning
+ end
+
+ # returns `true` when the task should be used
+ def enabled
+ true
+ end
+
+ private
+
+ attr_reader :progress
+
+ def puts_time(msg)
+ progress.puts "#{Time.zone.now} -- #{msg}"
+ Gitlab::BackupLogger.info(message: "#{Rainbow.uncolor(msg)}")
+ end
+ end
+end
diff --git a/lib/backup/terraform_state.rb b/lib/backup/terraform_state.rb
index be82793fe03..05f61d248be 100644
--- a/lib/backup/terraform_state.rb
+++ b/lib/backup/terraform_state.rb
@@ -2,14 +2,11 @@
module Backup
class TerraformState < Backup::Files
- attr_reader :progress
-
def initialize(progress)
- @progress = progress
-
- super('terraform_state', Settings.terraform_state.storage_path, excludes: ['tmp'])
+ super(progress, 'terraform_state', Settings.terraform_state.storage_path, excludes: ['tmp'])
end
+ override :human_name
def human_name
_('terraform states')
end
diff --git a/lib/backup/uploads.rb b/lib/backup/uploads.rb
index 7048a9a8ff5..700f2af4415 100644
--- a/lib/backup/uploads.rb
+++ b/lib/backup/uploads.rb
@@ -2,14 +2,11 @@
module Backup
class Uploads < Backup::Files
- attr_reader :progress
-
def initialize(progress)
- @progress = progress
-
- super('uploads', File.join(Gitlab.config.uploads.storage_path, "uploads"), excludes: ['tmp'])
+ super(progress, 'uploads', File.join(Gitlab.config.uploads.storage_path, "uploads"), excludes: ['tmp'])
end
+ override :human_name
def human_name
_('uploads')
end
diff --git a/qa/bin/contract b/qa/bin/contract
deleted file mode 100755
index f1bd7efc94d..00000000000
--- a/qa/bin/contract
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env ruby
-# frozen_string_literal: true
-
-require 'rake'
-
-host = ARGV.shift
-ENV['CONTRACT_HOST'] ||= host
-
-list = []
-
-loop do
- keyword = ARGV.shift
- case keyword
- when '--mr'
- ENV['CONTRACT_MR'] ||= ARGV.shift
- list.push 'test:merge_request'
- else
- break
- end
-end
-
-app = Rake.application
-
-Dir.chdir('contracts/provider') do
- app.init
- app.add_import 'Rakefile'
- app.load_rakefile
-
- list.each do |element|
- app[element].invoke
- end
-end
diff --git a/qa/contracts/provider/Rakefile b/qa/contracts/provider/Rakefile
deleted file mode 100644
index 00ca2355b11..00000000000
--- a/qa/contracts/provider/Rakefile
+++ /dev/null
@@ -1,17 +0,0 @@
-# frozen_string_literal: true
-
-require 'pact/tasks/verification_task'
-
-Pact::VerificationTask.new(:metadata) do |pact|
- pact.uri '../contracts/merge_request_page-merge_request_metadata_endpoint.json', pact_helper: './spec/metadata_helper.rb'
-end
-
-Pact::VerificationTask.new(:discussions) do |pact|
- pact.uri '../contracts/merge_request_page-merge_request_discussions_endpoint.json', pact_helper: './spec/discussions_helper.rb'
-end
-
-Pact::VerificationTask.new(:diffs) do |pact|
- pact.uri '../contracts/merge_request_page-merge_request_diffs_endpoint.json', pact_helper: './spec/diffs_helper.rb'
-end
-
-task 'test:merge_request' => ['pact:verify:metadata', 'pact:verify:discussions', 'pact:verify:diffs']
diff --git a/qa/contracts/provider/environments/base.rb b/qa/contracts/provider/environments/base.rb
index 36b97357417..695ee6b867d 100644
--- a/qa/contracts/provider/environments/base.rb
+++ b/qa/contracts/provider/environments/base.rb
@@ -1,25 +1,23 @@
# frozen_string_literal: true
-require 'faraday'
+module Provider
+ module Environments
+ class Base
+ attr_writer :base_url, :merge_request
-module Environments
- class Base
- attr_writer :base_url, :merge_request
+ def call(env)
+ @payload
+ end
- def call(env)
- @payload
- end
+ def http(endpoint)
+ Faraday.default_adapter = :net_http
+ response = Faraday.get(@base_url + endpoint)
+ @payload = [response.status, response.headers, [response.body]]
+ self
+ end
- def http(endpoint)
- Faraday.default_adapter = :net_http
- response = Faraday.get(@base_url + endpoint)
- @payload = [response.status, response.headers, [response.body]]
- self
- end
-
- def merge_request(endpoint)
- if endpoint.include? '.json'
- http(@merge_request + endpoint)
+ def merge_request(endpoint)
+ http(@merge_request + endpoint) if endpoint.include? '.json'
end
end
end
diff --git a/qa/contracts/provider/environments/local.rb b/qa/contracts/provider/environments/local.rb
index 7721122a097..0d472bc25e9 100644
--- a/qa/contracts/provider/environments/local.rb
+++ b/qa/contracts/provider/environments/local.rb
@@ -1,12 +1,12 @@
# frozen_string_literal: true
-require_relative './base'
-
-module Environments
- class Local < Base
- def initialize
- @base_url = ENV['CONTRACT_HOST']
- @merge_request = ENV['CONTRACT_MR']
+module Provider
+ module Environments
+ class Local < Base
+ def initialize
+ @base_url = ENV['CONTRACT_HOST']
+ @merge_request = ENV['CONTRACT_MR']
+ end
end
end
end
diff --git a/qa/contracts/provider/spec/diffs_helper.rb b/qa/contracts/provider/spec/diffs_helper.rb
index 4c5583c5011..95dbc4254e6 100644
--- a/qa/contracts/provider/spec/diffs_helper.rb
+++ b/qa/contracts/provider/spec/diffs_helper.rb
@@ -1,15 +1,17 @@
# frozen_string_literal: true
-require_relative '../environments/local'
+require_relative '../spec_helper'
-module DiffsHelper
- local = Environments::Local.new
+module Provider
+ module DiffsHelper
+ local = Environments::Local.new
- Pact.service_provider "Merge Request Diffs Endpoint" do
- app { local.merge_request('/diffs_batch.json?page=0') }
+ Pact.service_provider "Merge Request Diffs Endpoint" do
+ app { local.merge_request('/diffs_batch.json?page=0') }
- honours_pact_with 'Merge Request Page' do
- pact_uri '../contracts/merge_request_page-merge_request_diffs_endpoint.json'
+ honours_pact_with 'Merge Request Page' do
+ pact_uri '../contracts/merge_request_page-merge_request_diffs_endpoint.json'
+ end
end
end
end
diff --git a/qa/contracts/provider/spec/discussions_helper.rb b/qa/contracts/provider/spec/discussions_helper.rb
index 44f9803989f..642dde79e1d 100644
--- a/qa/contracts/provider/spec/discussions_helper.rb
+++ b/qa/contracts/provider/spec/discussions_helper.rb
@@ -1,15 +1,17 @@
# frozen_string_literal: true
-require_relative '../environments/local'
+require_relative '../spec_helper'
-module DiscussionsHelper
- local = Environments::Local.new
+module Provider
+ module DiscussionsHelper
+ local = Environments::Local.new
- Pact.service_provider "Merge Request Discussions Endpoint" do
- app { local.merge_request('/discussions.json') }
+ Pact.service_provider "Merge Request Discussions Endpoint" do
+ app { local.merge_request('/discussions.json') }
- honours_pact_with 'Merge Request Page' do
- pact_uri '../contracts/merge_request_page-merge_request_discussions_endpoint.json'
+ honours_pact_with 'Merge Request Page' do
+ pact_uri '../contracts/merge_request_page-merge_request_discussions_endpoint.json'
+ end
end
end
end
diff --git a/qa/contracts/provider/spec/metadata_helper.rb b/qa/contracts/provider/spec/metadata_helper.rb
index ac2910b1158..a3eb4978641 100644
--- a/qa/contracts/provider/spec/metadata_helper.rb
+++ b/qa/contracts/provider/spec/metadata_helper.rb
@@ -1,15 +1,17 @@
# frozen_string_literal: true
-require_relative '../environments/local'
+require_relative '../spec_helper'
-module MetadataHelper
- local = Environments::Local.new
+module Provider
+ module MetadataHelper
+ local = Environments::Local.new
- Pact.service_provider "Merge Request Metadata Endpoint" do
- app { local.merge_request('/diffs_metadata.json') }
+ Pact.service_provider "Merge Request Metadata Endpoint" do
+ app { local.merge_request('/diffs_metadata.json') }
- honours_pact_with 'Merge Request Page' do
- pact_uri '../contracts/merge_request_page-merge_request_metadata_endpoint.json'
+ honours_pact_with 'Merge Request Page' do
+ pact_uri '../contracts/merge_request_page-merge_request_metadata_endpoint.json'
+ end
end
end
end
diff --git a/qa/contracts/provider/spec_helper.rb b/qa/contracts/provider/spec_helper.rb
new file mode 100644
index 00000000000..1869c039910
--- /dev/null
+++ b/qa/contracts/provider/spec_helper.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+module SpecHelper
+ unless ENV['CONTRACT_HOST']
+ raise(ArgumentError, 'Contract tests require CONTRACT_HOST environment variable to be set!')
+ end
+
+ require_relative '../../../config/bundler_setup'
+ Bundler.require(:default)
+
+ root = File.expand_path('../', __dir__)
+
+ loader = Zeitwerk::Loader.new
+ loader.push_dir(root)
+
+ loader.ignore("#{root}/consumer")
+ loader.ignore("#{root}/contracts")
+
+ loader.collapse("#{root}/provider/spec")
+
+ loader.setup
+end
diff --git a/qa/tasks/contracts.rake b/qa/tasks/contracts.rake
new file mode 100644
index 00000000000..682ec0e2e21
--- /dev/null
+++ b/qa/tasks/contracts.rake
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'pact/tasks/verification_task'
+
+contracts = File.expand_path('../contracts', __dir__)
+provider = File.expand_path('provider', contracts)
+
+# rubocop:disable Rails/RakeEnvironment
+namespace :contracts do
+ namespace :mr do
+ Pact::VerificationTask.new(:metadata) do |pact|
+ pact.uri(
+ "#{contracts}/contracts/merge_request_page-merge_request_metadata_endpoint.json",
+ pact_helper: "#{provider}/spec/metadata_helper.rb"
+ )
+ end
+
+ Pact::VerificationTask.new(:discussions) do |pact|
+ pact.uri(
+ "#{contracts}/contracts/merge_request_page-merge_request_discussions_endpoint.json",
+ pact_helper: "#{provider}/spec/discussions_helper.rb"
+ )
+ end
+
+ Pact::VerificationTask.new(:diffs) do |pact|
+ pact.uri(
+ "#{contracts}/contracts/merge_request_page-merge_request_diffs_endpoint.json",
+ pact_helper: "#{provider}/spec/diffs_helper.rb"
+ )
+ end
+
+ desc 'Run all merge request contract tests'
+ task 'test:merge_request', :contract_mr do |_t, arg|
+ raise(ArgumentError, 'Merge request contract tests require contract_mr to be set') unless arg[:contract_mr]
+
+ ENV['CONTRACT_MR'] = arg[:contract_mr]
+ errors = %w[metadata discussions diffs].each_with_object([]) do |task, err|
+ Rake::Task["contracts:mr:pact:verify:#{task}"].execute
+ rescue StandardError, SystemExit
+ err << "contracts:mr:pact:verify:#{task}"
+ end
+
+ raise StandardError, "Errors in tasks #{errors.join(', ')}" unless errors.empty?
+ end
+ end
+end
+# rubocop:enable Rails/RakeEnvironment
diff --git a/spec/frontend/security_configuration/mock_data.js b/spec/frontend/security_configuration/mock_data.js
index b124a9e3de9..21d7140bf89 100644
--- a/spec/frontend/security_configuration/mock_data.js
+++ b/spec/frontend/security_configuration/mock_data.js
@@ -41,7 +41,8 @@ export const getSecurityTrainingProvidersData = (providerOverrides = {}) => {
const response = {
data: {
project: {
- id: 1,
+ id: 'gid://gitlab/Project/1',
+ __typename: 'Project',
securityTrainingProviders,
},
},
diff --git a/spec/lib/backup/artifacts_spec.rb b/spec/lib/backup/artifacts_spec.rb
index e65dc79b65b..d830692d96b 100644
--- a/spec/lib/backup/artifacts_spec.rb
+++ b/spec/lib/backup/artifacts_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Backup::Artifacts do
expect(backup).to receive(:tar).and_return('blabla-tar')
expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./tmp -C /var/gitlab-artifacts -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(backup).to receive(:pipeline_succeeded?).and_return(true)
- backup.dump
+ backup.dump('artifacts.tar.gz')
end
end
end
diff --git a/spec/lib/backup/database_spec.rb b/spec/lib/backup/database_spec.rb
index 4345778ba92..53db7f0f149 100644
--- a/spec/lib/backup/database_spec.rb
+++ b/spec/lib/backup/database_spec.rb
@@ -6,25 +6,49 @@ RSpec.describe Backup::Database do
let(:progress) { StringIO.new }
let(:output) { progress.string }
- before do
- allow(Gitlab::TaskHelpers).to receive(:ask_to_continue)
+ before(:all) do
+ Rake.application.rake_require 'active_record/railties/databases'
+ Rake.application.rake_require 'tasks/gitlab/backup'
+ Rake.application.rake_require 'tasks/gitlab/shell'
+ Rake.application.rake_require 'tasks/gitlab/db'
+ Rake.application.rake_require 'tasks/cache'
end
describe '#restore' do
let(:cmd) { %W[#{Gem.ruby} -e $stdout.puts(1)] }
let(:data) { Rails.root.join("spec/fixtures/pages_empty.tar.gz").to_s }
+ let(:force) { true }
- subject { described_class.new(progress, filename: data) }
+ subject { described_class.new(progress, force: force) }
before do
allow(subject).to receive(:pg_restore_cmd).and_return(cmd)
end
+ context 'when not forced' do
+ let(:force) { false }
+
+ it 'warns the user and waits' do
+ expect(subject).to receive(:sleep)
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+
+ subject.restore(data)
+
+ expect(output).to include('Removing all tables. Press `Ctrl-C` within 5 seconds to abort')
+ end
+
+ it 'has a pre restore warning' do
+ expect(subject.pre_restore_warning).not_to be_nil
+ end
+ end
+
context 'with an empty .gz file' do
let(:data) { Rails.root.join("spec/fixtures/pages_empty.tar.gz").to_s }
it 'returns successfully' do
- subject.restore
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+
+ subject.restore(data)
expect(output).to include("Restoring PostgreSQL database")
expect(output).to include("[DONE]")
@@ -36,7 +60,9 @@ RSpec.describe Backup::Database do
let(:data) { Rails.root.join("spec/fixtures/big-image.png").to_s }
it 'raises a backup error' do
- expect { subject.restore }.to raise_error(Backup::Error)
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+
+ expect { subject.restore(data) }.to raise_error(Backup::Error)
end
end
@@ -45,12 +71,15 @@ RSpec.describe Backup::Database do
let(:noise) { "Table projects does not exist\nmust be owner of extension pg_trgm\nWARNING: no privileges could be revoked for public\n" }
let(:cmd) { %W[#{Gem.ruby} -e $stderr.write("#{noise}#{visible_error}")] }
- it 'filters out noise from errors' do
- subject.restore
+ it 'filters out noise from errors and has a post restore warning' do
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+
+ subject.restore(data)
expect(output).to include("ERRORS")
expect(output).not_to include(noise)
expect(output).to include(visible_error)
+ expect(subject.post_restore_warning).not_to be_nil
end
end
@@ -66,7 +95,9 @@ RSpec.describe Backup::Database do
end
it 'overrides default config values' do
- subject.restore
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+
+ subject.restore(data)
expect(output).to include(%("PGHOST"=>"test.example.com"))
expect(output).to include(%("PGPASSWORD"=>"donotchange"))
diff --git a/spec/lib/backup/files_spec.rb b/spec/lib/backup/files_spec.rb
index 6bff0919293..bbc465a26c9 100644
--- a/spec/lib/backup/files_spec.rb
+++ b/spec/lib/backup/files_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe Backup::Files do
end
describe '#restore' do
- subject { described_class.new('registry', '/var/gitlab-registry') }
+ subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
let(:timestamp) { Time.utc(2017, 3, 22) }
@@ -58,11 +58,11 @@ RSpec.describe Backup::Files do
it 'moves all necessary files' do
allow(subject).to receive(:backup_existing_files).and_call_original
expect(FileUtils).to receive(:mv).with(["/var/gitlab-registry/sample1"], File.join(Gitlab.config.backup.path, "tmp", "registry.#{Time.now.to_i}"))
- subject.restore
+ subject.restore('registry.tar.gz')
end
it 'raises no errors' do
- expect { subject.restore }.not_to raise_error
+ expect { subject.restore('registry.tar.gz') }.not_to raise_error
end
it 'calls tar command with unlink' do
@@ -70,13 +70,13 @@ RSpec.describe Backup::Files do
expect(subject).to receive(:run_pipeline!).with([%w(gzip -cd), %w(blabla-tar --unlink-first --recursive-unlink -C /var/gitlab-registry -xf -)], any_args)
expect(subject).to receive(:pipeline_succeeded?).and_return(true)
- subject.restore
+ subject.restore('registry.tar.gz')
end
it 'raises an error on failure' do
expect(subject).to receive(:pipeline_succeeded?).and_return(false)
- expect { subject.restore }.to raise_error(/Restore operation failed:/)
+ expect { subject.restore('registry.tar.gz') }.to raise_error(/Restore operation failed:/)
end
end
@@ -89,7 +89,7 @@ RSpec.describe Backup::Files do
it 'shows error message' do
expect(subject).to receive(:access_denied_error).with("/var/gitlab-registry")
- subject.restore
+ subject.restore('registry.tar.gz')
end
end
@@ -104,13 +104,13 @@ RSpec.describe Backup::Files do
expect(subject).to receive(:resource_busy_error).with("/var/gitlab-registry")
.and_call_original
- expect { subject.restore }.to raise_error(/is a mountpoint/)
+ expect { subject.restore('registry.tar.gz') }.to raise_error(/is a mountpoint/)
end
end
end
describe '#dump' do
- subject { described_class.new('pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) }
+ subject { described_class.new(progress, 'pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) }
before do
allow(subject).to receive(:run_pipeline!).and_return([[true, true], ''])
@@ -118,14 +118,14 @@ RSpec.describe Backup::Files do
end
it 'raises no errors' do
- expect { subject.dump }.not_to raise_error
+ expect { subject.dump('registry.tar.gz') }.not_to raise_error
end
it 'excludes tmp dirs from archive' do
expect(subject).to receive(:tar).and_return('blabla-tar')
expect(subject).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./@pages.tmp -C /var/gitlab-pages -cf - .), 'gzip -c -1'], any_args)
- subject.dump
+ subject.dump('registry.tar.gz')
end
it 'raises an error on failure' do
@@ -133,7 +133,7 @@ RSpec.describe Backup::Files do
expect(subject).to receive(:pipeline_succeeded?).and_return(false)
expect do
- subject.dump
+ subject.dump('registry.tar.gz')
end.to raise_error(/Failed to create compressed file/)
end
@@ -149,7 +149,7 @@ RSpec.describe Backup::Files do
.with(%w(rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp /var/gitlab-pages /var/gitlab-backup))
.and_return(['', 0])
- subject.dump
+ subject.dump('registry.tar.gz')
end
it 'retries if rsync fails due to vanishing files' do
@@ -158,7 +158,7 @@ RSpec.describe Backup::Files do
.and_return(['rsync failed', 24], ['', 0])
expect do
- subject.dump
+ subject.dump('registry.tar.gz')
end.to output(/files vanished during rsync, retrying/).to_stdout
end
@@ -168,7 +168,7 @@ RSpec.describe Backup::Files do
.and_return(['rsync failed', 1])
expect do
- subject.dump
+ subject.dump('registry.tar.gz')
end.to output(/rsync failed/).to_stdout
.and raise_error(/Failed to create compressed file/)
end
@@ -176,7 +176,7 @@ RSpec.describe Backup::Files do
end
describe '#exclude_dirs' do
- subject { described_class.new('pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) }
+ subject { described_class.new(progress, 'pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) }
it 'prepends a leading dot slash to tar excludes' do
expect(subject.exclude_dirs(:tar)).to eq(['--exclude=lost+found', '--exclude=./@pages.tmp'])
@@ -188,7 +188,7 @@ RSpec.describe Backup::Files do
end
describe '#run_pipeline!' do
- subject { described_class.new('registry', '/var/gitlab-registry') }
+ subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
it 'executes an Open3.pipeline for cmd_list' do
expect(Open3).to receive(:pipeline).with(%w[whew command], %w[another cmd], any_args)
@@ -222,7 +222,7 @@ RSpec.describe Backup::Files do
end
describe '#pipeline_succeeded?' do
- subject { described_class.new('registry', '/var/gitlab-registry') }
+ subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
it 'returns true if both tar and gzip succeeeded' do
expect(
@@ -262,7 +262,7 @@ RSpec.describe Backup::Files do
end
describe '#tar_ignore_non_success?' do
- subject { described_class.new('registry', '/var/gitlab-registry') }
+ subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
context 'if `tar` command exits with 1 exitstatus' do
it 'returns true' do
@@ -310,7 +310,7 @@ RSpec.describe Backup::Files do
end
describe '#noncritical_warning?' do
- subject { described_class.new('registry', '/var/gitlab-registry') }
+ subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
it 'returns true if given text matches noncritical warnings list' do
expect(
diff --git a/spec/lib/backup/gitaly_backup_spec.rb b/spec/lib/backup/gitaly_backup_spec.rb
index 6bf4f833c1f..84ee75e27ac 100644
--- a/spec/lib/backup/gitaly_backup_spec.rb
+++ b/spec/lib/backup/gitaly_backup_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Backup::GitalyBackup do
let(:max_parallelism) { nil }
let(:storage_parallelism) { nil }
+ let(:destination) { File.join(Gitlab.config.backup.path, 'repositories') }
let(:progress) do
Tempfile.new('progress').tap do |progress|
@@ -27,7 +28,7 @@ RSpec.describe Backup::GitalyBackup do
context 'unknown' do
it 'fails to start unknown' do
- expect { subject.start(:unknown) }.to raise_error(::Backup::Error, 'unknown backup type: unknown')
+ expect { subject.start(:unknown, destination) }.to raise_error(::Backup::Error, 'unknown backup type: unknown')
end
end
@@ -42,7 +43,7 @@ RSpec.describe Backup::GitalyBackup do
expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything).and_call_original
- subject.start(:create)
+ subject.start(:create, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
subject.enqueue(project, Gitlab::GlRepository::DESIGN)
@@ -50,11 +51,11 @@ RSpec.describe Backup::GitalyBackup do
subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
subject.finish!
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.wiki.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.design.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', personal_snippet.disk_path + '.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project_snippet.disk_path + '.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path + '.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path + '.wiki.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path + '.design.bundle'))
+ expect(File).to exist(File.join(destination, personal_snippet.disk_path + '.bundle'))
+ expect(File).to exist(File.join(destination, project_snippet.disk_path + '.bundle'))
end
context 'parallel option set' do
@@ -63,7 +64,7 @@ RSpec.describe Backup::GitalyBackup do
it 'passes parallel option through' do
expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel', '3').and_call_original
- subject.start(:create)
+ subject.start(:create, destination)
subject.finish!
end
end
@@ -74,7 +75,7 @@ RSpec.describe Backup::GitalyBackup do
it 'passes parallel option through' do
expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel-storage', '3').and_call_original
- subject.start(:create)
+ subject.start(:create, destination)
subject.finish!
end
end
@@ -82,7 +83,7 @@ RSpec.describe Backup::GitalyBackup do
it 'raises when the exit code not zero' do
expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false'))
- subject.start(:create)
+ subject.start(:create, destination)
expect { subject.finish! }.to raise_error(::Backup::Error, 'gitaly-backup exit status 1')
end
end
@@ -114,7 +115,7 @@ RSpec.describe Backup::GitalyBackup do
it 'passes through SSL envs' do
expect(Open3).to receive(:popen2).with(ssl_env, anything, 'create', '-path', anything).and_call_original
- subject.start(:create)
+ subject.start(:create, destination)
subject.finish!
end
end
@@ -139,7 +140,7 @@ RSpec.describe Backup::GitalyBackup do
expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything).and_call_original
- subject.start(:restore)
+ subject.start(:restore, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
subject.enqueue(project, Gitlab::GlRepository::DESIGN)
@@ -162,7 +163,7 @@ RSpec.describe Backup::GitalyBackup do
it 'passes parallel option through' do
expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel', '3').and_call_original
- subject.start(:restore)
+ subject.start(:restore, destination)
subject.finish!
end
end
@@ -173,7 +174,7 @@ RSpec.describe Backup::GitalyBackup do
it 'passes parallel option through' do
expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel-storage', '3').and_call_original
- subject.start(:restore)
+ subject.start(:restore, destination)
subject.finish!
end
end
@@ -181,7 +182,7 @@ RSpec.describe Backup::GitalyBackup do
it 'raises when the exit code not zero' do
expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false'))
- subject.start(:restore)
+ subject.start(:restore, destination)
expect { subject.finish! }.to raise_error(::Backup::Error, 'gitaly-backup exit status 1')
end
end
diff --git a/spec/lib/backup/gitaly_rpc_backup_spec.rb b/spec/lib/backup/gitaly_rpc_backup_spec.rb
index 4829d51ac9d..6cba8c5c9b1 100644
--- a/spec/lib/backup/gitaly_rpc_backup_spec.rb
+++ b/spec/lib/backup/gitaly_rpc_backup_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Backup::GitalyRpcBackup do
let(:progress) { spy(:stdout) }
+ let(:destination) { File.join(Gitlab.config.backup.path, 'repositories') }
subject { described_class.new(progress) }
@@ -14,7 +15,7 @@ RSpec.describe Backup::GitalyRpcBackup do
context 'unknown' do
it 'fails to start unknown' do
- expect { subject.start(:unknown) }.to raise_error(::Backup::Error, 'unknown backup type: unknown')
+ expect { subject.start(:unknown, destination) }.to raise_error(::Backup::Error, 'unknown backup type: unknown')
end
end
@@ -27,7 +28,7 @@ RSpec.describe Backup::GitalyRpcBackup do
project_snippet = create(:project_snippet, :repository, project: project)
personal_snippet = create(:personal_snippet, :repository, author: project.first_owner)
- subject.start(:create)
+ subject.start(:create, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
subject.enqueue(project, Gitlab::GlRepository::DESIGN)
@@ -35,11 +36,11 @@ RSpec.describe Backup::GitalyRpcBackup do
subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
subject.finish!
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.wiki.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.design.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', personal_snippet.disk_path + '.bundle'))
- expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project_snippet.disk_path + '.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path + '.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path + '.wiki.bundle'))
+ expect(File).to exist(File.join(destination, project.disk_path + '.design.bundle'))
+ expect(File).to exist(File.join(destination, personal_snippet.disk_path + '.bundle'))
+ expect(File).to exist(File.join(destination, project_snippet.disk_path + '.bundle'))
end
context 'failure' do
@@ -50,7 +51,7 @@ RSpec.describe Backup::GitalyRpcBackup do
end
it 'logs an appropriate message', :aggregate_failures do
- subject.start(:create)
+ subject.start(:create, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.finish!
@@ -90,7 +91,7 @@ RSpec.describe Backup::GitalyRpcBackup do
copy_bundle_to_backup_path('personal_snippet_repo.bundle', personal_snippet.disk_path + '.bundle')
copy_bundle_to_backup_path('project_snippet_repo.bundle', project_snippet.disk_path + '.bundle')
- subject.start(:restore)
+ subject.start(:restore, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
subject.enqueue(project, Gitlab::GlRepository::DESIGN)
@@ -123,7 +124,7 @@ RSpec.describe Backup::GitalyRpcBackup do
repository
end
- subject.start(:restore)
+ subject.start(:restore, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
subject.enqueue(project, Gitlab::GlRepository::DESIGN)
@@ -141,7 +142,7 @@ RSpec.describe Backup::GitalyRpcBackup do
end
it 'logs an appropriate message', :aggregate_failures do
- subject.start(:restore)
+ subject.start(:restore, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.finish!
diff --git a/spec/lib/backup/lfs_spec.rb b/spec/lib/backup/lfs_spec.rb
index 6525019d9ac..a27f60f20d0 100644
--- a/spec/lib/backup/lfs_spec.rb
+++ b/spec/lib/backup/lfs_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Backup::Lfs do
expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found -C /var/lfs-objects -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(backup).to receive(:pipeline_succeeded?).and_return(true)
- backup.dump
+ backup.dump('lfs.tar.gz')
end
end
end
diff --git a/spec/lib/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb
index 9c186205067..bf1a039f354 100644
--- a/spec/lib/backup/manager_spec.rb
+++ b/spec/lib/backup/manager_spec.rb
@@ -6,16 +6,149 @@ RSpec.describe Backup::Manager do
include StubENV
let(:progress) { StringIO.new }
+ let(:definitions) { nil }
- subject { described_class.new(progress) }
+ subject { described_class.new(progress, definitions: definitions) }
before do
+ # Rspec fails with `uninitialized constant RSpec::Support::Differ` when it
+ # is trying to display a diff and `File.exist?` is stubbed. Adding a
+ # default stub fixes this.
+ allow(File).to receive(:exist?).and_call_original
+
allow(progress).to receive(:puts)
allow(progress).to receive(:print)
end
- describe '#pack' do
- let(:expected_backup_contents) { %w(repositories db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz terraform_state.tar.gz packages.tar.gz backup_information.yml) }
+ describe '#run_create_task' do
+ let(:enabled) { true }
+ let(:task) { instance_double(Backup::Task, human_name: 'my task', enabled: enabled) }
+ let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, destination_path: 'my_task.tar.gz') } }
+
+ it 'calls the named task' do
+ expect(task).to receive(:dump)
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... ')
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done')
+
+ subject.run_create_task('my_task')
+ end
+
+ describe 'disabled' do
+ let(:enabled) { false }
+
+ it 'informs the user' do
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... ')
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: '[DISABLED]')
+
+ subject.run_create_task('my_task')
+ end
+ end
+
+ describe 'skipped' do
+ it 'informs the user' do
+ stub_env('SKIP', 'my_task')
+
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... ')
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: '[SKIPPED]')
+
+ subject.run_create_task('my_task')
+ end
+ end
+ end
+
+ describe '#run_restore_task' do
+ let(:enabled) { true }
+ let(:pre_restore_warning) { nil }
+ let(:post_restore_warning) { nil }
+ let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, destination_path: 'my_task.tar.gz') } }
+ let(:backup_information) { {} }
+ let(:task) do
+ instance_double(Backup::Task,
+ human_name: 'my task',
+ enabled: enabled,
+ pre_restore_warning: pre_restore_warning,
+ post_restore_warning: post_restore_warning)
+ end
+
+ before do
+ allow(YAML).to receive(:load_file).with('backup_information.yml')
+ .and_return(backup_information)
+ end
+
+ it 'calls the named task' do
+ expect(task).to receive(:restore)
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done').ordered
+
+ subject.run_restore_task('my_task')
+ end
+
+ describe 'disabled' do
+ let(:enabled) { false }
+
+ it 'informs the user' do
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: '[DISABLED]').ordered
+
+ subject.run_restore_task('my_task')
+ end
+ end
+
+ describe 'pre_restore_warning' do
+ let(:pre_restore_warning) { 'Watch out!' }
+
+ it 'displays and waits for the user' do
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done').ordered
+ expect(Gitlab::TaskHelpers).to receive(:ask_to_continue)
+ expect(task).to receive(:restore)
+
+ subject.run_restore_task('my_task')
+ end
+
+ it 'does not continue when the user quits' do
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Quitting...').ordered
+ expect(Gitlab::TaskHelpers).to receive(:ask_to_continue).and_raise(Gitlab::TaskAbortedByUserError)
+
+ expect do
+ subject.run_restore_task('my_task')
+ end.to raise_error(SystemExit)
+ end
+ end
+
+ describe 'post_restore_warning' do
+ let(:post_restore_warning) { 'Watch out!' }
+
+ it 'displays and waits for the user' do
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered
+ expect(Gitlab::TaskHelpers).to receive(:ask_to_continue)
+ expect(task).to receive(:restore)
+
+ subject.run_restore_task('my_task')
+ end
+
+ it 'does not continue when the user quits' do
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Quitting...').ordered
+ expect(task).to receive(:restore)
+ expect(Gitlab::TaskHelpers).to receive(:ask_to_continue).and_raise(Gitlab::TaskAbortedByUserError)
+
+ expect do
+ subject.run_restore_task('my_task')
+ end.to raise_error(SystemExit)
+ end
+ end
+ end
+
+ describe '#create' do
+ let(:expected_backup_contents) { %w{backup_information.yml task1.tar.gz task2.tar.gz} }
let(:tar_file) { '1546300800_2019_01_01_12.3_gitlab_backup.tar' }
let(:tar_system_options) { { out: [tar_file, 'w', Gitlab.config.backup.archive_permissions] } }
let(:tar_cmdline) { ['tar', '-cf', '-', *expected_backup_contents, tar_system_options] }
@@ -26,21 +159,27 @@ RSpec.describe Backup::Manager do
}
end
+ let(:task1) { instance_double(Backup::Task, human_name: 'task 1', enabled: true) }
+ let(:task2) { instance_double(Backup::Task, human_name: 'task 2', enabled: true) }
+ let(:definitions) do
+ {
+ 'task1' => Backup::Manager::TaskDefinition.new(task: task1, destination_path: 'task1.tar.gz'),
+ 'task2' => Backup::Manager::TaskDefinition.new(task: task2, destination_path: 'task2.tar.gz')
+ }
+ end
+
before do
allow(ActiveRecord::Base.connection).to receive(:reconnect!)
allow(Kernel).to receive(:system).and_return(true)
allow(YAML).to receive(:load_file).and_return(backup_information)
- ::Backup::Manager::FOLDERS_TO_BACKUP.each do |folder|
- allow(Dir).to receive(:exist?).with(File.join(Gitlab.config.backup.path, folder)).and_return(true)
- end
-
allow(subject).to receive(:backup_information).and_return(backup_information)
- allow(subject).to receive(:upload)
+ allow(task1).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'task1.tar.gz'))
+ allow(task2).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz'))
end
it 'executes tar' do
- subject.pack
+ subject.create # rubocop:disable Rails/SaveBang
expect(Kernel).to have_received(:system).with(*tar_cmdline)
end
@@ -50,247 +189,400 @@ RSpec.describe Backup::Manager do
it 'uses the given value as tar file name' do
stub_env('BACKUP', '/ignored/path/custom')
- subject.pack
+ subject.create # rubocop:disable Rails/SaveBang
expect(Kernel).to have_received(:system).with(*tar_cmdline)
end
end
context 'when skipped is set in backup_information.yml' do
- let(:expected_backup_contents) { %w{db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz terraform_state.tar.gz packages.tar.gz backup_information.yml} }
+ let(:expected_backup_contents) { %w{backup_information.yml task1.tar.gz} }
let(:backup_information) do
{
backup_created_at: Time.zone.parse('2019-01-01'),
gitlab_version: '12.3',
- skipped: ['repositories']
+ skipped: ['task2']
}
end
it 'executes tar' do
- subject.pack
+ subject.create # rubocop:disable Rails/SaveBang
expect(Kernel).to have_received(:system).with(*tar_cmdline)
end
end
- context 'when a directory does not exist' do
- let(:expected_backup_contents) { %w{db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz terraform_state.tar.gz packages.tar.gz backup_information.yml} }
-
- before do
- expect(Dir).to receive(:exist?).with(File.join(Gitlab.config.backup.path, 'repositories')).and_return(false)
+ context 'when the destination is optional' do
+ let(:expected_backup_contents) { %w{backup_information.yml task1.tar.gz} }
+ let(:definitions) do
+ {
+ 'task1' => Backup::Manager::TaskDefinition.new(task: task1, destination_path: 'task1.tar.gz'),
+ 'task2' => Backup::Manager::TaskDefinition.new(task: task2, destination_path: 'task2.tar.gz', destination_optional: true)
+ }
end
it 'executes tar' do
- subject.pack
+ expect(File).to receive(:exist?).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz')).and_return(false)
+
+ subject.create # rubocop:disable Rails/SaveBang
expect(Kernel).to have_received(:system).with(*tar_cmdline)
end
end
- end
- describe '#remove_tmp' do
- let(:path) { File.join(Gitlab.config.backup.path, 'tmp') }
-
- before do
- allow(FileUtils).to receive(:rm_rf).and_return(true)
- end
-
- it 'removes backups/tmp dir' do
- subject.remove_tmp
-
- expect(FileUtils).to have_received(:rm_rf).with(path)
- end
-
- it 'prints running task with a done confirmation' do
- subject.remove_tmp
-
- expect(progress).to have_received(:print).with('Deleting backups/tmp ... ')
- expect(progress).to have_received(:puts).with('done')
- end
- end
-
- describe '#remove_old' do
- let(:files) do
- [
- '1451606400_2016_01_01_1.2.3_gitlab_backup.tar',
- '1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
- '1451520000_2015_12_31_4.5.6-pre_gitlab_backup.tar',
- '1451520000_2015_12_31_4.5.6-rc1_gitlab_backup.tar',
- '1451520000_2015_12_31_4.5.6-pre-ee_gitlab_backup.tar',
- '1451510000_2015_12_30_gitlab_backup.tar',
- '1450742400_2015_12_22_gitlab_backup.tar',
- '1449878400_gitlab_backup.tar',
- '1449014400_gitlab_backup.tar',
- 'manual_gitlab_backup.tar'
- ]
- end
-
- before do
- allow(Dir).to receive(:chdir).and_yield
- allow(Dir).to receive(:glob).and_return(files)
- allow(FileUtils).to receive(:rm)
- allow(Time).to receive(:now).and_return(Time.utc(2016))
- end
-
- context 'when keep_time is zero' do
- before do
- allow(Gitlab.config.backup).to receive(:keep_time).and_return(0)
-
- subject.remove_old
- end
-
- it 'removes no files' do
- expect(FileUtils).not_to have_received(:rm)
- end
-
- it 'prints a skipped message' do
- expect(progress).to have_received(:puts).with('skipping')
- end
- end
-
- context 'when no valid file is found' do
+ context 'many backup files' do
let(:files) do
[
- '14516064000_2016_01_01_1.2.3_gitlab_backup.tar',
- 'foo_1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
- '1451520000_2015_12_31_4.5.6-foo_gitlab_backup.tar'
+ '1451606400_2016_01_01_1.2.3_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6-pre_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6-rc1_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6-pre-ee_gitlab_backup.tar',
+ '1451510000_2015_12_30_gitlab_backup.tar',
+ '1450742400_2015_12_22_gitlab_backup.tar',
+ '1449878400_gitlab_backup.tar',
+ '1449014400_gitlab_backup.tar',
+ 'manual_gitlab_backup.tar'
]
end
before do
- allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
-
- subject.remove_old
+ allow(Dir).to receive(:chdir).and_yield
+ allow(Dir).to receive(:glob).and_return(files)
+ allow(FileUtils).to receive(:rm)
+ allow(Time).to receive(:now).and_return(Time.utc(2016))
end
- it 'removes no files' do
- expect(FileUtils).not_to have_received(:rm)
+ context 'when keep_time is zero' do
+ before do
+ allow(Gitlab.config.backup).to receive(:keep_time).and_return(0)
+
+ subject.create # rubocop:disable Rails/SaveBang
+ end
+
+ it 'removes no files' do
+ expect(FileUtils).not_to have_received(:rm)
+ end
+
+ it 'prints a skipped message' do
+ expect(progress).to have_received(:puts).with('skipping')
+ end
end
- it 'prints a done message' do
- expect(progress).to have_received(:puts).with('done. (0 removed)')
+ context 'when no valid file is found' do
+ let(:files) do
+ [
+ '14516064000_2016_01_01_1.2.3_gitlab_backup.tar',
+ 'foo_1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
+ '1451520000_2015_12_31_4.5.6-foo_gitlab_backup.tar'
+ ]
+ end
+
+ before do
+ allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
+
+ subject.create # rubocop:disable Rails/SaveBang
+ end
+
+ it 'removes no files' do
+ expect(FileUtils).not_to have_received(:rm)
+ end
+
+ it 'prints a done message' do
+ expect(progress).to have_received(:puts).with('done. (0 removed)')
+ end
+ end
+
+ context 'when there are no files older than keep_time' do
+ before do
+ # Set to 30 days
+ allow(Gitlab.config.backup).to receive(:keep_time).and_return(2592000)
+
+ subject.create # rubocop:disable Rails/SaveBang
+ end
+
+ it 'removes no files' do
+ expect(FileUtils).not_to have_received(:rm)
+ end
+
+ it 'prints a done message' do
+ expect(progress).to have_received(:puts).with('done. (0 removed)')
+ end
+ end
+
+ context 'when keep_time is set to remove files' do
+ before do
+ # Set to 1 second
+ allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
+
+ subject.create # rubocop:disable Rails/SaveBang
+ end
+
+ it 'removes matching files with a human-readable versioned timestamp' do
+ expect(FileUtils).to have_received(:rm).with(files[1])
+ expect(FileUtils).to have_received(:rm).with(files[2])
+ expect(FileUtils).to have_received(:rm).with(files[3])
+ end
+
+ it 'removes matching files with a human-readable versioned timestamp with tagged EE' do
+ expect(FileUtils).to have_received(:rm).with(files[4])
+ end
+
+ it 'removes matching files with a human-readable non-versioned timestamp' do
+ expect(FileUtils).to have_received(:rm).with(files[5])
+ expect(FileUtils).to have_received(:rm).with(files[6])
+ end
+
+ it 'removes matching files without a human-readable timestamp' do
+ expect(FileUtils).to have_received(:rm).with(files[7])
+ expect(FileUtils).to have_received(:rm).with(files[8])
+ end
+
+ it 'does not remove files that are not old enough' do
+ expect(FileUtils).not_to have_received(:rm).with(files[0])
+ end
+
+ it 'does not remove non-matching files' do
+ expect(FileUtils).not_to have_received(:rm).with(files[9])
+ end
+
+ it 'prints a done message' do
+ expect(progress).to have_received(:puts).with('done. (8 removed)')
+ end
+ end
+
+ context 'when removing a file fails' do
+ let(:file) { files[1] }
+ let(:message) { "Permission denied @ unlink_internal - #{file}" }
+
+ before do
+ allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
+ allow(FileUtils).to receive(:rm).with(file).and_raise(Errno::EACCES, message)
+
+ subject.create # rubocop:disable Rails/SaveBang
+ end
+
+ it 'removes the remaining expected files' do
+ expect(FileUtils).to have_received(:rm).with(files[4])
+ expect(FileUtils).to have_received(:rm).with(files[5])
+ expect(FileUtils).to have_received(:rm).with(files[6])
+ expect(FileUtils).to have_received(:rm).with(files[7])
+ expect(FileUtils).to have_received(:rm).with(files[8])
+ end
+
+ it 'sets the correct removed count' do
+ expect(progress).to have_received(:puts).with('done. (7 removed)')
+ end
+
+ it 'prints the error from file that could not be removed' do
+ expect(progress).to have_received(:puts).with(a_string_matching(message))
+ end
end
end
- context 'when there are no files older than keep_time' do
- before do
- # Set to 30 days
- allow(Gitlab.config.backup).to receive(:keep_time).and_return(2592000)
-
- subject.remove_old
- end
-
- it 'removes no files' do
- expect(FileUtils).not_to have_received(:rm)
- end
-
- it 'prints a done message' do
- expect(progress).to have_received(:puts).with('done. (0 removed)')
- end
- end
-
- context 'when keep_time is set to remove files' do
- before do
- # Set to 1 second
- allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
-
- subject.remove_old
- end
-
- it 'removes matching files with a human-readable versioned timestamp' do
- expect(FileUtils).to have_received(:rm).with(files[1])
- expect(FileUtils).to have_received(:rm).with(files[2])
- expect(FileUtils).to have_received(:rm).with(files[3])
- end
-
- it 'removes matching files with a human-readable versioned timestamp with tagged EE' do
- expect(FileUtils).to have_received(:rm).with(files[4])
- end
-
- it 'removes matching files with a human-readable non-versioned timestamp' do
- expect(FileUtils).to have_received(:rm).with(files[5])
- expect(FileUtils).to have_received(:rm).with(files[6])
- end
-
- it 'removes matching files without a human-readable timestamp' do
- expect(FileUtils).to have_received(:rm).with(files[7])
- expect(FileUtils).to have_received(:rm).with(files[8])
- end
-
- it 'does not remove files that are not old enough' do
- expect(FileUtils).not_to have_received(:rm).with(files[0])
- end
-
- it 'does not remove non-matching files' do
- expect(FileUtils).not_to have_received(:rm).with(files[9])
- end
-
- it 'prints a done message' do
- expect(progress).to have_received(:puts).with('done. (8 removed)')
- end
- end
-
- context 'when removing a file fails' do
- let(:file) { files[1] }
- let(:message) { "Permission denied @ unlink_internal - #{file}" }
+ describe 'cloud storage' do
+ let(:backup_file) { Tempfile.new('backup', Gitlab.config.backup.path) }
+ let(:backup_filename) { File.basename(backup_file.path) }
before do
- allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
- allow(FileUtils).to receive(:rm).with(file).and_raise(Errno::EACCES, message)
+ allow(subject).to receive(:tar_file).and_return(backup_filename)
- subject.remove_old
+ stub_backup_setting(
+ upload: {
+ connection: {
+ provider: 'AWS',
+ aws_access_key_id: 'id',
+ aws_secret_access_key: 'secret'
+ },
+ remote_directory: 'directory',
+ multipart_chunk_size: 104857600,
+ encryption: nil,
+ encryption_key: nil,
+ storage_class: nil
+ }
+ )
+
+ Fog.mock!
+
+ # the Fog mock only knows about directories we create explicitly
+ connection = ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys)
+ connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
end
- it 'removes the remaining expected files' do
- expect(FileUtils).to have_received(:rm).with(files[4])
- expect(FileUtils).to have_received(:rm).with(files[5])
- expect(FileUtils).to have_received(:rm).with(files[6])
- expect(FileUtils).to have_received(:rm).with(files[7])
- expect(FileUtils).to have_received(:rm).with(files[8])
+ context 'target path' do
+ it 'uses the tar filename by default' do
+ expect_any_instance_of(Fog::Collection).to receive(:create)
+ .with(hash_including(key: backup_filename, public: false))
+ .and_call_original
+
+ subject.create # rubocop:disable Rails/SaveBang
+ end
+
+ it 'adds the DIRECTORY environment variable if present' do
+ stub_env('DIRECTORY', 'daily')
+
+ expect_any_instance_of(Fog::Collection).to receive(:create)
+ .with(hash_including(key: "daily/#{backup_filename}", public: false))
+ .and_call_original
+
+ subject.create # rubocop:disable Rails/SaveBang
+ end
end
- it 'sets the correct removed count' do
- expect(progress).to have_received(:puts).with('done. (7 removed)')
+ context 'with AWS with server side encryption' do
+ let(:connection) { ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys) }
+ let(:encryption_key) { nil }
+ let(:encryption) { nil }
+ let(:storage_options) { nil }
+
+ before do
+ stub_backup_setting(
+ upload: {
+ connection: {
+ provider: 'AWS',
+ aws_access_key_id: 'AWS_ACCESS_KEY_ID',
+ aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY'
+ },
+ remote_directory: 'directory',
+ multipart_chunk_size: Gitlab.config.backup.upload.multipart_chunk_size,
+ encryption: encryption,
+ encryption_key: encryption_key,
+ storage_options: storage_options,
+ storage_class: nil
+ }
+ )
+
+ connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
+ end
+
+ context 'with SSE-S3 without using storage_options' do
+ let(:encryption) { 'AES256' }
+
+ it 'sets encryption attributes' do
+ subject.create # rubocop:disable Rails/SaveBang
+
+ expect(progress).to have_received(:puts).with("done (encrypted with AES256)")
+ end
+ end
+
+ context 'with SSE-C (customer-provided keys) options' do
+ let(:encryption) { 'AES256' }
+ let(:encryption_key) { SecureRandom.hex }
+
+ it 'sets encryption attributes' do
+ subject.create # rubocop:disable Rails/SaveBang
+
+ expect(progress).to have_received(:puts).with("done (encrypted with AES256)")
+ end
+ end
+
+ context 'with SSE-KMS options' do
+ let(:storage_options) do
+ {
+ server_side_encryption: 'aws:kms',
+ server_side_encryption_kms_key_id: 'arn:aws:kms:12345'
+ }
+ end
+
+ it 'sets encryption attributes' do
+ subject.create # rubocop:disable Rails/SaveBang
+
+ expect(progress).to have_received(:puts).with("done (encrypted with aws:kms)")
+ end
+ end
end
- it 'prints the error from file that could not be removed' do
- expect(progress).to have_received(:puts).with(a_string_matching(message))
+ context 'with Google provider' do
+ before do
+ stub_backup_setting(
+ upload: {
+ connection: {
+ provider: 'Google',
+ google_storage_access_key_id: 'test-access-id',
+ google_storage_secret_access_key: 'secret'
+ },
+ remote_directory: 'directory',
+ multipart_chunk_size: Gitlab.config.backup.upload.multipart_chunk_size,
+ encryption: nil,
+ encryption_key: nil,
+ storage_class: nil
+ }
+ )
+
+ connection = ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys)
+ connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
+ end
+
+ it 'does not attempt to set ACL' do
+ expect_any_instance_of(Fog::Collection).to receive(:create)
+ .with(hash_excluding(public: false))
+ .and_call_original
+
+ subject.create # rubocop:disable Rails/SaveBang
+ end
+ end
+
+ context 'with AzureRM provider' do
+ before do
+ stub_backup_setting(
+ upload: {
+ connection: {
+ provider: 'AzureRM',
+ azure_storage_account_name: 'test-access-id',
+ azure_storage_access_key: 'secret'
+ },
+ remote_directory: 'directory',
+ multipart_chunk_size: nil,
+ encryption: nil,
+ encryption_key: nil,
+ storage_class: nil
+ }
+ )
+ end
+
+ it 'loads the provider' do
+ expect { subject.create }.not_to raise_error # rubocop:disable Rails/SaveBang
+ end
end
end
end
- describe 'verify_backup_version' do
- context 'on version mismatch' do
- let(:gitlab_version) { Gitlab::VERSION }
-
- it 'stops the process' do
- allow(YAML).to receive(:load_file)
- .and_return({ gitlab_version: "not #{gitlab_version}" })
-
- expect { subject.verify_backup_version }.to raise_error SystemExit
- end
+ describe '#restore' do
+ let(:task1) { instance_double(Backup::Task, human_name: 'task 1', enabled: true, pre_restore_warning: nil, post_restore_warning: nil) }
+ let(:task2) { instance_double(Backup::Task, human_name: 'task 2', enabled: true, pre_restore_warning: nil, post_restore_warning: nil) }
+ let(:definitions) do
+ {
+ 'task1' => Backup::Manager::TaskDefinition.new(task: task1, destination_path: 'task1.tar.gz'),
+ 'task2' => Backup::Manager::TaskDefinition.new(task: task2, destination_path: 'task2.tar.gz')
+ }
end
- context 'on version match' do
- let(:gitlab_version) { Gitlab::VERSION }
-
- it 'does nothing' do
- allow(YAML).to receive(:load_file)
- .and_return({ gitlab_version: "#{gitlab_version}" })
-
- expect { subject.verify_backup_version }.not_to raise_error
- end
+ let(:gitlab_version) { Gitlab::VERSION }
+ let(:backup_information) do
+ {
+ backup_created_at: Time.zone.parse('2019-01-01'),
+ gitlab_version: gitlab_version
+ }
+ end
+
+ before do
+ Rake.application.rake_require 'tasks/gitlab/shell'
+ Rake.application.rake_require 'tasks/cache'
+
+ allow(task1).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'task1.tar.gz'))
+ allow(task2).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz'))
+ allow(YAML).to receive(:load_file).and_return(backup_information)
+ allow(Rake::Task['gitlab:shell:setup']).to receive(:invoke)
+ allow(Rake::Task['cache:clear']).to receive(:invoke)
end
- end
- describe '#unpack' do
context 'when there are no backup files in the directory' do
before do
allow(Dir).to receive(:glob).and_return([])
end
it 'fails the operation and prints an error' do
- expect { subject.unpack }.to raise_error SystemExit
+ expect { subject.restore }.to raise_error SystemExit
expect(progress).to have_received(:puts)
.with(a_string_matching('No backups found'))
end
@@ -307,13 +599,13 @@ RSpec.describe Backup::Manager do
end
it 'prints the list of available backups' do
- expect { subject.unpack }.to raise_error SystemExit
+ expect { subject.restore }.to raise_error SystemExit
expect(progress).to have_received(:puts)
.with(a_string_matching('1451606400_2016_01_01_1.2.3\n 1451520000_2015_12_31'))
end
it 'fails the operation and prints an error' do
- expect { subject.unpack }.to raise_error SystemExit
+ expect { subject.restore }.to raise_error SystemExit
expect(progress).to have_received(:puts)
.with(a_string_matching('Found more than one backup'))
end
@@ -332,7 +624,7 @@ RSpec.describe Backup::Manager do
end
it 'fails the operation and prints an error' do
- expect { subject.unpack }.to raise_error SystemExit
+ expect { subject.restore }.to raise_error SystemExit
expect(File).to have_received(:exist?).with('wrong_gitlab_backup.tar')
expect(progress).to have_received(:puts)
.with(a_string_matching('The backup file wrong_gitlab_backup.tar does not exist'))
@@ -348,17 +640,46 @@ RSpec.describe Backup::Manager do
)
allow(File).to receive(:exist?).and_return(true)
allow(Kernel).to receive(:system).and_return(true)
- allow(YAML).to receive(:load_file).and_return(gitlab_version: Gitlab::VERSION)
stub_env('BACKUP', '/ignored/path/1451606400_2016_01_01_1.2.3')
end
it 'unpacks the file' do
- subject.unpack
+ subject.restore
expect(Kernel).to have_received(:system)
.with("tar", "-xf", "1451606400_2016_01_01_1.2.3_gitlab_backup.tar")
- expect(progress).to have_received(:puts).with(a_string_matching('done'))
+ end
+
+ context 'on version mismatch' do
+ let(:backup_information) do
+ {
+ backup_created_at: Time.zone.parse('2019-01-01'),
+ gitlab_version: "not #{gitlab_version}"
+ }
+ end
+
+ it 'stops the process' do
+ expect { subject.restore }.to raise_error SystemExit
+ expect(progress).to have_received(:puts)
+ .with(a_string_matching('GitLab version mismatch'))
+ end
+ end
+
+ describe 'tmp files' do
+ let(:path) { File.join(Gitlab.config.backup.path, 'tmp') }
+
+ before do
+ allow(FileUtils).to receive(:rm_rf).and_call_original
+ end
+
+ it 'removes backups/tmp dir' do
+ expect(FileUtils).to receive(:rm_rf).with(path).and_call_original
+
+ subject.restore
+
+ expect(progress).to have_received(:print).with('Deleting backups/tmp ... ')
+ end
end
end
@@ -375,184 +696,41 @@ RSpec.describe Backup::Manager do
it 'selects the non-tarred backup to restore from' do
expect(Kernel).not_to receive(:system)
- subject.unpack
+ subject.restore
expect(progress).to have_received(:puts)
.with(a_string_matching('Non tarred backup found '))
end
- end
- end
- describe '#upload' do
- let(:backup_file) { Tempfile.new('backup', Gitlab.config.backup.path) }
- let(:backup_filename) { File.basename(backup_file.path) }
-
- before do
- allow(subject).to receive(:tar_file).and_return(backup_filename)
-
- stub_backup_setting(
- upload: {
- connection: {
- provider: 'AWS',
- aws_access_key_id: 'id',
- aws_secret_access_key: 'secret'
- },
- remote_directory: 'directory',
- multipart_chunk_size: 104857600,
- encryption: nil,
- encryption_key: nil,
- storage_class: nil
- }
- )
-
- Fog.mock!
-
- # the Fog mock only knows about directories we create explicitly
- connection = ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys)
- connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
- end
-
- context 'target path' do
- it 'uses the tar filename by default' do
- expect_any_instance_of(Fog::Collection).to receive(:create)
- .with(hash_including(key: backup_filename, public: false))
- .and_return(true)
-
- subject.upload
- end
-
- it 'adds the DIRECTORY environment variable if present' do
- stub_env('DIRECTORY', 'daily')
-
- expect_any_instance_of(Fog::Collection).to receive(:create)
- .with(hash_including(key: "daily/#{backup_filename}", public: false))
- .and_return(true)
-
- subject.upload
- end
- end
-
- context 'with AWS with server side encryption' do
- let(:connection) { ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys) }
- let(:encryption_key) { nil }
- let(:encryption) { nil }
- let(:storage_options) { nil }
-
- before do
- stub_backup_setting(
- upload: {
- connection: {
- provider: 'AWS',
- aws_access_key_id: 'AWS_ACCESS_KEY_ID',
- aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY'
- },
- remote_directory: 'directory',
- multipart_chunk_size: Gitlab.config.backup.upload.multipart_chunk_size,
- encryption: encryption,
- encryption_key: encryption_key,
- storage_options: storage_options,
- storage_class: nil
- }
- )
-
- connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
- end
-
- context 'with SSE-S3 without using storage_options' do
- let(:encryption) { 'AES256' }
-
- it 'sets encryption attributes' do
- result = subject.upload
-
- expect(result.key).to be_present
- expect(result.encryption).to eq('AES256')
- expect(result.encryption_key).to be_nil
- expect(result.kms_key_id).to be_nil
- end
- end
-
- context 'with SSE-C (customer-provided keys) options' do
- let(:encryption) { 'AES256' }
- let(:encryption_key) { SecureRandom.hex }
-
- it 'sets encryption attributes' do
- result = subject.upload
-
- expect(result.key).to be_present
- expect(result.encryption).to eq(encryption)
- expect(result.encryption_key).to eq(encryption_key)
- expect(result.kms_key_id).to be_nil
- end
- end
-
- context 'with SSE-KMS options' do
- let(:storage_options) do
+ context 'on version mismatch' do
+ let(:backup_information) do
{
- server_side_encryption: 'aws:kms',
- server_side_encryption_kms_key_id: 'arn:aws:kms:12345'
+ backup_created_at: Time.zone.parse('2019-01-01'),
+ gitlab_version: "not #{gitlab_version}"
}
end
- it 'sets encryption attributes' do
- result = subject.upload
-
- expect(result.key).to be_present
- expect(result.encryption).to eq('aws:kms')
- expect(result.kms_key_id).to eq('arn:aws:kms:12345')
+ it 'stops the process' do
+ expect { subject.restore }.to raise_error SystemExit
+ expect(progress).to have_received(:puts)
+ .with(a_string_matching('GitLab version mismatch'))
end
end
- end
- context 'with Google provider' do
- before do
- stub_backup_setting(
- upload: {
- connection: {
- provider: 'Google',
- google_storage_access_key_id: 'test-access-id',
- google_storage_secret_access_key: 'secret'
- },
- remote_directory: 'directory',
- multipart_chunk_size: Gitlab.config.backup.upload.multipart_chunk_size,
- encryption: nil,
- encryption_key: nil,
- storage_class: nil
- }
- )
+ describe 'tmp files' do
+ let(:path) { File.join(Gitlab.config.backup.path, 'tmp') }
- connection = ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys)
- connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
- end
+ before do
+ allow(FileUtils).to receive(:rm_rf).and_call_original
+ end
- it 'does not attempt to set ACL' do
- expect_any_instance_of(Fog::Collection).to receive(:create)
- .with(hash_excluding(public: false))
- .and_return(true)
+ it 'removes backups/tmp dir' do
+ expect(FileUtils).to receive(:rm_rf).with(path).and_call_original
- subject.upload
- end
- end
+ subject.restore
- context 'with AzureRM provider' do
- before do
- stub_backup_setting(
- upload: {
- connection: {
- provider: 'AzureRM',
- azure_storage_account_name: 'test-access-id',
- azure_storage_access_key: 'secret'
- },
- remote_directory: 'directory',
- multipart_chunk_size: nil,
- encryption: nil,
- encryption_key: nil,
- storage_class: nil
- }
- )
- end
-
- it 'loads the provider' do
- expect { subject.upload }.not_to raise_error
+ expect(progress).to have_received(:print).with('Deleting backups/tmp ... ')
+ end
end
end
end
diff --git a/spec/lib/backup/object_backup_spec.rb b/spec/lib/backup/object_backup_spec.rb
index 4d34dc0ade7..85658173b0e 100644
--- a/spec/lib/backup/object_backup_spec.rb
+++ b/spec/lib/backup/object_backup_spec.rb
@@ -21,7 +21,7 @@ RSpec.shared_examples 'backup object' do |setting|
expect(backup).to receive(:run_pipeline!).with([%W(blabla-tar --exclude=lost+found --exclude=./tmp -C #{backup_path} -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(backup).to receive(:pipeline_succeeded?).and_return(true)
- backup.dump
+ backup.dump('backup_object.tar.gz')
end
end
end
diff --git a/spec/lib/backup/pages_spec.rb b/spec/lib/backup/pages_spec.rb
index f9ee4bbdc41..095dda61cf4 100644
--- a/spec/lib/backup/pages_spec.rb
+++ b/spec/lib/backup/pages_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Backup::Pages do
expect(subject).to receive(:tar).and_return('blabla-tar')
expect(subject).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./@pages.tmp -C /var/gitlab-pages -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(subject).to receive(:pipeline_succeeded?).and_return(true)
- subject.dump
+ subject.dump('pages.tar.gz')
end
end
end
diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb
index 0b29a25360d..db3e507596f 100644
--- a/spec/lib/backup/repositories_spec.rb
+++ b/spec/lib/backup/repositories_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Backup::Repositories do
let(:strategy) { spy(:strategy, parallel_enqueue?: parallel_enqueue) }
let(:max_concurrency) { 1 }
let(:max_storage_concurrency) { 1 }
+ let(:destination) { 'repositories' }
subject do
described_class.new(
@@ -26,9 +27,9 @@ RSpec.describe Backup::Repositories do
project_snippet = create(:project_snippet, :repository, project: project)
personal_snippet = create(:personal_snippet, :repository, author: project.first_owner)
- subject.dump
+ subject.dump(destination)
- expect(strategy).to have_received(:start).with(:create)
+ expect(strategy).to have_received(:start).with(:create, destination)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
@@ -54,38 +55,38 @@ RSpec.describe Backup::Repositories do
it 'creates the expected number of threads' do
expect(Thread).not_to receive(:new)
- expect(strategy).to receive(:start).with(:create)
+ expect(strategy).to receive(:start).with(:create, destination)
projects.each do |project|
expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
end
expect(strategy).to receive(:finish!)
- subject.dump
+ subject.dump(destination)
end
describe 'command failure' do
it 'enqueue_project raises an error' do
allow(strategy).to receive(:enqueue).with(anything, Gitlab::GlRepository::PROJECT).and_raise(IOError)
- expect { subject.dump }.to raise_error(IOError)
+ expect { subject.dump(destination) }.to raise_error(IOError)
end
it 'project query raises an error' do
allow(Project).to receive_message_chain(:includes, :find_each).and_raise(ActiveRecord::StatementTimeout)
- expect { subject.dump }.to raise_error(ActiveRecord::StatementTimeout)
+ expect { subject.dump(destination) }.to raise_error(ActiveRecord::StatementTimeout)
end
end
it 'avoids N+1 database queries' do
control_count = ActiveRecord::QueryRecorder.new do
- subject.dump
+ subject.dump(destination)
end.count
create_list(:project, 2, :repository)
expect do
- subject.dump
+ subject.dump(destination)
end.not_to exceed_query_limit(control_count)
end
end
@@ -98,13 +99,13 @@ RSpec.describe Backup::Repositories do
it 'enqueues all projects sequentially' do
expect(Thread).not_to receive(:new)
- expect(strategy).to receive(:start).with(:create)
+ expect(strategy).to receive(:start).with(:create, destination)
projects.each do |project|
expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
end
expect(strategy).to receive(:finish!)
- subject.dump
+ subject.dump(destination)
end
end
@@ -122,13 +123,13 @@ RSpec.describe Backup::Repositories do
.exactly(storage_keys.length * (max_storage_concurrency + 1)).times
.and_call_original
- expect(strategy).to receive(:start).with(:create)
+ expect(strategy).to receive(:start).with(:create, destination)
projects.each do |project|
expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
end
expect(strategy).to receive(:finish!)
- subject.dump
+ subject.dump(destination)
end
context 'with extra max concurrency' do
@@ -139,13 +140,13 @@ RSpec.describe Backup::Repositories do
.exactly(storage_keys.length * (max_storage_concurrency + 1)).times
.and_call_original
- expect(strategy).to receive(:start).with(:create)
+ expect(strategy).to receive(:start).with(:create, destination)
projects.each do |project|
expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
end
expect(strategy).to receive(:finish!)
- subject.dump
+ subject.dump(destination)
end
end
@@ -153,33 +154,33 @@ RSpec.describe Backup::Repositories do
it 'enqueue_project raises an error' do
allow(strategy).to receive(:enqueue).and_raise(IOError)
- expect { subject.dump }.to raise_error(IOError)
+ expect { subject.dump(destination) }.to raise_error(IOError)
end
it 'project query raises an error' do
allow(Project).to receive_message_chain(:for_repository_storage, :includes, :find_each).and_raise(ActiveRecord::StatementTimeout)
- expect { subject.dump }.to raise_error(ActiveRecord::StatementTimeout)
+ expect { subject.dump(destination) }.to raise_error(ActiveRecord::StatementTimeout)
end
context 'misconfigured storages' do
let(:storage_keys) { %w[test_second_storage] }
it 'raises an error' do
- expect { subject.dump }.to raise_error(Backup::Error, 'repositories.storages in gitlab.yml is misconfigured')
+ expect { subject.dump(destination) }.to raise_error(Backup::Error, 'repositories.storages in gitlab.yml is misconfigured')
end
end
end
it 'avoids N+1 database queries' do
control_count = ActiveRecord::QueryRecorder.new do
- subject.dump
+ subject.dump(destination)
end.count
create_list(:project, 2, :repository)
expect do
- subject.dump
+ subject.dump(destination)
end.not_to exceed_query_limit(control_count)
end
end
@@ -192,9 +193,9 @@ RSpec.describe Backup::Repositories do
let_it_be(:project_snippet) { create(:project_snippet, project: project, author: project.first_owner) }
it 'calls enqueue for each repository type', :aggregate_failures do
- subject.restore
+ subject.restore(destination)
- expect(strategy).to have_received(:start).with(:restore)
+ expect(strategy).to have_received(:start).with(:restore, destination)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
@@ -208,7 +209,7 @@ RSpec.describe Backup::Repositories do
pool_repository = create(:pool_repository, :failed)
pool_repository.delete_object_pool
- subject.restore
+ subject.restore(destination)
pool_repository.reload
expect(pool_repository).not_to be_failed
@@ -219,7 +220,7 @@ RSpec.describe Backup::Repositories do
pool_repository = create(:pool_repository, state: :obsolete)
pool_repository.update_column(:source_project_id, nil)
- subject.restore
+ subject.restore(destination)
pool_repository.reload
expect(pool_repository).to be_obsolete
@@ -236,14 +237,14 @@ RSpec.describe Backup::Repositories do
end
it 'shows the appropriate error' do
- subject.restore
+ subject.restore(destination)
expect(progress).to have_received(:puts).with("Snippet #{personal_snippet.full_path} can't be restored: Repository has more than one branch")
expect(progress).to have_received(:puts).with("Snippet #{project_snippet.full_path} can't be restored: Repository has more than one branch")
end
it 'removes the snippets from the DB' do
- expect { subject.restore }.to change(PersonalSnippet, :count).by(-1)
+ expect { subject.restore(destination) }.to change(PersonalSnippet, :count).by(-1)
.and change(ProjectSnippet, :count).by(-1)
.and change(SnippetRepository, :count).by(-2)
end
@@ -253,7 +254,7 @@ RSpec.describe Backup::Repositories do
shard_name = personal_snippet.repository.shard
path = personal_snippet.disk_path + '.git'
- subject.restore
+ subject.restore(destination)
expect(gitlab_shell.repository_exists?(shard_name, path)).to eq false
end
diff --git a/spec/lib/backup/task_spec.rb b/spec/lib/backup/task_spec.rb
new file mode 100644
index 00000000000..b0eb885d3f4
--- /dev/null
+++ b/spec/lib/backup/task_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Backup::Task do
+ let(:progress) { StringIO.new }
+
+ subject { described_class.new(progress) }
+
+ describe '#human_name' do
+ it 'must be implemented by the subclass' do
+ expect { subject.human_name }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#dump' do
+ it 'must be implemented by the subclass' do
+ expect { subject.dump('some/path') }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#restore' do
+ it 'must be implemented by the subclass' do
+ expect { subject.restore('some/path') }.to raise_error(NotImplementedError)
+ end
+ end
+end
diff --git a/spec/lib/backup/uploads_spec.rb b/spec/lib/backup/uploads_spec.rb
index 25ad0c0d3f7..0cfc80a9cb9 100644
--- a/spec/lib/backup/uploads_spec.rb
+++ b/spec/lib/backup/uploads_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Backup::Uploads do
expect(backup).to receive(:tar).and_return('blabla-tar')
expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./tmp -C /var/uploads -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(backup).to receive(:pipeline_succeeded?).and_return(true)
- backup.dump
+ backup.dump('uploads.tar.gz')
end
end
end
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index e9aa8cbb991..3b64034fc2d 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -72,7 +72,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
before do
allow(YAML).to receive(:load_file)
.and_return({ gitlab_version: gitlab_version })
- expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
expect_next_instance_of(::Backup::Manager) do |instance|
backup_types.each do |subtask|
expect(instance).to receive(:run_restore_task).with(subtask).ordered
@@ -85,10 +84,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
it 'invokes restoration on match' do
expect { run_rake_task('gitlab:backup:restore') }.to output.to_stdout_from_any_process
end
-
- it 'prints timestamps on messages' do
- expect { run_rake_task('gitlab:backup:restore') }.to output(/.*\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\s[-+]\d{4}\s--\s.*/).to_stdout_from_any_process
- end
end
end
@@ -131,8 +126,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
allow(YAML).to receive(:load_file)
.and_return({ gitlab_version: Gitlab::VERSION })
- expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
-
expect_next_instance_of(::Backup::Manager) do |instance|
backup_types.each do |subtask|
expect(instance).to receive(:run_restore_task).with(subtask).ordered
@@ -486,7 +479,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
allow(Rake::Task['gitlab:shell:setup'])
.to receive(:invoke).and_return(true)
- expect(Rake::Task['gitlab:db:drop_tables']).to receive :invoke
expect_next_instance_of(::Backup::Manager) do |instance|
(backup_types - %w{repositories uploads}).each do |subtask|
expect(instance).to receive(:run_restore_task).with(subtask).ordered
@@ -531,7 +523,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
allow(Rake::Task['gitlab:shell:setup'])
.to receive(:invoke).and_return(true)
- expect(Rake::Task['gitlab:db:drop_tables']).to receive :invoke
expect_next_instance_of(::Backup::Manager) do |instance|
backup_types.each do |subtask|
expect(instance).to receive(:run_restore_task).with(subtask).ordered