Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
8ac91ecfd1
commit
d46287cc16
27 changed files with 372 additions and 287 deletions
|
@ -245,7 +245,7 @@
|
|||
.select2-highlighted {
|
||||
.group-result {
|
||||
.group-path {
|
||||
color: $white-light;
|
||||
color: $gray-800;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -118,8 +118,6 @@ module Ci
|
|||
|
||||
scope :eager_load_job_artifacts, -> { includes(:job_artifacts) }
|
||||
|
||||
scope :with_artifacts_stored_locally, -> { with_existing_job_artifacts(Ci::JobArtifact.archive.with_files_stored_locally) }
|
||||
scope :with_archived_trace_stored_locally, -> { with_existing_job_artifacts(Ci::JobArtifact.trace.with_files_stored_locally) }
|
||||
scope :with_artifacts_not_expired, ->() { with_artifacts_archive.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
|
||||
scope :with_expired_artifacts, ->() { with_artifacts_archive.where('artifacts_expire_at < ?', Time.now) }
|
||||
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
|
||||
|
|
|
@ -65,6 +65,7 @@ module Ci
|
|||
after_save :update_file_store, if: :saved_change_to_file?
|
||||
|
||||
scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
|
||||
scope :with_files_stored_remotely, -> { where(file_store: ::JobArtifactUploader::Store::REMOTE) }
|
||||
|
||||
scope :with_file_types, -> (file_types) do
|
||||
types = self.file_types.select { |file_type| file_types.include?(file_type) }.values
|
||||
|
|
|
@ -39,7 +39,7 @@ module Clusters
|
|||
|
||||
def self.has_one_cluster_application(name) # rubocop:disable Naming/PredicateName
|
||||
application = APPLICATIONS[name.to_s]
|
||||
has_one application.association_name, class_name: application.to_s # rubocop:disable Rails/ReflectionClassName
|
||||
has_one application.association_name, class_name: application.to_s, inverse_of: :cluster # rubocop:disable Rails/ReflectionClassName
|
||||
end
|
||||
|
||||
has_one_cluster_application :helm
|
||||
|
|
|
@ -9,6 +9,7 @@ class LfsObject < ApplicationRecord
|
|||
has_many :projects, -> { distinct }, through: :lfs_objects_projects
|
||||
|
||||
scope :with_files_stored_locally, -> { where(file_store: LfsObjectUploader::Store::LOCAL) }
|
||||
scope :with_files_stored_remotely, -> { where(file_store: LfsObjectUploader::Store::REMOTE) }
|
||||
|
||||
validates :oid, presence: true, uniqueness: true
|
||||
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Increase color contrast of select option path
|
||||
merge_request:
|
||||
author:
|
||||
type: other
|
5
changelogs/unreleased/59036-object-to-local-storage.yml
Normal file
5
changelogs/unreleased/59036-object-to-local-storage.yml
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: "[ObjectStorage] Allow migrating back to local storage"
|
||||
merge_request: 16868
|
||||
author:
|
||||
type: added
|
5
changelogs/unreleased/github-release-importer-fixup.yml
Normal file
5
changelogs/unreleased/github-release-importer-fixup.yml
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Set name and updated_at properly in GitHub ReleaseImporter
|
||||
merge_request: 17020
|
||||
author:
|
||||
type: fixed
|
|
@ -92,7 +92,7 @@ Use an object storage option like AWS S3 to store job artifacts.
|
|||
DANGER: **Danger:**
|
||||
If you're enabling S3 in [GitLab HA](high_availability/README.md), you will need to have an [NFS mount set up for CI traces and artifacts](high_availability/nfs.md#a-single-nfs-mount) or enable [live tracing](job_traces.md#new-live-trace-architecture). If these settings are not set, you will risk job traces disappearing or not being saved.
|
||||
|
||||
### Object Storage Settings
|
||||
#### Object Storage Settings
|
||||
|
||||
For source installations the following settings are nested under `artifacts:` and then `object_store:`. On Omnibus GitLab installs they are prefixed by `artifacts_object_store_`.
|
||||
|
||||
|
@ -105,7 +105,7 @@ For source installations the following settings are nested under `artifacts:` an
|
|||
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
|
||||
| `connection` | Various connection options described below | |
|
||||
|
||||
#### S3 compatible connection settings
|
||||
##### S3 compatible connection settings
|
||||
|
||||
The connection settings match those provided by [Fog](https://github.com/fog), and are as follows:
|
||||
|
||||
|
@ -188,6 +188,14 @@ _The artifacts are stored by default in
|
|||
sudo -u git -H bundle exec rake gitlab:artifacts:migrate RAILS_ENV=production
|
||||
```
|
||||
|
||||
### Migrating from object storage to local storage
|
||||
|
||||
In order to migrate back to local storage:
|
||||
|
||||
1. Set both `direct_upload` and `background_upload` to false under the artifacts object storage settings. Don't forget to restart GitLab.
|
||||
1. Run `rake gitlab:artifacts:migrate_to_local` on your console.
|
||||
1. Disable `object_storage` for artifacts in `gitlab.rb`. Remember to restart GitLab afterwards.
|
||||
|
||||
## Expiring artifacts
|
||||
|
||||
If an expiry date is used for the artifacts, they are marked for deletion
|
||||
|
|
|
@ -59,46 +59,6 @@ job traces are automatically migrated to it along with the other job artifacts.
|
|||
|
||||
See "Phase 4: uploading" in [Data flow](#data-flow) to learn about the process.
|
||||
|
||||
## How to archive legacy job trace files
|
||||
|
||||
Legacy job traces, which were created before GitLab 10.5, were not archived regularly.
|
||||
It's the same state with the "2: overwriting" in the above [Data flow](#data-flow).
|
||||
To archive those legacy job traces, please follow the instruction below.
|
||||
|
||||
1. Execute the following command
|
||||
|
||||
```bash
|
||||
gitlab-rake gitlab:traces:archive
|
||||
```
|
||||
|
||||
After you executed this task, GitLab instance queues up Sidekiq jobs (asynchronous processes)
|
||||
for migrating job trace files from local storage to object storage.
|
||||
It could take time to complete the all migration jobs. You can check the progress by the following command
|
||||
|
||||
```bash
|
||||
sudo gitlab-rails console
|
||||
```
|
||||
|
||||
```bash
|
||||
[1] pry(main)> Sidekiq::Stats.new.queues['pipeline_background:archive_trace']
|
||||
=> 100
|
||||
```
|
||||
|
||||
If the count becomes zero, the archiving processes are done
|
||||
|
||||
## How to migrate archived job traces to object storage
|
||||
|
||||
> [Introduced][ce-21193] in GitLab 11.3.
|
||||
|
||||
If job traces have already been archived into local storage, and you want to migrate those traces to object storage, please follow the instruction below.
|
||||
|
||||
1. Ensure [Object storage integration for Job Artifacts](job_artifacts.md#object-storage-settings) is enabled
|
||||
1. Execute the following command
|
||||
|
||||
```bash
|
||||
gitlab-rake gitlab:traces:migrate
|
||||
```
|
||||
|
||||
## How to remove job traces
|
||||
|
||||
There isn't a way to automatically expire old job logs, but it's safe to remove
|
||||
|
|
|
@ -61,6 +61,9 @@ To enable external storage of merge request diffs, follow the instructions below
|
|||
|
||||
## Using object storage
|
||||
|
||||
CAUTION: **WARNING:**
|
||||
Currently migrating to object storage is **non-reversible**
|
||||
|
||||
Instead of storing the external diffs on disk, we recommended the use of an object
|
||||
store like AWS S3 instead. This configuration relies on valid AWS credentials to
|
||||
be configured already.
|
||||
|
|
|
@ -113,3 +113,39 @@ To migrate all uploads created by legacy uploaders, run:
|
|||
```shell
|
||||
bundle exec rake gitlab:uploads:legacy:migrate
|
||||
```
|
||||
|
||||
## Migrate from object storage to local storage
|
||||
|
||||
If you need to disable Object Storage for any reason, first you need to migrate
|
||||
your data out of Object Storage and back into your local storage.
|
||||
|
||||
**Before proceeding, it is important to disable both `direct_upload` and `background_upload` under `uploads` settings in `gitlab.rb`**
|
||||
|
||||
CAUTION: **Warning:**
|
||||
**Extended downtime is required** so no new files are created in object storage during
|
||||
the migration. A configuration setting will be added soon to allow migrating
|
||||
from object storage to local files with only a brief moment of downtime for configuration changes.
|
||||
See issue [gitlab-org/gitlab-ce#66144](https://gitlab.com/gitlab-org/gitlab-ce/issues/66144)
|
||||
|
||||
### All-in-one rake task
|
||||
|
||||
GitLab provides a wrapper rake task that migrates all uploaded files - avatars,
|
||||
logos, attachments, favicon, etc. - to local storage in one go. Under the hood,
|
||||
it invokes individual rake tasks to migrate files falling under each of this
|
||||
category one by one. For details on these rake tasks please [refer to the section above](#individual-rake-tasks),
|
||||
keeping in mind the task name in this case is `gitlab:uploads:migrate_to_local`.
|
||||
|
||||
**Omnibus Installation**
|
||||
|
||||
```bash
|
||||
gitlab-rake "gitlab:uploads:migrate_to_local:all"
|
||||
```
|
||||
|
||||
**Source Installation**
|
||||
|
||||
```bash
|
||||
sudo RAILS_ENV=production -u git -H bundle exec rake gitlab:uploads:migrate_to_local:all
|
||||
```
|
||||
|
||||
After this is done, you may disable Object Storage by undoing the changes described
|
||||
in the instructions to [configure object storage](../../uploads.md#using-object-storage-core-only)
|
||||
|
|
|
@ -218,6 +218,14 @@ For source installations the settings are nested under `lfs:` and then
|
|||
will be forwarded to object storage unless `background_upload` is set to
|
||||
false.
|
||||
|
||||
### Migrating back to local storage
|
||||
|
||||
In order to migrate back to local storage:
|
||||
|
||||
1. Set both `direct_upload` and `background_upload` to false under the LFS object storage settings. Don't forget to restart GitLab.
|
||||
1. Run `rake gitlab:lfs:migrate_to_local` on your console.
|
||||
1. Disable `object_storage` for LFS objects in `gitlab.rb`. Remember to restart GitLab afterwards.
|
||||
|
||||
## Storage statistics
|
||||
|
||||
You can see the total storage used for LFS objects on groups and projects
|
||||
|
|
33
lib/gitlab/artifacts/migration_helper.rb
Normal file
33
lib/gitlab/artifacts/migration_helper.rb
Normal file
|
@ -0,0 +1,33 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Artifacts
|
||||
class MigrationHelper
|
||||
def migrate_to_remote_storage(&block)
|
||||
artifacts = ::Ci::JobArtifact.with_files_stored_locally
|
||||
migrate(artifacts, ObjectStorage::Store::REMOTE, &block)
|
||||
end
|
||||
|
||||
def migrate_to_local_storage(&block)
|
||||
artifacts = ::Ci::JobArtifact.with_files_stored_remotely
|
||||
migrate(artifacts, ObjectStorage::Store::LOCAL, &block)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def batch_size
|
||||
ENV.fetch('MIGRATION_BATCH_SIZE', 10).to_i
|
||||
end
|
||||
|
||||
def migrate(artifacts, store, &block)
|
||||
artifacts.find_each(batch_size: batch_size) do |artifact| # rubocop:disable CodeReuse/ActiveRecord
|
||||
artifact.file.migrate!(store)
|
||||
|
||||
yield artifact if block
|
||||
rescue => e
|
||||
raise StandardError.new("Failed to transfer artifact of type #{artifact.file_type} and ID #{artifact.id} with error: #{e.message}")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -32,10 +32,11 @@ module Gitlab
|
|||
|
||||
def build(release)
|
||||
{
|
||||
name: release.name,
|
||||
tag: release.tag_name,
|
||||
description: description_for(release),
|
||||
created_at: release.created_at,
|
||||
updated_at: release.updated_at,
|
||||
updated_at: release.created_at,
|
||||
released_at: release.published_at,
|
||||
project_id: project.id
|
||||
}
|
||||
|
|
72
lib/gitlab/uploads/migration_helper.rb
Normal file
72
lib/gitlab/uploads/migration_helper.rb
Normal file
|
@ -0,0 +1,72 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Uploads
|
||||
class MigrationHelper
|
||||
attr_reader :logger
|
||||
|
||||
CATEGORIES = [%w(AvatarUploader Project :avatar),
|
||||
%w(AvatarUploader Group :avatar),
|
||||
%w(AvatarUploader User :avatar),
|
||||
%w(AttachmentUploader Note :attachment),
|
||||
%w(AttachmentUploader Appearance :logo),
|
||||
%w(AttachmentUploader Appearance :header_logo),
|
||||
%w(FaviconUploader Appearance :favicon),
|
||||
%w(FileUploader Project),
|
||||
%w(PersonalFileUploader Snippet),
|
||||
%w(NamespaceFileUploader Snippet),
|
||||
%w(FileUploader MergeRequest)].freeze
|
||||
|
||||
def initialize(args, logger)
|
||||
prepare_variables(args, logger)
|
||||
end
|
||||
|
||||
def migrate_to_remote_storage
|
||||
@to_store = ObjectStorage::Store::REMOTE
|
||||
|
||||
uploads.each_batch(of: batch_size, &method(:enqueue_batch))
|
||||
end
|
||||
|
||||
def migrate_to_local_storage
|
||||
@to_store = ObjectStorage::Store::LOCAL
|
||||
|
||||
uploads(ObjectStorage::Store::REMOTE).each_batch(of: batch_size, &method(:enqueue_batch))
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def batch_size
|
||||
ENV.fetch('MIGRATION_BATCH_SIZE', 200).to_i
|
||||
end
|
||||
|
||||
def prepare_variables(args, logger)
|
||||
@mounted_as = args.mounted_as&.gsub(':', '')&.to_sym
|
||||
@uploader_class = args.uploader_class.constantize
|
||||
@model_class = args.model_class.constantize
|
||||
@logger = logger
|
||||
end
|
||||
|
||||
def enqueue_batch(batch, index)
|
||||
job = ObjectStorage::MigrateUploadsWorker.enqueue!(batch,
|
||||
@model_class,
|
||||
@mounted_as,
|
||||
@to_store)
|
||||
logger.info(message: "[Uploads migration] Enqueued upload migration job", index: index, job_id: job)
|
||||
rescue ObjectStorage::MigrateUploadsWorker::SanityCheckError => e
|
||||
# continue for the next batch
|
||||
logger.warn(message: "[Uploads migration] Could not enqueue batch", ids: batch.ids, reason: e.message) # rubocop:disable CodeReuse/ActiveRecord
|
||||
end
|
||||
|
||||
# rubocop:disable CodeReuse/ActiveRecord
|
||||
def uploads(store_type = [nil, ObjectStorage::Store::LOCAL])
|
||||
Upload.class_eval { include EachBatch } unless Upload < EachBatch
|
||||
|
||||
Upload
|
||||
.where(store: store_type,
|
||||
uploader: @uploader_class.to_s,
|
||||
model_type: @model_class.base_class.sti_name)
|
||||
end
|
||||
# rubocop:enable CodeReuse/ActiveRecord
|
||||
end
|
||||
end
|
||||
end
|
|
@ -6,18 +6,31 @@ namespace :gitlab do
|
|||
namespace :artifacts do
|
||||
task migrate: :environment do
|
||||
logger = Logger.new(STDOUT)
|
||||
logger.info('Starting transfer of artifacts')
|
||||
logger.info('Starting transfer of artifacts to remote storage')
|
||||
|
||||
Ci::Build.joins(:project)
|
||||
.with_artifacts_stored_locally
|
||||
.find_each(batch_size: 10) do |build|
|
||||
helper = Gitlab::Artifacts::MigrationHelper.new
|
||||
|
||||
build.artifacts_file.migrate!(ObjectStorage::Store::REMOTE)
|
||||
build.artifacts_metadata.migrate!(ObjectStorage::Store::REMOTE)
|
||||
|
||||
logger.info("Transferred artifact ID #{build.id} with size #{build.artifacts_size} to object storage")
|
||||
begin
|
||||
helper.migrate_to_remote_storage do |artifact|
|
||||
logger.info("Transferred artifact ID #{artifact.id} of type #{artifact.file_type} with size #{artifact.size} to object storage")
|
||||
end
|
||||
rescue => e
|
||||
logger.error("Failed to transfer artifacts of #{build.id} with error: #{e.message}")
|
||||
logger.error(e.message)
|
||||
end
|
||||
end
|
||||
|
||||
task migrate_to_local: :environment do
|
||||
logger = Logger.new(STDOUT)
|
||||
logger.info('Starting transfer of artifacts to local storage')
|
||||
|
||||
helper = Gitlab::Artifacts::MigrationHelper.new
|
||||
|
||||
begin
|
||||
helper.migrate_to_local_storage do |artifact|
|
||||
logger.info("Transferred artifact ID #{artifact.id} of type #{artifact.file_type} with size #{artifact.size} to local storage")
|
||||
end
|
||||
rescue => e
|
||||
logger.error(e.message)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -17,5 +17,20 @@ namespace :gitlab do
|
|||
logger.error("Failed to transfer LFS object #{lfs_object.oid} with error: #{e.message}")
|
||||
end
|
||||
end
|
||||
|
||||
task migrate_to_local: :environment do
|
||||
logger = Logger.new(STDOUT)
|
||||
logger.info('Starting transfer of LFS files to local storage')
|
||||
|
||||
LfsObject.with_files_stored_remotely
|
||||
.find_each(batch_size: 10) do |lfs_object|
|
||||
|
||||
lfs_object.file.migrate!(LfsObjectUploader::Store::LOCAL)
|
||||
|
||||
logger.info("Transferred LFS object #{lfs_object.oid} of size #{lfs_object.size.to_i.bytes} to local storage")
|
||||
rescue => e
|
||||
logger.error("Failed to transfer LFS object #{lfs_object.oid} with error: #{e.message}")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,38 +0,0 @@
|
|||
require 'logger'
|
||||
require 'resolv-replace'
|
||||
|
||||
desc "GitLab | Archive legacy traces to trace artifacts"
|
||||
namespace :gitlab do
|
||||
namespace :traces do
|
||||
task archive: :environment do
|
||||
logger = Logger.new(STDOUT)
|
||||
logger.info('Archiving legacy traces')
|
||||
|
||||
Ci::Build.finished.without_archived_trace
|
||||
.order(id: :asc)
|
||||
.find_in_batches(batch_size: 1000) do |jobs|
|
||||
job_ids = jobs.map { |job| [job.id] }
|
||||
|
||||
ArchiveTraceWorker.bulk_perform_async(job_ids)
|
||||
|
||||
logger.info("Scheduled #{job_ids.count} jobs. From #{job_ids.min} to #{job_ids.max}")
|
||||
end
|
||||
end
|
||||
|
||||
task migrate: :environment do
|
||||
logger = Logger.new(STDOUT)
|
||||
logger.info('Starting transfer of job traces')
|
||||
|
||||
Ci::Build.joins(:project)
|
||||
.with_archived_trace_stored_locally
|
||||
.find_each(batch_size: 10) do |build|
|
||||
|
||||
build.job_artifacts_trace.file.migrate!(ObjectStorage::Store::REMOTE)
|
||||
|
||||
logger.info("Transferred job trace of #{build.id} to object storage")
|
||||
rescue => e
|
||||
logger.error("Failed to transfer artifacts of #{build.id} with error: #{e.message}")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -3,19 +3,7 @@ namespace :gitlab do
|
|||
namespace :migrate do
|
||||
desc "GitLab | Uploads | Migrate all uploaded files to object storage"
|
||||
task all: :environment do
|
||||
categories = [%w(AvatarUploader Project :avatar),
|
||||
%w(AvatarUploader Group :avatar),
|
||||
%w(AvatarUploader User :avatar),
|
||||
%w(AttachmentUploader Note :attachment),
|
||||
%w(AttachmentUploader Appearance :logo),
|
||||
%w(AttachmentUploader Appearance :header_logo),
|
||||
%w(FaviconUploader Appearance :favicon),
|
||||
%w(FileUploader Project),
|
||||
%w(PersonalFileUploader Snippet),
|
||||
%w(NamespaceFileUploader Snippet),
|
||||
%w(FileUploader MergeRequest)]
|
||||
|
||||
categories.each do |args|
|
||||
Gitlab::Uploads::MigrationHelper::CATEGORIES.each do |args|
|
||||
Rake::Task["gitlab:uploads:migrate"].invoke(*args)
|
||||
Rake::Task["gitlab:uploads:migrate"].reenable
|
||||
end
|
||||
|
@ -25,34 +13,23 @@ namespace :gitlab do
|
|||
# The following is the actual rake task that migrates uploads of specified
|
||||
# category to object storage
|
||||
desc 'GitLab | Uploads | Migrate the uploaded files of specified type to object storage'
|
||||
task :migrate, [:uploader_class, :model_class, :mounted_as] => :environment do |task, args|
|
||||
batch_size = ENV.fetch('BATCH', 200).to_i
|
||||
@to_store = ObjectStorage::Store::REMOTE
|
||||
@mounted_as = args.mounted_as&.gsub(':', '')&.to_sym
|
||||
@uploader_class = args.uploader_class.constantize
|
||||
@model_class = args.model_class.constantize
|
||||
|
||||
uploads.each_batch(of: batch_size, &method(:enqueue_batch))
|
||||
task :migrate, [:uploader_class, :model_class, :mounted_as] => :environment do |_t, args|
|
||||
Gitlab::Uploads::MigrationHelper.new(args, Logger.new(STDOUT)).migrate_to_remote_storage
|
||||
end
|
||||
|
||||
def enqueue_batch(batch, index)
|
||||
job = ObjectStorage::MigrateUploadsWorker.enqueue!(batch,
|
||||
@model_class,
|
||||
@mounted_as,
|
||||
@to_store)
|
||||
puts "Enqueued job ##{index}: #{job}"
|
||||
rescue ObjectStorage::MigrateUploadsWorker::SanityCheckError => e
|
||||
# continue for the next batch
|
||||
puts "Could not enqueue batch (#{batch.ids}) #{e.message}".color(:red)
|
||||
namespace :migrate_to_local do
|
||||
desc "GitLab | Uploads | Migrate all uploaded files to local storage"
|
||||
task all: :environment do
|
||||
Gitlab::Uploads::MigrationHelper::CATEGORIES.each do |args|
|
||||
Rake::Task["gitlab:uploads:migrate_to_local"].invoke(*args)
|
||||
Rake::Task["gitlab:uploads:migrate_to_local"].reenable
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def uploads
|
||||
Upload.class_eval { include EachBatch } unless Upload < EachBatch
|
||||
|
||||
Upload
|
||||
.where(store: [nil, ObjectStorage::Store::LOCAL],
|
||||
uploader: @uploader_class.to_s,
|
||||
model_type: @model_class.base_class.sti_name)
|
||||
desc 'GitLab | Uploads | Migrate the uploaded files of specified type to local storage'
|
||||
task :migrate_to_local, [:uploader_class, :model_class, :mounted_as] => :environment do |_t, args|
|
||||
Gitlab::Uploads::MigrationHelper.new(args, Logger.new(STDOUT)).migrate_to_local_storage
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -4,17 +4,17 @@ describe Gitlab::GithubImport::Importer::ReleasesImporter do
|
|||
let(:project) { create(:project) }
|
||||
let(:client) { double(:client) }
|
||||
let(:importer) { described_class.new(project, client) }
|
||||
let(:github_release_name) { 'Initial Release' }
|
||||
let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
|
||||
let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
|
||||
let(:released_at) { Time.new(2017, 1, 1, 12, 00) }
|
||||
|
||||
let(:release) do
|
||||
let(:github_release) do
|
||||
double(
|
||||
:release,
|
||||
:github_release,
|
||||
tag_name: '1.0',
|
||||
name: github_release_name,
|
||||
body: 'This is my release',
|
||||
created_at: created_at,
|
||||
updated_at: updated_at,
|
||||
published_at: released_at
|
||||
)
|
||||
end
|
||||
|
@ -25,7 +25,7 @@ describe Gitlab::GithubImport::Importer::ReleasesImporter do
|
|||
tag_name: '1.0',
|
||||
description: 'This is my release',
|
||||
created_at: created_at,
|
||||
updated_at: updated_at,
|
||||
updated_at: created_at,
|
||||
released_at: released_at
|
||||
}
|
||||
|
||||
|
@ -37,8 +37,8 @@ describe Gitlab::GithubImport::Importer::ReleasesImporter do
|
|||
end
|
||||
|
||||
describe '#build_releases' do
|
||||
it 'returns an Array containnig release rows' do
|
||||
expect(importer).to receive(:each_release).and_return([release])
|
||||
it 'returns an Array containing release rows' do
|
||||
expect(importer).to receive(:each_release).and_return([github_release])
|
||||
|
||||
rows = importer.build_releases
|
||||
|
||||
|
@ -49,13 +49,13 @@ describe Gitlab::GithubImport::Importer::ReleasesImporter do
|
|||
it 'does not create releases that already exist' do
|
||||
create(:release, project: project, tag: '1.0', description: '1.0')
|
||||
|
||||
expect(importer).to receive(:each_release).and_return([release])
|
||||
expect(importer).to receive(:each_release).and_return([github_release])
|
||||
expect(importer.build_releases).to be_empty
|
||||
end
|
||||
|
||||
it 'uses a default release description if none is provided' do
|
||||
expect(release).to receive(:body).and_return('')
|
||||
expect(importer).to receive(:each_release).and_return([release])
|
||||
expect(github_release).to receive(:body).and_return('')
|
||||
expect(importer).to receive(:each_release).and_return([github_release])
|
||||
|
||||
release = importer.build_releases.first
|
||||
|
||||
|
@ -64,7 +64,7 @@ describe Gitlab::GithubImport::Importer::ReleasesImporter do
|
|||
end
|
||||
|
||||
describe '#build' do
|
||||
let(:release_hash) { importer.build(release) }
|
||||
let(:release_hash) { importer.build(github_release) }
|
||||
|
||||
it 'returns the attributes of the release as a Hash' do
|
||||
expect(release_hash).to be_an_instance_of(Hash)
|
||||
|
@ -88,13 +88,17 @@ describe Gitlab::GithubImport::Importer::ReleasesImporter do
|
|||
end
|
||||
|
||||
it 'includes the updated timestamp' do
|
||||
expect(release_hash[:updated_at]).to eq(updated_at)
|
||||
expect(release_hash[:updated_at]).to eq(created_at)
|
||||
end
|
||||
|
||||
it 'includes the release name' do
|
||||
expect(release_hash[:name]).to eq(github_release_name)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#each_release' do
|
||||
let(:release) { double(:release) }
|
||||
let(:github_release) { double(:github_release) }
|
||||
|
||||
before do
|
||||
allow(project).to receive(:import_source).and_return('foo/bar')
|
||||
|
@ -102,7 +106,7 @@ describe Gitlab::GithubImport::Importer::ReleasesImporter do
|
|||
allow(client)
|
||||
.to receive(:releases)
|
||||
.with('foo/bar')
|
||||
.and_return([release].to_enum)
|
||||
.and_return([github_release].to_enum)
|
||||
end
|
||||
|
||||
it 'returns an Enumerator' do
|
||||
|
@ -110,19 +114,19 @@ describe Gitlab::GithubImport::Importer::ReleasesImporter do
|
|||
end
|
||||
|
||||
it 'yields every release to the Enumerator' do
|
||||
expect(importer.each_release.next).to eq(release)
|
||||
expect(importer.each_release.next).to eq(github_release)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#description_for' do
|
||||
it 'returns the description when present' do
|
||||
expect(importer.description_for(release)).to eq(release.body)
|
||||
expect(importer.description_for(github_release)).to eq(github_release.body)
|
||||
end
|
||||
|
||||
it 'returns a generated description when one is not present' do
|
||||
allow(release).to receive(:body).and_return('')
|
||||
allow(github_release).to receive(:body).and_return('')
|
||||
|
||||
expect(importer.description_for(release)).to eq('Release for tag 1.0')
|
||||
expect(importer.description_for(github_release)).to eq('Release for tag 1.0')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -38,6 +38,15 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
|
|||
|
||||
it { is_expected.to respond_to :project }
|
||||
|
||||
describe 'applications have inverse_of: :cluster option' do
|
||||
let(:cluster) { create(:cluster) }
|
||||
let!(:helm) { create(:clusters_applications_helm, cluster: cluster) }
|
||||
|
||||
it 'does not do a third query when referencing cluster again' do
|
||||
expect { cluster.application_helm.cluster }.not_to exceed_query_limit(2)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.enabled' do
|
||||
subject { described_class.enabled }
|
||||
|
||||
|
|
|
@ -11,10 +11,11 @@ describe 'gitlab:artifacts namespace rake task' do
|
|||
stub_artifacts_object_storage(enabled: object_storage_enabled)
|
||||
end
|
||||
|
||||
subject { run_rake_task('gitlab:artifacts:migrate') }
|
||||
describe 'gitlab:artifacts:migrate' do
|
||||
subject { run_rake_task('gitlab:artifacts:migrate') }
|
||||
|
||||
context 'job artifacts' do
|
||||
let!(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
|
||||
let!(:job_trace) { create(:ci_job_artifact, :trace, file_store: store) }
|
||||
|
||||
context 'when local storage is used' do
|
||||
let(:store) { ObjectStorage::Store::LOCAL }
|
||||
|
@ -27,6 +28,7 @@ describe 'gitlab:artifacts namespace rake task' do
|
|||
subject
|
||||
|
||||
expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
|
||||
expect(job_trace.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -37,6 +39,7 @@ describe 'gitlab:artifacts namespace rake task' do
|
|||
subject
|
||||
|
||||
expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
|
||||
expect(job_trace.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -45,6 +48,7 @@ describe 'gitlab:artifacts namespace rake task' do
|
|||
subject
|
||||
|
||||
expect(artifact.reload.file_store).to eq(ObjectStorage::Store::LOCAL)
|
||||
expect(job_trace.reload.file_store).to eq(ObjectStorage::Store::LOCAL)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -57,6 +61,40 @@ describe 'gitlab:artifacts namespace rake task' do
|
|||
subject
|
||||
|
||||
expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
|
||||
expect(job_trace.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'gitlab:artifacts:migrate_to_local' do
|
||||
let(:object_storage_enabled) { true }
|
||||
|
||||
subject { run_rake_task('gitlab:artifacts:migrate_to_local') }
|
||||
|
||||
let!(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
|
||||
let!(:job_trace) { create(:ci_job_artifact, :trace, file_store: store) }
|
||||
|
||||
context 'when remote storage is used' do
|
||||
let(:store) { ObjectStorage::Store::REMOTE }
|
||||
|
||||
context 'and job has remote file store defined' do
|
||||
it "migrates file to local storage" do
|
||||
subject
|
||||
|
||||
expect(artifact.reload.file_store).to eq(ObjectStorage::Store::LOCAL)
|
||||
expect(job_trace.reload.file_store).to eq(ObjectStorage::Store::LOCAL)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when local storage is used' do
|
||||
let(:store) { ObjectStorage::Store::LOCAL }
|
||||
|
||||
it 'file stays on local storage' do
|
||||
subject
|
||||
|
||||
expect(artifact.reload.file_store).to eq(ObjectStorage::Store::LOCAL)
|
||||
expect(job_trace.reload.file_store).to eq(ObjectStorage::Store::LOCAL)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -5,32 +5,49 @@ describe 'gitlab:lfs namespace rake task' do
|
|||
Rake.application.rake_require 'tasks/gitlab/lfs/migrate'
|
||||
end
|
||||
|
||||
describe 'migrate' do
|
||||
context 'migration tasks' do
|
||||
let(:local) { ObjectStorage::Store::LOCAL }
|
||||
let(:remote) { ObjectStorage::Store::REMOTE }
|
||||
let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
|
||||
|
||||
def lfs_migrate
|
||||
run_rake_task('gitlab:lfs:migrate')
|
||||
before do
|
||||
stub_lfs_object_storage(background_upload: false, direct_upload: false)
|
||||
end
|
||||
|
||||
context 'object storage disabled' do
|
||||
before do
|
||||
stub_lfs_object_storage(enabled: false)
|
||||
describe 'migrate' do
|
||||
subject { run_rake_task('gitlab:lfs:migrate') }
|
||||
|
||||
let!(:lfs_object) { create(:lfs_object, :with_file) }
|
||||
|
||||
context 'object storage disabled' do
|
||||
before do
|
||||
stub_lfs_object_storage(enabled: false)
|
||||
end
|
||||
|
||||
it "doesn't migrate files" do
|
||||
expect { subject }.not_to change { lfs_object.reload.file_store }
|
||||
end
|
||||
end
|
||||
|
||||
it "doesn't migrate files" do
|
||||
expect { lfs_migrate }.not_to change { lfs_object.reload.file_store }
|
||||
context 'object storage enabled' do
|
||||
it 'migrates local file to object storage' do
|
||||
expect { subject }.to change { lfs_object.reload.file_store }.from(local).to(remote)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'object storage enabled' do
|
||||
describe 'migrate_to_local' do
|
||||
subject { run_rake_task('gitlab:lfs:migrate_to_local') }
|
||||
|
||||
let(:lfs_object) { create(:lfs_object, :with_file, :object_storage) }
|
||||
|
||||
before do
|
||||
stub_lfs_object_storage
|
||||
stub_lfs_object_storage(background_upload: false, direct_upload: true)
|
||||
end
|
||||
|
||||
it 'migrates local file to object storage' do
|
||||
expect { lfs_migrate }.to change { lfs_object.reload.file_store }.from(local).to(remote)
|
||||
context 'object storage enabled' do
|
||||
it 'migrates remote files to local storage' do
|
||||
expect { subject }.to change { lfs_object.reload.file_store }.from(remote).to(local)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,113 +0,0 @@
|
|||
require 'rake_helper'
|
||||
|
||||
describe 'gitlab:traces rake tasks' do
|
||||
before do
|
||||
Rake.application.rake_require 'tasks/gitlab/traces'
|
||||
end
|
||||
|
||||
describe 'gitlab:traces:archive' do
|
||||
shared_examples 'passes the job id to worker' do
|
||||
it do
|
||||
expect(ArchiveTraceWorker).to receive(:bulk_perform_async).with([[job.id]])
|
||||
|
||||
run_rake_task('gitlab:traces:archive')
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'does not pass the job id to worker' do
|
||||
it do
|
||||
expect(ArchiveTraceWorker).not_to receive(:bulk_perform_async)
|
||||
|
||||
run_rake_task('gitlab:traces:archive')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when trace file stored in default path' do
|
||||
let!(:job) { create(:ci_build, :success, :trace_live) }
|
||||
|
||||
it_behaves_like 'passes the job id to worker'
|
||||
end
|
||||
|
||||
context 'when trace is stored in database' do
|
||||
let!(:job) { create(:ci_build, :success) }
|
||||
|
||||
before do
|
||||
job.update_column(:trace, 'trace in db')
|
||||
end
|
||||
|
||||
it_behaves_like 'passes the job id to worker'
|
||||
end
|
||||
|
||||
context 'when job has trace artifact' do
|
||||
let!(:job) { create(:ci_build, :success) }
|
||||
|
||||
before do
|
||||
create(:ci_job_artifact, :trace, job: job)
|
||||
end
|
||||
|
||||
it_behaves_like 'does not pass the job id to worker'
|
||||
end
|
||||
|
||||
context 'when job is not finished yet' do
|
||||
let!(:build) { create(:ci_build, :running, :trace_live) }
|
||||
|
||||
it_behaves_like 'does not pass the job id to worker'
|
||||
end
|
||||
end
|
||||
|
||||
describe 'gitlab:traces:migrate' do
|
||||
let(:object_storage_enabled) { false }
|
||||
|
||||
before do
|
||||
stub_artifacts_object_storage(enabled: object_storage_enabled)
|
||||
end
|
||||
|
||||
subject { run_rake_task('gitlab:traces:migrate') }
|
||||
|
||||
let!(:job_trace) { create(:ci_job_artifact, :trace, file_store: store) }
|
||||
|
||||
context 'when local storage is used' do
|
||||
let(:store) { ObjectStorage::Store::LOCAL }
|
||||
|
||||
context 'and job does not have file store defined' do
|
||||
let(:object_storage_enabled) { true }
|
||||
let(:store) { nil }
|
||||
|
||||
it "migrates file to remote storage" do
|
||||
subject
|
||||
|
||||
expect(job_trace.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
|
||||
end
|
||||
end
|
||||
|
||||
context 'and remote storage is defined' do
|
||||
let(:object_storage_enabled) { true }
|
||||
|
||||
it "migrates file to remote storage" do
|
||||
subject
|
||||
|
||||
expect(job_trace.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
|
||||
end
|
||||
end
|
||||
|
||||
context 'and remote storage is not defined' do
|
||||
it "fails to migrate to remote storage" do
|
||||
subject
|
||||
|
||||
expect(job_trace.reload.file_store).to eq(ObjectStorage::Store::LOCAL)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when remote storage is used' do
|
||||
let(:object_storage_enabled) { true }
|
||||
let(:store) { ObjectStorage::Store::REMOTE }
|
||||
|
||||
it "file stays on remote storage" do
|
||||
subject
|
||||
|
||||
expect(job_trace.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,31 +1,42 @@
|
|||
require 'rake_helper'
|
||||
|
||||
describe 'gitlab:uploads:migrate rake tasks' do
|
||||
describe 'gitlab:uploads:migrate and migrate_to_local rake tasks' do
|
||||
let(:model_class) { nil }
|
||||
let(:uploader_class) { nil }
|
||||
let(:mounted_as) { nil }
|
||||
let(:batch_size) { 3 }
|
||||
|
||||
before do
|
||||
stub_env('BATCH', batch_size.to_s)
|
||||
stub_env('MIGRATION_BATCH_SIZE', batch_size.to_s)
|
||||
stub_uploads_object_storage(uploader_class)
|
||||
Rake.application.rake_require 'tasks/gitlab/uploads/migrate'
|
||||
|
||||
allow(ObjectStorage::MigrateUploadsWorker).to receive(:perform_async)
|
||||
end
|
||||
|
||||
def run
|
||||
def run(task)
|
||||
args = [uploader_class.to_s, model_class.to_s, mounted_as].compact
|
||||
run_rake_task("gitlab:uploads:migrate", *args)
|
||||
run_rake_task(task, *args)
|
||||
end
|
||||
|
||||
shared_examples 'enqueue jobs in batch' do |batch:|
|
||||
it do
|
||||
it 'migrates local storage to remote object storage' do
|
||||
expect(ObjectStorage::MigrateUploadsWorker)
|
||||
.to receive(:perform_async).exactly(batch).times
|
||||
.and_return("A fake job.")
|
||||
.and_return("A fake job.")
|
||||
|
||||
run
|
||||
run('gitlab:uploads:migrate')
|
||||
end
|
||||
|
||||
it 'migrates remote object storage to local storage' do
|
||||
expect(Upload).to receive(:where).exactly(batch + 1).times { Upload.all }
|
||||
expect(ObjectStorage::MigrateUploadsWorker)
|
||||
.to receive(:perform_async)
|
||||
.with(anything, model_class.name, mounted_as, ObjectStorage::Store::LOCAL)
|
||||
.exactly(batch).times
|
||||
.and_return("A fake job.")
|
||||
|
||||
run('gitlab:uploads:migrate_to_local')
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -11,8 +11,8 @@ describe ObjectStorage::MigrateUploadsWorker, :sidekiq do
|
|||
let(:uploads) { Upload.all }
|
||||
let(:to_store) { ObjectStorage::Store::REMOTE }
|
||||
|
||||
def perform(uploads)
|
||||
described_class.new.perform(uploads.ids, model_class.to_s, mounted_as, to_store)
|
||||
def perform(uploads, store = nil)
|
||||
described_class.new.perform(uploads.ids, model_class.to_s, mounted_as, store || to_store)
|
||||
rescue ObjectStorage::MigrateUploadsWorker::Report::MigrationFailures
|
||||
# swallow
|
||||
end
|
||||
|
@ -97,12 +97,28 @@ describe ObjectStorage::MigrateUploadsWorker, :sidekiq do
|
|||
|
||||
it_behaves_like 'outputs correctly', success: 10
|
||||
|
||||
it 'migrates files' do
|
||||
it 'migrates files to remote storage' do
|
||||
perform(uploads)
|
||||
|
||||
expect(Upload.where(store: ObjectStorage::Store::LOCAL).count).to eq(0)
|
||||
end
|
||||
|
||||
context 'reversed' do
|
||||
let(:to_store) { ObjectStorage::Store::LOCAL }
|
||||
|
||||
before do
|
||||
perform(uploads, ObjectStorage::Store::REMOTE)
|
||||
end
|
||||
|
||||
it 'migrates files to local storage' do
|
||||
expect(Upload.where(store: ObjectStorage::Store::REMOTE).count).to eq(10)
|
||||
|
||||
perform(uploads)
|
||||
|
||||
expect(Upload.where(store: ObjectStorage::Store::LOCAL).count).to eq(10)
|
||||
end
|
||||
end
|
||||
|
||||
context 'migration is unsuccessful' do
|
||||
before do
|
||||
allow_any_instance_of(ObjectStorage::Concern)
|
||||
|
|
Loading…
Reference in a new issue