Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2019-11-29 15:06:43 +00:00
parent b860c6ba26
commit eac0da9a47
38 changed files with 681 additions and 173 deletions

View file

@ -5,10 +5,11 @@ stages:
- prepare
- quick-test
- test
- post-test
- review-prepare
- review
- qa
- post-test
- post-qa
- notification
- pages

View file

@ -275,7 +275,7 @@ parallel-spec-reports:
- .only-review
- .only:changes-code-qa
image: ruby:2.6-alpine
stage: post-test
stage: post-qa
dependencies: ["review-qa-all"]
variables:
NEW_PARALLEL_SPECS_REPORT: qa/report-new.html

View file

@ -218,8 +218,13 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
end
def ci_environments_status
environments = if ci_environments_status_on_merge_result?
environments =
if ci_environments_status_on_merge_result?
if Feature.enabled?(:deployment_merge_requests_widget, @project)
EnvironmentStatus.for_deployed_merge_request(@merge_request, current_user)
else
EnvironmentStatus.after_merge_request(@merge_request, current_user)
end
else
EnvironmentStatus.for_merge_request(@merge_request, current_user)
end

View file

@ -161,6 +161,18 @@ module DiffHelper
end
end
def render_overflow_warning?(diffs_collection)
diff_files = diffs_collection.diff_files
if diff_files.any?(&:too_large?)
Gitlab::Metrics.add_event(:diffs_overflow_single_file_limits)
end
diff_files.overflow?.tap do |overflown|
Gitlab::Metrics.add_event(:diffs_overflow_collection_limits) if overflown
end
end
private
def diff_btn(title, name, selected)
@ -203,12 +215,6 @@ module DiffHelper
link_to "#{hide_whitespace? ? 'Show' : 'Hide'} whitespace changes", url, class: options[:class]
end
def render_overflow_warning?(diffs_collection)
diffs = @merge_request_diff.presence || diffs_collection.diff_files
diffs.overflow?
end
def diff_file_path_text(diff_file, max: 60)
path = diff_file.new_path

View file

@ -20,6 +20,28 @@ class EnvironmentStatus
build_environments_status(mr, user, mr.merge_pipeline)
end
def self.for_deployed_merge_request(mr, user)
statuses = []
mr.recent_visible_deployments.each do |deploy|
env = deploy.environment
next unless Ability.allowed?(user, :read_environment, env)
statuses <<
EnvironmentStatus.new(deploy.project, env, mr, deploy.sha)
end
# Existing projects that used deployments prior to the introduction of
# explicitly linked merge requests won't have any data using this new
# approach, so we fall back to retrieving deployments based on CI pipelines.
if statuses.any?
statuses
else
after_merge_request(mr, user)
end
end
def initialize(project, environment, merge_request, sha)
@project = project
@environment = environment

View file

@ -73,6 +73,14 @@ class MergeRequest < ApplicationRecord
has_many :merge_request_assignees
has_many :assignees, class_name: "User", through: :merge_request_assignees
has_many :deployment_merge_requests
# These are deployments created after the merge request has been merged, and
# the merge request was tracked explicitly (instead of implicitly using a CI
# build).
has_many :deployments,
through: :deployment_merge_requests
KNOWN_MERGE_PARAMS = [
:auto_merge_strategy,
:should_remove_source_branch,
@ -1475,6 +1483,10 @@ class MergeRequest < ApplicationRecord
true
end
def recent_visible_deployments
deployments.visible.includes(:environment).order(id: :desc).limit(10)
end
private
def with_rebase_lock

View file

@ -925,8 +925,23 @@ class Repository
def ancestor?(ancestor_id, descendant_id)
return false if ancestor_id.nil? || descendant_id.nil?
counter = Gitlab::Metrics.counter(
:repository_ancestor_calls_total,
'The number of times we call Repository#ancestor with valid arguments')
cache_hit = true
cache_key = "ancestor:#{ancestor_id}:#{descendant_id}"
result = request_store_cache.fetch(cache_key) do
cache.fetch(cache_key) do
cache_hit = false
raw_repository.ancestor?(ancestor_id, descendant_id)
end
end
counter.increment(cache_hit: cache_hit.to_s)
result
end
def fetch_as_mirror(url, forced: false, refmap: :all_refs, remote_name: nil, prune: true)
unless remote_name

View file

@ -0,0 +1,5 @@
---
title: Cache the ancestor? Gitaly call to speed up polling for the merge request widget
merge_request: 20958
author:
type: performance

View file

@ -0,0 +1,5 @@
---
title: Run housekeeping after moving a repository between shards
merge_request: 20863
author:
type: performance

View file

@ -0,0 +1,5 @@
---
title: Import large gitlab_project exports via rake task
merge_request: 20724
author:
type: added

View file

@ -69,6 +69,7 @@ description: 'Learn how to contribute to GitLab.'
- [Developing against interacting components or features](interacting_components.md)
- [File uploads](uploads.md)
- [Auto DevOps development guide](auto_devops.md)
- [Mass Inserting Models](mass_insert.md)
- [Cycle Analytics development guide](cycle_analytics.md)
## Performance guides

View file

@ -0,0 +1,13 @@
# Mass Inserting Rails Models
Setting the environment variable [`MASS_INSERT=1`](rake_tasks.md#env-variables)
when running `rake setup` will create millions of records, but these records
aren't visible to the `root` user by default.
To make any number of the mass-inserted projects visible to the `root` user, run
the following snippet in the rails console.
```ruby
u = User.find(1)
Project.last(100).each { |p| p.set_create_timestamps && p.add_maintainer(u, current_user: u) } # Change 100 to whatever number of projects you need access to
```

View file

@ -23,15 +23,17 @@ The current stages are:
pipeline early (currently used to run Geo tests when the branch name starts
with `geo-`, `geo/`, or ends with `-geo`).
- `test`: This stage includes most of the tests, DB/migration jobs, and static analysis jobs.
- `post-test`: This stage includes jobs that build reports or gather data from
the `test` stage's jobs (e.g. coverage, Knapsack metadata etc.).
- `review-prepare`: This stage includes a job that build the CNG images that are
later used by the (Helm) Review App deployment (see
[Review Apps](testing_guide/review_apps.md) for details).
- `review`: This stage includes jobs that deploy the GitLab and Docs Review Apps.
- `qa`: This stage includes jobs that perform QA tasks against the Review App
that is deployed in the previous stage.
- `post-qa`: This stage includes jobs that build reports or gather data from
the `qa` stage's jobs (e.g. Review App performance report).
- `notification`: This stage includes jobs that sends notifications about pipeline status.
- `post-test`: This stage includes jobs that build reports or gather data from
the previous stages' jobs (e.g. coverage, Knapsack metadata etc.).
- `pages`: This stage includes a job that deploys the various reports as
GitLab Pages (e.g. <https://gitlab-org.gitlab.io/gitlab/coverage-ruby/>,
<https://gitlab-org.gitlab.io/gitlab/coverage-javascript/>,

View file

@ -5,7 +5,7 @@ description: 'Understand and explore the user permission levels in GitLab, and w
# Permissions
Users have different abilities depending on the access level they have in a
particular group or project. If a user is both in a group's project and the
particular group or project. If a user is both in a project's group and the
project itself, the highest permission level is used.
On public and internal projects the Guest role is not enforced. All users will

View file

@ -15,15 +15,15 @@ performance:
fi
- export CI_ENVIRONMENT_URL=$(cat environment_url.txt)
- mkdir gitlab-exporter
- wget -O gitlab-exporter/index.js https://gitlab.com/gitlab-org/gl-performance/raw/10-5/index.js
- wget -O gitlab-exporter/index.js https://gitlab.com/gitlab-org/gl-performance/raw/1.0.0/index.js
- mkdir sitespeed-results
- |
if [ -f .gitlab-urls.txt ]
then
sed -i -e 's@^@'"$CI_ENVIRONMENT_URL"'@' .gitlab-urls.txt
docker run --shm-size=1g --rm -v "$(pwd)":/sitespeed.io sitespeedio/sitespeed.io:6.3.1 --plugins.add ./gitlab-exporter --outputFolder sitespeed-results .gitlab-urls.txt
docker run --shm-size=1g --rm -v "$(pwd)":/sitespeed.io sitespeedio/sitespeed.io:11.2.0 --plugins.add ./gitlab-exporter --outputFolder sitespeed-results .gitlab-urls.txt
else
docker run --shm-size=1g --rm -v "$(pwd)":/sitespeed.io sitespeedio/sitespeed.io:6.3.1 --plugins.add ./gitlab-exporter --outputFolder sitespeed-results "$CI_ENVIRONMENT_URL"
docker run --shm-size=1g --rm -v "$(pwd)":/sitespeed.io sitespeedio/sitespeed.io:11.2.0 --plugins.add ./gitlab-exporter --outputFolder sitespeed-results "$CI_ENVIRONMENT_URL"
fi
- mv sitespeed-results/data/performance.json performance.json
artifacts:

View file

@ -11,7 +11,7 @@ performance:
image: docker:git
variables:
URL: https://example.com
SITESPEED_VERSION: 6.3.1
SITESPEED_VERSION: 11.2.0
SITESPEED_OPTIONS: ''
services:
- docker:stable-dind

View file

@ -4,13 +4,17 @@ module Gitlab
module Diff
module FileCollection
class MergeRequestDiff < MergeRequestDiffBase
include Gitlab::Utils::StrongMemoize
def diff_files
strong_memoize(:diff_files) do
diff_files = super
diff_files.each { |diff_file| cache.decorate(diff_file) }
diff_files
end
end
override :write_cache
def write_cache

View file

@ -34,8 +34,8 @@ module Gitlab
def authorize
message =
if @resource
":wave: Hi there! Before I do anything for you, please [connect your GitLab account](#{@resource})."
if resource
":wave: Hi there! Before I do anything for you, please [connect your GitLab account](#{resource})."
else
":sweat_smile: Couldn't identify you, nor can I authorize you!"
end

View file

@ -18,6 +18,8 @@ module Gitlab
private
attr_reader :resource
def header_with_list(header, items)
message = [header]
@ -67,12 +69,51 @@ module Gitlab
def resource_url
url_for(
[
@resource.project.namespace.becomes(Namespace),
@resource.project,
@resource
resource.project.namespace.becomes(Namespace),
resource.project,
resource
]
)
end
def project_link
"[#{project.full_name}](#{project.web_url})"
end
def author_profile_link
"[#{author.to_reference}](#{url_for(author)})"
end
def response_message(custom_pretext: pretext)
{
attachments: [
{
title: "#{issue.title} · #{issue.to_reference}",
title_link: resource_url,
author_name: author.name,
author_icon: author.avatar_url,
fallback: fallback_message,
pretext: custom_pretext,
text: text,
color: color(resource),
fields: fields,
mrkdwn_in: fields_with_markdown
}
]
}
end
def pretext
''
end
def text
''
end
def fields_with_markdown
%i(title pretext fields)
end
end
end
end

View file

@ -42,17 +42,11 @@ module Gitlab
]
end
def project_link
"[#{project.full_name}](#{project.web_url})"
end
def author_profile_link
"[#{author.to_reference}](#{url_for(author)})"
end
private
attr_reader :resource
alias_method :issue, :resource
end
end
end

View file

@ -7,43 +7,25 @@ module Gitlab
include Presenters::IssueBase
def present
if @resource.confidential?
ephemeral_response(close_issue)
if resource.confidential?
ephemeral_response(response_message)
else
in_channel_response(close_issue)
in_channel_response(response_message)
end
end
def already_closed
ephemeral_response(text: "Issue #{@resource.to_reference} is already closed.")
ephemeral_response(text: "Issue #{resource.to_reference} is already closed.")
end
private
def close_issue
{
attachments: [
{
title: "#{@resource.title} · #{@resource.to_reference}",
title_link: resource_url,
author_name: author.name,
author_icon: author.avatar_url,
fallback: "Closed issue #{@resource.to_reference}: #{@resource.title}",
pretext: pretext,
color: color(@resource),
fields: fields,
mrkdwn_in: [
:title,
:pretext,
:fields
]
}
]
}
def fallback_message
"Closed issue #{issue.to_reference}: #{issue.title}"
end
def pretext
"I closed an issue on #{author_profile_link}'s behalf: *#{@resource.to_reference}* in #{project_link}"
"I closed an issue on #{author_profile_link}'s behalf: *#{issue.to_reference}* in #{project_link}"
end
end
end

View file

@ -7,31 +7,13 @@ module Gitlab
include Presenters::NoteBase
def present
ephemeral_response(new_note)
ephemeral_response(response_message)
end
private
def new_note
{
attachments: [
{
title: "#{issue.title} · #{issue.to_reference}",
title_link: resource_url,
author_name: author.name,
author_icon: author.avatar_url,
fallback: "New comment on #{issue.to_reference}: #{issue.title}",
pretext: pretext,
color: color,
fields: fields,
mrkdwn_in: [
:title,
:pretext,
:fields
]
}
]
}
def fallback_message
"New comment on #{issue.to_reference}: #{issue.title}"
end
def pretext

View file

@ -19,30 +19,15 @@ module Gitlab
private
def moved_issue(old_issue)
{
attachments: [
{
title: "#{@resource.title} · #{@resource.to_reference}",
title_link: resource_url,
author_name: author.name,
author_icon: author.avatar_url,
fallback: "Issue #{@resource.to_reference}: #{@resource.title}",
pretext: pretext(old_issue),
color: color(@resource),
fields: fields,
mrkdwn_in: [
:title,
:pretext,
:text,
:fields
]
}
]
}
response_message(custom_pretext: custom_pretext(old_issue))
end
def pretext(old_issue)
"Moved issue *#{issue_link(old_issue)}* to *#{issue_link(@resource)}*"
def fallback_message
"Issue #{issue.to_reference}: #{issue.title}"
end
def custom_pretext(old_issue)
"Moved issue *#{issue_link(old_issue)}* to *#{issue_link(issue)}*"
end
def issue_link(issue)

View file

@ -7,36 +7,21 @@ module Gitlab
include Presenters::IssueBase
def present
in_channel_response(new_issue)
in_channel_response(response_message)
end
private
def new_issue
{
attachments: [
{
title: "#{@resource.title} · #{@resource.to_reference}",
title_link: resource_url,
author_name: author.name,
author_icon: author.avatar_url,
fallback: "New issue #{@resource.to_reference}: #{@resource.title}",
pretext: pretext,
color: color(@resource),
fields: fields,
mrkdwn_in: [
:title,
:pretext,
:text,
:fields
]
}
]
}
def fallback_message
"New issue #{issue.to_reference}: #{issue.title}"
end
def fields_with_markdown
%i(title pretext text fields)
end
def pretext
"I created an issue on #{author_profile_link}'s behalf: *#{@resource.to_reference}* in #{project_link}"
"I created an issue on #{author_profile_link}'s behalf: *#{issue.to_reference}* in #{project_link}"
end
end
end

View file

@ -7,12 +7,12 @@ module Gitlab
include Presenters::IssueBase
def present
text = if @resource.count >= 5
text = if resource.count >= 5
"Here are the first 5 issues I found:"
elsif @resource.one?
elsif resource.one?
"Here is the only issue I found:"
else
"Here are the #{@resource.count} issues I found:"
"Here are the #{resource.count} issues I found:"
end
ephemeral_response(text: text, attachments: attachments)
@ -21,7 +21,7 @@ module Gitlab
private
def attachments
@resource.map do |issue|
resource.map do |issue|
url = "[#{issue.to_reference}](#{url_for([namespace, project, issue])})"
{
@ -37,7 +37,7 @@ module Gitlab
end
def project
@project ||= @resource.first.project
@project ||= resource.first.project
end
def namespace

View file

@ -7,55 +7,36 @@ module Gitlab
include Presenters::IssueBase
def present
if @resource.confidential?
ephemeral_response(show_issue)
if resource.confidential?
ephemeral_response(response_message)
else
in_channel_response(show_issue)
in_channel_response(response_message)
end
end
private
def show_issue
{
attachments: [
{
title: "#{@resource.title} · #{@resource.to_reference}",
title_link: resource_url,
author_name: author.name,
author_icon: author.avatar_url,
fallback: "Issue #{@resource.to_reference}: #{@resource.title}",
pretext: pretext,
text: text,
color: color(@resource),
fields: fields,
mrkdwn_in: [
:pretext,
:text,
:fields
]
}
]
}
def fallback_message
"Issue #{resource.to_reference}: #{resource.title}"
end
def text
message = ["**#{status_text(@resource)}**"]
message = ["**#{status_text(resource)}**"]
if @resource.upvotes.zero? && @resource.downvotes.zero? && @resource.user_notes_count.zero?
if resource.upvotes.zero? && resource.downvotes.zero? && resource.user_notes_count.zero?
return message.join
end
message << " · "
message << ":+1: #{@resource.upvotes} " unless @resource.upvotes.zero?
message << ":-1: #{@resource.downvotes} " unless @resource.downvotes.zero?
message << ":speech_balloon: #{@resource.user_notes_count}" unless @resource.user_notes_count.zero?
message << ":+1: #{resource.upvotes} " unless resource.upvotes.zero?
message << ":-1: #{resource.downvotes} " unless resource.downvotes.zero?
message << ":speech_balloon: #{resource.user_notes_count}" unless resource.user_notes_count.zero?
message.join
end
def pretext
"Issue *#{@resource.to_reference}* from #{project.full_name}"
"Issue *#{resource.to_reference}* from #{project.full_name}"
end
end
end

View file

@ -6,7 +6,7 @@ module Gitlab
module NoteBase
GREEN = '#38ae67'
def color
def color(_)
GREEN
end
@ -18,18 +18,10 @@ module Gitlab
issue.project
end
def project_link
"[#{project.full_name}](#{project.web_url})"
end
def author
resource.author
end
def author_profile_link
"[#{author.to_reference}](#{url_for(author)})"
end
def fields
[
{

View file

@ -0,0 +1,132 @@
# frozen_string_literal: true
# Import large project archives
#
# This task:
# 1. Disables ObjectStorage for archive upload
# 2. Performs Sidekiq job synchronously
#
# @example
# bundle exec rake "gitlab:import_export:import[root, root, imported_project, /path/to/file.tar.gz]"
#
require 'sidekiq/testing'
namespace :gitlab do
namespace :import_export do
desc 'EXPERIMENTAL | Import large project archives'
task :import, [:username, :namespace_path, :project_path, :archive_path] => :gitlab_environment do |_t, args|
warn_user_is_not_gitlab
GitlabProjectImport.new(
namespace_path: args.namespace_path,
project_path: args.project_path,
username: args.username,
file_path: args.archive_path
).import
end
end
end
class GitlabProjectImport
def initialize(opts)
@project_path = opts.fetch(:project_path)
@file_path = opts.fetch(:file_path)
@namespace = Namespace.find_by_full_path(opts.fetch(:namespace_path))
@current_user = User.find_by_username(opts.fetch(:username))
end
def import
show_import_start_message
run_isolated_sidekiq_job
show_import_failures_count
if @project&.import_state&.last_error
puts "ERROR: #{@project.import_state.last_error}"
exit 1
elsif @project.errors.any?
puts "ERROR: #{@project.errors.full_messages.join(', ')}"
exit 1
else
puts 'Done!'
end
rescue StandardError => e
puts "Exception: #{e.message}"
puts e.backtrace
exit 1
end
private
# We want to ensure that all Sidekiq jobs are executed
# synchronously as part of that process.
# This ensures that all expensive operations do not escape
# to general Sidekiq clusters/nodes.
def run_isolated_sidekiq_job
Sidekiq::Testing.fake! do
@project = create_project
execute_sidekiq_job
true
end
end
def create_project
# We are disabling ObjectStorage for `import`
# as it is too slow to handle big archives:
# 1. DB transaction timeouts on upload
# 2. Download of archive before unpacking
disable_upload_object_storage do
service = Projects::GitlabProjectsImportService.new(
@current_user,
{
namespace_id: @namespace.id,
path: @project_path,
file: File.open(@file_path)
}
)
service.execute
end
end
def execute_sidekiq_job
Sidekiq::Worker.drain_all
end
def disable_upload_object_storage
overwrite_uploads_setting('background_upload', false) do
overwrite_uploads_setting('direct_upload', false) do
yield
end
end
end
def overwrite_uploads_setting(key, value)
old_value = Settings.uploads.object_store[key]
Settings.uploads.object_store[key] = value
yield
ensure
Settings.uploads.object_store[key] = old_value
end
def full_path
"#{@namespace.full_path}/#{@project_path}"
end
def show_import_start_message
puts "Importing GitLab export: #{@file_path} into GitLab" \
" #{full_path}" \
" as #{@current_user.name}"
end
def show_import_failures_count
return unless @project.import_failures.exists?
puts "Total number of not imported relations: #{@project.import_failures.count}"
end
end

View file

@ -1280,6 +1280,28 @@ describe Projects::MergeRequestsController do
end
end
it 'uses the explicitly linked deployments' do
expect(EnvironmentStatus)
.to receive(:for_deployed_merge_request)
.with(merge_request, user)
.and_call_original
get_ci_environments_status(environment_target: 'merge_commit')
end
context 'when the deployment_merge_requests_widget feature flag is disabled' do
it 'uses the deployments retrieved using CI builds' do
stub_feature_flags(deployment_merge_requests_widget: false)
expect(EnvironmentStatus)
.to receive(:after_merge_request)
.with(merge_request, user)
.and_call_original
get_ci_environments_status(environment_target: 'merge_commit')
end
end
def get_ci_environments_status(extra_params = {})
params = {
namespace_id: merge_request.project.namespace.to_param,

Binary file not shown.

Binary file not shown.

View file

@ -258,6 +258,51 @@ describe DiffHelper do
end
end
context '#render_overflow_warning?' do
let(:diffs_collection) { instance_double(Gitlab::Diff::FileCollection::MergeRequestDiff, diff_files: diff_files) }
let(:diff_files) { Gitlab::Git::DiffCollection.new(files) }
let(:safe_file) { { too_large: false, diff: '' } }
let(:large_file) { { too_large: true, diff: '' } }
let(:files) { [safe_file, safe_file] }
before do
allow(diff_files).to receive(:overflow?).and_return(false)
end
context 'when neither collection nor individual file hit the limit' do
it 'returns false and does not log any overflow events' do
expect(Gitlab::Metrics).not_to receive(:add_event).with(:diffs_overflow_collection_limits)
expect(Gitlab::Metrics).not_to receive(:add_event).with(:diffs_overflow_single_file_limits)
expect(render_overflow_warning?(diffs_collection)).to be false
end
end
context 'when the file collection has an overflow' do
before do
allow(diff_files).to receive(:overflow?).and_return(true)
end
it 'returns false and only logs collection overflow event' do
expect(Gitlab::Metrics).to receive(:add_event).with(:diffs_overflow_collection_limits).exactly(:once)
expect(Gitlab::Metrics).not_to receive(:add_event).with(:diffs_overflow_single_file_limits)
expect(render_overflow_warning?(diffs_collection)).to be true
end
end
context 'when two individual files are too big' do
let(:files) { [safe_file, large_file, large_file] }
it 'returns false and only logs single file overflow events' do
expect(Gitlab::Metrics).to receive(:add_event).with(:diffs_overflow_single_file_limits).exactly(:once)
expect(Gitlab::Metrics).not_to receive(:add_event).with(:diffs_overflow_collection_limits)
expect(render_overflow_warning?(diffs_collection)).to be false
end
end
end
context '#diff_file_path_text' do
it 'returns full path by default' do
expect(diff_file_path_text(diff_file)).to eq(diff_file.new_path)

View file

@ -139,6 +139,8 @@ merge_requests:
- blocking_merge_requests
- blocked_merge_requests
- description_versions
- deployment_merge_requests
- deployments
external_pull_requests:
- project
merge_request_diff:

View file

@ -92,6 +92,84 @@ describe EnvironmentStatus do
end
end
describe '.for_deployed_merge_request' do
context 'when a merge request has no explicitly linked deployments' do
it 'returns the statuses based on the CI pipelines' do
mr = create(:merge_request, :merged)
expect(described_class)
.to receive(:after_merge_request)
.with(mr, mr.author)
.and_return([])
statuses = described_class.for_deployed_merge_request(mr, mr.author)
expect(statuses).to eq([])
end
end
context 'when a merge request has explicitly linked deployments' do
let(:merge_request) { create(:merge_request, :merged) }
let(:environment) do
create(:environment, project: merge_request.target_project)
end
it 'returns the statuses based on the linked deployments' do
deploy = create(
:deployment,
:success,
project: merge_request.target_project,
environment: environment,
deployable: nil
)
deploy.link_merge_requests(merge_request.target_project.merge_requests)
statuses = described_class
.for_deployed_merge_request(merge_request, merge_request.author)
expect(statuses.length).to eq(1)
expect(statuses[0].environment).to eq(environment)
expect(statuses[0].merge_request).to eq(merge_request)
end
it 'excludes environments the user can not see' do
deploy = create(
:deployment,
:success,
project: merge_request.target_project,
environment: environment,
deployable: nil
)
deploy.link_merge_requests(merge_request.target_project.merge_requests)
statuses = described_class
.for_deployed_merge_request(merge_request, create(:user))
expect(statuses).to be_empty
end
it 'excludes deployments that have the status "created"' do
deploy = create(
:deployment,
:created,
project: merge_request.target_project,
environment: environment,
deployable: nil
)
deploy.link_merge_requests(merge_request.target_project.merge_requests)
statuses = described_class
.for_deployed_merge_request(merge_request, merge_request.author)
expect(statuses).to be_empty
end
end
end
describe '.build_environments_status' do
subject { described_class.send(:build_environments_status, merge_request, user, pipeline) }

View file

@ -3391,4 +3391,56 @@ describe MergeRequest do
])
end
end
describe '#recent_visible_deployments' do
let(:merge_request) { create(:merge_request) }
let(:environment) do
create(:environment, project: merge_request.target_project)
end
it 'returns visible deployments' do
created = create(
:deployment,
:created,
project: merge_request.target_project,
environment: environment
)
success = create(
:deployment,
:success,
project: merge_request.target_project,
environment: environment
)
failed = create(
:deployment,
:failed,
project: merge_request.target_project,
environment: environment
)
merge_request.deployment_merge_requests.create!(deployment: created)
merge_request.deployment_merge_requests.create!(deployment: success)
merge_request.deployment_merge_requests.create!(deployment: failed)
expect(merge_request.recent_visible_deployments).to eq([failed, success])
end
it 'only returns a limited number of deployments' do
20.times do
deploy = create(
:deployment,
:success,
project: merge_request.target_project,
environment: environment
)
merge_request.deployment_merge_requests.create!(deployment: deploy)
end
expect(merge_request.recent_visible_deployments.count).to eq(10)
end
end
end

View file

@ -2399,7 +2399,40 @@ describe Repository do
end
describe '#ancestor? with Gitaly enabled' do
it_behaves_like "#ancestor?"
let(:commit) { repository.commit }
let(:ancestor) { commit.parents.first }
let(:cache_key) { "ancestor:#{ancestor.id}:#{commit.id}" }
it_behaves_like '#ancestor?'
context 'caching', :request_store, :clean_gitlab_redis_cache do
it 'only calls out to Gitaly once' do
expect(repository.raw_repository).to receive(:ancestor?).once
2.times { repository.ancestor?(commit.id, ancestor.id) }
end
it 'increments a counter with cache hits' do
counter = Gitlab::Metrics.counter(:repository_ancestor_calls_total, 'Repository ancestor calls')
expect do
2.times { repository.ancestor?(commit.id, ancestor.id) }
end.to change { counter.get(cache_hit: 'true') }.by(1)
.and change { counter.get(cache_hit: 'false') }.by(1)
end
it 'returns the value from the request store' do
repository.__send__(:request_store_cache).write(cache_key, "it's apparent")
expect(repository.ancestor?(ancestor.id, commit.id)).to eq("it's apparent")
end
it 'returns the value from the redis cache' do
expect(repository.__send__(:cache)).to receive(:fetch).with(cache_key).and_return("it's apparent")
expect(repository.ancestor?(ancestor.id, commit.id)).to eq("it's apparent")
end
end
end
describe '#ancestor? with Rugged enabled', :enable_rugged do

View file

@ -0,0 +1,106 @@
# frozen_string_literal: true
require 'rake_helper'
describe 'gitlab:import_export:import rake task' do
let(:username) { 'root' }
let(:namespace_path) { username }
let!(:user) { create(:user, username: username) }
let(:task_params) { [username, namespace_path, project_name, archive_path] }
let(:project) { Project.find_by_full_path("#{namespace_path}/#{project_name}") }
before do
Rake.application.rake_require('tasks/gitlab/import_export/import')
allow(Settings.uploads.object_store).to receive(:[]=).and_call_original
end
around do |example|
old_direct_upload_setting = Settings.uploads.object_store['direct_upload']
old_background_upload_setting = Settings.uploads.object_store['background_upload']
Settings.uploads.object_store['direct_upload'] = true
Settings.uploads.object_store['background_upload'] = true
example.run
Settings.uploads.object_store['direct_upload'] = old_direct_upload_setting
Settings.uploads.object_store['background_upload'] = old_background_upload_setting
end
subject { run_rake_task('gitlab:import_export:import', task_params) }
context 'when project import is valid' do
let(:project_name) { 'import_rake_test_project' }
let(:archive_path) { 'spec/fixtures/gitlab/import_export/lightweight_project_export.tar.gz' }
it 'performs project import successfully' do
expect { subject }.to output(/Done!/).to_stdout
expect { subject }.not_to raise_error
expect(project.merge_requests.count).to be > 0
expect(project.issues.count).to be > 0
expect(project.milestones.count).to be > 0
expect(project.import_state.status).to eq('finished')
end
it 'disables direct & background upload only during project creation' do
expect_next_instance_of(Projects::GitlabProjectsImportService) do |service|
expect(service).to receive(:execute).and_wrap_original do |m|
expect(Settings.uploads.object_store['background_upload']).to eq(false)
expect(Settings.uploads.object_store['direct_upload']).to eq(false)
m.call
end
end
expect_next_instance_of(GitlabProjectImport) do |importer|
expect(importer).to receive(:execute_sidekiq_job).and_wrap_original do |m|
expect(Settings.uploads.object_store['background_upload']).to eq(true)
expect(Settings.uploads.object_store['direct_upload']).to eq(true)
expect(Settings.uploads.object_store).not_to receive(:[]=).with('backgroud_upload', false)
expect(Settings.uploads.object_store).not_to receive(:[]=).with('direct_upload', false)
m.call
end
end
subject
end
end
context 'when project import is invalid' do
let(:project_name) { 'import_rake_invalid_test_project' }
let(:archive_path) { 'spec/fixtures/gitlab/import_export/corrupted_project_export.tar.gz' }
let(:not_imported_message) { /Total number of not imported relations: 1/ }
let(:error) { /Validation failed: Notes is invalid/ }
context 'when import_graceful_failures feature flag is enabled' do
before do
stub_feature_flags(import_graceful_failures: true)
end
it 'performs project import successfully' do
expect { subject }.to output(not_imported_message).to_stdout
expect { subject }.not_to raise_error
expect(project.merge_requests).to be_empty
expect(project.import_state.last_error).to be_nil
expect(project.import_state.status).to eq('finished')
end
end
context 'when import_graceful_failures feature flag is disabled' do
before do
stub_feature_flags(import_graceful_failures: false)
end
it 'fails project import with an error' do
expect { subject }.to raise_error(SystemExit).and output(error).to_stdout
expect(project.merge_requests).to be_empty
expect(project.import_state.last_error).to match(error)
expect(project.import_state.status).to eq('failed')
end
end
end
end