Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-06-04 12:08:21 +00:00
parent b3ce1ce452
commit 63546c0b11
72 changed files with 834 additions and 381 deletions

View File

@ -1,6 +1,6 @@
source 'https://rubygems.org'
gem 'rails', '~> 6.0.3'
gem 'rails', '~> 6.0.3.1'
gem 'bootsnap', '~> 1.4.6'

View File

@ -6,59 +6,59 @@ GEM
ace-rails-ap (4.1.2)
acme-client (2.0.5)
faraday (~> 0.9, >= 0.9.1)
actioncable (6.0.3)
actionpack (= 6.0.3)
actioncable (6.0.3.1)
actionpack (= 6.0.3.1)
nio4r (~> 2.0)
websocket-driver (>= 0.6.1)
actionmailbox (6.0.3)
actionpack (= 6.0.3)
activejob (= 6.0.3)
activerecord (= 6.0.3)
activestorage (= 6.0.3)
activesupport (= 6.0.3)
actionmailbox (6.0.3.1)
actionpack (= 6.0.3.1)
activejob (= 6.0.3.1)
activerecord (= 6.0.3.1)
activestorage (= 6.0.3.1)
activesupport (= 6.0.3.1)
mail (>= 2.7.1)
actionmailer (6.0.3)
actionpack (= 6.0.3)
actionview (= 6.0.3)
activejob (= 6.0.3)
actionmailer (6.0.3.1)
actionpack (= 6.0.3.1)
actionview (= 6.0.3.1)
activejob (= 6.0.3.1)
mail (~> 2.5, >= 2.5.4)
rails-dom-testing (~> 2.0)
actionpack (6.0.3)
actionview (= 6.0.3)
activesupport (= 6.0.3)
actionpack (6.0.3.1)
actionview (= 6.0.3.1)
activesupport (= 6.0.3.1)
rack (~> 2.0, >= 2.0.8)
rack-test (>= 0.6.3)
rails-dom-testing (~> 2.0)
rails-html-sanitizer (~> 1.0, >= 1.2.0)
actiontext (6.0.3)
actionpack (= 6.0.3)
activerecord (= 6.0.3)
activestorage (= 6.0.3)
activesupport (= 6.0.3)
actiontext (6.0.3.1)
actionpack (= 6.0.3.1)
activerecord (= 6.0.3.1)
activestorage (= 6.0.3.1)
activesupport (= 6.0.3.1)
nokogiri (>= 1.8.5)
actionview (6.0.3)
activesupport (= 6.0.3)
actionview (6.0.3.1)
activesupport (= 6.0.3.1)
builder (~> 3.1)
erubi (~> 1.4)
rails-dom-testing (~> 2.0)
rails-html-sanitizer (~> 1.1, >= 1.2.0)
activejob (6.0.3)
activesupport (= 6.0.3)
activejob (6.0.3.1)
activesupport (= 6.0.3.1)
globalid (>= 0.3.6)
activemodel (6.0.3)
activesupport (= 6.0.3)
activerecord (6.0.3)
activemodel (= 6.0.3)
activesupport (= 6.0.3)
activemodel (6.0.3.1)
activesupport (= 6.0.3.1)
activerecord (6.0.3.1)
activemodel (= 6.0.3.1)
activesupport (= 6.0.3.1)
activerecord-explain-analyze (0.1.0)
activerecord (>= 4)
pg
activestorage (6.0.3)
actionpack (= 6.0.3)
activejob (= 6.0.3)
activerecord (= 6.0.3)
activestorage (6.0.3.1)
actionpack (= 6.0.3.1)
activejob (= 6.0.3.1)
activerecord (= 6.0.3.1)
marcel (~> 0.3.1)
activesupport (6.0.3)
activesupport (6.0.3.1)
concurrent-ruby (~> 1.0, >= 1.0.2)
i18n (>= 0.7, < 2)
minitest (~> 5.1)
@ -801,20 +801,20 @@ GEM
rack-test (1.1.0)
rack (>= 1.0, < 3)
rack-timeout (0.5.1)
rails (6.0.3)
actioncable (= 6.0.3)
actionmailbox (= 6.0.3)
actionmailer (= 6.0.3)
actionpack (= 6.0.3)
actiontext (= 6.0.3)
actionview (= 6.0.3)
activejob (= 6.0.3)
activemodel (= 6.0.3)
activerecord (= 6.0.3)
activestorage (= 6.0.3)
activesupport (= 6.0.3)
rails (6.0.3.1)
actioncable (= 6.0.3.1)
actionmailbox (= 6.0.3.1)
actionmailer (= 6.0.3.1)
actionpack (= 6.0.3.1)
actiontext (= 6.0.3.1)
actionview (= 6.0.3.1)
activejob (= 6.0.3.1)
activemodel (= 6.0.3.1)
activerecord (= 6.0.3.1)
activestorage (= 6.0.3.1)
activesupport (= 6.0.3.1)
bundler (>= 1.3.0)
railties (= 6.0.3)
railties (= 6.0.3.1)
sprockets-rails (>= 2.0.0)
rails-controller-testing (1.0.4)
actionpack (>= 5.0.1.x)
@ -828,9 +828,9 @@ GEM
rails-i18n (6.0.0)
i18n (>= 0.7, < 2)
railties (>= 6.0.0, < 7)
railties (6.0.3)
actionpack (= 6.0.3)
activesupport (= 6.0.3)
railties (6.0.3.1)
actionpack (= 6.0.3.1)
activesupport (= 6.0.3.1)
method_source
rake (>= 0.8.7)
thor (>= 0.20.3, < 2.0)
@ -1335,7 +1335,7 @@ DEPENDENCIES
rack-oauth2 (~> 1.9.3)
rack-proxy (~> 0.6.0)
rack-timeout
rails (~> 6.0.3)
rails (~> 6.0.3.1)
rails-controller-testing
rails-i18n (~> 6.0)
rainbow (~> 3.0)

View File

@ -244,22 +244,28 @@ export const contentTop = () => {
);
};
export const scrollToElement = element => {
export const scrollToElement = (element, options = {}) => {
let $el = element;
if (!(element instanceof $)) {
$el = $(element);
}
const { top } = $el.offset();
const { offset = 0 } = options;
// eslint-disable-next-line no-jquery/no-animate
return $('body, html').animate(
{
scrollTop: top - contentTop(),
scrollTop: top - contentTop() + offset,
},
200,
);
};
export const scrollToElementWithContext = element => {
const offsetMultiplier = -0.1;
return scrollToElement(element, { offset: window.innerHeight * offsetMultiplier });
};
/**
* Returns a function that can only be invoked once between
* each browser screen repaint.

View File

@ -1,5 +1,5 @@
import { mapGetters, mapActions, mapState } from 'vuex';
import { scrollToElement } from '~/lib/utils/common_utils';
import { scrollToElementWithContext } from '~/lib/utils/common_utils';
import eventHub from '../event_hub';
/**
@ -10,7 +10,7 @@ function scrollTo(selector) {
const el = document.querySelector(selector);
if (el) {
scrollToElement(el);
scrollToElementWithContext(el);
return true;
}

View File

@ -35,7 +35,7 @@ export default {
<ul class="dropdown-menu dropdown-menu-right">
<li v-for="(artifact, i) in artifacts" :key="i">
<gl-link :href="artifact.path" rel="nofollow" download
>Download {{ artifact.name }} artifacts</gl-link
>Download {{ artifact.name }} artifact</gl-link
>
</li>
</ul>

View File

@ -11,7 +11,7 @@ import {
COPY_BUILD_TITLE,
PUSH_COMMAND_LABEL,
COPY_PUSH_TITLE,
} from '../constants/index';
} from '../../constants/index';
export default {
components: {

View File

@ -8,7 +8,7 @@ import {
LIST_DELETE_BUTTON_DISABLED,
REMOVE_REPOSITORY_LABEL,
ROW_SCHEDULED_FOR_DELETION,
} from '../constants/index';
} from '../../constants/index';
export default {
name: 'ImageListrow',

View File

@ -8,7 +8,7 @@ import {
COPY_BUILD_TITLE,
COPY_PUSH_TITLE,
QUICK_START,
} from '../constants/index';
} from '../../constants/index';
export default {
name: 'ProjectEmptyState',

View File

@ -9,7 +9,7 @@ import {
EXPIRATION_POLICY_WILL_RUN_IN,
EXPIRATION_POLICY_DISABLED_TEXT,
EXPIRATION_POLICY_DISABLED_MESSAGE,
} from '../constants/index';
} from '../../constants/index';
export default {
components: {

View File

@ -12,11 +12,11 @@ import {
} from '@gitlab/ui';
import Tracking from '~/tracking';
import ProjectEmptyState from '../components/project_empty_state.vue';
import GroupEmptyState from '../components/group_empty_state.vue';
import RegistryHeader from '../components/registry_header.vue';
import ImageList from '../components/image_list.vue';
import CliCommands from '../components/cli_commands.vue';
import ProjectEmptyState from '../components/list_page/project_empty_state.vue';
import GroupEmptyState from '../components/list_page/group_empty_state.vue';
import RegistryHeader from '../components/list_page/registry_header.vue';
import ImageList from '../components/list_page/image_list.vue';
import CliCommands from '../components/list_page/cli_commands.vue';
import {
DELETE_IMAGE_SUCCESS_MESSAGE,

View File

@ -21,15 +21,16 @@ export default class UserCallout {
dismissCallout(e) {
const $currentTarget = $(e.currentTarget);
const cookieOptions = {};
if (this.options.setCalloutPerProject) {
Cookies.set(this.cookieName, 'true', {
expires: 365,
path: this.userCalloutBody.data('projectPath'),
});
} else {
Cookies.set(this.cookieName, 'true', { expires: 365 });
if (!$currentTarget.hasClass('js-close-session')) {
cookieOptions.expires = 365;
}
if (this.options.setCalloutPerProject) {
cookieOptions.path = this.userCalloutBody.data('projectPath');
}
Cookies.set(this.cookieName, 'true', cookieOptions);
if ($currentTarget.hasClass('close') || $currentTarget.hasClass('js-close')) {
this.userCalloutBody.remove();

View File

@ -162,8 +162,8 @@ module GitlabRoutingHelper
# against the arguments. We can speed this up 10x by generating the strings directly.
# /*namespace_id/:project_id/-/jobs/:job_id/artifacts/download(.:format)
def fast_download_project_job_artifacts_path(project, job)
expose_fast_artifacts_path(project, job, :download)
def fast_download_project_job_artifacts_path(project, job, params = {})
expose_fast_artifacts_path(project, job, :download, params)
end
# /*namespace_id/:project_id/-/jobs/:job_id/artifacts/keep(.:format)
@ -176,8 +176,13 @@ module GitlabRoutingHelper
expose_fast_artifacts_path(project, job, :browse)
end
def expose_fast_artifacts_path(project, job, action)
def expose_fast_artifacts_path(project, job, action, params = {})
path = "#{project.full_path}/-/jobs/#{job.id}/artifacts/#{action}"
unless params.empty?
path += "?#{params.to_query}"
end
Gitlab::Utils.append_path(Gitlab.config.gitlab.relative_url_root, path)
end

View File

@ -112,6 +112,7 @@ module Ci
after_save :update_file_store, if: :saved_change_to_file?
scope :not_expired, -> { where('expire_at IS NULL OR expire_at > ?', Time.current) }
scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
scope :with_files_stored_remotely, -> { where(file_store: ::JobArtifactUploader::Store::REMOTE) }
scope :for_sha, ->(sha, project_id) { joins(job: :pipeline).where(ci_pipelines: { sha: sha, project_id: project_id }) }
@ -151,6 +152,7 @@ module Ci
end
scope :expired, -> (limit) { where('expire_at < ?', Time.current).limit(limit) }
scope :downloadable, -> { where(file_type: DOWNLOADABLE_TYPES) }
scope :locked, -> { where(locked: true) }
scope :unlocked, -> { where(locked: [false, nil]) }
@ -246,6 +248,14 @@ module Ci
super || self.file_location.nil?
end
def expired?
expire_at.present? && expire_at < Time.current
end
def expiring?
expire_at.present? && expire_at > Time.current
end
def expire_in
expire_at - Time.current if expire_at
end

View File

@ -41,10 +41,13 @@ module Ci
has_many :latest_statuses_ordered_by_stage, -> { latest.order(:stage_idx, :stage) }, class_name: 'CommitStatus', foreign_key: :commit_id, inverse_of: :pipeline
has_many :processables, class_name: 'Ci::Processable', foreign_key: :commit_id, inverse_of: :pipeline
has_many :builds, foreign_key: :commit_id, inverse_of: :pipeline
has_many :job_artifacts, through: :builds
has_many :trigger_requests, dependent: :destroy, foreign_key: :commit_id # rubocop:disable Cop/ActiveRecordDependent
has_many :variables, class_name: 'Ci::PipelineVariable'
has_many :deployments, through: :builds
has_many :environments, -> { distinct }, through: :deployments
has_many :latest_builds, -> { latest }, foreign_key: :commit_id, inverse_of: :pipeline, class_name: 'Ci::Build'
has_many :downloadable_artifacts, -> { not_expired.downloadable }, through: :latest_builds, source: :job_artifacts
# Merge requests for which the current pipeline is running against
# the merge request's latest commit.
@ -56,7 +59,6 @@ module Ci
has_many :cancelable_statuses, -> { cancelable }, foreign_key: :commit_id, class_name: 'CommitStatus'
has_many :manual_actions, -> { latest.manual_actions.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build'
has_many :scheduled_actions, -> { latest.scheduled_actions.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build'
has_many :artifacts, -> { latest.with_artifacts_not_expired.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build'
has_many :auto_canceled_pipelines, class_name: 'Ci::Pipeline', foreign_key: 'auto_canceled_by_id'
has_many :auto_canceled_jobs, class_name: 'CommitStatus', foreign_key: 'auto_canceled_by_id'

View File

@ -4,13 +4,16 @@ module Integration
extend ActiveSupport::Concern
class_methods do
def with_custom_integration_compared_to(integration)
custom_integrations = Service
.select('1')
.where(type: integration.type, inherit_from_id: nil)
.where('services.project_id = projects.id')
def with_custom_integration_for(integration, page = nil, per = nil)
custom_integration_project_ids = Service
.where(type: integration.type)
.where(inherit_from_id: nil)
.distinct # Required until https://gitlab.com/gitlab-org/gitlab/-/issues/207385
.page(page)
.per(per)
.pluck(:project_id)
Project.where('EXISTS (?)', custom_integrations)
Project.where(id: custom_integration_project_ids)
end
end
end

View File

@ -4,30 +4,28 @@ class BuildArtifactEntity < Grape::Entity
include RequestAwareEntity
include GitlabRoutingHelper
expose :name do |job|
job.name
alias_method :artifact, :object
expose :name do |artifact|
"#{artifact.job.name}:#{artifact.file_type}"
end
expose :artifacts_expired?, as: :expired
expose :artifacts_expire_at, as: :expire_at
expose :expire_at
expose :expired?, as: :expired
expose :path do |job|
fast_download_project_job_artifacts_path(project, job)
expose :path do |artifact|
fast_download_project_job_artifacts_path(
artifact.project,
artifact.job,
file_type: artifact.file_type
)
end
expose :keep_path, if: -> (*) { job.has_expiring_archive_artifacts? } do |job|
fast_keep_project_job_artifacts_path(project, job)
expose :keep_path, if: -> (*) { artifact.expiring? } do |artifact|
fast_keep_project_job_artifacts_path(artifact.project, artifact.job)
end
expose :browse_path do |job|
fast_browse_project_job_artifacts_path(project, job)
end
private
alias_method :job, :object
def project
job.project
expose :browse_path do |artifact|
fast_browse_project_job_artifacts_path(artifact.project, artifact.job)
end
end

View File

@ -9,8 +9,7 @@ class PipelineDetailsEntity < PipelineEntity
expose :details do
expose :artifacts do |pipeline, options|
rel = pipeline.artifacts
rel = rel.eager_load_job_artifacts_archive if options.fetch(:preload_job_artifacts_archive, true)
rel = pipeline.downloadable_artifacts
BuildArtifactEntity.represent(rel, options)
end

View File

@ -7,10 +7,6 @@ class PipelineSerializer < BaseSerializer
# rubocop: disable CodeReuse/ActiveRecord
def represent(resource, opts = {})
if resource.is_a?(ActiveRecord::Relation)
# We don't want PipelineDetailsEntity to preload the job_artifacts_archive
# because we do it with preloaded_relations in a more optimal way
# if the given resource is a collection of multiple pipelines.
opts[:preload_job_artifacts_archive] = false
resource = resource.preload(preloaded_relations)
end
@ -44,35 +40,29 @@ class PipelineSerializer < BaseSerializer
def preloaded_relations
[
:latest_statuses_ordered_by_stage,
:project,
:stages,
{
failed_builds: %i(project metadata)
},
:retryable_builds,
:cancelable_statuses,
:trigger_requests,
:latest_statuses_ordered_by_stage,
:manual_actions,
:retryable_builds,
:scheduled_actions,
:artifacts,
:stages,
:trigger_requests,
:user,
{
downloadable_artifacts: {
project: [:route, { namespace: :route }],
job: []
},
failed_builds: %i(project metadata),
merge_request: {
source_project: [:route, { namespace: :route }],
target_project: [:route, { namespace: :route }]
}
},
{
},
pending_builds: :project,
project: [:route, { namespace: :route }],
artifacts: {
project: [:route, { namespace: :route }],
job_artifacts_archive: []
}
},
{ triggered_by_pipeline: [:project, :user] },
{ triggered_pipelines: [:project, :user] }
triggered_by_pipeline: [:project, :user],
triggered_pipelines: [:project, :user]
}
]
end
end

View File

@ -0,0 +1,5 @@
---
title: Show more context in unresolved jump button
merge_request: 32737
author:
type: changed

View File

@ -0,0 +1,5 @@
---
title: Fix 404 when downloading a non-archive artifact
merge_request: 32811
author:
type: fixed

View File

@ -1,5 +1,7 @@
# frozen_string_literal: true
require 'action_cable/subscription_adapter/redis'
Rails.application.configure do
# We only mount the ActionCable engine in tests where we run it in-app
# For other environments, we run it on a standalone Puma server
@ -7,3 +9,11 @@ Rails.application.configure do
config.action_cable.url = Gitlab::Utils.append_path(Gitlab.config.gitlab.relative_url_root, '/-/cable')
config.action_cable.worker_pool_size = Gitlab.config.action_cable.worker_pool_size
end
# https://github.com/rails/rails/blob/bb5ac1623e8de08c1b7b62b1368758f0d3bb6379/actioncable/lib/action_cable/subscription_adapter/redis.rb#L18
ActionCable::SubscriptionAdapter::Redis.redis_connector = lambda do |config|
args = config.except(:adapter, :channel_prefix)
.merge(instrumentation_class: ::Gitlab::Instrumentation::Redis::ActionCable)
::Redis.new(args)
end

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
module Gitlab
module RequestForgeryProtectionPatch
private
# Patch to generate 6.0.3 tokens so that we do not have CSRF errors while
# rolling out 6.0.3.1. This enables GitLab to have a mix of 6.0.3 and
# 6.0.3.1 Rails servers
#
# 1. Deploy this patch with :global_csrf_token FF disabled.
# 2. Once all Rails servers are on 6.0.3.1, enable :global_csrf_token FF.
# 3. On GitLab 13.2, remove this patch
def masked_authenticity_token(session, form_options: {})
action, method = form_options.values_at(:action, :method)
raw_token = if per_form_csrf_tokens && action && method
action_path = normalize_action_path(action)
per_form_csrf_token(session, action_path, method)
else
if Feature.enabled?(:global_csrf_token)
global_csrf_token(session)
else
real_csrf_token(session)
end
end
mask_token(raw_token)
end
end
end
ActionController::Base.include Gitlab::RequestForgeryProtectionPatch

View File

@ -118,7 +118,6 @@ namespace :admin do
resources :services, only: [:index, :edit, :update]
resources :integrations, only: [:edit, :update] do
member do
get :custom_integration_projects
put :test
end
end

View File

@ -3,7 +3,7 @@
# For a list of all options, see https://errata-ai.github.io/vale/styles/
extends: conditional
message: "'%s' has no definition."
link: https://about.gitlab.com/handbook/marketing/corporate-marketing/content/editorial-team/#acronyms
link: https://about.gitlab.com/handbook/marketing/growth-marketing/content/editorial-team/#acronyms
level: warning
ignorecase: false
# Ensures that the existence of 'first' implies the existence of 'second'.

View File

@ -542,7 +542,7 @@ or `gitlab-ctl promote-to-primary-node`, either:
```ruby
Rails.application.load_tasks; nil
Gitlab::Geo.expire_cache_keys!([:primary_node, :current_node])
Gitlab::Geo.expire_cache!
Rake::Task['geo:set_secondary_as_primary'].invoke
```

Binary file not shown.

Before

Width:  |  Height:  |  Size: 42 KiB

After

Width:  |  Height:  |  Size: 141 KiB

View File

@ -12,9 +12,7 @@ For organizations with 300 users or less, the recommended AWS installation metho
## Introduction
GitLab on AWS can leverage many of the services that are already
configurable. These services offer a great deal of
flexibility and can be adapted to the needs of most companies.
For the most part, we'll make use of Omnibus GitLab in our setup, but we'll also leverage native AWS services. Instead of using the Omnibus bundled PostgreSQL and Redis, we will use AWS RDS and ElastiCache.
In this guide, we'll go through a multi-node setup where we'll start by
configuring our Virtual Private Cloud and subnets to later integrate
@ -225,16 +223,18 @@ We also need to create two private route tables so that instances in each privat
## Load Balancer
We'll create a load balancer to evenly distribute inbound traffic on ports `80` and `443` across our GitLab application servers. Based the on the [scaling policies](#create-an-auto-scaling-group) we'll create later, instances will be added to or removed from our load balancer as needed. Additionally, the load balance will perform health checks on our instances.
On the EC2 dashboard, look for Load Balancer in the left navigation bar:
1. Click the **Create Load Balancer** button.
1. Choose the **Classic Load Balancer**.
1. Give it a name (we'll use `gitlab-loadbalancer`) and for the **Create LB Inside** option, select `gitlab-vpc` from the dropdown menu.
1. In the **Listeners** section, set HTTP port 80, HTTPS port 443, and TCP port 22 for both load balancer and instance protocols and ports.
1. In the **Select Subnets** section, select both public subnets from the list.
1. Click **Assign Security Groups** and select **Create a new security group**, give it a name
1. In the **Select Subnets** section, select both public subnets from the list so that the load balancer can route traffic to both availability zones.
1. We'll add a security group for our load balancer to act as a firewall to control what traffic is allowed through. Click **Assign Security Groups** and select **Create a new security group**, give it a name
(we'll use `gitlab-loadbalancer-sec-group`) and description, and allow both HTTP and HTTPS traffic
from anywhere (`0.0.0.0/0, ::/0`). Also allow SSH traffic from a single IP address or an IP address range in CIDR notation.
from anywhere (`0.0.0.0/0, ::/0`). Also allow SSH traffic, select a custom source, and add a single trusted IP address or an IP address range in CIDR notation. This will allow users to perform Git actions over SSH.
1. Click **Configure Security Settings** and set the following:
1. Select an SSL/TLS certificate from ACM or upload a certificate to IAM.
1. Under **Select a Cipher**, pick a predefined security policy from the dropdown. You can see a breakdown of [Predefined SSL Security Policies for Classic Load Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-security-policy-table.html) in the AWS docs. Check the GitLab codebase for a list of [supported SSL ciphers and protocols](https://gitlab.com/gitlab-org/gitlab/-/blob/9ee7ad433269b37251e0dd5b5e00a0f00d8126b4/lib/support/nginx/gitlab-ssl#L97-99).
@ -261,11 +261,16 @@ On the Route 53 dashboard, click **Hosted zones** in the left navigation bar:
1. Click **Create Record Set** and provide the following values:
1. **Name:** Use the domain name (the default value) or enter a subdomain.
1. **Type:** Select **A - IPv4 address**.
1. **Alias:** Defaults to **No**. Select **Yes**.
1. **Alias Target:** Find the **ELB Classic Load Balancers** section and select the classic load balancer we created earlier.
1. **Routing Policy:** We'll use **Simple** but you can choose a different policy based on your use case.
1. **Evaluate Target Health:** We'll set this to **No** but you can choose to have the load balancer route traffic based on target health.
1. Click **Create**.
1. Update your DNS records with your domain registrar. The steps for doing this vary depending on which registrar you use and is beyond the scope of this guide.
1. If you registered your domain through Route 53, you're done. If you used a different domain registrar, you need to update your DNS records with your domain registrar. You'll need to:
1. Click on **Hosted zones** and select the domain you added above.
1. You'll see a list of `NS` records. From your domain registrar's admin panel, add each of these as `NS` records to your domain's DNS records. These steps may vary between domain registrars. If you're stuck, Google **"name of your registrar" add dns records** and you should find a help article specific to your domain registrar.
The steps for doing this vary depending on which registrar you use and is beyond the scope of this guide.
## PostgreSQL with RDS

Binary file not shown.

Before

Width:  |  Height:  |  Size: 37 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 49 KiB

After

Width:  |  Height:  |  Size: 76 KiB

View File

@ -33,13 +33,13 @@ To deploy GitLab on GCP you first need to create a virtual machine:
![Search for GitLab](img/launch_vm.png)
1. On the next page, you can select the type of VM as well as the
estimated costs. Provide the name of the instance, desired datacenter, and machine type. Note that GitLab recommends at least 2 vCPU's and 4GB of RAM.
estimated costs. Provide the name of the instance, desired datacenter, and machine type.
Note our [hardware requirements for different user base sizes](../requirements.md#hardware-requirements).
![Launch on Compute Engine](img/vm_details.png)
1. Click **Change** under Boot disk to select the size, type, and desired operating system. GitLab supports a [variety of linux operating systems](../requirements.md), including Ubuntu and Debian. Click **Select** when finished.
![Deploy in progress](img/boot_disk.png)
1. To select the size, type, and desired [operating system](../requirements.md#supported-linux-distributions),
click **Change** under `Boot disk`. Click **Select** when finished.
1. As a last step allow HTTP and HTTPS traffic, then click **Create**. The process will finish in a few seconds.

View File

@ -45,7 +45,7 @@ Please consider using a virtual machine to run GitLab.
GitLab requires Ruby (MRI) 2.6. Beginning in GitLab 12.2, we no longer support Ruby 2.5 and lower.
You must use the standard MRI implementation of Ruby.
We love [JRuby](https://www.jruby.org/) and [Rubinius](https://rubinius.com), but GitLab
We love [JRuby](https://www.jruby.org/) and [Rubinius](https://github.com/rubinius/rubinius#the-rubinius-language-platform), but GitLab
needs several Gems that have native extensions.
### Go versions
@ -62,12 +62,12 @@ GitLab 11.11 and higher only supports Git 2.24.x and newer, and
Beginning in GitLab 12.9, we only support node.js 10.13.0 or higher, and we have dropped
support for node.js 8. (node.js 6 support was dropped in GitLab 11.8)
We recommend Node 12.x, as it is faster.
We recommend Node 12.x, as it's faster.
GitLab uses [webpack](https://webpack.js.org/) to compile frontend assets, which requires a minimum
version of Node.js 10.13.0.
You can check which version you are running with `node -v`. If you are running
You can check which version you're running with `node -v`. If you're running
a version older than `v10.13.0`, you need to update it to a newer version. You
can find instructions to install from community maintained packages or compile
from source at the [Node.js website](https://nodejs.org/en/download/).
@ -80,30 +80,30 @@ GitLab requires Redis 5.0+. Beginning in GitLab 13.0, lower versions are not sup
### Storage
The necessary hard drive space largely depends on the size of the repos you want to store in GitLab but as a *rule of thumb* you should have at least as much free space as all your repos combined take up.
The necessary hard drive space largely depends on the size of the repositories you want to store in GitLab but as a *rule of thumb* you should have at least as much free space as all your repositories combined take up.
If you want to be flexible about growing your hard drive space in the future consider mounting it using LVM so you can add more hard drives when you need them.
If you want to be flexible about growing your hard drive space in the future consider mounting it using [logical volume management (LVM)](https://en.wikipedia.org/wiki/Logical_volume_management) so you can add more hard drives when you need them.
Apart from a local hard drive you can also mount a volume that supports the network file system (NFS) protocol. This volume might be located on a file server, a network attached storage (NAS) device, a storage area network (SAN) or on an Amazon Web Services (AWS) Elastic Block Store (EBS) volume.
If you have enough RAM memory and a recent CPU the speed of GitLab is mainly limited by hard drive seek times. Having a fast drive (7200 RPM and up) or a solid state drive (SSD) will improve the responsiveness of GitLab.
If you have enough RAM and a recent CPU the speed of GitLab is mainly limited by hard drive seek times. Having a fast drive (7200 RPM and up) or a solid state drive (SSD) will improve the responsiveness of GitLab.
NOTE: **Note:** Since file system performance may affect GitLab's overall performance, we do not recommend using EFS for storage. See the [relevant documentation](../administration/high_availability/nfs.md#avoid-using-awss-elastic-file-system-efs) for more details.
NOTE: **Note:** Since file system performance may affect GitLab's overall performance, [we don't recommend using AWS EFS for storage](../administration/high_availability/nfs.md#avoid-using-awss-elastic-file-system-efs).
### CPU
This is the recommended minimum hardware for a handful of example GitLab user base sizes. Your exact needs may be more, depending on your workload. Your workload is influenced by factors such as - but not limited to - how active your users are, how much automation you use, mirroring, and repo/change size.
This is the recommended minimum hardware for a handful of example GitLab user base sizes. Your exact needs may be more, depending on your workload. Your workload is influenced by factors such as - but not limited to - how active your users are, how much automation you use, mirroring, and repository/change size.
- 1 core supports up to 100 users but the application can be a bit slower due to having all workers and background jobs running on the same core
- **2 cores** is the **recommended** minimum number of cores and supports up to 100 users
- 4 cores supports up to 500 users
- 8 cores supports up to 1,000 users
- 32 cores supports up to 5,000 users
- 4 cores support up to 500 users
- 8 cores support up to 1,000 users
- 32 cores support up to 5,000 users
- More users? Consult the [reference architectures page](../administration/reference_architectures/index.md)
### Memory
This is the recommended minimum hardware for a handful of example GitLab user base sizes. Your exact needs may be more, depending on your workload. Your workload is influenced by factors such as - but not limited to - how active your users are, how much automation you use, mirroring, and repo/change size.
This is the recommended minimum hardware for a handful of example GitLab user base sizes. Your exact needs may be more, depending on your workload. Your workload is influenced by factors such as - but not limited to - how active your users are, how much automation you use, mirroring, and the size of repositories as well as changes/commits.
You need at least 8GB of addressable memory (RAM + swap) to install and use GitLab!
The operating system and any other running applications will also be using memory
@ -130,56 +130,43 @@ NOTE: **Note:** The 25 workers of Sidekiq will show up as separate processes in
## Database
The server running the database should have _at least_ 5-10 GB of storage
available, though the exact requirements depend on the size of the GitLab
installation (e.g. the number of users, projects, etc).
We currently support the following databases:
- PostgreSQL
PostgreSQL is the only supported database, which is bundled with the Omnibus GitLab package.
You can also use an [external PostgreSQL database](https://docs.gitlab.com/omnibus/settings/database.html#using-a-non-packaged-postgresql-database-management-server).
Support for MySQL was removed in GitLab 12.1. Existing users using GitLab with
MySQL/MariaDB are advised to [migrate to PostgreSQL](../update/mysql_to_postgresql.md) before upgrading.
### PostgreSQL Requirements
The server running PostgreSQL should have _at least_ 5-10 GB of storage
available, though the exact requirements [depend on the number of users](../administration/reference_architectures/index.md).
We highly recommend users to use the minimum PostgreSQL versions specified below as these are the versions used for development and testing.
GitLab version | Minimum PostgreSQL version
-|-
10.0 | 9.6
12.10 | 11
13.0 | 11
Users using PostgreSQL must ensure the `pg_trgm` extension is loaded into every
GitLab database. This extension can be enabled (using a PostgreSQL super user)
by running the following query for every database:
You must also ensure the `pg_trgm` extension is loaded into every
GitLab database. This extension [can be enabled](https://www.postgresql.org/docs/11/sql-createextension.html) using a PostgreSQL super user.
```sql
CREATE EXTENSION pg_trgm;
```
On some systems you may need to install an additional package (e.g.
On some systems you may need to install an additional package (for example,
`postgresql-contrib`) for this extension to become available.
NOTE: **Note:** Support for PostgreSQL 9.6 and 10 will be removed in GitLab 13.0 so that GitLab can benefit from PostgreSQL 11 improvements, such as partitioning. For the schedule on adding support for PostgreSQL 11 and 12, see [the related epic](https://gitlab.com/groups/gitlab-org/-/epics/2184). For the release schedule for GitLab 13.0, see [GitLab's release and maintenance policy](../policy/maintenance.md).
NOTE: **Note:** Support for [PostgreSQL 9.6 and 10 has been removed in GitLab 13.0](https://about.gitlab.com/releases/2020/05/22/gitlab-13-0-released/#postgresql-11-is-now-the-minimum-required-version-to-install-gitlab) so that GitLab can benefit from PostgreSQL 11 improvements, such as partitioning. For the schedule of transitioning to PostgreSQL 12, see [the related epic](https://gitlab.com/groups/gitlab-org/-/epics/2184).
#### Additional requirements for GitLab Geo
If you are using [GitLab Geo](../development/geo.md):
If you're using [GitLab Geo](../administration/geo/replication/index.md):
- We strongly recommend running Omnibus-managed instances as they are actively
developed and tested. We aim to be compatible with most external (not managed
by Omnibus) databases (for example, AWS RDS) but we do not guarantee
compatibility.
- The
[tracking database](../development/geo.md#using-the-tracking-database)
requires the
[postgres_fdw](https://www.postgresql.org/docs/11/postgres-fdw.html)
extension.
```sql
CREATE EXTENSION postgres_fdw;
```
by Omnibus) databases (for example, [AWS Relational Database Service (RDS)](https://aws.amazon.com/rds/)) but we don't guarantee compatibility.
- You must also ensure the `postgres_fdw` extension is loaded into every
GitLab database. This extension
[can be enabled](https://www.postgresql.org/docs/11/sql-createextension.html)
using a PostgreSQL super user.
## Unicorn Workers
@ -204,7 +191,7 @@ Omnibus GitLab defaults to the recommended Puma settings. Regardless of installa
tune the Puma settings.
If you're using Omnibus GitLab, see [Puma settings](https://docs.gitlab.com/omnibus/settings/puma.html)
for instructions on changing the Puma settings. If you are using the GitLab Helm chart, see the [Webservice chart](https://docs.gitlab.com/charts/charts/gitlab/webservice/index.html).
for instructions on changing the Puma settings. If you're using the GitLab Helm chart, see the [Webservice chart](https://docs.gitlab.com/charts/charts/gitlab/webservice/index.html).
### Puma workers
@ -228,7 +215,7 @@ of [legacy Rugged code](../development/gitaly.md#legacy-rugged-code).
- If the operating system has a maximum 2 GB of memory, the recommended number of threads is `1`.
A higher value will result in excess swapping, and decrease performance.
- If legacy Rugged code is in use, the recommended number of threads is `1`.
- In all other cases, the recommended number of threads is `4`. We do not recommend setting this
- In all other cases, the recommended number of threads is `4`. We don't recommend setting this
higher, due to how [Ruby MRI multi-threading](https://en.wikipedia.org/wiki/Global_interpreter_lock)
works.
@ -257,11 +244,11 @@ to install GitLab on. Depending on how you decide to configure GitLab Runner and
what tools you use to exercise your application in the CI environment, GitLab
Runner can consume significant amount of available memory.
Memory consumption calculations, that are available above, will not be valid if
Memory consumption calculations, that are available above, won't be valid if
you decide to run GitLab Runner and the GitLab Rails application on the same
machine.
It is also not safe to install everything on a single machine, because of the
It's also not safe to install everything on a single machine, because of the
[security reasons](https://docs.gitlab.com/runner/security/), especially when you plan to use shell executor with GitLab
Runner.
@ -282,7 +269,7 @@ For reference, GitLab.com's [auto-scaling shared runner](../user/gitlab_com/inde
## Supported web browsers
CAUTION: **Caution:** With GitLab 13.0 (May 2020) we are removing official support for Internet Explorer 11.
CAUTION: **Caution:** With GitLab 13.0 (May 2020) we have removed official support for Internet Explorer 11.
With the release of GitLab 13.4 (September 2020) we will remove all code that supports Internet Explorer 11.
You can provide feedback [on this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/197987) or via your usual support channels.
@ -293,14 +280,13 @@ GitLab supports the following web browsers:
- [Chromium](https://www.chromium.org/getting-involved/dev-channel)
- [Apple Safari](https://www.apple.com/safari/)
- [Microsoft Edge](https://www.microsoft.com/edge)
- Internet Explorer 11 (until May 2020)
For the listed web browsers, GitLab supports:
- The current and previous major versions of browsers except Internet Explorer.
- The current minor version of a supported major version.
NOTE: **Note:** We do not support running GitLab with JavaScript disabled in the browser and have no plans of supporting that
NOTE: **Note:** We don't support running GitLab with JavaScript disabled in the browser and have no plans of supporting that
in the future because we have features such as Issue Boards which require JavaScript extensively.
<!-- ## Troubleshooting

View File

@ -1,90 +1,30 @@
# frozen_string_literal: true
require 'redis'
module Gitlab
module Instrumentation
module RedisInterceptor
def call(*args, &block)
start = Time.now
super(*args, &block)
ensure
duration = (Time.now - start)
if ::RequestStore.active?
::Gitlab::Instrumentation::Redis.increment_request_count
::Gitlab::Instrumentation::Redis.add_duration(duration)
::Gitlab::Instrumentation::Redis.add_call_details(duration, args)
end
end
end
# Aggregates Redis measurements from different request storage sources.
class Redis
REDIS_REQUEST_COUNT = :redis_request_count
REDIS_CALL_DURATION = :redis_call_duration
REDIS_CALL_DETAILS = :redis_call_details
REDIS_READ_BYTES = :redis_read_bytes
REDIS_WRITE_BYTES = :redis_write_bytes
ActionCable = Class.new(RedisBase)
Cache = Class.new(RedisBase)
Queues = Class.new(RedisBase)
SharedState = Class.new(RedisBase)
STORAGES = [ActionCable, Cache, Queues, SharedState].freeze
# Milliseconds represented in seconds (from 1 to 500 milliseconds).
QUERY_TIME_BUCKETS = [0.001, 0.0025, 0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5].freeze
def self.get_request_count
::RequestStore[REDIS_REQUEST_COUNT] || 0
end
class << self
def detail_store
STORAGES.flat_map(&:detail_store)
end
def self.increment_request_count
::RequestStore[REDIS_REQUEST_COUNT] ||= 0
::RequestStore[REDIS_REQUEST_COUNT] += 1
end
def self.increment_read_bytes(num_bytes)
::RequestStore[REDIS_READ_BYTES] ||= 0
::RequestStore[REDIS_READ_BYTES] += num_bytes
end
def self.increment_write_bytes(num_bytes)
::RequestStore[REDIS_WRITE_BYTES] ||= 0
::RequestStore[REDIS_WRITE_BYTES] += num_bytes
end
def self.read_bytes
::RequestStore[REDIS_READ_BYTES] || 0
end
def self.write_bytes
::RequestStore[REDIS_WRITE_BYTES] || 0
end
def self.detail_store
::RequestStore[REDIS_CALL_DETAILS] ||= []
end
def self.query_time
query_time = ::RequestStore[REDIS_CALL_DURATION] || 0
query_time.round(::Gitlab::InstrumentationHelper::DURATION_PRECISION)
end
def self.add_duration(duration)
::RequestStore[REDIS_CALL_DURATION] ||= 0
::RequestStore[REDIS_CALL_DURATION] += duration
end
def self.add_call_details(duration, args)
return unless Gitlab::PerformanceBar.enabled_for_request?
# redis-rb passes an array (e.g. [:get, key])
return unless args.length == 1
detail_store << {
cmd: args.first,
duration: duration,
backtrace: ::Gitlab::BacktraceCleaner.clean_backtrace(caller)
}
%i[get_request_count query_time read_bytes write_bytes].each do |method|
define_method method do
STORAGES.sum(&method) # rubocop:disable CodeReuse/ActiveRecord
end
end
end
end
end
end
class ::Redis::Client
prepend ::Gitlab::Instrumentation::RedisInterceptor
end

View File

@ -0,0 +1,101 @@
# frozen_string_literal: true
require 'redis'
module Gitlab
module Instrumentation
class RedisBase
class << self
include ::Gitlab::Utils::StrongMemoize
# TODO: To be used by https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/395
# as a 'label' alias.
def storage_key
self.name.underscore
end
def add_duration(duration)
::RequestStore[call_duration_key] ||= 0
::RequestStore[call_duration_key] += duration
end
def add_call_details(duration, args)
return unless Gitlab::PerformanceBar.enabled_for_request?
# redis-rb passes an array (e.g. [[:get, key]])
return unless args.length == 1
# TODO: Add information about current Redis client
# being instrumented.
# https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/316.
detail_store << {
cmd: args.first,
duration: duration,
backtrace: ::Gitlab::BacktraceCleaner.clean_backtrace(caller)
}
end
def increment_request_count
::RequestStore[request_count_key] ||= 0
::RequestStore[request_count_key] += 1
end
def increment_read_bytes(num_bytes)
::RequestStore[read_bytes_key] ||= 0
::RequestStore[read_bytes_key] += num_bytes
end
def increment_write_bytes(num_bytes)
::RequestStore[write_bytes_key] ||= 0
::RequestStore[write_bytes_key] += num_bytes
end
def get_request_count
::RequestStore[request_count_key] || 0
end
def read_bytes
::RequestStore[read_bytes_key] || 0
end
def write_bytes
::RequestStore[write_bytes_key] || 0
end
def detail_store
::RequestStore[call_details_key] ||= []
end
def query_time
query_time = ::RequestStore[call_duration_key] || 0
query_time.round(::Gitlab::InstrumentationHelper::DURATION_PRECISION)
end
private
def request_count_key
strong_memoize(:request_count_key) { build_key(:redis_request_count) }
end
def read_bytes_key
strong_memoize(:read_bytes_key) { build_key(:redis_read_bytes) }
end
def write_bytes_key
strong_memoize(:write_bytes_key) { build_key(:redis_write_bytes) }
end
def call_duration_key
strong_memoize(:call_duration_key) { build_key(:redis_call_duration) }
end
def call_details_key
strong_memoize(:call_details_key) { build_key(:redis_call_details) }
end
def build_key(namespace)
"#{storage_key}_#{namespace}"
end
end
end
end
end

View File

@ -4,7 +4,20 @@ require 'redis'
module Gitlab
module Instrumentation
class RedisDriver < ::Redis::Connection::Ruby
module RedisInterceptor
def call(*args, &block)
start = Time.now
super(*args, &block)
ensure
duration = (Time.now - start)
if ::RequestStore.active?
instrumentation_class.increment_request_count
instrumentation_class.add_duration(duration)
instrumentation_class.add_call_details(duration, args)
end
end
def write(command)
measure_write_size(command) if ::RequestStore.active?
super
@ -35,27 +48,40 @@ module Gitlab
end
end
::Gitlab::Instrumentation::Redis.increment_write_bytes(size)
instrumentation_class.increment_write_bytes(size)
end
def measure_read_size(result)
# The superclass can return one of four types of results from read:
# The Connection::Ruby#read class can return one of four types of results from read:
# https://github.com/redis/redis-rb/blob/f597f21a6b954b685cf939febbc638f6c803e3a7/lib/redis/connection/ruby.rb#L406
#
# 1. Error (exception, will not reach this line)
# 2. Status (string)
# 3. Integer (will be converted to string by to_s.bytesize and thrown away)
# 4. "Binary" string (i.e. may contain zero byte)
# 5. Array of binary string (recurses back into read)
# 5. Array of binary string
# Avoid double-counting array responses: the array elements themselves
# are retrieved with 'read'.
unless result.is_a? Array
# This count is an approximation that omits the Redis protocol overhead
# of type prefixes, length prefixes and line endings.
::Gitlab::Instrumentation::Redis.increment_read_bytes(result.to_s.bytesize)
end
size = if result.is_a? Array
# This count is an approximation that omits the Redis protocol overhead
# of type prefixes, length prefixes and line endings.
result.inject(0) { |sum, y| sum + y.to_s.bytesize }
else
result.to_s.bytesize
end
instrumentation_class.increment_read_bytes(size)
end
# That's required so it knows which GitLab Redis instance
# it's interacting with in order to categorize accordingly.
#
def instrumentation_class
@options[:instrumentation_class] # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
end
end
end
class ::Redis::Client
prepend ::Gitlab::Instrumentation::RedisInterceptor
end

View File

@ -27,6 +27,10 @@ module Gitlab
# this will force use of DEFAULT_REDIS_QUEUES_URL when config file is absent
super
end
def instrumentation_class
::Gitlab::Instrumentation::Redis::Cache
end
end
end
end

View File

@ -28,6 +28,10 @@ module Gitlab
# this will force use of DEFAULT_REDIS_QUEUES_URL when config file is absent
super
end
def instrumentation_class
::Gitlab::Instrumentation::Redis::Queues
end
end
end
end

View File

@ -30,6 +30,10 @@ module Gitlab
# this will force use of DEFAULT_REDIS_SHARED_STATE_URL when config file is absent
super
end
def instrumentation_class
::Gitlab::Instrumentation::Redis::SharedState
end
end
end
end

View File

@ -71,6 +71,10 @@ module Gitlab
# nil will force use of DEFAULT_REDIS_URL when config file is absent
nil
end
def instrumentation_class
raise NotImplementedError
end
end
def initialize(rails_env = nil)
@ -100,7 +104,7 @@ module Gitlab
redis_url = config.delete(:url)
redis_uri = URI.parse(redis_url)
config[:driver] ||= ::Gitlab::Instrumentation::RedisDriver
config[:instrumentation_class] ||= self.class.instrumentation_class
if redis_uri.scheme == 'unix'
# Redis::Store does not handle Unix sockets well, so let's do it for them

View File

@ -9,6 +9,10 @@ module Peek
'redis'
end
def detail_store
::Gitlab::Instrumentation::Redis.detail_store
end
private
def format_call_details(call)

View File

@ -17609,6 +17609,9 @@ msgstr ""
msgid "Promotions|Learn more"
msgstr ""
msgid "Promotions|Not now, thanks!"
msgstr ""
msgid "Promotions|See the other features in the %{subscription_link_start}bronze plan%{subscription_link_end}"
msgstr ""
@ -17621,6 +17624,9 @@ msgstr ""
msgid "Promotions|Track activity with Contribution Analytics."
msgstr ""
msgid "Promotions|Try it for free"
msgstr ""
msgid "Promotions|Upgrade plan"
msgstr ""

View File

@ -43,7 +43,7 @@
"@gitlab/svgs": "1.137.0",
"@gitlab/ui": "16.2.0",
"@gitlab/visual-review-tools": "1.6.1",
"@rails/actioncable": "^6.0.3",
"@rails/actioncable": "^6.0.3-1",
"@sentry/browser": "^5.10.2",
"@sourcegraph/code-host-integration": "0.0.48",
"@toast-ui/editor": "^2.0.1",

View File

@ -1,7 +1,7 @@
source 'https://rubygems.org'
gem 'gitlab-qa'
gem 'activesupport', '~> 6.0.3' # This should stay in sync with the root's Gemfile
gem 'activesupport', '~> 6.0.3.1' # This should stay in sync with the root's Gemfile
gem 'capybara', '~> 3.29.0'
gem 'capybara-screenshot', '~> 1.0.23'
gem 'rake', '~> 12.3.0'

View File

@ -1,7 +1,7 @@
GEM
remote: https://rubygems.org/
specs:
activesupport (6.0.3)
activesupport (6.0.3.1)
concurrent-ruby (~> 1.0, >= 1.0.2)
i18n (>= 0.7, < 2)
minitest (~> 5.1)
@ -54,7 +54,7 @@ GEM
mime-types-data (3.2020.0425)
mini_mime (1.0.2)
mini_portile2 (2.4.0)
minitest (5.14.0)
minitest (5.14.1)
netrc (0.11.0)
nokogiri (1.10.9)
mini_portile2 (~> 2.4.0)
@ -116,7 +116,7 @@ PLATFORMS
ruby
DEPENDENCIES
activesupport (~> 6.0.3)
activesupport (~> 6.0.3.1)
airborne (~> 0.3.4)
capybara (~> 3.29.0)
capybara-screenshot (~> 1.0.23)

View File

@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe Admin::IntegrationsController do
let(:admin) { create(:admin) }
let(:integration) { create(:jira_service, :instance) }
before do
sign_in(admin)
@ -34,6 +33,8 @@ RSpec.describe Admin::IntegrationsController do
end
describe '#update' do
let(:integration) { create(:jira_service, :instance) }
before do
allow(PropagateIntegrationWorker).to receive(:perform_async)
@ -67,14 +68,4 @@ RSpec.describe Admin::IntegrationsController do
end
end
end
describe '#custom_integration_projects' do
it 'calls to get the custom integration projects' do
allow(Project).to receive_message_chain(:with_custom_integration_compared_to, :page, :per)
get :custom_integration_projects, params: { id: integration.class.to_param }
expect(Project).to have_received(:with_custom_integration_compared_to).with(integration)
end
end
end

View File

@ -453,10 +453,12 @@ RSpec.describe 'Pipelines', :js do
context 'downloadable pipelines' do
context 'with artifacts' do
let!(:with_artifacts) do
create(:ci_build, :artifacts, :success,
build = create(:ci_build, :success,
pipeline: pipeline,
name: 'rspec tests',
stage: 'test')
create(:ci_job_artifact, :codequality, job: build)
end
before do
@ -470,7 +472,7 @@ RSpec.describe 'Pipelines', :js do
it 'has artifacts download dropdown' do
find('.js-pipeline-dropdown-download').click
expect(page).to have_link(with_artifacts.name)
expect(page).to have_link(with_artifacts.file_type)
end
it 'has download attribute on download links' do

View File

@ -1,4 +1,5 @@
import * as commonUtils from '~/lib/utils/common_utils';
import $ from 'jquery';
describe('common_utils', () => {
describe('parseUrl', () => {
@ -211,6 +212,59 @@ describe('common_utils', () => {
});
});
describe('scrollToElement*', () => {
let elem;
const windowHeight = 1000;
const elemTop = 100;
beforeEach(() => {
elem = document.createElement('div');
window.innerHeight = windowHeight;
jest.spyOn($.fn, 'animate');
jest.spyOn($.fn, 'offset').mockReturnValue({ top: elemTop });
});
afterEach(() => {
$.fn.animate.mockRestore();
$.fn.offset.mockRestore();
});
describe('scrollToElement', () => {
it('scrolls to element', () => {
commonUtils.scrollToElement(elem);
expect($.fn.animate).toHaveBeenCalledWith(
{
scrollTop: elemTop,
},
expect.any(Number),
);
});
it('scrolls to element with offset', () => {
const offset = 50;
commonUtils.scrollToElement(elem, { offset });
expect($.fn.animate).toHaveBeenCalledWith(
{
scrollTop: elemTop + offset,
},
expect.any(Number),
);
});
});
describe('scrollToElementWithContext', () => {
it('scrolls with context', () => {
commonUtils.scrollToElementWithContext();
expect($.fn.animate).toHaveBeenCalledWith(
{
scrollTop: elemTop - windowHeight * 0.1,
},
expect.any(Number),
);
});
});
});
describe('debounceByAnimationFrame', () => {
it('debounces a function to allow a maximum of one call per animation frame', done => {
const spy = jest.fn();

View File

@ -41,7 +41,7 @@ describe('Discussion navigation mixin', () => {
.join(''),
);
jest.spyOn(utils, 'scrollToElement');
jest.spyOn(utils, 'scrollToElementWithContext');
expandDiscussion = jest.fn();
const { actions, ...notesRest } = notesModule();
@ -102,7 +102,7 @@ describe('Discussion navigation mixin', () => {
});
it('scrolls to element', () => {
expect(utils.scrollToElement).toHaveBeenCalledWith(
expect(utils.scrollToElementWithContext).toHaveBeenCalledWith(
findDiscussion('div.discussion', expected),
);
});
@ -123,11 +123,13 @@ describe('Discussion navigation mixin', () => {
});
it('scrolls when scrollToDiscussion is emitted', () => {
expect(utils.scrollToElement).not.toHaveBeenCalled();
expect(utils.scrollToElementWithContext).not.toHaveBeenCalled();
eventHub.$emit('scrollToDiscussion');
expect(utils.scrollToElement).toHaveBeenCalledWith(findDiscussion('ul.notes', expected));
expect(utils.scrollToElementWithContext).toHaveBeenCalledWith(
findDiscussion('ul.notes', expected),
);
});
});
@ -167,7 +169,7 @@ describe('Discussion navigation mixin', () => {
});
it('scrolls to discussion', () => {
expect(utils.scrollToElement).toHaveBeenCalledWith(
expect(utils.scrollToElementWithContext).toHaveBeenCalledWith(
findDiscussion('div.discussion', expected),
);
});

View File

@ -3,7 +3,7 @@ import { mount, createLocalVue } from '@vue/test-utils';
import { GlDropdown, GlFormGroup, GlFormInputGroup } from '@gitlab/ui';
import Tracking from '~/tracking';
import * as getters from '~/registry/explorer/stores/getters';
import QuickstartDropdown from '~/registry/explorer/components/cli_commands.vue';
import QuickstartDropdown from '~/registry/explorer/components/list_page/cli_commands.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import {

View File

@ -1,8 +1,8 @@
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import { GlSprintf } from '@gitlab/ui';
import { GlEmptyState } from '../stubs';
import groupEmptyState from '~/registry/explorer/components/group_empty_state.vue';
import { GlEmptyState } from '../../stubs';
import groupEmptyState from '~/registry/explorer/components/list_page/group_empty_state.vue';
const localVue = createLocalVue();
localVue.use(Vuex);

View File

@ -1,14 +1,14 @@
import { shallowMount } from '@vue/test-utils';
import { GlIcon, GlSprintf } from '@gitlab/ui';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import Component from '~/registry/explorer/components/image_list_row.vue';
import Component from '~/registry/explorer/components/list_page/image_list_row.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import {
ROW_SCHEDULED_FOR_DELETION,
LIST_DELETE_BUTTON_DISABLED,
} from '~/registry/explorer/constants';
import { RouterLink } from '../stubs';
import { imagesListResponse } from '../mock_data';
import { RouterLink } from '../../stubs';
import { imagesListResponse } from '../../mock_data';
describe('Image List Row', () => {
let wrapper;

View File

@ -1,9 +1,9 @@
import { shallowMount } from '@vue/test-utils';
import { GlPagination } from '@gitlab/ui';
import Component from '~/registry/explorer/components/image_list.vue';
import ImageListRow from '~/registry/explorer/components/image_list_row.vue';
import Component from '~/registry/explorer/components/list_page/image_list.vue';
import ImageListRow from '~/registry/explorer/components/list_page/image_list_row.vue';
import { imagesListResponse, imagePagination } from '../mock_data';
import { imagesListResponse, imagePagination } from '../../mock_data';
describe('Image List', () => {
let wrapper;

View File

@ -1,8 +1,8 @@
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import { GlSprintf } from '@gitlab/ui';
import { GlEmptyState } from '../stubs';
import projectEmptyState from '~/registry/explorer/components/project_empty_state.vue';
import { GlEmptyState } from '../../stubs';
import projectEmptyState from '~/registry/explorer/components/list_page/project_empty_state.vue';
import * as getters from '~/registry/explorer/stores/getters';
const localVue = createLocalVue();

View File

@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import { GlSprintf, GlLink } from '@gitlab/ui';
import Component from '~/registry/explorer/components/registry_header.vue';
import Component from '~/registry/explorer/components/list_page/registry_header.vue';
import {
CONTAINER_REGISTRY_TITLE,
LIST_INTRO_TEXT,

View File

@ -3,11 +3,11 @@ import { GlSkeletonLoader, GlSprintf, GlAlert, GlSearchBoxByClick } from '@gitla
import Tracking from '~/tracking';
import waitForPromises from 'helpers/wait_for_promises';
import component from '~/registry/explorer/pages/list.vue';
import CliCommands from '~/registry/explorer/components/cli_commands.vue';
import GroupEmptyState from '~/registry/explorer/components/group_empty_state.vue';
import ProjectEmptyState from '~/registry/explorer/components/project_empty_state.vue';
import RegistryHeader from '~/registry/explorer/components/registry_header.vue';
import ImageList from '~/registry/explorer/components/image_list.vue';
import CliCommands from '~/registry/explorer/components/list_page/cli_commands.vue';
import GroupEmptyState from '~/registry/explorer/components/list_page/group_empty_state.vue';
import ProjectEmptyState from '~/registry/explorer/components/list_page/project_empty_state.vue';
import RegistryHeader from '~/registry/explorer/components/list_page/registry_header.vue';
import ImageList from '~/registry/explorer/components/list_page/image_list.vue';
import { createStore } from '~/registry/explorer/stores/';
import {
SET_MAIN_LOADING,

View File

@ -121,6 +121,16 @@ describe GitlabRoutingHelper do
it 'matches the Rails download path' do
expect(fast_download_project_job_artifacts_path(project, job)).to eq(download_project_job_artifacts_path(project, job))
end
context 'when given parameters' do
it 'adds them to the path' do
expect(
fast_download_project_job_artifacts_path(project, job, file_type: :dast)
).to eq(
download_project_job_artifacts_path(project, job, file_type: :dast)
)
end
end
end
describe '#fast_keep_project_job_artifacts_path' do

View File

@ -0,0 +1,47 @@
# frozen_string_literal: true
require 'spec_helper'
describe ActionController::Base, 'CSRF token generation patch', type: :controller do # rubocop:disable RSpec/FilePath
let(:fixed_seed) { SecureRandom.random_bytes(described_class::AUTHENTICITY_TOKEN_LENGTH) }
context 'global_csrf_token feature flag is enabled' do
it 'generates 6.0.3.1 style CSRF token', :aggregate_failures do
generated_token = controller.send(:form_authenticity_token)
expect(valid_authenticity_token?(generated_token)).to be_truthy
expect(compare_with_real_token(generated_token)).to be_falsey
expect(compare_with_global_token(generated_token)).to be_truthy
end
end
context 'global_csrf_token feature flag is disabled' do
before do
stub_feature_flags(global_csrf_token: false)
end
it 'generates 6.0.3 style CSRF token', :aggregate_failures do
generated_token = controller.send(:form_authenticity_token)
expect(valid_authenticity_token?(generated_token)).to be_truthy
expect(compare_with_real_token(generated_token)).to be_truthy
expect(compare_with_global_token(generated_token)).to be_falsey
end
end
def compare_with_global_token(token)
unmasked_token = controller.send :unmask_token, Base64.strict_decode64(token)
controller.send(:compare_with_global_token, unmasked_token, session)
end
def compare_with_real_token(token)
unmasked_token = controller.send :unmask_token, Base64.strict_decode64(token)
controller.send(:compare_with_real_token, unmasked_token, session)
end
def valid_authenticity_token?(token)
controller.send(:valid_authenticity_token?, session, token)
end
end

View File

@ -195,7 +195,7 @@ ci_pipelines:
- cancelable_statuses
- manual_actions
- scheduled_actions
- artifacts
- downloadable_artifacts
- pipeline_schedule
- merge_requests_as_head_pipeline
- merge_request
@ -220,6 +220,7 @@ ci_pipelines:
- pipeline_config
- security_scans
- daily_build_group_report_results
- latest_builds
pipeline_variables:
- pipeline
stages:

View File

@ -0,0 +1,109 @@
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Instrumentation::RedisBase, :request_store do
let(:instrumentation_class_a) do
stub_const('RedisInstanceA', Class.new(described_class))
end
let(:instrumentation_class_b) do
stub_const('RedisInstanceB', Class.new(described_class))
end
describe '.storage_key' do
it 'returns the class name with underscore' do
expect(instrumentation_class_a.storage_key).to eq('redis_instance_a')
expect(instrumentation_class_b.storage_key).to eq('redis_instance_b')
end
end
describe '.add_duration' do
it 'does not lose precision while adding' do
precision = 1.0 / (10**::Gitlab::InstrumentationHelper::DURATION_PRECISION)
2.times { instrumentation_class_a.add_duration(0.4 * precision) }
# 2 * 0.4 should be 0.8 and get rounded to 1
expect(instrumentation_class_a.query_time).to eq(1 * precision)
end
context 'storage key overlapping' do
it 'keys do not overlap across storages' do
instrumentation_class_a.add_duration(0.4)
instrumentation_class_b.add_duration(0.5)
expect(instrumentation_class_a.query_time).to eq(0.4)
expect(instrumentation_class_b.query_time).to eq(0.5)
end
end
end
describe '.increment_request_count' do
context 'storage key overlapping' do
it 'keys do not overlap across storages' do
3.times { instrumentation_class_a.increment_request_count }
2.times { instrumentation_class_b.increment_request_count }
expect(instrumentation_class_a.get_request_count).to eq(3)
expect(instrumentation_class_b.get_request_count).to eq(2)
end
end
end
describe '.increment_write_bytes' do
context 'storage key overlapping' do
it 'keys do not overlap across storages' do
2.times do
instrumentation_class_a.increment_write_bytes(42)
instrumentation_class_b.increment_write_bytes(77)
end
expect(instrumentation_class_a.write_bytes).to eq(42 * 2)
expect(instrumentation_class_b.write_bytes).to eq(77 * 2)
end
end
end
describe '.increment_read_bytes' do
context 'storage key overlapping' do
it 'keys do not overlap across storages' do
2.times do
instrumentation_class_a.increment_read_bytes(42)
instrumentation_class_b.increment_read_bytes(77)
end
expect(instrumentation_class_a.read_bytes).to eq(42 * 2)
expect(instrumentation_class_b.read_bytes).to eq(77 * 2)
end
end
end
describe '.add_call_details' do
before do
allow(Gitlab::PerformanceBar).to receive(:enabled_for_request?) { true }
end
context 'storage key overlapping' do
it 'keys do not overlap across storages' do
2.times do
instrumentation_class_a.add_call_details(0.3, [:set])
instrumentation_class_b.add_call_details(0.4, [:set])
end
expect(instrumentation_class_a.detail_store).to match(
[
a_hash_including(cmd: :set, duration: 0.3, backtrace: an_instance_of(Array)),
a_hash_including(cmd: :set, duration: 0.3, backtrace: an_instance_of(Array))
]
)
expect(instrumentation_class_b.detail_store).to match(
[
a_hash_including(cmd: :set, duration: 0.4, backtrace: an_instance_of(Array)),
a_hash_including(cmd: :set, duration: 0.4, backtrace: an_instance_of(Array))
]
)
end
end
end
end

View File

@ -3,7 +3,7 @@
require 'spec_helper'
require 'rspec-parameterized'
describe Gitlab::Instrumentation::RedisDriver, :clean_gitlab_redis_shared_state, :request_store do
describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_shared_state, :request_store do
using RSpec::Parameterized::TableSyntax
describe 'read and write' do

View File

@ -3,13 +3,26 @@
require 'spec_helper'
describe Gitlab::Instrumentation::Redis do
describe '.add_duration', :request_store do
it 'does not lose precision while adding' do
precision = 1.0 / (10**::Gitlab::InstrumentationHelper::DURATION_PRECISION)
2.times { described_class.add_duration(0.4 * precision) }
# 2 * 0.4 should be 0.8 and get rounded to 1
expect(described_class.query_time).to eq(1 * precision)
def stub_storages(method, value)
described_class::STORAGES.each do |storage|
allow(storage).to receive(method) { value }
end
end
shared_examples 'aggregation of redis storage data' do |method|
describe "#{method} sum" do
it "sums data from all Redis storages" do
amount = 0.3
stub_storages(method, amount)
expect(described_class.public_send(method)).to eq(described_class::STORAGES.size * amount)
end
end
end
it_behaves_like 'aggregation of redis storage data', :get_request_count
it_behaves_like 'aggregation of redis storage data', :query_time
it_behaves_like 'aggregation of redis storage data', :read_bytes
it_behaves_like 'aggregation of redis storage data', :write_bytes
end

View File

@ -18,7 +18,21 @@ describe Gitlab::Redis::Wrapper do
let(:config_env_variable_url) {"TEST_GITLAB_REDIS_URL"}
let(:class_redis_url) { Gitlab::Redis::Wrapper::DEFAULT_REDIS_URL }
include_examples "redis_shared_examples"
include_examples "redis_shared_examples" do
before do
allow(described_class).to receive(:instrumentation_class) do
::Gitlab::Instrumentation::Redis::Cache
end
end
end
describe '.instrumentation_class' do
it 'raises a NotImplementedError' do
expect(described_class).to receive(:instrumentation_class).and_call_original
expect { described_class.instrumentation_class }.to raise_error(NotImplementedError)
end
end
describe '.config_file_path' do
it 'returns the absolute path to the configuration file' do

View File

@ -17,7 +17,7 @@ describe Peek::Views::RedisDetailed, :request_store do
with_them do
it 'scrubs Redis commands' do
subject.detail_store << { cmd: cmd, duration: 1.second }
Gitlab::Instrumentation::Redis::SharedState.detail_store << { cmd: cmd, duration: 1.second }
expect(subject.results[:details].count).to eq(1)
expect(subject.results[:details].first)
@ -29,11 +29,12 @@ describe Peek::Views::RedisDetailed, :request_store do
end
it 'returns aggregated results' do
subject.detail_store << { cmd: [:get, 'test'], duration: 0.001 }
subject.detail_store << { cmd: [:get, 'test'], duration: 1.second }
Gitlab::Instrumentation::Redis::Cache.detail_store << { cmd: [:get, 'test'], duration: 0.001 }
Gitlab::Instrumentation::Redis::Cache.detail_store << { cmd: [:get, 'test'], duration: 1.second }
Gitlab::Instrumentation::Redis::SharedState.detail_store << { cmd: [:get, 'test'], duration: 1.second }
expect(subject.results[:calls]).to eq(2)
expect(subject.results[:duration]).to eq('1001.00ms')
expect(subject.results[:details].count).to eq(2)
expect(subject.results[:calls]).to eq(3)
expect(subject.results[:duration]).to eq('2001.00ms')
expect(subject.results[:details].count).to eq(3)
end
end

View File

@ -23,6 +23,14 @@ describe Ci::JobArtifact do
subject { build(:ci_job_artifact, :archive, size: 107464) }
end
describe '.not_expired' do
it 'returns artifacts that have not expired' do
_expired_artifact = create(:ci_job_artifact, :expired)
expect(described_class.not_expired).to contain_exactly(artifact)
end
end
describe '.with_reports' do
let!(:artifact) { create(:ci_job_artifact, :archive) }
@ -118,6 +126,17 @@ describe Ci::JobArtifact do
end
end
describe '.downloadable' do
subject { described_class.downloadable }
it 'filters for downloadable artifacts' do
downloadable_artifact = create(:ci_job_artifact, :codequality)
_not_downloadable_artifact = create(:ci_job_artifact, :trace)
expect(subject).to contain_exactly(downloadable_artifact)
end
end
describe '.archived_trace_exists_for?' do
subject { described_class.archived_trace_exists_for?(job_id) }
@ -357,6 +376,62 @@ describe Ci::JobArtifact do
end
end
describe 'expired?' do
subject { artifact.expired? }
context 'when expire_at is nil' do
let(:artifact) { build(:ci_job_artifact, expire_at: nil) }
it 'returns false' do
is_expected.to be_falsy
end
end
context 'when expire_at is in the past' do
let(:artifact) { build(:ci_job_artifact, expire_at: Date.yesterday) }
it 'returns true' do
is_expected.to be_truthy
end
end
context 'when expire_at is in the future' do
let(:artifact) { build(:ci_job_artifact, expire_at: Date.tomorrow) }
it 'returns false' do
is_expected.to be_falsey
end
end
end
describe '#expiring?' do
subject { artifact.expiring? }
context 'when expire_at is nil' do
let(:artifact) { build(:ci_job_artifact, expire_at: nil) }
it 'returns false' do
is_expected.to be_falsy
end
end
context 'when expire_at is in the past' do
let(:artifact) { build(:ci_job_artifact, expire_at: Date.yesterday) }
it 'returns false' do
is_expected.to be_falsy
end
end
context 'when expire_at is in the future' do
let(:artifact) { build(:ci_job_artifact, expire_at: Date.tomorrow) }
it 'returns true' do
is_expected.to be_truthy
end
end
end
describe '#expire_in' do
subject { artifact.expire_in }

View File

@ -26,6 +26,7 @@ describe Ci::Pipeline, :mailer do
it { is_expected.to have_many(:trigger_requests) }
it { is_expected.to have_many(:variables) }
it { is_expected.to have_many(:builds) }
it { is_expected.to have_many(:job_artifacts).through(:builds) }
it { is_expected.to have_many(:auto_canceled_pipelines) }
it { is_expected.to have_many(:auto_canceled_jobs) }
it { is_expected.to have_many(:sourced_pipelines) }
@ -51,6 +52,27 @@ describe Ci::Pipeline, :mailer do
expect(Project.reflect_on_association(:all_pipelines).has_inverse?).to eq(:project)
expect(Project.reflect_on_association(:ci_pipelines).has_inverse?).to eq(:project)
end
describe '#latest_builds' do
it 'has a one to many relationship with its latest builds' do
_old_build = create(:ci_build, :retried, pipeline: pipeline)
latest_build = create(:ci_build, :expired, pipeline: pipeline)
expect(pipeline.latest_builds).to contain_exactly(latest_build)
end
end
describe '#downloadable_artifacts' do
let(:build) { create(:ci_build, pipeline: pipeline) }
it 'returns downloadable artifacts that have not expired' do
downloadable_artifact = create(:ci_job_artifact, :codequality, job: build)
_expired_artifact = create(:ci_job_artifact, :junit, :expired, job: build)
_undownloadable_artifact = create(:ci_job_artifact, :trace, job: build)
expect(pipeline.downloadable_artifacts).to contain_exactly(downloadable_artifact)
end
end
end
describe '#set_status' do

View File

@ -13,9 +13,9 @@ RSpec.describe Integration do
create(:slack_service, project: project_1, inherit_from_id: nil)
end
describe '#with_custom_integration_compared_to' do
describe '#with_custom_integration_for' do
it 'returns projects with custom integrations' do
expect(Project.with_custom_integration_compared_to(instance_integration)).to contain_exactly(project_2)
expect(Project.with_custom_integration_for(instance_integration)).to contain_exactly(project_2)
end
end
end

View File

@ -3,17 +3,18 @@
require 'spec_helper'
describe BuildArtifactEntity do
let(:job) { create(:ci_build, :artifacts, name: 'test:job', artifacts_expire_at: 1.hour.from_now) }
let(:job) { create(:ci_build) }
let(:artifact) { create(:ci_job_artifact, :codequality, expire_at: 1.hour.from_now, job: job) }
let(:entity) do
described_class.new(job, request: double)
described_class.new(artifact, request: double)
end
describe '#as_json' do
subject { entity.as_json }
it 'contains job name' do
expect(subject[:name]).to eq 'test:job'
expect(subject[:name]).to eq "test:codequality"
end
it 'exposes information about expiration of artifacts' do
@ -22,7 +23,7 @@ describe BuildArtifactEntity do
it 'contains paths to the artifacts' do
expect(subject[:path])
.to include "jobs/#{job.id}/artifacts/download"
.to include "jobs/#{job.id}/artifacts/download?file_type=codequality"
expect(subject[:keep_path])
.to include "jobs/#{job.id}/artifacts/keep"

View File

@ -173,44 +173,5 @@ describe PipelineDetailsEntity do
expect(subject[:triggered].first[:project]).not_to be_nil
end
end
context 'when pipeline has expiring archive artifacts' do
let(:pipeline) { create(:ci_empty_pipeline) }
let!(:build_1) { create(:ci_build, :artifacts, pipeline: pipeline, artifacts_expire_at: 2.days.from_now, name: 'build_1') }
let!(:build_2) { create(:ci_build, :artifacts, pipeline: pipeline, artifacts_expire_at: 2.days.from_now, name: 'build_2') }
let!(:build_3) { create(:ci_build, :artifacts, pipeline: pipeline, artifacts_expire_at: 2.days.from_now, name: 'build_3') }
let(:names) { subject[:details][:artifacts].map { |a| a[:name] } }
context 'and preload_job_artifacts_archive is not defined in the options' do
it 'defaults to true and eager loads the job_artifacts_archive' do
recorder = ActiveRecord::QueryRecorder.new do
expect(names).to match_array(%w[build_1 build_2 build_3])
end
expected_queries = Gitlab.ee? ? 42 : 29
# This makes only one query to fetch all job artifacts
expect(recorder.count).to eq(expected_queries)
end
end
context 'and preload_job_artifacts_archive is set to false' do
let(:entity) do
described_class.represent(pipeline, request: request, preload_job_artifacts_archive: false)
end
it 'does not eager load the job_artifacts_archive' do
recorder = ActiveRecord::QueryRecorder.new do
expect(names).to match_array(%w[build_1 build_2 build_3])
end
expected_queries = Gitlab.ee? ? 44 : 31
# This makes one query for each job artifact
expect(recorder.count).to eq(expected_queries)
end
end
end
end
end

View File

@ -983,10 +983,10 @@
consola "^2.10.1"
node-fetch "^2.6.0"
"@rails/actioncable@^6.0.3":
version "6.0.3"
resolved "https://registry.yarnpkg.com/@rails/actioncable/-/actioncable-6.0.3.tgz#722b4b639936129307ddbab3a390f6bcacf3e7bc"
integrity sha512-I01hgqxxnOgOtJTGlq0ZsGJYiTEEiSGVEGQn3vimZSqEP1HqzyFNbzGTq14Xdyeow2yGJjygjoFF1pmtE+SQaw==
"@rails/actioncable@^6.0.3-1":
version "6.0.3-1"
resolved "https://registry.yarnpkg.com/@rails/actioncable/-/actioncable-6.0.3-1.tgz#9b9eb8858a6507162911007d355d9a206e1c5caa"
integrity sha512-szFhWD+V5TAxVNVIG16klgq+ypqA5k5AecLarTTrXgOG8cawVbQdOAwLbCmzkwiQ60rGSxAFoC1u2LrzxSK2Aw==
"@sentry/browser@^5.10.2":
version "5.10.2"