Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-09-23 12:11:29 +00:00
parent 66e4d1bf78
commit 8c4e384860
57 changed files with 569 additions and 354 deletions

View file

@ -1 +1 @@
13.21.0
13.21.1

View file

@ -129,7 +129,7 @@ gem 'fog-local', '~> 0.6'
gem 'fog-openstack', '~> 1.0'
gem 'fog-rackspace', '~> 0.1.1'
gem 'fog-aliyun', '~> 0.3'
gem 'gitlab-fog-azure-rm', '~> 1.1.1', require: false
gem 'gitlab-fog-azure-rm', '~> 1.2.0', require: false
# for Google storage
gem 'google-api-client', '~> 0.33'

View file

@ -467,7 +467,7 @@ GEM
activesupport (>= 3.0)
request_store (>= 1.0)
scientist (~> 1.6, >= 1.6.0)
gitlab-fog-azure-rm (1.1.1)
gitlab-fog-azure-rm (1.2.0)
azure-storage-blob (~> 2.0)
azure-storage-common (~> 2.0)
fog-core (= 2.1.0)
@ -1466,7 +1466,7 @@ DEPENDENCIES
gitlab-chronic (~> 0.10.5)
gitlab-dangerfiles (~> 2.3.0)
gitlab-experiment (~> 0.6.4)
gitlab-fog-azure-rm (~> 1.1.1)
gitlab-fog-azure-rm (~> 1.2.0)
gitlab-labkit (~> 0.21.1)
gitlab-license (~> 2.0)
gitlab-mail_room (~> 0.0.9)

View file

@ -44,18 +44,18 @@ export default {
methods: {
...mapActions('pipelines', ['fetchJobLogs', 'setDetailJob']),
scrollDown() {
if (this.$refs.buildTrace) {
this.$refs.buildTrace.scrollTo(0, this.$refs.buildTrace.scrollHeight);
if (this.$refs.buildJobLog) {
this.$refs.buildJobLog.scrollTo(0, this.$refs.buildJobLog.scrollHeight);
}
},
scrollUp() {
if (this.$refs.buildTrace) {
this.$refs.buildTrace.scrollTo(0, 0);
if (this.$refs.buildJobLog) {
this.$refs.buildJobLog.scrollTo(0, 0);
}
},
scrollBuildLog: throttle(function buildLogScrollDebounce() {
const { scrollTop } = this.$refs.buildTrace;
const { offsetHeight, scrollHeight } = this.$refs.buildTrace;
const { scrollTop } = this.$refs.buildJobLog;
const { offsetHeight, scrollHeight } = this.$refs.buildJobLog;
if (scrollTop + offsetHeight === scrollHeight) {
this.scrollPos = scrollPositions.bottom;
@ -97,7 +97,7 @@ export default {
<scroll-button :disabled="isScrolledToBottom" direction="down" @click="scrollDown" />
</div>
</div>
<pre ref="buildTrace" class="build-trace mb-0 h-100 mr-3" @scroll="scrollBuildLog">
<pre ref="buildJobLog" class="build-trace mb-0 h-100 mr-3" @scroll="scrollBuildLog">
<code
v-show="!detailJob.isLoading"
class="bash"

View file

@ -1,15 +1,36 @@
import { s__, sprintf } from '~/locale';
const HANDSHAKE = String.fromCodePoint(0x1f91d);
const MAG = String.fromCodePoint(0x1f50e);
const ROCKET = String.fromCodePoint(0x1f680);
export const logHello = () => {
// eslint-disable-next-line no-console
console.log(
`%cWelcome to GitLab!%c
`%c${s__('HelloMessage|Welcome to GitLab!')}%c
Does this page need fixes or improvements? Open an issue or contribute a merge request to help make GitLab more lovable. At GitLab, everyone can contribute!
${s__(
'HelloMessage|Does this page need fixes or improvements? Open an issue or contribute a merge request to help make GitLab more lovable. At GitLab, everyone can contribute!',
)}
${HANDSHAKE} Contribute to GitLab: https://about.gitlab.com/community/contribute/
${MAG} Create a new GitLab issue: https://gitlab.com/gitlab-org/gitlab/-/issues/new`,
${sprintf(s__('HelloMessage|%{handshake_emoji} Contribute to GitLab: %{contribute_link}'), {
handshake_emoji: `${HANDSHAKE}`,
contribute_link: 'https://about.gitlab.com/community/contribute/',
})}
${sprintf(s__('HelloMessage|%{magnifier_emoji} Create a new GitLab issue: %{new_issue_link}'), {
magnifier_emoji: `${MAG}`,
new_issue_link: 'https://gitlab.com/gitlab-org/gitlab/-/issues/new',
})}
${
window.gon?.dot_com
? `${sprintf(
s__(
'HelloMessage|%{rocket_emoji} We like your curiosity! Help us improve GitLab by joining the team: %{jobs_page_link}',
),
{ rocket_emoji: `${ROCKET}`, jobs_page_link: 'https://about.gitlab.com/jobs/' },
)}`
: ''
}`,
`padding-top: 0.5em; font-size: 2em;`,
'padding-bottom: 0.5em;',
);

View file

@ -7,7 +7,7 @@ module Ci
"endpoint" => project_job_path(@project, @build, format: :json),
"project_path" => @project.full_path,
"artifact_help_url" => help_page_path('user/gitlab_com/index.html', anchor: 'gitlab-cicd'),
"deployment_help_url" => help_page_path('user/project/clusters/index.html', anchor: 'troubleshooting'),
"deployment_help_url" => help_page_path('user/project/clusters/deploy_to_cluster.html', anchor: 'troubleshooting'),
"runner_settings_url" => project_runners_path(@build.project, anchor: 'js-runners-settings'),
"page_path" => project_job_path(@project, @build),
"build_status" => @build.status,

View file

@ -31,9 +31,13 @@ module Ci
next unless bridge.triggers_downstream_pipeline?
bridge.run_after_commit do
if ::Feature.enabled?(:create_cross_project_pipeline_worker_rename, default_enabled: :yaml)
::Ci::CreateDownstreamPipelineWorker.perform_async(bridge.id)
else
::Ci::CreateCrossProjectPipelineWorker.perform_async(bridge.id)
end
end
end
event :pending do
transition all => :pending

View file

@ -101,7 +101,9 @@ class Namespace < ApplicationRecord
saved_change_to_name? || saved_change_to_path? || saved_change_to_parent_id?
}
scope :for_user, -> { where(type: nil) }
# TODO: change to `type: Namespaces::UserNamespace.sti_name` when
# working on issue https://gitlab.com/gitlab-org/gitlab/-/issues/341070
scope :user_namespaces, -> { where(type: [nil, Namespaces::UserNamespace.sti_name]) }
scope :sort_by_type, -> { order(Gitlab::Database.nulls_first_order(:type)) }
scope :include_route, -> { includes(:route) }
scope :by_parent, -> (parent) { where(parent_id: parent) }
@ -143,9 +145,7 @@ class Namespace < ApplicationRecord
when Namespaces::ProjectNamespace.sti_name
Namespaces::ProjectNamespace
when Namespaces::UserNamespace.sti_name
# TODO: We create a normal Namespace until
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68894 is ready
Namespace
Namespaces::UserNamespace
else
Namespace
end

View file

@ -1,7 +1,7 @@
# frozen_string_literal: true
# TODO: currently not created/mapped in the database, will be done in another issue
# https://gitlab.com/gitlab-org/gitlab/-/issues/337102
# https://gitlab.com/gitlab-org/gitlab/-/issues/341070
module Namespaces
class UserNamespace < Namespace
def self.sti_name

View file

@ -19,13 +19,10 @@ module Operations
default_value_for :active, true
default_value_for :version, :new_version_flag
# scopes exists only for the first version
has_many :scopes, class_name: 'Operations::FeatureFlagScope'
# strategies exists only for the second version
has_many :strategies, class_name: 'Operations::FeatureFlags::Strategy'
has_many :feature_flag_issues
has_many :issues, through: :feature_flag_issues
has_one :default_scope, -> { where(environment_scope: '*') }, class_name: 'Operations::FeatureFlagScope'
validates :project, presence: true
validates :name,
@ -37,10 +34,7 @@ module Operations
}
validates :name, uniqueness: { scope: :project_id }
validates :description, allow_blank: true, length: 0..255
validate :first_default_scope, on: :create, if: :has_scopes?
validate :version_associations
accepts_nested_attributes_for :scopes, allow_destroy: true
accepts_nested_attributes_for :strategies, allow_destroy: true
scope :ordered, -> { order(:name) }
@ -56,7 +50,7 @@ module Operations
class << self
def preload_relations
preload(:scopes, strategies: :scopes)
preload(strategies: :scopes)
end
def for_unleash_client(project, environment)
@ -119,27 +113,5 @@ module Operations
active: active
}
end
private
def version_associations
if new_version_flag? && scopes.any?
errors.add(:version_associations, 'version 2 feature flags may not have scopes')
end
end
def first_default_scope
unless scopes.first.environment_scope == '*'
errors.add(:default_scope, 'has to be the first element')
end
end
def build_default_scope
scopes.build(environment_scope: '*', active: self.active)
end
def has_scopes?
scopes.any?
end
end
end

View file

@ -1,66 +0,0 @@
# frozen_string_literal: true
# All of the legacy flags have been removed in 14.1, including all of the
# `operations_feature_flag_scopes` rows. Therefore, this model and the database
# table are unused and should be removed.
module Operations
class FeatureFlagScope < ApplicationRecord
prepend HasEnvironmentScope
include Gitlab::Utils::StrongMemoize
self.table_name = 'operations_feature_flag_scopes'
belongs_to :feature_flag
validates :environment_scope, uniqueness: {
scope: :feature_flag,
message: "(%{value}) has already been taken"
}
validates :environment_scope,
if: :default_scope?, on: :update,
inclusion: { in: %w(*), message: 'cannot be changed from default scope' }
validates :strategies, feature_flag_strategies: true
before_destroy :prevent_destroy_default_scope, if: :default_scope?
scope :ordered, -> { order(:id) }
scope :enabled, -> { where(active: true) }
scope :disabled, -> { where(active: false) }
def self.with_name_and_description
joins(:feature_flag)
.select(FeatureFlag.arel_table[:name], FeatureFlag.arel_table[:description])
end
def self.for_unleash_client(project, environment)
select_columns = [
'DISTINCT ON (operations_feature_flag_scopes.feature_flag_id) operations_feature_flag_scopes.id',
'(operations_feature_flags.active AND operations_feature_flag_scopes.active) AS active',
'operations_feature_flag_scopes.strategies',
'operations_feature_flag_scopes.environment_scope',
'operations_feature_flag_scopes.created_at',
'operations_feature_flag_scopes.updated_at'
]
select(select_columns)
.with_name_and_description
.where(feature_flag_id: project.operations_feature_flags.select(:id))
.order(:feature_flag_id)
.on_environment(environment)
.reverse_order
end
private
def default_scope?
environment_scope_was == '*'
end
def prevent_destroy_default_scope
raise ActiveRecord::ReadOnlyRecord, "default scope cannot be destroyed"
end
end
end

View file

@ -112,7 +112,9 @@ class User < ApplicationRecord
#
# Namespace for personal projects
has_one :namespace, -> { where(type: nil) }, dependent: :destroy, foreign_key: :owner_id, inverse_of: :owner, autosave: true # rubocop:disable Cop/ActiveRecordDependent
# TODO: change to `type: Namespaces::UserNamespace.sti_name` when
# working on issue https://gitlab.com/gitlab-org/gitlab/-/issues/341070
has_one :namespace, -> { where(type: [nil, Namespaces::UserNamespace.sti_name]) }, dependent: :destroy, foreign_key: :owner_id, inverse_of: :owner, autosave: true # rubocop:disable Cop/ActiveRecordDependent
# Profile
has_many :keys, -> { regular_keys }, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
@ -728,7 +730,7 @@ class User < ApplicationRecord
end
def find_by_full_path(path, follow_redirects: false)
namespace = Namespace.for_user.find_by_full_path(path, follow_redirects: follow_redirects)
namespace = Namespace.user_namespaces.find_by_full_path(path, follow_redirects: follow_redirects)
namespace&.owner
end

View file

@ -24,8 +24,8 @@ class FeatureFlagEntity < Grape::Entity
project_feature_flag_path(feature_flag.project, feature_flag)
end
expose :scopes, with: FeatureFlagScopeEntity do |feature_flag|
feature_flag.scopes.sort_by(&:id)
expose :scopes do |_ff|
[]
end
expose :strategies, with: FeatureFlags::StrategyEntity do |feature_flag|

View file

@ -1,12 +0,0 @@
# frozen_string_literal: true
class FeatureFlagScopeEntity < Grape::Entity
include RequestAwareEntity
expose :id
expose :active
expose :environment_scope
expose :created_at
expose :updated_at
expose :strategies
end

View file

@ -1474,6 +1474,15 @@
:weight: 3
:idempotent:
:tags: []
- :name: pipeline_default:ci_create_downstream_pipeline
:worker_name: Ci::CreateDownstreamPipelineWorker
:feature_category: :continuous_integration
:has_external_dependencies:
:urgency: :low
:resource_boundary: :cpu
:weight: 3
:idempotent:
:tags: []
- :name: pipeline_default:ci_drop_pipeline
:worker_name: Ci::DropPipelineWorker
:feature_category: :continuous_integration

View file

@ -0,0 +1,19 @@
# frozen_string_literal: true
module Ci
class CreateDownstreamPipelineWorker # rubocop:disable Scalability/IdempotentWorker
include ::ApplicationWorker
include ::PipelineQueue
sidekiq_options retry: 3
worker_resource_boundary :cpu
def perform(bridge_id)
::Ci::Bridge.find_by_id(bridge_id).try do |bridge|
::Ci::CreateDownstreamPipelineService
.new(bridge.project, bridge.user)
.execute(bridge)
end
end
end
end

View file

@ -0,0 +1,8 @@
---
name: create_cross_project_pipeline_worker_rename
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/70816
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/341410
milestone: '14.4'
type: development
group: group::pipeline authoring
default_enabled: false

View file

@ -49,14 +49,8 @@ module CarrierWave
local_file = local_directory.files.new(key: path)
expire_at = options[:expire_at] || ::Fog::Time.now + @uploader.fog_authenticated_url_expiration
case @uploader.fog_credentials[:provider]
when 'AWS', 'Google'
# Older versions of fog-google do not support options as a parameter
if url_options_supported?(local_file)
when 'AWS', 'Google', 'AzureRM'
local_file.url(expire_at, options)
else
warn "Options hash not supported in #{local_file.class}. You may need to upgrade your Fog provider."
local_file.url(expire_at)
end
when 'Rackspace'
connection.get_object_https_url(@uploader.fog_directory, path, expire_at, options)
when 'OpenStack'

View file

@ -215,6 +215,7 @@ the [reviewer values](https://about.gitlab.com/handbook/engineering/workflow/rev
- [How to dump production data to staging](db_dump.md)
- [Geo development](geo.md)
- [Redis guidelines](redis.md)
- [Adding a new Redis instance](redis/new_redis_instance.md)
- [Sidekiq guidelines](sidekiq_style_guide.md) for working with Sidekiq workers
- [Working with Gitaly](gitaly.md)
- [Elasticsearch integration docs](elasticsearch.md)

View file

@ -6,11 +6,14 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Redis guidelines
## Redis instances
GitLab uses [Redis](https://redis.io) for the following distinct purposes:
- Caching (mostly via `Rails.cache`).
- As a job processing queue with [Sidekiq](sidekiq_style_guide.md).
- To manage the shared application state.
- To store CI trace chunks.
- As a Pub/Sub queue backend for ActionCable.
In most environments (including the GDK), all of these point to the same
@ -29,6 +32,8 @@ more often than it is read.
If [Geo](geo.md) is enabled, each Geo node gets its own, independent Redis
database.
We have [development documentation on adding a new Redis instance](redis/new_redis_instance.md).
## Key naming
Redis is a flat namespace with no hierarchy, which means we must pay attention

View file

@ -0,0 +1,131 @@
---
stage: none
group: unassigned
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# Add a new Redis instance
GitLab can make use of multiple [Redis instances](../redis.md#redis-instances).
These instances are functionally partitioned so that, for example, we
can store [CI trace chunks](../../administration/job_logs.md#incremental-logging-architecture)
from one Redis instance while storing sessions in another.
From time to time we might want to add a new Redis instance. Typically this will
be a functional partition split from one of the existing instances such as the
cache or shared state. This document describes an approach
for adding a new Redis instance that handles existing data, based on
prior examples:
- [Dedicated Redis instance for Trace Chunk storage](https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/462).
This document does not cover the operational side of preparing and configuring
the new Redis instance in detail, but the example epics do contain information
on previous approaches to this.
## Step 1: Support configuring the new instance
Before we can switch any features to using the new instance, we have to support
configuring it and referring to it in the codebase. We must support the
main installation types:
- Source installs (including development environments) - [example MR](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/62767)
- Omnibus - [example MR](https://gitlab.com/gitlab-org/omnibus-gitlab/-/merge_requests/5316)
- Helm charts - [example MR](https://gitlab.com/gitlab-org/charts/gitlab/-/merge_requests/2031)
### Fallback instance
In the application code, we need to define a fallback instance in case the new
instance is not configured. For example, if a GitLab instance has already
configured a separate shared state Redis, and we are partitioning data from the
shared state Redis, our new instance's configuration should default to that of
the shared state Redis when it's not present. Otherwise we could break instances
that don't configure the new Redis instance as soon as it's available.
You can [define a `.config_fallback` method](https://gitlab.com/gitlab-org/gitlab/-/blob/a75471dd744678f1a59eeb99f71fca577b155acd/lib/gitlab/redis/wrapper.rb#L69-87)
in `Gitlab::Redis::Wrapper` (the base class for all Redis instances)
that defines the instance to be used if this one is not configured. If we were
adding a `Foo` instance that should fall back to `SharedState`, we can do that
like this:
```ruby
module Gitlab
module Redis
class Foo < ::Gitlab::Redis::Wrapper
# The data we store on Foo used to be stored on SharedState.
def self.config_fallback
SharedState
end
end
end
end
```
We should also add specs like those in
[`trace_chunks_spec.rb`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/spec/lib/gitlab/redis/trace_chunks_spec.rb)
to ensure that this fallback works correctly.
## Step 2: Support writing to and reading from the new instance
When migrating to the new instance, we must account for cases where data is
either on:
- The 'old' (original) instance.
- The new one that we have just added support for.
As a result we may need to support reading from and writing to both
instances, depending on some condition.
The exact condition to use varies depending on the data to be migrated. For
the trace chunks case above, there was already a database column indicating where the
data was stored (as there are other storage options than Redis).
This step may not apply if the data has a very short lifetime (a few minutes at most)
and is not critical. In that case, we
may decide that it is OK to incur a small amount of data loss and switch
over through configuration only.
If there is not a more natural way to mark where the data is stored, using a
[feature flag](../feature_flags/index.md) may be convenient:
- It does not require an application restart to take effect.
- It applies to all application instances (Sidekiq, API, web, etc.) at
the same time.
- It supports incremental rollout - ideally by actor (project, group,
user, etc.) - so that we can monitor for errors and roll back easily.
## Step 3: Migrate the data
We then need to configure the new instance for GitLab.com's production and
staging environments. Hopefully it will be possible to test this change
effectively on staging, to at least make sure that basic usage continues to
work.
After that is done, we can roll out the change to production. Ideally this would
be in an incremental fashion, following the
[standard incremental rollout](../feature_flags/controls.md#rolling-out-changes)
documentation for feature flags.
When we have been using the new instance 100% of the time in production for a
while and there are no issues, we can proceed.
## Step 4: clean up after the migration
<!-- markdownlint-disable MD044 -->
We may choose to keep the migration paths or remove them, depending on whether
or not we expect self-managed instances to perform this migration.
[gitlab-com/gl-infra/scalability#1131](https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1131#note_603354746)
contains a discussion on this topic for the trace chunks feature flag. It may
be - as in that case - that we decide that the maintenance costs of supporting
the migration code are higher than the benefits of allowing self-managed
instances to perform this migration seamlessly, if we expect self-managed
instances to cope without this functional partition.
<!-- markdownlint-enable MD044 -->
If we decide to keep the migration code:
- We should document the migration steps.
- If we used a feature flag, we should ensure it's an [ops type feature
flag](../feature_flags/index.md#ops-type), as these are long-lived flags.
Otherwise, we can remove the flags and conclude the project.

View file

@ -26,6 +26,7 @@ This is a partial list of the [RSpec metadata](https://relishapp.com/rspec/rspec
| `:ldap_no_tls` | The test requires a GitLab instance to be configured to use an external LDAP server with TLS not enabled. |
| `:ldap_tls` | The test requires a GitLab instance to be configured to use an external LDAP server with TLS enabled. |
| `:mattermost` | The test requires a GitLab Mattermost service on the GitLab instance. |
| `:mixed_env` | The test should only be executed in environments that have a paired canary version available through traffic routing based on the existence of the `gitlab_canary=true` cookie. Tests in this category are switching the cookie mid-test to validate mixed deployment environments. |
| `:object_storage` | The test requires a GitLab instance to be configured to use multiple [object storage types](../../../administration/object_storage.md). Uses MinIO as the object storage server. |
| `:only` | The test is only to be run in specific execution contexts. See [test execution context selection](execution_context_selection.md) for more information. |
| `:orchestrated` | The GitLab instance under test may be [configured by `gitlab-qa`](https://gitlab.com/gitlab-org/gitlab-qa/-/blob/master/docs/what_tests_can_be_run.md#orchestrated-tests) to be different to the default GitLab configuration, or `gitlab-qa` may launch additional services in separate Docker containers, or both. Tests tagged with `:orchestrated` are excluded when testing environments where we can't dynamically modify the GitLab configuration (for example, Staging). |

View file

@ -24,7 +24,7 @@ The Admin Area is made up of the following sections:
| Section | Description |
|:-----------------------------------------------|:------------|
| **{overview}** [Overview](#overview-section) | View your GitLab [Dashboard](#admin-dashboard), and administer [projects](#administering-projects), [users](#administering-users), [groups](#administering-groups), [jobs](#administering-jobs), [runners](#administering-runners), and [Gitaly servers](#administering-gitaly-servers). |
| **{overview}** [Overview](#overview-section) | View your GitLab [Dashboard](#admin-area-dashboard), and administer [projects](#administering-projects), [users](#administering-users), [groups](#administering-groups), [jobs](#administering-jobs), [runners](#administering-runners), and [Gitaly servers](#administering-gitaly-servers). |
| **{monitor}** Monitoring | View GitLab [system information](#system-information), and information on [background jobs](#background-jobs), [logs](#logs), [health checks](monitoring/health_check.md), [requests profiles](#requests-profiles), and [audit events](#audit-events). |
| **{messages}** Messages | Send and manage [broadcast messages](broadcast_messages.md) for your users. |
| **{hook}** System Hooks | Configure [system hooks](../../system_hooks/system_hooks.md) for many events. |
@ -41,7 +41,7 @@ The Admin Area is made up of the following sections:
| **{appearance}** Appearance | Customize [GitLab appearance](appearance.md). |
| **{settings}** Settings | Modify the [settings](settings/index.md) for your GitLab instance. |
## Admin Dashboard
## Admin Area dashboard
The Dashboard provides statistics and system information about the GitLab instance.
@ -151,7 +151,7 @@ you must provide the complete email address.
#### User impersonation
An administrator can "impersonate" any other user, including other administrator users.
An administrator can "impersonate" any other user, including other administrators.
This allows the administrator to "see what the user sees," and take actions on behalf of the user.
You can impersonate a user in the following ways:
@ -369,7 +369,7 @@ The Sidekiq dashboard consists of the following elements:
### Logs
Since GitLab 13.0, **Log** view has been removed from the admin dashboard since the logging does not work in multi-node setups and could cause confusion for administrators by displaying partial information.
Since GitLab 13.0, **Log** view has been removed from the Admin Area dashboard since the logging does not work in multi-node setups and could cause confusion for administrators by displaying partial information.
For multi-node systems we recommend ingesting the logs into services like Elasticsearch and Splunk.

View file

@ -9,7 +9,9 @@ module API
expose :version
expose :created_at
expose :updated_at
expose :scopes, using: FeatureFlag::LegacyScope
expose :scopes do |_ff|
[]
end
expose :strategies, using: FeatureFlag::Strategy
end
end

View file

@ -1,11 +0,0 @@
# frozen_string_literal: true
module API
module Entities
class FeatureFlag < Grape::Entity
class DetailedLegacyScope < LegacyScope
expose :name
end
end
end
end

View file

@ -1,16 +0,0 @@
# frozen_string_literal: true
module API
module Entities
class FeatureFlag < Grape::Entity
class LegacyScope < Grape::Entity
expose :id
expose :active
expose :environment_scope
expose :strategies
expose :created_at
expose :updated_at
end
end
end
end

View file

@ -9,7 +9,7 @@ module Banzai
html.sub(Gitlab::FrontMatter::PATTERN) do |_match|
lang = $~[:lang].presence || lang_mapping[$~[:delim]]
["```#{lang}", $~[:front_matter], "```", "\n"].join("\n")
["```#{lang}:frontmatter", $~[:front_matter], "```", "\n"].join("\n")
end
end
end

View file

@ -28,6 +28,7 @@ module Banzai
def highlight_node(node)
css_classes = +'code highlight js-syntax-highlight'
lang, lang_params = parse_lang_params(node.attr('lang'))
sourcepos = node.parent.attr('data-sourcepos')
retried = false
if use_rouge?(lang)
@ -55,7 +56,9 @@ module Banzai
retry
end
highlighted = %(<pre class="#{css_classes}"
sourcepos_attr = sourcepos ? "data-sourcepos=\"#{sourcepos}\"" : ""
highlighted = %(<pre #{sourcepos_attr} class="#{css_classes}"
lang="#{language}"
#{lang_params}
v-pre="true"><code>#{code}</code></pre>)

View file

@ -4276,6 +4276,9 @@ msgstr ""
msgid "ApprovalSettings|This setting is configured at the instance level and can only be changed by an administrator."
msgstr ""
msgid "ApprovalSettings|This setting is configured in %{groupName} and can only be changed by an administrator or group owner."
msgstr ""
msgid "ApprovalStatusTooltip|Adheres to separation of duties"
msgstr ""
@ -6546,6 +6549,9 @@ msgstr ""
msgid "Checkout"
msgstr ""
msgid "Checkout|$%{selectedPlanPrice} per 10 GB storage per pack"
msgstr ""
msgid "Checkout|$%{selectedPlanPrice} per pack of 1,000 minutes"
msgstr ""
@ -6566,6 +6572,9 @@ msgstr ""
msgid "Checkout|%{name}'s GitLab subscription"
msgstr ""
msgid "Checkout|%{name}'s storage subscription"
msgstr ""
msgid "Checkout|%{quantity} GB of storage"
msgstr ""
@ -6730,6 +6739,9 @@ msgstr ""
msgid "Checkout|Your organization"
msgstr ""
msgid "Checkout|Your storage subscription has the same term as your main subscription, and the price is prorated accordingly."
msgstr ""
msgid "Checkout|Your subscription will be applied to this group"
msgstr ""
@ -16672,6 +16684,21 @@ msgstr ""
msgid "Hello, %{username}!"
msgstr ""
msgid "HelloMessage|%{handshake_emoji} Contribute to GitLab: %{contribute_link}"
msgstr ""
msgid "HelloMessage|%{magnifier_emoji} Create a new GitLab issue: %{new_issue_link}"
msgstr ""
msgid "HelloMessage|%{rocket_emoji} We like your curiosity! Help us improve GitLab by joining the team: %{jobs_page_link}"
msgstr ""
msgid "HelloMessage|Does this page need fixes or improvements? Open an issue or contribute a merge request to help make GitLab more lovable. At GitLab, everyone can contribute!"
msgstr ""
msgid "HelloMessage|Welcome to GitLab!"
msgstr ""
msgid "Help"
msgstr ""

View file

@ -38,11 +38,7 @@ module QA
end
end
context 'when using attachments in comments', :object_storage, quarantine: {
only: { job: 'object_storage' },
issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/341209#note_681513082',
type: :investigating
} do
context 'when using attachments in comments', :object_storage do
let(:png_file_name) { 'testfile.png' }
let(:file_to_attach) do
File.absolute_path(File.join('qa', 'fixtures', 'designs', png_file_name))

View file

@ -1,11 +1,7 @@
# frozen_string_literal: true
module QA
RSpec.describe 'Package', :orchestrated, :packages, :object_storage, quarantine: {
only: { job: 'object_storage' },
issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/341209#note_681513082',
type: :investigating
} do
RSpec.describe 'Package', :orchestrated, :packages, :object_storage do
describe 'Composer Repository' do
include Runtime::Fixtures

View file

@ -1,11 +1,7 @@
# frozen_string_literal: true
module QA
RSpec.describe 'Package', :orchestrated, :packages, :object_storage, quarantine: {
only: { job: 'object_storage' },
issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/341209#note_681513082',
type: :investigating
} do
RSpec.describe 'Package', :orchestrated, :packages, :object_storage do
describe 'Generic Repository' do
let(:project) do
Resource::Project.fabricate_via_api! do |project|

View file

@ -1,11 +1,7 @@
# frozen_string_literal: true
module QA
RSpec.describe 'Package', :orchestrated, :packages, :object_storage, quarantine: {
only: { job: 'object_storage' },
issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/341209',
type: :investigating
} do
RSpec.describe 'Package', :orchestrated, :packages, :object_storage do
describe 'Helm Registry' do
include Runtime::Fixtures
include_context 'packages registry qa scenario'

View file

@ -1,11 +1,7 @@
# frozen_string_literal: true
module QA
RSpec.describe 'Package', :orchestrated, :packages, :object_storage, quarantine: {
only: { job: 'object_storage' },
issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/341209#note_681513082',
type: :investigating
} do
RSpec.describe 'Package', :orchestrated, :packages, :object_storage do
describe 'Maven Repository with Gradle' do
using RSpec::Parameterized::TableSyntax
include Runtime::Fixtures

View file

@ -1,11 +1,7 @@
# frozen_string_literal: true
module QA
RSpec.describe 'Package', :orchestrated, :packages, :reliable, :object_storage, quarantine: {
only: { job: 'object_storage' },
issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/341209#note_681513082',
type: :investigating
} do
RSpec.describe 'Package', :orchestrated, :packages, :reliable, :object_storage do
describe 'Maven Repository' do
include Runtime::Fixtures

View file

@ -1,11 +1,7 @@
# frozen_string_literal: true
module QA
RSpec.describe 'Package', :orchestrated, :packages, :reliable, :object_storage, quarantine: {
only: { job: 'object_storage' },
issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/341209#note_681513082',
type: :investigating
} do
RSpec.describe 'Package', :orchestrated, :packages, :reliable, :object_storage do
describe 'npm registry' do
using RSpec::Parameterized::TableSyntax
include Runtime::Fixtures

View file

@ -1,11 +1,7 @@
# frozen_string_literal: true
module QA
RSpec.describe 'Package', :orchestrated, :packages, :object_storage, quarantine: {
only: { job: 'object_storage' },
issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/341209#note_681513082',
type: :investigating
} do
RSpec.describe 'Package', :orchestrated, :packages, :object_storage do
describe 'NuGet Repository' do
using RSpec::Parameterized::TableSyntax
include Runtime::Fixtures

View file

@ -1,11 +1,7 @@
# frozen_string_literal: true
module QA
RSpec.describe 'Package', :orchestrated, :packages, :object_storage, quarantine: {
only: { job: 'object_storage' },
issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/341209#note_681513082',
type: :investigating
} do
RSpec.describe 'Package', :orchestrated, :packages, :object_storage do
describe 'PyPI Repository' do
include Runtime::Fixtures
let(:project) do

View file

@ -1,11 +1,7 @@
# frozen_string_literal: true
module QA
RSpec.describe 'Package', :orchestrated, :requires_admin, :packages, :object_storage, quarantine: {
only: { job: 'object_storage' },
issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/341209#note_681513082',
type: :investigating
} do
RSpec.describe 'Package', :orchestrated, :requires_admin, :packages, :object_storage do
describe 'RubyGems Repository' do
include Runtime::Fixtures

View file

@ -0,0 +1,8 @@
# frozen_string_literal: true
FactoryBot.define do
factory :user_namespace, class: 'Namespaces::UserNamespace', parent: :namespace do
sequence(:name) { |n| "user_namespace#{n}" }
type { Namespaces::UserNamespace.sti_name }
end
end

View file

@ -41,7 +41,7 @@ describe('IDE jobs detail view', () => {
});
it('scrolls to bottom', () => {
expect(vm.$refs.buildTrace.scrollTo).toHaveBeenCalled();
expect(vm.$refs.buildJobLog.scrollTo).toHaveBeenCalled();
});
it('renders job output', () => {
@ -125,15 +125,15 @@ describe('IDE jobs detail view', () => {
beforeEach(() => {
vm = vm.$mount();
jest.spyOn(vm.$refs.buildTrace, 'scrollTo').mockImplementation();
jest.spyOn(vm.$refs.buildJobLog, 'scrollTo').mockImplementation();
});
it('scrolls build trace to bottom', () => {
jest.spyOn(vm.$refs.buildTrace, 'scrollHeight', 'get').mockReturnValue(1000);
jest.spyOn(vm.$refs.buildJobLog, 'scrollHeight', 'get').mockReturnValue(1000);
vm.scrollDown();
expect(vm.$refs.buildTrace.scrollTo).toHaveBeenCalledWith(0, 1000);
expect(vm.$refs.buildJobLog.scrollTo).toHaveBeenCalledWith(0, 1000);
});
});
@ -141,26 +141,26 @@ describe('IDE jobs detail view', () => {
beforeEach(() => {
vm = vm.$mount();
jest.spyOn(vm.$refs.buildTrace, 'scrollTo').mockImplementation();
jest.spyOn(vm.$refs.buildJobLog, 'scrollTo').mockImplementation();
});
it('scrolls build trace to top', () => {
vm.scrollUp();
expect(vm.$refs.buildTrace.scrollTo).toHaveBeenCalledWith(0, 0);
expect(vm.$refs.buildJobLog.scrollTo).toHaveBeenCalledWith(0, 0);
});
});
describe('scrollBuildLog', () => {
beforeEach(() => {
vm = vm.$mount();
jest.spyOn(vm.$refs.buildTrace, 'scrollTo').mockImplementation();
jest.spyOn(vm.$refs.buildTrace, 'offsetHeight', 'get').mockReturnValue(100);
jest.spyOn(vm.$refs.buildTrace, 'scrollHeight', 'get').mockReturnValue(200);
jest.spyOn(vm.$refs.buildJobLog, 'scrollTo').mockImplementation();
jest.spyOn(vm.$refs.buildJobLog, 'offsetHeight', 'get').mockReturnValue(100);
jest.spyOn(vm.$refs.buildJobLog, 'scrollHeight', 'get').mockReturnValue(200);
});
it('sets scrollPos to bottom when at the bottom', () => {
jest.spyOn(vm.$refs.buildTrace, 'scrollTop', 'get').mockReturnValue(100);
jest.spyOn(vm.$refs.buildJobLog, 'scrollTop', 'get').mockReturnValue(100);
vm.scrollBuildLog();
@ -168,7 +168,7 @@ describe('IDE jobs detail view', () => {
});
it('sets scrollPos to top when at the top', () => {
jest.spyOn(vm.$refs.buildTrace, 'scrollTop', 'get').mockReturnValue(0);
jest.spyOn(vm.$refs.buildJobLog, 'scrollTop', 'get').mockReturnValue(0);
vm.scrollPos = 1;
vm.scrollBuildLog();
@ -177,7 +177,7 @@ describe('IDE jobs detail view', () => {
});
it('resets scrollPos when not at top or bottom', () => {
jest.spyOn(vm.$refs.buildTrace, 'scrollTop', 'get').mockReturnValue(10);
jest.spyOn(vm.$refs.buildJobLog, 'scrollTop', 'get').mockReturnValue(10);
vm.scrollBuildLog();

View file

@ -1,6 +1,6 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`~/lib/logger/hello logHello console logs a friendly hello message 1`] = `
exports[`~/lib/logger/hello logHello when on dot_com console logs a friendly hello message including the careers page 1`] = `
Array [
Array [
"%cWelcome to GitLab!%c
@ -8,7 +8,24 @@ Array [
Does this page need fixes or improvements? Open an issue or contribute a merge request to help make GitLab more lovable. At GitLab, everyone can contribute!
🤝 Contribute to GitLab: https://about.gitlab.com/community/contribute/
🔎 Create a new GitLab issue: https://gitlab.com/gitlab-org/gitlab/-/issues/new",
🔎 Create a new GitLab issue: https://gitlab.com/gitlab-org/gitlab/-/issues/new
🚀 We like your curiosity! Help us improve GitLab by joining the team: https://about.gitlab.com/jobs/",
"padding-top: 0.5em; font-size: 2em;",
"padding-bottom: 0.5em;",
],
]
`;
exports[`~/lib/logger/hello logHello when on self managed console logs a friendly hello message without including the careers page 1`] = `
Array [
Array [
"%cWelcome to GitLab!%c
Does this page need fixes or improvements? Open an issue or contribute a merge request to help make GitLab more lovable. At GitLab, everyone can contribute!
🤝 Contribute to GitLab: https://about.gitlab.com/community/contribute/
🔎 Create a new GitLab issue: https://gitlab.com/gitlab-org/gitlab/-/issues/new
",
"padding-top: 0.5em; font-size: 2em;",
"padding-bottom: 0.5em;",
],

View file

@ -9,7 +9,26 @@ describe('~/lib/logger/hello', () => {
});
describe('logHello', () => {
it('console logs a friendly hello message', () => {
describe('when on dot_com', () => {
beforeEach(() => {
gon.dot_com = true;
});
it('console logs a friendly hello message including the careers page', () => {
expect(consoleLogSpy).not.toHaveBeenCalled();
logHello();
expect(consoleLogSpy.mock.calls).toMatchSnapshot();
});
});
describe('when on self managed', () => {
beforeEach(() => {
gon.dot_com = false;
});
it('console logs a friendly hello message without including the careers page', () => {
expect(consoleLogSpy).not.toHaveBeenCalled();
logHello();
@ -18,3 +37,4 @@ describe('~/lib/logger/hello', () => {
});
});
});
});

View file

@ -81,19 +81,32 @@ RSpec.describe 'CarrierWave::Storage::Fog::File' do
end
describe '#authenticated_url' do
let(:expire_at) { 24.hours.from_now }
let(:options) { { expire_at: expire_at } }
it 'has an authenticated URL' do
expect(subject.authenticated_url).to eq("https://sa.blob.core.windows.net/test_container/test_blob?token")
expect(subject.authenticated_url(options)).to eq("https://sa.blob.core.windows.net/test_container/test_blob?token")
end
context 'with custom expire_at' do
it 'properly sets expires param' do
expire_at = 24.hours.from_now
expect_next_instance_of(Fog::Storage::AzureRM::File) do |file|
expect(file).to receive(:url).with(expire_at).and_call_original
expect(file).to receive(:url).with(expire_at, options).and_call_original
end
expect(subject.authenticated_url(expire_at: expire_at)).to eq("https://sa.blob.core.windows.net/test_container/test_blob?token")
expect(subject.authenticated_url(options)).to eq("https://sa.blob.core.windows.net/test_container/test_blob?token")
end
end
context 'with content_disposition option' do
let(:options) { { expire_at: expire_at, content_disposition: 'attachment' } }
it 'passes options' do
expect_next_instance_of(Fog::Storage::AzureRM::File) do |file|
expect(file).to receive(:url).with(expire_at, options).and_call_original
end
expect(subject.authenticated_url(options)).to eq("https://sa.blob.core.windows.net/test_container/test_blob?token")
end
end
end

View file

@ -39,7 +39,7 @@ RSpec.describe Banzai::Filter::FrontMatterFilter do
aggregate_failures do
expect(output).not_to include '---'
expect(output).to include "```yaml\nfoo: :foo_symbol\n"
expect(output).to include "```yaml:frontmatter\nfoo: :foo_symbol\n"
end
end
@ -59,7 +59,7 @@ RSpec.describe Banzai::Filter::FrontMatterFilter do
aggregate_failures do
expect(output).not_to include '+++'
expect(output).to include "```toml\nfoo = :foo_symbol\n"
expect(output).to include "```toml:frontmatter\nfoo = :foo_symbol\n"
end
end
@ -81,7 +81,7 @@ RSpec.describe Banzai::Filter::FrontMatterFilter do
aggregate_failures do
expect(output).not_to include ';;;'
expect(output).to include "```json\n{\n \"foo\": \":foo_symbol\",\n"
expect(output).to include "```json:frontmatter\n{\n \"foo\": \":foo_symbol\",\n"
end
end
@ -101,7 +101,7 @@ RSpec.describe Banzai::Filter::FrontMatterFilter do
aggregate_failures do
expect(output).not_to include '---arbitrary'
expect(output).to include "```arbitrary\nfoo = :foo_symbol\n"
expect(output).to include "```arbitrary:frontmatter\nfoo = :foo_symbol\n"
end
end
@ -130,7 +130,7 @@ RSpec.describe Banzai::Filter::FrontMatterFilter do
aggregate_failures do
expect(output).to eq <<~MD
```yaml
```yaml:frontmatter
foo: :foo_symbol
bar: :bar_symbol
```

View file

@ -98,6 +98,14 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
end
end
context "when sourcepos metadata is available" do
it "includes it in the highlighted code block" do
result = filter('<pre data-sourcepos="1:1-3:3"><code lang="plaintext">This is a test</code></pre>')
expect(result.to_html).to eq('<pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre>')
end
end
context "when Rouge lexing fails" do
before do
allow_next_instance_of(Rouge::Lexers::Ruby) do |instance|

View file

@ -20,7 +20,7 @@ RSpec.describe Banzai::Pipeline::PreProcessPipeline do
aggregate_failures do
expect(result[:output]).not_to include "\xEF\xBB\xBF"
expect(result[:output]).not_to include '---'
expect(result[:output]).to include "```yaml\nfoo: :foo_symbol\n"
expect(result[:output]).to include "```yaml:frontmatter\nfoo: :foo_symbol\n"
expect(result[:output]).to include "> blockquote\n"
end
end

View file

@ -73,6 +73,25 @@ RSpec.describe Ci::Bridge do
describe 'state machine transitions' do
context 'when bridge points towards downstream' do
%i[created manual].each do |status|
context 'when the create_cross_project_pipeline_worker_rename feature is enabled' do
before do
stub_feature_flags(create_cross_project_pipeline_worker_rename: true)
end
it "schedules downstream pipeline creation when the status is #{status}" do
bridge.status = status
bridge.enqueue!
expect(::Ci::CreateDownstreamPipelineWorker.jobs.last['args']).to eq([bridge.id])
end
end
context 'when the create_cross_project_pipeline_worker_rename feature is not enabled' do
before do
stub_feature_flags(create_cross_project_pipeline_worker_rename: false)
end
it "schedules downstream pipeline creation when the status is #{status}" do
bridge.status = status
@ -81,6 +100,26 @@ RSpec.describe Ci::Bridge do
expect(::Ci::CreateCrossProjectPipelineWorker.jobs.last['args']).to eq([bridge.id])
end
end
end
context 'when the create_cross_project_pipeline_worker_rename feature is enabled' do
before do
stub_feature_flags(create_cross_project_pipeline_worker_rename: true)
end
it "schedules downstream pipeline creation when the status is waiting for resource" do
bridge.status = :waiting_for_resource
bridge.enqueue_waiting_for_resource!
expect(::Ci::CreateDownstreamPipelineWorker.jobs.last['args']).to eq([bridge.id])
end
end
context 'when the create_cross_project_pipeline_worker_rename feature is not enabled' do
before do
stub_feature_flags(create_cross_project_pipeline_worker_rename: false)
end
it "schedules downstream pipeline creation when the status is waiting for resource" do
bridge.status = :waiting_for_resource
@ -89,6 +128,7 @@ RSpec.describe Ci::Bridge do
expect(::Ci::CreateCrossProjectPipelineWorker.jobs.last['args']).to eq([bridge.id])
end
end
it 'raises error when the status is failed' do
bridge.status = :failed

View file

@ -178,7 +178,7 @@ RSpec.describe Namespace do
context 'creating a Group' do
let(:namespace_type) { group_sti_name }
it 'is valid' do
it 'is the correct type of namespace' do
expect(namespace).to be_a(Group)
expect(namespace.kind).to eq('group')
expect(namespace.group_namespace?).to be_truthy
@ -189,7 +189,7 @@ RSpec.describe Namespace do
let(:namespace_type) { project_sti_name }
let(:parent) { create(:group) }
it 'is valid' do
it 'is the correct type of namespace' do
expect(Namespace.find(namespace.id)).to be_a(Namespaces::ProjectNamespace)
expect(namespace.kind).to eq('project')
expect(namespace.project_namespace?).to be_truthy
@ -199,10 +199,8 @@ RSpec.describe Namespace do
context 'creating a UserNamespace' do
let(:namespace_type) { user_sti_name }
it 'is valid' do
# TODO: We create a normal Namespace until
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68894 is ready
expect(Namespace.find(namespace.id)).to be_a(Namespace)
it 'is the correct type of namespace' do
expect(Namespace.find(namespace.id)).to be_a(Namespaces::UserNamespace)
expect(namespace.kind).to eq('user')
expect(namespace.user_namespace?).to be_truthy
end
@ -211,7 +209,7 @@ RSpec.describe Namespace do
context 'creating a default Namespace' do
let(:namespace_type) { nil }
it 'is valid' do
it 'is the correct type of namespace' do
expect(Namespace.find(namespace.id)).to be_a(Namespace)
expect(namespace.kind).to eq('user')
expect(namespace.user_namespace?).to be_truthy
@ -221,7 +219,7 @@ RSpec.describe Namespace do
context 'creating an unknown Namespace type' do
let(:namespace_type) { 'One' }
it 'defaults to a Namespace' do
it 'creates a default Namespace' do
expect(Namespace.find(namespace.id)).to be_a(Namespace)
expect(namespace.kind).to eq('user')
expect(namespace.user_namespace?).to be_truthy

View file

@ -0,0 +1,12 @@
# frozen_string_literal: true
require 'spec_helper'
# Main user namespace functionality it still in `Namespace`, so most
# of the specs are in `namespace_spec.rb`.
# UserNamespace specific specs will end up being migrated here.
RSpec.describe Namespaces::UserNamespace, type: :model do
describe 'validations' do
it { is_expected.to validate_presence_of(:owner) }
end
end

View file

@ -13,7 +13,7 @@ RSpec.describe Operations::FeatureFlag do
describe 'associations' do
it { is_expected.to belong_to(:project) }
it { is_expected.to have_many(:scopes) }
it { is_expected.to have_many(:strategies) }
end
describe '.reference_pattern' do
@ -52,17 +52,6 @@ RSpec.describe Operations::FeatureFlag do
it { is_expected.to define_enum_for(:version).with_values(new_version_flag: 2) }
context 'a version 2 feature flag' do
it 'is invalid if associated with Operations::FeatureFlagScope models' do
project = create(:project)
feature_flag = described_class.new({ name: 'test', project: project, version: 2,
scopes_attributes: [{ environment_scope: '*', active: false }] })
expect(feature_flag.valid?).to eq(false)
expect(feature_flag.errors.messages).to eq({
version_associations: ["version 2 feature flags may not have scopes"]
})
end
it 'is valid if associated with Operations::FeatureFlags::Strategy models' do
project = create(:project)
feature_flag = described_class.create!({ name: 'test', project: project, version: 2,
@ -81,18 +70,6 @@ RSpec.describe Operations::FeatureFlag do
end
end
describe 'the default scope' do
let_it_be(:project) { create(:project) }
context 'with a version 2 feature flag' do
it 'does not create a default scope' do
feature_flag = described_class.create!({ name: 'test', project: project, scopes_attributes: [], version: 2 })
expect(feature_flag.scopes).to eq([])
end
end
end
describe '.enabled' do
subject { described_class.enabled }

View file

@ -2542,7 +2542,14 @@ RSpec.describe User do
end
describe '.find_by_full_path' do
let!(:user) { create(:user) }
using RSpec::Parameterized::TableSyntax
# TODO: this `where/when` can be removed in issue https://gitlab.com/gitlab-org/gitlab/-/issues/341070
# At that point we only need to check `user_namespace`
where(namespace_type: [:namespace, :user_namespace])
with_them do
let!(:user) { create(:user, namespace: create(namespace_type)) }
context 'with a route matching the given path' do
let!(:route) { user.namespace.route }
@ -2611,6 +2618,7 @@ RSpec.describe User do
end
end
end
end
describe 'all_ssh_keys' do
it { is_expected.to have_many(:keys).dependent(:destroy) }

View file

@ -23,16 +23,34 @@ RSpec.describe Ci::PlayBridgeService, '#execute' do
expect(bridge.reload).to be_pending
end
it "updates bridge's user" do
execute_service
expect(bridge.reload.user).to eq(user)
end
context 'when the create_cross_project_pipeline_worker_rename feature is enabled' do
before do
stub_feature_flags(create_cross_project_pipeline_worker_rename: true)
end
it 'enqueues Ci::CreateDownstreamPipelineWorker' do
expect(::Ci::CreateDownstreamPipelineWorker).to receive(:perform_async).with(bridge.id)
execute_service
end
end
context 'when the create_cross_project_pipeline_worker_rename feature is not enabled' do
before do
stub_feature_flags(create_cross_project_pipeline_worker_rename: false)
end
it 'enqueues Ci::CreateCrossProjectPipelineWorker' do
expect(::Ci::CreateCrossProjectPipelineWorker).to receive(:perform_async).with(bridge.id)
execute_service
end
it "updates bridge's user" do
execute_service
expect(bridge.reload.user).to eq(user)
end
context 'when a subsequent job is skipped' do

View file

@ -93,6 +93,9 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'ee/spec/foo' | [:backend]
'ee/spec/foo/bar' | [:backend]
'spec/migrations/foo' | [:database]
'ee/spec/migrations/foo' | [:database]
'spec/features/foo' | [:test]
'ee/spec/features/foo' | [:test]
'spec/support/shared_examples/features/foo' | [:test]

View file

@ -0,0 +1,37 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::CreateDownstreamPipelineWorker do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
let(:bridge) { create(:ci_bridge, user: user, pipeline: pipeline) }
let(:service) { double('pipeline creation service') }
describe '#perform' do
context 'when bridge exists' do
it 'calls cross project pipeline creation service' do
expect(Ci::CreateDownstreamPipelineService)
.to receive(:new)
.with(project, user)
.and_return(service)
expect(service).to receive(:execute).with(bridge)
described_class.new.perform(bridge.id)
end
end
context 'when bridge does not exist' do
it 'does nothing' do
expect(Ci::CreateDownstreamPipelineService)
.not_to receive(:new)
described_class.new.perform(non_existing_record_id)
end
end
end
end

View file

@ -155,6 +155,7 @@ RSpec.describe 'Every Sidekiq worker' do
'Ci::BuildScheduleWorker' => 3,
'Ci::BuildTraceChunkFlushWorker' => 3,
'Ci::CreateCrossProjectPipelineWorker' => 3,
'Ci::CreateDownstreamPipelineWorker' => 3,
'Ci::DailyBuildGroupReportResultsWorker' => 3,
'Ci::DeleteObjectsWorker' => 0,
'Ci::DropPipelineWorker' => 3,

View file

@ -113,6 +113,7 @@ module Tooling
generator_templates/usage_metric_definition/metric_definition\.yml)\z}x => [:backend, :product_intelligence],
%r{\A((ee|jh)/)?app/(?!assets|views)[^/]+} => :backend,
%r{\A((ee|jh)/)?(bin|config|generator_templates|lib|rubocop)/} => :backend,
%r{\A((ee|jh)/)?spec/migrations} => :database,
%r{\A((ee|jh)/)?spec/} => :backend,
%r{\A((ee|jh)/)?vendor/} => :backend,
%r{\A(Gemfile|Gemfile.lock|Rakefile)\z} => :backend,