Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-05-06 03:10:25 +00:00
parent bfb0d93c76
commit 806b829e76
17 changed files with 823 additions and 47 deletions

View file

@ -0,0 +1,142 @@
# frozen_string_literal: true
module Nav
module TopNavHelper
PROJECTS_VIEW = :projects
def top_nav_view_model(project:)
builder = ::Gitlab::Nav::TopNavViewModelBuilder.new
if current_user
build_view_model(builder: builder, project: project)
else
build_anonymous_view_model(builder: builder)
end
builder.build
end
private
def build_anonymous_view_model(builder:)
# These come from `app/views/layouts/nav/_explore.html.ham`
# TODO: We will move the rest of them shortly
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56587
if explore_nav_link?(:projects)
builder.add_primary_menu_item(
**projects_menu_item_attrs.merge({
active: active_nav_link?(path: ['dashboard#show', 'root#show', 'projects#trending', 'projects#starred', 'projects#index']),
href: explore_root_path
})
)
end
end
def build_view_model(builder:, project:)
# These come from `app/views/layouts/nav/_dashboard.html.haml`
if dashboard_nav_link?(:projects)
current_item = project ? current_project(project: project) : {}
builder.add_primary_menu_item(
**projects_menu_item_attrs.merge({
active: active_nav_link?(path: ['root#index', 'projects#trending', 'projects#starred', 'dashboard/projects#index']),
css_class: 'qa-projects-dropdown',
data: { track_label: "projects_dropdown", track_event: "click_dropdown", track_experiment: "new_repo" },
view: PROJECTS_VIEW
})
)
builder.add_view(PROJECTS_VIEW, container_view_props(current_item: current_item, submenu: projects_submenu))
end
if dashboard_nav_link?(:milestones)
builder.add_primary_menu_item(
id: 'milestones',
title: 'Milestones',
active: active_nav_link?(controller: 'dashboard/milestones'),
icon: 'clock',
data: { qa_selector: 'milestones_link' },
href: dashboard_milestones_path
)
end
# Using admin? is generally discouraged because it does not check for
# "admin_mode". In this case we are migrating code and check both, so
# we should be good.
# rubocop: disable Cop/UserAdmin
if current_user&.admin?
builder.add_secondary_menu_item(
id: 'admin',
title: _('Admin'),
active: active_nav_link?(controller: 'admin/dashboard'),
icon: 'admin',
css_class: 'qa-admin-area-link',
href: admin_root_path
)
end
if Gitlab::CurrentSettings.admin_mode
if header_link?(:admin_mode)
builder.add_secondary_menu_item(
id: 'leave_admin_mode',
title: _('Leave Admin Mode'),
active: active_nav_link?(controller: 'admin/sessions'),
icon: 'lock-open',
href: destroy_admin_session_path,
method: :post
)
elsif current_user.admin?
builder.add_secondary_menu_item(
id: 'enter_admin_mode',
title: _('Enter Admin Mode'),
active: active_nav_link?(controller: 'admin/sessions'),
icon: 'lock',
href: new_admin_session_path
)
end
end
# rubocop: enable Cop/UserAdmin
end
def projects_menu_item_attrs
{
id: 'project',
title: _('Projects'),
icon: 'project'
}
end
def container_view_props(current_item:, submenu:)
{
namespace: 'projects',
currentUserName: current_user&.username,
currentItem: current_item,
linksPrimary: submenu[:primary],
linksSecondary: submenu[:secondary]
}
end
def current_project(project:)
return {} unless project.persisted?
{
id: project.id,
name: project.name,
namespace: project.full_name,
webUrl: project_path(project),
avatarUrl: project.avatar_url
}
end
def projects_submenu
# These project links come from `app/views/layouts/nav/projects_dropdown/_show.html.haml`
builder = ::Gitlab::Nav::TopNavMenuBuilder.new
builder.add_primary_menu_item(id: 'your', title: _('Your projects'), href: dashboard_projects_path)
builder.add_primary_menu_item(id: 'starred', title: _('Starred projects'), href: starred_dashboard_projects_path)
builder.add_primary_menu_item(id: 'explore', title: _('Explore projects'), href: explore_root_path)
builder.add_secondary_menu_item(id: 'create', title: _('Create new project'), href: new_project_path)
builder.build
end
end
end
Nav::TopNavHelper.prepend_ee_mod

View file

@ -113,14 +113,29 @@ class MergeRequestDiff < ApplicationRecord
joins(merge_request: :metrics).where(condition)
end
# This scope uses LATERAL JOIN to find the most recent MR diff association for the given merge requests.
# To avoid joining the merge_requests table, we build an in memory table using the merge request ids.
# Example:
# SELECT ...
# FROM (VALUES (MR_ID_1),(MR_ID_2)) merge_requests (id)
# INNER JOIN LATERAL (...)
scope :latest_diff_for_merge_requests, -> (merge_requests) do
inner_select = MergeRequestDiff
.default_scoped
.distinct
.select("FIRST_VALUE(id) OVER (PARTITION BY merge_request_id ORDER BY created_at DESC) as id")
.where(merge_request: merge_requests)
mrs = Array(merge_requests)
return MergeRequestDiff.none if mrs.empty?
joins("INNER JOIN (#{inner_select.to_sql}) latest_diffs ON latest_diffs.id = merge_request_diffs.id")
merge_request_table = MergeRequest.arel_table
merge_request_diff_table = MergeRequestDiff.arel_table
join_query = MergeRequestDiff
.where(merge_request_table[:id].eq(merge_request_diff_table[:merge_request_id]))
.order(created_at: :desc)
.limit(1)
mr_id_list = mrs.map { |mr| "(#{Integer(mr.id)})" }.join(",")
MergeRequestDiff
.from("(VALUES #{mr_id_list}) merge_requests (id)")
.joins("INNER JOIN LATERAL (#{join_query.to_sql}) #{MergeRequestDiff.table_name} ON TRUE")
.includes(:merge_request_diff_commits)
end

View file

@ -0,0 +1,6 @@
---
title: Send email to oncall rotation participants and project owners when user is
removed
merge_request: 59331
author:
type: added

View file

@ -466,15 +466,16 @@ The replication process is now complete.
[PgBouncer](https://www.pgbouncer.org/) may be used with GitLab Geo to pool
PostgreSQL connections. We recommend using PgBouncer if you use GitLab in a
high-availability configuration with a cluster of nodes supporting a Geo
**primary** node and another cluster of nodes supporting a Geo **secondary** node. For more
information, see [High Availability with Omnibus GitLab](../../postgresql/replication_and_failover.md).
**primary** site and two other clusters of nodes supporting a Geo **secondary** site.
One for the main database and the other for the tracking database. For more information,
see [High Availability with Omnibus GitLab](../../postgresql/replication_and_failover.md).
## Patroni support
Support for Patroni is intended to replace `repmgr` as a
[highly available PostgreSQL solution](../../postgresql/replication_and_failover.md)
on the primary node, but it can also be used for PostgreSQL HA on a secondary
site.
site. Similar to `repmgr`, using Patroni on a secondary node is optional.
Starting with GitLab 13.5, Patroni is available for _experimental_ use with Geo
primary and secondary sites. Due to its experimental nature, Patroni support is
@ -490,6 +491,10 @@ This experimental implementation has the following limitations:
For instructions about how to set up Patroni on the primary site, see the
[PostgreSQL replication and failover with Omnibus GitLab](../../postgresql/replication_and_failover.md#patroni) page.
### Configuring Patroni cluster for a Geo secondary site
In a Geo secondary site, the main PostgreSQL database is a read-only replica of the primary sites PostgreSQL database.
If you are currently using `repmgr` on your Geo primary site, see [these instructions](#migrating-from-repmgr-to-patroni) for migrating from `repmgr` to Patroni.
A production-ready and secure setup requires at least three Consul nodes, three
@ -498,9 +503,7 @@ configuration for the secondary site. The internal load balancer provides a sing
endpoint for connecting to the Patroni cluster's leader whenever a new leader is
elected. Be sure to use [password credentials](../../postgresql/replication_and_failover.md#database-authorization-for-patroni) and other database best practices.
Similar to `repmgr`, using Patroni on a secondary node is optional.
### Step 1. Configure Patroni permanent replication slot on the primary site
#### Step 1. Configure Patroni permanent replication slot on the primary site
To set up database replication with Patroni on a secondary node, we need to
configure a _permanent replication slot_ on the primary node's Patroni cluster,
@ -520,7 +523,7 @@ Leader instance**:
```ruby
consul['enable'] = true
consul['configuration'] = {
retry_join: %w[CONSUL_PRIMARY1_IP CONSULT_PRIMARY2_IP CONSULT_PRIMARY3_IP]
retry_join: %w[CONSUL_PRIMARY1_IP CONSUL_PRIMARY2_IP CONSUL_PRIMARY3_IP]
}
repmgr['enable'] = false
@ -553,7 +556,7 @@ Leader instance**:
gitlab-ctl reconfigure
```
### Step 2. Configure the internal load balancer on the primary site
#### Step 2. Configure the internal load balancer on the primary site
To avoid reconfiguring the Standby Leader on the secondary site whenever a new
Leader is elected on the primary site, we'll need to set up a TCP internal load
@ -597,7 +600,65 @@ backend postgresql
Refer to your preferred Load Balancer's documentation for further guidance.
### Step 3. Configure a Standby cluster on the secondary site
#### Step 3. Configure a PgBouncer node on the secondary site
A production-ready and highly available configuration requires at least
three Consul nodes, a minimum of one PgBouncer node, but its recommended to have
one per database node. An internal load balancer (TCP) is required when there is
more than one PgBouncer service nodes. The internal load balancer provides a single
endpoint for connecting to the PgBouncer cluster. For more information,
see [High Availability with Omnibus GitLab](../../postgresql/replication_and_failover.md).
Follow the minimal configuration for the PgBouncer node:
1. SSH into your PgBouncer node and login as root:
```shell
sudo -i
```
1. Edit `/etc/gitlab/gitlab.rb` and add the following:
```ruby
# Disable all components except Pgbouncer and Consul agent
roles ['pgbouncer_role']
# PgBouncer configuration
pgbouncer['users'] = {
'pgbouncer': {
password: 'PGBOUNCER_PASSWORD_HASH'
}
}
# Consul configuration
consul['watchers'] = %w(postgresql)
consul['configuration'] = {
retry_join: %w[CONSUL_SECONDARY1_IP CONSUL_SECONDARY2_IP CONSUL_SECONDARY3_IP]
}
consul['monitoring_service_discovery'] = true
```
1. Reconfigure GitLab for the changes to take effect:
```shell
gitlab-ctl reconfigure
```
1. Create a `.pgpass` file so Consul is able to reload PgBouncer. Enter the `PLAIN_TEXT_PGBOUNCER_PASSWORD` twice when asked:
```shell
gitlab-ctl write-pgpass --host 127.0.0.1 --database pgbouncer --user pgbouncer --hostuser gitlab-consul
```
1. Restart the PgBouncer service:
```shell
gitlab-ctl restart pgbouncer
```
#### Step 4. Configure a Standby cluster on the secondary site
NOTE:
If you are converting a secondary site to a Patroni Cluster, you must start
@ -619,7 +680,7 @@ For each Patroni instance on the secondary site:
consul['enable'] = true
consul['configuration'] = {
retry_join: %w[CONSUL_SECONDARY1_IP CONSULT_SECONDARY2_IP CONSULT_SECONDARY3_IP]
retry_join: %w[CONSUL_SECONDARY1_IP CONSUL_SECONDARY2_IP CONSUL_SECONDARY3_IP]
}
repmgr['enable'] = false
@ -669,14 +730,14 @@ For each Patroni instance on the secondary site:
gitlab-ctl reconfigure
```
## Migrating from repmgr to Patroni
### Migrating from repmgr to Patroni
1. Before migrating, it is recommended that there is no replication lag between the primary and secondary sites and that replication is paused. In GitLab 13.2 and later, you can pause and resume replication with `gitlab-ctl geo-replication-pause` and `gitlab-ctl geo-replication-resume` on a Geo secondary database node.
1. Follow the [instructions to migrate repmgr to Patroni](../../postgresql/replication_and_failover.md#switching-from-repmgr-to-patroni). When configuring Patroni on each primary site database node, add `patroni['replication_slots'] = { '<slot_name>' => 'physical' }`
to `gitlab.rb` where `<slot_name>` is the name of the replication slot for your Geo secondary. This will ensure that Patroni recognizes the replication slot as permanent and will not drop it upon restarting.
1. If database replication to the secondary was paused before migration, resume replication once Patroni is confirmed working on the primary.
## Migrating a single PostgreSQL node to Patroni
### Migrating a single PostgreSQL node to Patroni
Before the introduction of Patroni, Geo had no Omnibus support for HA setups on the secondary node.
@ -685,12 +746,197 @@ With Patroni it's now possible to support that. In order to migrate the existing
1. Make sure you have a Consul cluster setup on the secondary (similar to how you set it up on the primary).
1. [Configure a permanent replication slot](#step-1-configure-patroni-permanent-replication-slot-on-the-primary-site).
1. [Configure the internal load balancer](#step-2-configure-the-internal-load-balancer-on-the-primary-site).
1. [Configure a Standby Cluster](#step-3-configure-a-standby-cluster-on-the-secondary-site)
1. [Configure a PgBouncer node](#step-3-configure-a-pgbouncer-node-on-the-secondary-site)
1. [Configure a Standby Cluster](#step-4-configure-a-standby-cluster-on-the-secondary-site)
on that single node machine.
You will end up with a "Standby Cluster" with a single node. That allows you to later on add additional Patroni nodes
by following the same instructions above.
### Configuring Patroni cluster for the tracking PostgreSQL database
Secondary sites use a separate PostgreSQL installation as a tracking database to
keep track of replication status and automatically recover from potential replication issues.
Omnibus automatically configures a tracking database when `roles ['geo_secondary_role']` is set.
If you want to run this database in a highly available configuration, follow the instructions below.
A production-ready and secure setup requires at least three Consul nodes, three
Patroni nodes on the secondary site secondary site. Be sure to use [password credentials](../../postgresql/replication_and_failover.md#database-authorization-for-patroni) and other database best practices.
#### Step 1. Configure a PgBouncer node on the secondary site
A production-ready and highly available configuration requires at least
three Consul nodes, three PgBouncer nodes, and one internal load-balancing node.
The internal load balancer provides a single endpoint for connecting to the
PgBouncer cluster. For more information, see [High Availability with Omnibus GitLab](../../postgresql/replication_and_failover.md).
Follow the minimal configuration for the PgBouncer node for the tracking database:
1. SSH into your PgBouncer node and login as root:
```shell
sudo -i
```
1. Edit `/etc/gitlab/gitlab.rb` and add the following:
```ruby
# Disable all components except Pgbouncer and Consul agent
roles ['pgbouncer_role']
# PgBouncer configuration
pgbouncer['users'] = {
'pgbouncer': {
password: 'PGBOUNCER_PASSWORD_HASH'
}
}
pgbouncer['databases'] = {
gitlabhq_geo_production: {
user: 'pgbouncer',
password: 'PGBOUNCER_PASSWORD_HASH'
}
}
# Consul configuration
consul['watchers'] = %w(postgresql)
consul['configuration'] = {
retry_join: %w[CONSUL_TRACKINGDB1_IP CONSUL_TRACKINGDB2_IP CONSUL_TRACKINGDB3_IP]
}
consul['monitoring_service_discovery'] = true
# GitLab database settings
gitlab_rails['db_database'] = 'gitlabhq_geo_production'
gitlab_rails['db_username'] = 'gitlab_geo'
```
1. Reconfigure GitLab for the changes to take effect:
```shell
gitlab-ctl reconfigure
```
1. Create a `.pgpass` file so Consul is able to reload PgBouncer. Enter the `PLAIN_TEXT_PGBOUNCER_PASSWORD` twice when asked:
```shell
gitlab-ctl write-pgpass --host 127.0.0.1 --database pgbouncer --user pgbouncer --hostuser gitlab-consul
```
1. Restart the PgBouncer service:
```shell
gitlab-ctl restart pgbouncer
```
#### Step 2. Configure a Patroni cluster
For each Patroni instance on the secondary site for the tracking database:
1. SSH into your Patroni node and login as root:
```shell
sudo -i
```
1. Edit `/etc/gitlab/gitlab.rb` and add the following:
```ruby
# Disable all components except PostgreSQL, Patroni, and Consul
roles ['patroni_role']
# Consul configuration
consul['services'] = %w(postgresql)
consul['configuration'] = {
server: true,
retry_join: %w[CONSUL_TRACKINGDB1_IP CONSUL_TRACKINGDB2_IP CONSUL_TRACKINGDB3_IP]
}
# PostgreSQL configuration
postgresql['listen_address'] = '0.0.0.0'
postgresql['hot_standby'] = 'on'
postgresql['wal_level'] = 'replica'
postgresql['pgbouncer_user_password'] = 'PGBOUNCER_PASSWORD_HASH'
postgresql['sql_replication_password'] = 'POSTGRESQL_REPLICATION_PASSWORD_HASH'
postgresql['sql_user_password'] = 'POSTGRESQL_PASSWORD_HASH'
postgresql['md5_auth_cidr_addresses'] = [
'PATRONI_TRACKINGDB1_IP/32', 'PATRONI_TRACKINGDB2_IP/32', 'PATRONI_TRACKINGDB3_IP/32', 'PATRONI_TRACKINGDB_PGBOUNCER/32',
# Any other instance that needs access to the database as per documentation
]
# Patroni configuration
patroni['replication_password'] = 'PLAIN_TEXT_POSTGRESQL_REPLICATION_PASSWORD'
patroni['postgresql']['max_wal_senders'] = 5 # A minimum of three for one replica, plus two for each additional replica
# GitLab database settings
gitlab_rails['db_database'] = 'gitlabhq_geo_production'
gitlab_rails['db_username'] = 'gitlab_geo'
# Disable automatic database migrations
gitlab_rails['auto_migrate'] = false
```
1. Reconfigure GitLab for the changes to take effect.
This is required to bootstrap PostgreSQL users and settings:
```shell
gitlab-ctl reconfigure
```
#### Step 3. Configure the tracking database on the secondary nodes
For each node running the `gitlab-rails`, `sidekiq`, and `geo-logcursor` services:
1. SSH into your node and login as root:
```shell
sudo -i
```
1. Edit `/etc/gitlab/gitlab.rb` and add the following attributes. You may have other attributes set, but the following need to be set.
```ruby
# Tracking database settings
geo_secondary['db_username'] = 'gitlab_geo'
geo_secondary['db_password'] = 'PLAIN_TEXT_PGBOUNCER_PASSWORD'
geo_secondary['db_database'] = 'gitlabhq_geo_production'
geo_secondary['db_host'] = 'PATRONI_TRACKINGDB_PGBOUNCER_IP'
geo_secondary['db_port'] = 6432
geo_secondary['auto_migrate'] = false
# Disable the tracking database service
geo_postgresql['enable'] = false
```
1. Reconfigure GitLab for the changes to take effect.
```shell
gitlab-ctl reconfigure
```
1. Run the tracking database migrations:
```shell
gitlab-rake geo:db:migrate
```
### Migrating a single tracking database node to Patroni
Before the introduction of Patroni, Geo had no Omnibus support for HA setups on
the secondary node.
With Patroni, it's now possible to support that. Due to some restrictions on the
Patroni implementation on Omnibus that do not allow us to manage two different
clusters on the same machine, we recommend setting up a new Patroni cluster for
the tracking database by following the same instructions above.
The secondary nodes will backfill the new tracking database, and no data
synchronization will be required.
## Troubleshooting
Read the [troubleshooting document](../replication/troubleshooting.md).

View file

@ -173,9 +173,8 @@ https://gitlab.com/gitlab-org/gitlab/-/jobs/artifacts/master/file/htmlcov/index.
## When job artifacts are deleted
By default, the latest job artifacts from the most recent successful jobs are never deleted.
If a job is configured with [`expire_in`](../yaml/README.md#artifactsexpire_in),
its artifacts only expire if a more recent artifact exists.
See the [`expire_in`](../yaml/README.md#artifactsexpire_in) documentation for information on when
job artifacts are deleted.
### Keep artifacts from most recent successful jobs

View file

@ -17,4 +17,5 @@ Pipeline artifacts are saved to disk or object storage. They count towards a pro
## When pipeline artifacts are deleted
The latest artifacts for refs are locked against deletion, and kept regardless of the expiry time.
See the [`expire_in`](../yaml/README.md#artifactsexpire_in) documentation for information on when
pipeline artifacts are deleted.

View file

@ -3320,25 +3320,25 @@ Files matched by [`artifacts:untracked`](#artifactsuntracked) can be excluded us
#### `artifacts:expire_in`
Use `expire_in` to specify how long artifacts are active before they
expire and are deleted.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/16267) in GitLab 13.0 behind a disabled feature flag, the latest job artifacts are kept regardless of expiry time.
> - [Made default behavior](https://gitlab.com/gitlab-org/gitlab/-/issues/229936) in GitLab 13.4.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/241026) in GitLab 13.8, keeping latest job artifacts can be disabled at the project level.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/276583) in GitLab 13.9, keeping latest job artifacts can be disabled instance-wide.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/321323) in GitLab 13.12, the latest pipeline artifacts are kept regardless of expiry time.
The expiration time period begins when the artifact is uploaded and
stored on GitLab. If the expiry time is not defined, it defaults to the
[instance wide setting](../../user/admin_area/settings/continuous_integration.md#default-artifacts-expiration)
(30 days by default).
Use `expire_in` to specify how long [job artifacts](../pipelines/job_artifacts.md) are stored before
they expire and are deleted. The `expire_in` setting does not affect:
To override the expiration date and protect artifacts from being automatically deleted:
- Artifacts from the latest job, unless this keeping the latest job artifacts is:
- [Disabled at the project level](../pipelines/job_artifacts.md#keep-artifacts-from-most-recent-successful-jobs).
- [Disabled instance-wide](../../user/admin_area/settings/continuous_integration.md#keep-the-latest-artifacts-for-all-jobs-in-the-latest-successful-pipelines).
- [Pipeline artifacts](../pipelines/pipeline_artifacts.md). It's not possible to specify an
expiration date for these:
- Pipeline artifacts from the latest pipeline are kept forever.
- Other pipeline artifacts are erased after one week.
- Use the **Keep** button on the job page.
- Set the value of `expire_in` to `never`. [Available](https://gitlab.com/gitlab-org/gitlab/-/issues/22761)
in GitLab 13.3 and later.
After their expiry, artifacts are deleted hourly by default (via a cron job),
and are not accessible anymore.
The value of `expire_in` is an elapsed time in seconds, unless a unit is
provided. Examples of valid values:
The value of `expire_in` is an elapsed time in seconds, unless a unit is provided. Valid values
include:
- `'42'`
- `42 seconds`
@ -3350,7 +3350,7 @@ provided. Examples of valid values:
- `3 weeks and 2 days`
- `never`
To expire artifacts 1 week after being uploaded:
To expire artifacts one week after being uploaded:
```yaml
job:
@ -3358,12 +3358,19 @@ job:
expire_in: 1 week
```
The latest artifacts for refs are locked against deletion, and kept regardless of
the expiry time. [Introduced in](https://gitlab.com/gitlab-org/gitlab/-/issues/16267)
GitLab 13.0 behind a disabled feature flag, and [made the default behavior](https://gitlab.com/gitlab-org/gitlab/-/issues/229936)
in GitLab 13.4.
The expiration time period begins when the artifact is uploaded and stored on GitLab. If the expiry
time is not defined, it defaults to the
[instance wide setting](../../user/admin_area/settings/continuous_integration.md#default-artifacts-expiration)
(30 days by default).
In [GitLab 13.8 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/241026), you can [disable this behavior at the project level in the CI/CD settings](../pipelines/job_artifacts.md#keep-artifacts-from-most-recent-successful-jobs). In [GitLab 13.9 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/276583), you can [disable this behavior instance-wide](../../user/admin_area/settings/continuous_integration.md#keep-the-latest-artifacts-for-all-jobs-in-the-latest-successful-pipelines).
To override the expiration date and protect artifacts from being automatically deleted:
- Use the **Keep** button on the job page.
- [In GitLab 13.3 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/22761), set the value of
`expire_in` to `never`.
After their expiry, artifacts are deleted hourly by default (using a cron job), and are not
accessible anymore.
#### `artifacts:expose_as`

View file

@ -582,6 +582,7 @@ repository's root as `.gitlab-api-fuzzing.yml`.
| CI/CD variable | Description |
|------------------------------------------------------|--------------------|
| `SECURE_ANALYZERS_PREFIX` | Specify the Docker registry base address from which to download the analyzer. |
| `FUZZAPI_VERSION` | Specify API Fuzzing container version. Defaults to `latest`. |
| `FUZZAPI_TARGET_URL` | Base URL of API testing target. |
|[`FUZZAPI_CONFIG`](#configuration-files) | API Fuzzing configuration file. Defaults to `.gitlab-apifuzzer.yml`. |
@ -1117,6 +1118,12 @@ Profiles:
UnicodeFuzzing: true
```
## Running API fuzzing in an offline environment
For self-managed GitLab instances in an environment with limited, restricted, or intermittent access
to external resources through the internet, some adjustments are required for the Web API Fuzz testing job to
successfully run. For more information, see [Offline environments](../offline_deployments/index.md).
## Troubleshooting
### Error, the OpenAPI document is not valid. Errors were found during validation of the document using the published OpenAPI schema

View file

@ -17,13 +17,14 @@ module Gitlab
Config::Yaml::Tags::TagError
].freeze
attr_reader :root, :context, :ref
attr_reader :root, :context, :ref, :source
def initialize(config, project: nil, sha: nil, user: nil, parent_pipeline: nil, ref: nil)
def initialize(config, project: nil, sha: nil, user: nil, parent_pipeline: nil, ref: nil, source: nil)
@context = build_context(project: project, sha: sha, user: user, parent_pipeline: parent_pipeline)
@context.set_deadline(TIMEOUT_SECONDS)
@ref = ref
@source = source
@config = expand_config(config)

View file

@ -16,6 +16,7 @@ module Gitlab
project: project,
ref: @pipeline.ref,
sha: @pipeline.sha,
source: @pipeline.source,
user: current_user,
parent_pipeline: parent_pipeline
}

View file

@ -0,0 +1,35 @@
# frozen_string_literal: true
module Gitlab
module Nav
class TopNavMenuBuilder
def initialize
@primary = []
@secondary = []
end
def add_primary_menu_item(**args)
add_menu_item(dest: @primary, **args)
end
def add_secondary_menu_item(**args)
add_menu_item(dest: @secondary, **args)
end
def build
{
primary: @primary,
secondary: @secondary
}
end
private
def add_menu_item(dest:, **args)
item = ::Gitlab::Nav::TopNavMenuItem.build(**args)
dest.push(item)
end
end
end
end

View file

@ -0,0 +1,27 @@
# frozen_string_literal: true
module Gitlab
module Nav
class TopNavMenuItem
# We want to have all keyword arguments for type safety.
# Ordinarily we could introduce a params object, but that's kind of what
# this is already :/. We could also take a hash and manually check every
# entry, but it's much more maintainable to do rely on native Ruby.
# rubocop: disable Metrics/ParameterLists
def self.build(id:, title:, active: false, icon: '', href: '', method: nil, view: '', css_class: '', data: {})
{
id: id,
title: title,
active: active,
icon: icon,
href: href,
method: method,
view: view.to_s,
css_class: css_class,
data: data
}
end
# rubocop: enable Metrics/ParameterLists
end
end
end

View file

@ -0,0 +1,27 @@
# frozen_string_literal: true
module Gitlab
module Nav
class TopNavViewModelBuilder
def initialize
@menu_builder = ::Gitlab::Nav::TopNavMenuBuilder.new
@views = {}
end
delegate :add_primary_menu_item, :add_secondary_menu_item, to: :@menu_builder
def add_view(name, props)
@views[name] = props
end
def build
menu = @menu_builder.build
menu.merge({
views: @views,
activeTitle: _('Menu')
})
end
end
end
end

View file

@ -800,6 +800,12 @@ msgstr ""
msgid "%{retryButtonStart}Try again%{retryButtonEnd} or %{newFileButtonStart}attach a new file%{newFileButtonEnd}."
msgstr ""
msgid "%{rotation} has been recalculated with the remaining participants. Please review the new setup for %{rotation_link}. It is recommended that you reach out to the current on-call responder to ensure continuity of on-call coverage."
msgstr ""
msgid "%{rotation} has been recalculated with the remaining participants. Please review the new setup for %{rotation}. It is recommended that you reach out to the current on-call responder to ensure continuity of on-call coverage."
msgstr ""
msgid "%{seconds}s"
msgstr ""
@ -964,6 +970,9 @@ msgstr ""
msgid "%{userName}'s avatar"
msgstr ""
msgid "%{user_name} (%{user_username}) was removed from %{rotation} in %{schedule} in %{project}. "
msgstr ""
msgid "%{user_name} profile page"
msgstr ""
@ -2165,6 +2174,9 @@ msgstr ""
msgid "Adjust your filters/search criteria above. If you believe this may be an error, please refer to the %{linkStart}Geo Troubleshooting%{linkEnd} documentation for more information."
msgstr ""
msgid "Admin"
msgstr ""
msgid "Admin Area"
msgstr ""
@ -9343,6 +9355,9 @@ msgstr ""
msgid "Create new label"
msgstr ""
msgid "Create new project"
msgstr ""
msgid "Create new..."
msgstr ""
@ -15288,6 +15303,9 @@ msgstr ""
msgid "Go to previous page"
msgstr ""
msgid "Go to primary node"
msgstr ""
msgid "Go to project"
msgstr ""
@ -20284,6 +20302,9 @@ msgstr ""
msgid "Memory Usage"
msgstr ""
msgid "Menu"
msgstr ""
msgid "Merge"
msgstr ""

View file

@ -0,0 +1,214 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Nav::TopNavHelper do
include ActionView::Helpers::UrlHelper
describe '#top_nav_view_model' do
let_it_be(:user) { build_stubbed(:user) }
let_it_be(:admin) { build_stubbed(:user, :admin) }
let(:current_user) { nil }
let(:current_project) { nil }
let(:with_current_settings_admin_mode) { false }
let(:with_header_link_admin_mode) { false }
let(:with_projects) { false }
let(:with_milestones) { false }
let(:subject) { helper.top_nav_view_model(project: current_project) }
let(:active_title) { 'Menu' }
before do
allow(helper).to receive(:current_user) { current_user }
allow(Gitlab::CurrentSettings).to receive(:admin_mode) { with_current_settings_admin_mode }
allow(helper).to receive(:header_link?).with(:admin_mode) { with_header_link_admin_mode }
# Defaulting all `dashboard_nav_link?` calls to false ensures the EE-specific behavior
# is not enabled in this CE spec
allow(helper).to receive(:dashboard_nav_link?).with(anything) { false }
allow(helper).to receive(:dashboard_nav_link?).with(:projects) { with_projects }
allow(helper).to receive(:dashboard_nav_link?).with(:milestones) { with_milestones }
end
it 'has :activeTitle' do
expect(subject[:activeTitle]).to eq(active_title)
end
context 'when current_user is nil (anonymous)' do
it 'has expected :primary' do
expected_primary = ::Gitlab::Nav::TopNavMenuItem.build(
href: '/explore',
icon: 'project',
id: 'project',
title: 'Projects'
)
expect(subject[:primary]).to eq([expected_primary])
end
end
context 'when current_user is non-admin' do
let(:current_user) { user }
it 'has no menu items or views by default' do
expect(subject).to eq({ activeTitle: active_title,
primary: [],
secondary: [],
views: {} })
end
context 'with projects' do
let(:with_projects) { true }
let(:projects_view) { subject[:views][:projects] }
it 'has expected :primary' do
expected_primary = ::Gitlab::Nav::TopNavMenuItem.build(
css_class: 'qa-projects-dropdown',
data: {
track_event: 'click_dropdown',
track_experiment: 'new_repo',
track_label: 'projects_dropdown'
},
icon: 'project',
id: 'project',
title: 'Projects',
view: 'projects'
)
expect(subject[:primary]).to eq([expected_primary])
end
context 'projects' do
it 'has expected :currentUserName' do
expect(projects_view[:currentUserName]).to eq(current_user.username)
end
it 'has expected :namespace' do
expect(projects_view[:namespace]).to eq('projects')
end
it 'has expected :linksPrimary' do
expected_links_primary = [
::Gitlab::Nav::TopNavMenuItem.build(
href: '/dashboard/projects',
id: 'your',
title: 'Your projects'
),
::Gitlab::Nav::TopNavMenuItem.build(
href: '/dashboard/projects/starred',
id: 'starred',
title: 'Starred projects'
),
::Gitlab::Nav::TopNavMenuItem.build(
href: '/explore',
id: 'explore',
title: 'Explore projects'
)
]
expect(projects_view[:linksPrimary]).to eq(expected_links_primary)
end
it 'has expected :linksSecondary' do
expected_links_secondary = [
::Gitlab::Nav::TopNavMenuItem.build(
href: '/projects/new',
id: 'create',
title: 'Create new project'
)
]
expect(projects_view[:linksSecondary]).to eq(expected_links_secondary)
end
context 'with persisted project' do
let_it_be(:project) { build_stubbed(:project) }
let(:current_project) { project }
let(:avatar_url) { 'avatar_url' }
before do
allow(project).to receive(:persisted?) { true }
allow(project).to receive(:avatar_url) { avatar_url }
end
it 'has project as :container' do
expected_container = {
avatarUrl: avatar_url,
id: project.id,
name: project.name,
namespace: project.full_name,
webUrl: project_path(project)
}
expect(projects_view[:currentItem]).to eq(expected_container)
end
end
end
end
context 'with milestones' do
let(:with_milestones) { true }
it 'has expected :primary' do
expected_primary = ::Gitlab::Nav::TopNavMenuItem.build(
data: {
qa_selector: 'milestones_link'
},
href: '/dashboard/milestones',
icon: 'clock',
id: 'milestones',
title: 'Milestones'
)
expect(subject[:primary]).to eq([expected_primary])
end
end
end
context 'when current_user is admin' do
let_it_be(:current_user) { admin }
let(:with_current_settings_admin_mode) { true }
it 'has admin as first :secondary item' do
expected_admin_item = ::Gitlab::Nav::TopNavMenuItem.build(
id: 'admin',
title: 'Admin',
icon: 'admin',
href: '/admin',
css_class: 'qa-admin-area-link'
)
expect(subject[:secondary].first).to eq(expected_admin_item)
end
context 'with header link admin_mode true' do
let(:with_header_link_admin_mode) { true }
it 'has leave_admin_mode as last :secondary item' do
expected_leave_admin_mode_item = ::Gitlab::Nav::TopNavMenuItem.build(
id: 'leave_admin_mode',
title: 'Leave Admin Mode',
icon: 'lock-open',
href: '/admin/session/destroy',
method: :post
)
expect(subject[:secondary].last).to eq(expected_leave_admin_mode_item)
end
end
context 'with header link admin_mode false' do
let(:with_header_link_admin_mode) { false }
it 'has enter_admin_mode as last :secondary item' do
expected_enter_admin_mode_item = ::Gitlab::Nav::TopNavMenuItem.build(
id: 'enter_admin_mode',
title: 'Enter Admin Mode',
icon: 'lock',
href: '/admin/session/new'
)
expect(subject[:secondary].last).to eq(expected_enter_admin_mode_item)
end
end
end
end
end

View file

@ -0,0 +1,23 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ::Gitlab::Nav::TopNavMenuItem do
describe '.build' do
it 'builds a hash from the given args' do
item = {
id: 'id',
title: 'Title',
active: true,
icon: 'icon',
href: 'href',
method: 'method',
view: 'view',
css_class: 'css_class',
data: {}
}
expect(described_class.build(**item)).to eq(item)
end
end
end

View file

@ -1166,5 +1166,9 @@ RSpec.describe MergeRequestDiff do
it 'loads nothing if the merge request has no diff record' do
expect(described_class.latest_diff_for_merge_requests(merge_request_3)).to be_empty
end
it 'loads nothing if nil was passed as merge_request' do
expect(described_class.latest_diff_for_merge_requests(nil)).to be_empty
end
end
end