Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-02-16 15:09:50 +00:00
parent 8215fc964a
commit b4e854a900
96 changed files with 1570 additions and 661 deletions

View file

@ -1 +1 @@
8.62.0
8.63.0

View file

@ -1,5 +1,5 @@
<script>
import { GlButton, GlLoadingIcon, GlIntersectionObserver, GlModal } from '@gitlab/ui';
import { GlButton, GlLoadingIcon, GlIntersectionObserver, GlModal, GlFormInput } from '@gitlab/ui';
import { mapActions, mapState, mapGetters } from 'vuex';
import { n__, __, sprintf } from '~/locale';
import ProviderRepoTableRow from './provider_repo_table_row.vue';
@ -12,6 +12,7 @@ export default {
GlButton,
GlModal,
GlIntersectionObserver,
GlFormInput,
},
props: {
providerTitle: {
@ -115,13 +116,13 @@ export default {
<template>
<div>
<p class="light text-nowrap mt-2">
<p class="gl-text-gray-900 gl-white-space-nowrap gl-mt-3">
{{ s__('ImportProjects|Select the repositories you want to import') }}
</p>
<template v-if="hasIncompatibleRepos">
<slot name="incompatible-repos-warning"></slot>
</template>
<div class="d-flex justify-content-between align-items-end flex-wrap mb-3">
<div class="gl-display-flex gl-justify-content-space-between gl-flex-wrap gl-mb-5">
<gl-button
variant="success"
:loading="isImportingAnyRepo"
@ -148,24 +149,29 @@ export default {
<slot name="actions"></slot>
<form v-if="filterable" class="gl-ml-auto" novalidate @submit.prevent>
<input
<gl-form-input
data-qa-selector="githubish_import_filter_field"
class="form-control"
name="filter"
:placeholder="__('Filter your repositories by name')"
autofocus
size="40"
size="lg"
@keyup.enter="setFilter($event.target.value)"
/>
</form>
</div>
<div v-if="repositories.length" class="table-responsive">
<table class="table import-table">
<thead>
<th class="import-jobs-from-col">{{ fromHeaderText }}</th>
<th class="import-jobs-to-col">{{ __('To GitLab') }}</th>
<th class="import-jobs-status-col">{{ __('Status') }}</th>
<th class="import-jobs-cta-col"></th>
<div v-if="repositories.length" class="gl-w-full">
<table>
<thead class="gl-border-0 gl-border-solid gl-border-t-1 gl-border-gray-100">
<th class="import-jobs-from-col gl-p-4 gl-vertical-align-top gl-border-b-1">
{{ fromHeaderText }}
</th>
<th class="import-jobs-to-col gl-p-4 gl-vertical-align-top gl-border-b-1">
{{ __('To GitLab') }}
</th>
<th class="import-jobs-status-col gl-p-4 gl-vertical-align-top gl-border-b-1">
{{ __('Status') }}
</th>
<th class="import-jobs-cta-col gl-p-4 gl-vertical-align-top gl-border-b-1"></th>
</thead>
<tbody>
<template v-for="repo in repositories">
@ -183,9 +189,9 @@ export default {
:key="pagePaginationStateKey"
@appear="fetchRepos"
/>
<gl-loading-icon v-if="isLoading" class="import-projects-loading-icon" size="md" />
<gl-loading-icon v-if="isLoading" class="gl-mt-7" size="md" />
<div v-if="!isLoading && repositories.length === 0" class="text-center">
<div v-if="!isLoading && repositories.length === 0" class="gl-text-center">
<strong>{{ emptyStateText }}</strong>
</div>
</div>

View file

@ -1,5 +1,5 @@
<script>
import { GlIcon, GlBadge } from '@gitlab/ui';
import { GlIcon, GlBadge, GlFormInput, GlButton, GlLink } from '@gitlab/ui';
import { mapState, mapGetters, mapActions } from 'vuex';
import { __ } from '~/locale';
import Select2Select from '~/vue_shared/components/select2_select.vue';
@ -12,8 +12,11 @@ export default {
components: {
Select2Select,
ImportStatus,
GlFormInput,
GlButton,
GlIcon,
GlBadge,
GlLink,
},
props: {
repo: {
@ -61,7 +64,7 @@ export default {
select2Options() {
return {
data: this.availableNamespaces,
containerCssClass: 'import-namespace-select qa-project-namespace-select w-auto',
containerCssClass: 'import-namespace-select qa-project-namespace-select gl-w-auto',
};
},
@ -97,52 +100,56 @@ export default {
</script>
<template>
<tr class="qa-project-import-row import-row">
<td>
<a
:href="repo.importSource.providerLink"
rel="noreferrer noopener"
target="_blank"
data-testid="providerLink"
<tr
class="qa-project-import-row gl-h-11 gl-border-0 gl-border-solid gl-border-t-1 gl-border-gray-100 gl-h-11"
>
<td class="gl-p-4">
<gl-link :href="repo.importSource.providerLink" target="_blank" data-testid="providerLink"
>{{ repo.importSource.fullName }}
<gl-icon v-if="repo.importSource.providerLink" name="external-link" />
</a>
</gl-link>
</td>
<td class="d-flex flex-wrap flex-lg-nowrap" data-testid="fullPath">
<td
class="gl-display-flex gl-flex-sm-wrap gl-p-4 gl-pt-5 gl-vertical-align-top"
data-testid="fullPath"
>
<template v-if="repo.importSource.target">{{ repo.importSource.target }}</template>
<template v-else-if="isImportNotStarted">
<div class="import-entities-target-select gl-display-flex gl-align-items-stretch gl-w-full">
<select2-select v-model="targetNamespaceSelect" :options="select2Options" />
<span class="px-2 import-slash-divider d-flex justify-content-center align-items-center"
>/</span
<div
class="import-entities-target-select-separator gl-px-3 gl-display-flex gl-align-items-center gl-border-solid gl-border-0 gl-border-t-1 gl-border-b-1"
>
<input
/
</div>
<gl-form-input
v-model="newNameInput"
type="text"
class="form-control import-project-name-input qa-project-path-field"
class="gl-rounded-top-left-none gl-rounded-bottom-left-none qa-project-path-field"
/>
</div>
</template>
<template v-else-if="repo.importedProject">{{ displayFullPath }}</template>
</td>
<td>
<td class="gl-p-4">
<import-status :status="importStatus" />
</td>
<td data-testid="actions">
<a
<gl-button
v-if="isFinished"
class="btn btn-default"
:href="repo.importedProject.fullPath"
rel="noreferrer noopener"
target="_blank"
>{{ __('Go to project') }}
</a>
<button
</gl-button>
<gl-button
v-if="isImportNotStarted"
type="button"
class="qa-import-button btn btn-default"
class="qa-import-button"
@click="fetchImport(repo.importSource.id)"
>
{{ importButtonText }}
</button>
</gl-button>
<gl-badge v-else-if="isIncompatible" variant="danger">{{
__('Incompatible project')
}}</gl-badge>

View file

@ -333,15 +333,19 @@ export default {
this.fetchIssuables();
},
handleFilter(filters) {
let search = null;
const searchTokens = [];
filters.forEach((filter) => {
if (typeof filter === 'string') {
search = filter;
if (filter.type === 'filtered-search-term') {
if (filter.value.data) {
searchTokens.push(filter.value.data);
}
}
});
this.filters.search = search;
if (searchTokens.length) {
this.filters.search = searchTokens.join(' ');
}
this.page = 1;
this.refetchIssuables();

View file

@ -1,6 +1,9 @@
import { joinPaths } from '~/lib/utils/url_utility';
// tell webpack to load assets from origin so that web workers don't break
/**
* Tell webpack to load assets from origin so that web workers don't break
* See https://gitlab.com/gitlab-org/gitlab/-/issues/321656 for a fix
*/
export function resetServiceWorkersPublicPath() {
// __webpack_public_path__ is a global variable that can be used to adjust
// the webpack publicPath setting at runtime.

View file

@ -2,12 +2,17 @@ import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createDefaultClient from '~/lib/graphql';
import { resetServiceWorkersPublicPath } from '../lib/utils/webpack';
import { resolvers } from './graphql/resolvers';
import typeDefs from './graphql/typedefs.graphql';
import PipelineEditorApp from './pipeline_editor_app.vue';
export const initPipelineEditor = (selector = '#js-pipeline-editor') => {
// Prevent issues loading syntax validation workers
// Fixes https://gitlab.com/gitlab-org/gitlab/-/issues/297252
// TODO Remove when https://gitlab.com/gitlab-org/gitlab/-/issues/321656 is resolved
resetServiceWorkersPublicPath();
const el = document.querySelector(selector);
if (!el) {

View file

@ -12,35 +12,6 @@
width: 1%;
}
.import-project-name-input {
border-radius: 0 $border-radius-default $border-radius-default 0;
position: relative;
left: -1px;
max-width: 300px;
}
.import-slash-divider {
background-color: $gray-lightest;
border: 1px solid $border-color;
}
.import-row {
height: 55px;
}
.import-table {
.import-jobs-from-col,
.import-jobs-to-col,
.import-jobs-status-col,
.import-jobs-cta-col {
border-bottom-width: 1px;
padding-left: $gl-padding;
}
}
.import-projects-loading-icon {
margin-top: $gl-padding-32;
}
.import-entities-target-select {
&.disabled {

View file

@ -36,7 +36,6 @@ class ProjectsController < Projects::ApplicationController
end
before_action only: [:edit] do
push_frontend_feature_flag(:approval_suggestions, @project, default_enabled: true)
push_frontend_feature_flag(:allow_editing_commit_messages, @project)
end

View file

@ -214,6 +214,15 @@ module DiffHelper
)
end
# As the fork suggestion button is identical every time, we cache it for a full page load
def render_fork_suggestion
return unless current_user
strong_memoize(:fork_suggestion) do
render partial: "projects/fork_suggestion"
end
end
private
def diff_btn(title, name, selected)

View file

@ -768,7 +768,7 @@ class MergeRequestDiff < ApplicationRecord
end
def sort_diffs?
Feature.enabled?(:sort_diffs, project, default_enabled: false)
Feature.enabled?(:sort_diffs, project, default_enabled: :yaml)
end
end

View file

@ -1,4 +1,3 @@
- if current_user
.js-file-fork-suggestion-section.file-fork-suggestion.hidden
%span.file-fork-suggestion-note
You're not allowed to

View file

@ -20,5 +20,5 @@
= download_blob_button(blob)
= view_on_environment_button(@commit.sha, @path, @environment) if @environment
= render 'projects/fork_suggestion'
= render_fork_suggestion
= render_if_exists 'projects/blob/header_file_locks', project: @project, path: @path

View file

@ -30,6 +30,6 @@
= view_file_button(diff_file.content_sha, diff_file.file_path, project)
= view_on_environment_button(diff_file.content_sha, diff_file.file_path, environment) if environment
= render 'projects/fork_suggestion'
= render_fork_suggestion
= render 'projects/diffs/content', diff_file: diff_file

View file

@ -0,0 +1,5 @@
---
title: Enable sorting diffs by default
merge_request: 54210
author:
type: other

View file

@ -0,0 +1,5 @@
---
title: Fix /-/readiness probe for Puma Single
merge_request: 53708
author:
type: other

View file

@ -0,0 +1,5 @@
---
title: Fix search functionality in Jira issues list
merge_request: 54312
author:
type: fixed

View file

@ -0,0 +1,5 @@
---
title: Add post migration to backfill projects updated at after repository move
merge_request: 53845
author:
type: fixed

View file

@ -0,0 +1,5 @@
---
title: Memoize the fork suggestion button partial
merge_request: 53256
author:
type: performance

View file

@ -0,0 +1,5 @@
---
title: Fix force-routing to Gitaly primary with empty hook env
merge_request: 54317
author:
type: fixed

View file

@ -0,0 +1,5 @@
---
title: Track CI minutes for namespace on a monthly basis
merge_request: 52915
author:
type: added

View file

@ -0,0 +1,5 @@
---
title: Track CI minutes on a monthly basis at project level
merge_request: 53460
author:
type: added

View file

@ -0,0 +1,5 @@
---
title: Update GitLab Workhorse to v8.63.0
merge_request: 54315
author:
type: other

View file

@ -0,0 +1,5 @@
---
title: Apply new GitLab UI for buttons and card in admin/license
merge_request: 52408
author: Yogi (@yo)
type: other

View file

@ -1,8 +0,0 @@
---
name: approval_suggestions
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/38992
rollout_issue_url:
milestone: '13.3'
type: development
group: group::composition analysis
default_enabled: true

View file

@ -1,8 +0,0 @@
---
name: changelog_api
introduced_by_url: '13.9'
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/300043
milestone: '13.9'
type: development
group: group::source code
default_enabled: false

View file

@ -1,8 +0,0 @@
---
name: gitaly_catfile-cache
introduced_by_url:
rollout_issue_url:
milestone:
type: development
group:
default_enabled: false

View file

@ -1,8 +0,0 @@
---
name: gitaly_deny_disk_access
introduced_by_url:
rollout_issue_url:
milestone:
type: development
group:
default_enabled: false

View file

@ -1,7 +1,7 @@
---
name: pages_serve_with_zip_file_protocol
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/46320
rollout_issue_url:
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/321677
milestone: '13.6'
type: development
group: group::release

View file

@ -1,8 +1,8 @@
---
name: sort_diffs
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/49118
rollout_issue_url:
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/293819
milestone: '13.7'
type: development
group: group::code review
default_enabled: false
default_enabled: true

View file

@ -4,7 +4,7 @@ def max_puma_workers
Puma.cli_config.options[:workers].to_i
end
if Gitlab::Runtime.puma? && max_puma_workers == 0
if Gitlab::Runtime.puma? && !Gitlab::Runtime.puma_in_clustered_mode?
raise 'Puma is only supported in Clustered mode (workers > 0)' if Gitlab.com?
warn 'WARNING: Puma is running in Single mode (workers = 0). Some features may not work. Please refer to https://gitlab.com/groups/gitlab-org/-/epics/5303 for info.'

View file

@ -0,0 +1,30 @@
# frozen_string_literal: true
class CreateCiNamespaceMonthlyUsage < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
with_lock_retries do
create_table :ci_namespace_monthly_usages, if_not_exists: true do |t|
t.references :namespace, index: false, null: false
t.date :date, null: false
t.integer :additional_amount_available, null: false, default: 0
t.decimal :amount_used, null: false, default: 0.0, precision: 18, scale: 2
t.index [:namespace_id, :date], unique: true
end
end
add_check_constraint :ci_namespace_monthly_usages, "(date = date_trunc('month', date))", 'ci_namespace_monthly_usages_year_month_constraint'
end
def down
with_lock_retries do
drop_table :ci_namespace_monthly_usages
end
end
end

View file

@ -0,0 +1,29 @@
# frozen_string_literal: true
class CreateCiProjectMonthlyUsage < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
with_lock_retries do
create_table :ci_project_monthly_usages, if_not_exists: true do |t|
t.references :project, foreign_key: { on_delete: :cascade }, index: false, null: false
t.date :date, null: false
t.decimal :amount_used, null: false, default: 0.0, precision: 18, scale: 2
t.index [:project_id, :date], unique: true
end
end
add_check_constraint :ci_project_monthly_usages, "(date = date_trunc('month', date))", 'ci_project_monthly_usages_year_month_constraint'
end
def down
with_lock_retries do
drop_table :ci_project_monthly_usages
end
end
end

View file

@ -0,0 +1,34 @@
# frozen_string_literal: true
class BackfillUpdatedAtAfterRepositoryStorageMove < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BATCH_SIZE = 10_000
INTERVAL = 2.minutes
MIGRATION_CLASS = 'BackfillProjectUpdatedAtAfterRepositoryStorageMove'
disable_ddl_transaction!
class ProjectRepositoryStorageMove < ActiveRecord::Base
include EachBatch
self.table_name = 'project_repository_storage_moves'
end
def up
ProjectRepositoryStorageMove.reset_column_information
ProjectRepositoryStorageMove.select(:project_id).distinct.each_batch(of: BATCH_SIZE, column: :project_id) do |batch, index|
migrate_in(
INTERVAL * index,
MIGRATION_CLASS,
batch.pluck(:project_id)
)
end
end
def down
# No-op
end
end

View file

@ -0,0 +1 @@
932509d18f1cfdfa09f1565e4ac2f197b7ca792263ff5da3e5b712fae7279925

View file

@ -0,0 +1 @@
5bd622f36126b06c7c585ee14a8140750843d36092e79b6cc35b62c06afb1178

View file

@ -0,0 +1 @@
961c147e9c8e35eac5b8dd33f879582e173b7f6e31659b2d00989bc38afc6f5a

View file

@ -10506,6 +10506,24 @@ CREATE SEQUENCE ci_job_variables_id_seq
ALTER SEQUENCE ci_job_variables_id_seq OWNED BY ci_job_variables.id;
CREATE TABLE ci_namespace_monthly_usages (
id bigint NOT NULL,
namespace_id bigint NOT NULL,
date date NOT NULL,
additional_amount_available integer DEFAULT 0 NOT NULL,
amount_used numeric(18,2) DEFAULT 0.0 NOT NULL,
CONSTRAINT ci_namespace_monthly_usages_year_month_constraint CHECK ((date = date_trunc('month'::text, (date)::timestamp with time zone)))
);
CREATE SEQUENCE ci_namespace_monthly_usages_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE ci_namespace_monthly_usages_id_seq OWNED BY ci_namespace_monthly_usages.id;
CREATE TABLE ci_pipeline_artifacts (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
@ -10703,6 +10721,23 @@ CREATE SEQUENCE ci_platform_metrics_id_seq
ALTER SEQUENCE ci_platform_metrics_id_seq OWNED BY ci_platform_metrics.id;
CREATE TABLE ci_project_monthly_usages (
id bigint NOT NULL,
project_id bigint NOT NULL,
date date NOT NULL,
amount_used numeric(18,2) DEFAULT 0.0 NOT NULL,
CONSTRAINT ci_project_monthly_usages_year_month_constraint CHECK ((date = date_trunc('month'::text, (date)::timestamp with time zone)))
);
CREATE SEQUENCE ci_project_monthly_usages_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE ci_project_monthly_usages_id_seq OWNED BY ci_project_monthly_usages.id;
CREATE TABLE ci_refs (
id bigint NOT NULL,
project_id bigint NOT NULL,
@ -18747,6 +18782,8 @@ ALTER TABLE ONLY ci_job_artifacts ALTER COLUMN id SET DEFAULT nextval('ci_job_ar
ALTER TABLE ONLY ci_job_variables ALTER COLUMN id SET DEFAULT nextval('ci_job_variables_id_seq'::regclass);
ALTER TABLE ONLY ci_namespace_monthly_usages ALTER COLUMN id SET DEFAULT nextval('ci_namespace_monthly_usages_id_seq'::regclass);
ALTER TABLE ONLY ci_pipeline_artifacts ALTER COLUMN id SET DEFAULT nextval('ci_pipeline_artifacts_id_seq'::regclass);
ALTER TABLE ONLY ci_pipeline_chat_data ALTER COLUMN id SET DEFAULT nextval('ci_pipeline_chat_data_id_seq'::regclass);
@ -18765,6 +18802,8 @@ ALTER TABLE ONLY ci_pipelines_config ALTER COLUMN pipeline_id SET DEFAULT nextva
ALTER TABLE ONLY ci_platform_metrics ALTER COLUMN id SET DEFAULT nextval('ci_platform_metrics_id_seq'::regclass);
ALTER TABLE ONLY ci_project_monthly_usages ALTER COLUMN id SET DEFAULT nextval('ci_project_monthly_usages_id_seq'::regclass);
ALTER TABLE ONLY ci_refs ALTER COLUMN id SET DEFAULT nextval('ci_refs_id_seq'::regclass);
ALTER TABLE ONLY ci_resource_groups ALTER COLUMN id SET DEFAULT nextval('ci_resource_groups_id_seq'::regclass);
@ -19860,6 +19899,9 @@ ALTER TABLE ONLY ci_job_artifacts
ALTER TABLE ONLY ci_job_variables
ADD CONSTRAINT ci_job_variables_pkey PRIMARY KEY (id);
ALTER TABLE ONLY ci_namespace_monthly_usages
ADD CONSTRAINT ci_namespace_monthly_usages_pkey PRIMARY KEY (id);
ALTER TABLE ONLY ci_pipeline_artifacts
ADD CONSTRAINT ci_pipeline_artifacts_pkey PRIMARY KEY (id);
@ -19887,6 +19929,9 @@ ALTER TABLE ONLY ci_pipelines
ALTER TABLE ONLY ci_platform_metrics
ADD CONSTRAINT ci_platform_metrics_pkey PRIMARY KEY (id);
ALTER TABLE ONLY ci_project_monthly_usages
ADD CONSTRAINT ci_project_monthly_usages_pkey PRIMARY KEY (id);
ALTER TABLE ONLY ci_refs
ADD CONSTRAINT ci_refs_pkey PRIMARY KEY (id);
@ -21658,6 +21703,8 @@ CREATE INDEX index_ci_job_variables_on_job_id ON ci_job_variables USING btree (j
CREATE UNIQUE INDEX index_ci_job_variables_on_key_and_job_id ON ci_job_variables USING btree (key, job_id);
CREATE UNIQUE INDEX index_ci_namespace_monthly_usages_on_namespace_id_and_date ON ci_namespace_monthly_usages USING btree (namespace_id, date);
CREATE INDEX index_ci_pipeline_artifacts_on_expire_at ON ci_pipeline_artifacts USING btree (expire_at);
CREATE INDEX index_ci_pipeline_artifacts_on_pipeline_id ON ci_pipeline_artifacts USING btree (pipeline_id);
@ -21722,6 +21769,8 @@ CREATE INDEX index_ci_pipelines_on_user_id_and_created_at_and_config_source ON c
CREATE INDEX index_ci_pipelines_on_user_id_and_created_at_and_source ON ci_pipelines USING btree (user_id, created_at, source);
CREATE UNIQUE INDEX index_ci_project_monthly_usages_on_project_id_and_date ON ci_project_monthly_usages USING btree (project_id, date);
CREATE UNIQUE INDEX index_ci_refs_on_project_id_and_ref_path ON ci_refs USING btree (project_id, ref_path);
CREATE UNIQUE INDEX index_ci_resource_groups_on_project_id_and_key ON ci_resource_groups USING btree (project_id, key);
@ -25241,6 +25290,9 @@ ALTER TABLE ONLY resource_iteration_events
ALTER TABLE ONLY status_page_settings
ADD CONSTRAINT fk_rails_506e5ba391 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY ci_project_monthly_usages
ADD CONSTRAINT fk_rails_508bcd4aa6 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY project_repository_storage_moves
ADD CONSTRAINT fk_rails_5106dbd44a FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;

View file

@ -43,6 +43,7 @@ exceptions:
- DSA
- DVCS
- ECDSA
- EFS
- EKS
- EOL
- EXIF

View file

@ -5,22 +5,29 @@ info: To determine the technical writer assigned to the Stage/Group associated w
type: reference, howto
---
# Repository storage paths
# Repository storage **(FREE SELF)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/4578) in GitLab 8.10.
GitLab allows you to define multiple repository storage paths (sometimes called
storage shards) to distribute the storage load between several mount points.
GitLab stores [repositories](../user/project/repository/index.md) on repository storage. Repository
storage is either:
> **Notes:**
>
> - You must have at least one storage path called `default`.
> - The paths are defined in key-value pairs. The key is an arbitrary name you
> can pick to name the file path.
> - The target directories and any of its sub-paths must not be a symlink.
> - No target directory may be a sub-directory of another; no nesting.
- A `gitaly_address`, which points to a [Gitaly node](gitaly/index.md).
- A `path`, which points directly a directory where the repository is stored.
Example: this is OK:
GitLab allows you to define multiple repository storages to distribute the storage load between
several mount points. For example:
- When using Gitaly (Omnibus GitLab-style configuration):
```ruby
git_data_dirs({
'default' => { 'gitaly_address' => 'tcp://gitaly1.internal:8075' },
'storage1' => { 'gitaly_address' => 'tcp://gitaly2.internal:8075' },
})
```
- When using direct repository storage (source install-style configuration):
```plaintext
default:
@ -29,7 +36,28 @@ storage2:
path: /mnt/git-storage-2
```
This is not OK because it nests storage paths:
For more information on
- Configuring Gitaly, see [Configure Gitaly](gitaly/index.md#configure-gitaly).
- Configuring direct repository access, see the following section below.
## Configure repository storage paths
To configure repository storage paths:
1. Edit the necessary configuration files:
- `/etc/gitlab/gitlab.rb`, for Omnibus GitLab installations.
- `gitlab.yml`, for installations from source.
1. Add the required repository storage paths.
For repository storage paths:
- You must have at least one storage path called `default`.
- The paths are defined in key-value pairs. Apart from `default`, the key can be any name you choose
to name the file path.
- The target directories and any of its sub paths must not be a symlink.
- No target directory may be a sub-directory of another. That is, no nesting. For example, the
following configuration is invalid:
```plaintext
default:
@ -38,40 +66,39 @@ storage2:
path: /mnt/git-storage-1/git-storage-2 # <- NOT OK because of nesting
```
## Configure GitLab
### Configure for backups
For [backups](../raketasks/backup_restore.md) to work correctly, the storage path cannot be a
mount point, and the GitLab user must have correct permissions for the parent
directory of the path. Omnibus GitLab takes care of these issues for you,
but for source installations you should be extra careful.
For [backups](../raketasks/backup_restore.md) to work correctly:
The thing is that for compatibility reasons `gitlab.yml` has a different
structure than Omnibus. In `gitlab.yml` you indicate the path for the
repositories, for example `/home/git/repositories`, while in Omnibus you
indicate `git_data_dirs`, which for the example above would be `/home/git`.
Then, Omnibus creates a `repositories` directory under that path to use with
- The repository storage path cannot be a mount point.
- The GitLab user must have correct permissions for the parent directory of the path.
Omnibus GitLab takes care of these issues for you, but for source installations you should be extra
careful.
While restoring a backup, the current contents of `/home/git/repositories` are moved to
`/home/git/repositories.old`. If `/home/git/repositories` is a mount point, then `mv` would be
moving things between mount points, and problems can occur.
Ideally, `/home/git` is the mount point, so things remain inside the same mount point. Omnibus
GitLab installations guarantee this because they don't specify the full repository path but instead
the parent path, but source installations do not.
### Example configuration
In the examples below, we add two additional repository storage paths configured to two additional
mount points.
For compatibility reasons `gitlab.yml` has a different structure than Omnibus GitLab configuration:
- In `gitlab.yml`, you indicate the path for the repositories, for example `/home/git/repositories`
- In Omnibus GitLab configuration you indicate `git_data_dirs`, which could be `/home/git` for
example. Then Omnibus GitLab creates a `repositories` directory under that path to use with
`gitlab.yml`.
WARNING:
This detail matters because while restoring a backup, the current
contents of `/home/git/repositories`
[are moved to](https://gitlab.com/gitlab-org/gitlab/blob/033e5423a2594e08a7ebcd2379bd2331f4c39032/lib/backup/repository.rb#L54-56)
`/home/git/repositories.old`.
If `/home/git/repositories` is the mount point, then `mv` would be moving
things between mount points, and bad things could happen. Ideally,
`/home/git` would be the mount point, so things would remain inside the
same mount point. Omnibus installations guarantee this, because they
don't specify the full repository path but instead the parent path, but source
installations do not.
Next, edit the configuration
files, and add the full paths of the alternative repository storage paths. In
the example below, we add two more mount points that are named `nfs_1` and `nfs_2`
respectively.
NOTE:
This example uses NFS. We do not recommend using EFS for storage as it may impact GitLab performance. Read
the [relevant documentation](nfs.md#avoid-using-awss-elastic-file-system-efs) for more details.
This example uses NFS. We do not recommend using EFS for storage as it may impact GitLab performance.
Read the [relevant documentation](nfs.md#avoid-using-awss-elastic-file-system-efs) for more details.
**For installations from source**
@ -81,7 +108,7 @@ the [relevant documentation](nfs.md#avoid-using-awss-elastic-file-system-efs) fo
repositories:
# Paths where repositories can be stored. Give the canonicalized absolute pathname.
# NOTE: REPOS PATHS MUST NOT CONTAIN ANY SYMLINK!!!
storages: # You must have at least a 'default' storage path.
storages: # You must have at least a 'default' repository storage path.
default:
path: /home/git/repositories
nfs_1:
@ -92,16 +119,9 @@ the [relevant documentation](nfs.md#avoid-using-awss-elastic-file-system-efs) fo
1. [Restart GitLab](restart_gitlab.md#installations-from-source) for the changes to take effect.
NOTE:
We plan to replace [`gitlab_shell: repos_path` entry](https://gitlab.com/gitlab-org/gitlab-foss/-/blob/8-9-stable/config/gitlab.yml.example#L457) in `gitlab.yml` with `repositories: storages`. If you
are upgrading from a version prior to 8.10, make sure to add the configuration
as described in the step above. After you make the changes and confirm they are
working, you can remove the `repos_path` line.
**For Omnibus installations**
1. Edit `/etc/gitlab/gitlab.rb` by appending the rest of the paths to the
default one:
Edit `/etc/gitlab/gitlab.rb` by appending the rest of the paths to the default one:
```ruby
git_data_dirs({
@ -111,23 +131,27 @@ working, you can remove the `repos_path` line.
})
```
Note that Omnibus stores the repositories in a `repositories` subdirectory
of the `git-data` directory.
NOTE:
Omnibus stores the repositories in a `repositories` subdirectory of the `git-data` directory.
## Choose where new repositories are stored
## Configure where new repositories are stored
After you set the multiple storage paths, you can choose where new repositories
are stored in the Admin Area under **Settings > Repository > Repository storage > Storage nodes for new repositories**.
After you [configure](#configure-repository-storage-paths) multiple repository storage paths, you
can choose where new repositories are stored:
Each storage can be assigned a weight from 0-100. When a new project is created, these
weights are used to determine the storage location the repository is created on.
1. Go to the Admin Area (**{admin}**).
1. Go to **Settings > Repository** and expand the **Repository storage** section.
1. Enter values in the **Storage nodes for new repositories** fields.
1. Select **Save changes**.
Each repository storage path can be assigned a weight from 0-100. When a new project is created,
these weights are used to determine the storage location the repository is created on. The higher
the weight of a given repository storage path relative to other repository storages paths, the more
often it is chosen. That is, `(storage weight) / (sum of all weights) * 100 = chance %`.
![Choose repository storage path in Admin Area](img/repository_storages_admin_ui_v13_1.png)
Beginning with GitLab 8.13.4, multiple paths can be chosen. New repositories
are randomly placed on one of the selected paths.
## Move a repository to a different repository path
## Move repositories
To move a repository to a different repository path, use the
[Project repository storage moves](../api/project_repository_storage_moves.md) API. Use

View file

@ -281,14 +281,7 @@ Example response:
## Generate changelog data
> - [Introduced](https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/351) in GitLab 13.9.
> - It's [deployed behind a feature flag](../user/feature_flags.md), disabled by default.
> - It's disabled on GitLab.com.
> - It's not yet recommended for production use.
> - To use it in GitLab self-managed instances, ask a GitLab administrator to [enable it](#enable-or-disable-generating-changelog-data).
WARNING:
This feature might not be available to you. Check the **version history** note above for details.
> [Introduced](https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/351) in GitLab 13.9.
Generate changelog data based on commits in a repository.
@ -562,25 +555,6 @@ In an entry, the following variables are available (here `foo.bar` means that
- `merge_request.reference`: a reference to the merge request that first
introduced the change (for example, `gitlab-org/gitlab!50063`).
The `author` and `merge_request` objects might not be present if the data couldn't
be determined (for example, when a commit was created without a corresponding merge
request).
### Enable or disable generating changelog data **(CORE ONLY)**
This feature is under development and not ready for production use. It is
deployed behind a feature flag that is **disabled by default**.
[GitLab administrators with access to the GitLab Rails console](../administration/feature_flags.md)
can enable it.
To enable it for a project:
```ruby
Feature.enable(:changelog_api, Project.find(id_of_the_project))
```
To disable it for a project:
```ruby
Feature.disable(:changelog_api, Project.find(id_of_the_project))
```
The `author` and `merge_request` objects might not be present if the data
couldn't be determined. For example, when a commit is created without a
corresponding merge request, no merge request is displayed.

View file

@ -346,7 +346,7 @@ listed in the descriptions of the relevant settings.
| `receive_max_input_size` | integer | no | Maximum push size (MB). |
| `repository_checks_enabled` | boolean | no | GitLab periodically runs `git fsck` in all project and wiki repositories to look for silent disk corruption issues. |
| `repository_size_limit` | integer | no | **(PREMIUM)** Size limit per repository (MB) |
| `repository_storages_weighted` | hash of strings to integers | no | (GitLab 13.1 and later) Hash of names of taken from `gitlab.yml` to [weights](../administration/repository_storage_paths.md#choose-where-new-repositories-are-stored). New projects are created in one of these stores, chosen by a weighted random selection. |
| `repository_storages_weighted` | hash of strings to integers | no | (GitLab 13.1 and later) Hash of names of taken from `gitlab.yml` to [weights](../administration/repository_storage_paths.md#configure-where-new-repositories-are-stored). New projects are created in one of these stores, chosen by a weighted random selection. |
| `repository_storages` | array of strings | no | (GitLab 13.0 and earlier) List of names of enabled storage paths, taken from `gitlab.yml`. New projects are created in one of these stores, chosen at random. |
| `require_admin_approval_after_user_signup` | boolean | no | When enabled, any user that signs up for an account using the registration form is placed under a **Pending approval** state and has to be explicitly [approved](../user/admin_area/approving_users.md) by an administrator. |
| `require_two_factor_authentication` | boolean | no | (**If enabled, requires:** `two_factor_grace_period`) Require all users to set up Two-factor authentication. |

View file

@ -733,9 +733,10 @@ the scan. You must start it manually.
An on-demand DAST scan:
- Uses settings in the site profile and scanner profile you select when you run the scan,
instead of those in the `.gitlab-ci.yml` file.
- Can run a specific combination of a [site profile](#site-profile) and a
[scanner profile](#scanner-profile).
- Is associated with your project's default branch.
- Is saved on creation so it can be run later.
### On-demand scan modes
@ -753,19 +754,75 @@ You must have permission to run an on-demand DAST scan against a protected branc
The default branch is automatically protected. For more information, see
[Pipeline security on protected branches](../../../ci/pipelines/index.md#pipeline-security-on-protected-branches).
To run an on-demand DAST scan, you need:
Prerequisites:
- A [scanner profile](#create-a-scanner-profile).
- A [site profile](#create-a-site-profile).
- If you are running an active scan the site profile must be [validated](#validate-a-site-profile).
To run an on-demand scan, either:
- [Create and run an on-demand scan](#create-and-run-an-on-demand-scan).
- [Run a previously saved on-demand scan](#run-a-saved-on-demand-scan).
### Create and run an on-demand scan
1. From your project's home page, go to **Security & Compliance > On-demand Scans** in the left sidebar.
1. In **Scanner profile**, select a scanner profile from the dropdown.
1. In **Site profile**, select a site profile from the dropdown.
1. Click **Run scan**.
1. To run the on-demand scan now, select **Save and run scan**. Otherwise select **Save scan** to
[run](#run-a-saved-on-demand-scan) it later.
The on-demand DAST scan runs and the project's dashboard shows the results.
#### List saved on-demand scans
To list saved on-demand scans:
1. From your project's home page, go to **Security & Compliance > Configuration**.
1. Select the **Saved Scans** tab.
#### View details of an on-demand scan
To view details of an on-demand scan:
1. From your project's home page, go to **Security & Compliance > Configuration**.
1. Select **Manage** in the **DAST Profiles** row.
1. Select the **Saved Scans** tab.
1. In the saved scan's row select **More actions** (**{ellipsis_v}**), then select **Edit**.
#### Run a saved on-demand scan
To run a saved on-demand scan:
1. From your project's home page, go to **Security & Compliance > Configuration**.
1. Select **Manage** in the **DAST Profiles** row.
1. Select the **Saved Scans** tab.
1. In the scan's row select **Run scan**.
The on-demand DAST scan runs and the project's dashboard shows the results.
#### Edit an on-demand scan
To edit an on-demand scan:
1. From your project's home page, go to **Security & Compliance > Configuration**.
1. Select **Manage** in the **DAST Profiles** row.
1. Select the **Saved Scans** tab.
1. In the saved scan's row select **More actions** (**{ellipsis_v}**), then select **Edit**.
1. Edit the form.
1. Select **Save scan**.
#### Delete an on-demand scan
To delete an on-demand scan:
1. From your project's home page, go to **Security & Compliance > Configuration**.
1. Select **Manage** in the **DAST Profiles** row.
1. Select the **Saved Scans** tab.
1. In the saved scan's row select **More actions** (**{ellipsis_v}**), then select **Delete**.
1. Select **Delete** to confirm the deletion.
## Site profile
A site profile describes the attributes of a web site to scan on demand with DAST. A site profile is

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

View file

@ -126,14 +126,13 @@ any pods. The policy itself is still deployed to the corresponding deployment na
> [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/3403) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 13.4.
The policy editor allows you to create, edit, and delete policies. To
create a new policy click the **New policy** button located in the
**Policy** tab's header. To edit an existing policy, click**Edit
policy** in the selected policy drawer.
You can use the policy editor to create, edit, and delete policies.
Note that the policy editor only supports the
[CiliumNetworkPolicy](https://docs.cilium.io/en/v1.8/policy/)specification. Regular Kubernetes
[NetworkPolicy](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.19/#networkpolicy-v1-networking-k8s-io)
- To create a new policy, click the **New policy** button located in the **Policy** tab's header.
- To edit an existing policy, click **Edit policy** in the selected policy drawer.
The policy editor only supports the [CiliumNetworkPolicy](https://docs.cilium.io/en/v1.8/policy/)
specification. Regular Kubernetes [NetworkPolicy](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.19/#networkpolicy-v1-networking-k8s-io)
resources aren't supported.
The policy editor has two modes:
@ -163,3 +162,65 @@ Once your policy is complete, save it by pressing the **Save policy**
button at the bottom of the editor. Existing policies can also be
removed from the editor interface by clicking the **Delete policy**
button at the bottom of the editor.
### Configuring Network Policy Alerts
> [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/3438) and [enabled by default](https://gitlab.com/gitlab-org/gitlab/-/issues/287676) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 13.9.
You can use policy alerts to track your policy's impact. Alerts are only available if you've
[installed](../../clusters/agent/repository.md)
and [configured](../../clusters/agent/index.md#create-an-agent-record-in-gitlab)
a Kubernetes Agent for this project.
There are two ways to create policy alerts:
- In the [policy editor UI](#container-network-policy-editor),
by clicking **Add alert**.
- In the policy editor's YAML mode, through the `metadata.annotations` property:
```yaml
metadata:
annotations:
app.gitlab.com/alert: 'true'
```
Once added, the UI updates and displays a warning about the dangers of too many alerts.
#### Enable or disable Policy Alerts **(FREE SELF)**
Policy Alerts is under development but ready for production use.
It is deployed behind a feature flag that is **enabled by default**.
[GitLab administrators with access to the GitLab Rails console](../../../administration/feature_flags.md)
can opt to disable it.
To enable it:
```ruby
Feature.enable(:threat_monitoring_alerts)
```
To disable it:
```ruby
Feature.disable(:threat_monitoring_alerts)
```
### Container Network Policy Alert list
> [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/3438) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 13.9.
The policy alert list displays your policy's alert activity. You can sort the list by the
**Date and time** column, and the **Status** column. Use the selector menu in the **Status** column
to set the status for each alert:
- Unreviewed
- In review
- Resolved
- Dismissed
By default, the list doesn't display resolved or dismissed alerts. To show these alerts, clear the
checkbox **Hide dismissed alerts**.
![Policy Alert List](img/threat_monitoring_policy_alert_list_v13_9.png)
For information on work in progress for the alerts dashboard, see [this epic](https://gitlab.com/groups/gitlab-org/-/epics/5041).

View file

@ -296,6 +296,10 @@ WARNING:
Unlinking an account removes all roles assigned to that user in the group.
If a user re-links their account, roles need to be reassigned.
Groups require at least one owner. If your account is the only owner in the
group, you are not allowed to unlink the account. In that case, set up another user as a
group owner, and then you can unlink the account.
For example, to unlink the `MyOrg` account:
1. In the top-right corner, select your avatar.

View file

@ -124,7 +124,7 @@ module API
repository: repository.gitaly_repository.to_h,
address: Gitlab::GitalyClient.address(repository.shard),
token: Gitlab::GitalyClient.token(repository.shard),
features: Feature::Gitaly.server_feature_flags
features: Feature::Gitaly.server_feature_flags(repository.project)
}
end
end

View file

@ -211,8 +211,6 @@ module API
desc: 'The commit message to use when committing the changelog'
end
post ':id/repository/changelog' do
not_found! unless Feature.enabled?(:changelog_api, user_project)
branch = params[:branch] || user_project.default_branch_or_master
access = Gitlab::UserAccess.new(current_user, container: user_project)

View file

@ -5,25 +5,25 @@ class Feature
PREFIX = "gitaly_"
class << self
def enabled?(feature_flag)
def enabled?(feature_flag, project = nil)
return false unless Feature::FlipperFeature.table_exists?
Feature.enabled?("#{PREFIX}#{feature_flag}")
Feature.enabled?("#{PREFIX}#{feature_flag}", project)
rescue ActiveRecord::NoDatabaseError, PG::ConnectionBad
false
end
def server_feature_flags
def server_feature_flags(project = nil)
# We need to check that both the DB connection and table exists
return {} unless ::Gitlab::Database.cached_table_exists?(FlipperFeature.table_name)
Feature.persisted_names
.select { |f| f.start_with?(PREFIX) }
.map do |f|
.to_h do |f|
flag = f.delete_prefix(PREFIX)
["gitaly-feature-#{flag.tr('_', '-')}", enabled?(flag).to_s]
end.to_h
["gitaly-feature-#{flag.tr('_', '-')}", enabled?(flag, project).to_s]
end
end
end
end

View file

@ -0,0 +1,22 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Update existent project update_at column after their repository storage was moved
class BackfillProjectUpdatedAtAfterRepositoryStorageMove
def perform(*project_ids)
updated_repository_storages = ProjectRepositoryStorageMove.select("project_id, MAX(updated_at) as updated_at").where(project_id: project_ids).group(:project_id)
Project.connection.execute <<-SQL
WITH repository_storage_cte as (
#{updated_repository_storages.to_sql}
)
UPDATE projects
SET updated_at = (repository_storage_cte.updated_at + interval '1 second')
FROM repository_storage_cte
WHERE projects.id = repository_storage_cte.project_id AND projects.updated_at <= repository_storage_cte.updated_at
SQL
end
end
end
end

View file

@ -117,7 +117,7 @@ module Gitlab
end
def sort_diffs(diffs)
return diffs unless Feature.enabled?(:sort_diffs, project, default_enabled: false)
return diffs unless Feature.enabled?(:sort_diffs, project, default_enabled: :yaml)
Gitlab::Diff::FileCollectionSorter.new(diffs).sort
end

View file

@ -248,6 +248,8 @@ module Gitlab
return {} unless Gitlab::SafeRequestStore[:gitlab_git_env]
return {} if Gitlab::SafeRequestStore[:gitlab_git_env].empty?
{ 'gitaly-route-repository-accessor-policy' => 'primary-only' }
end
private_class_method :route_to_primary

View file

@ -23,7 +23,6 @@ module Gitlab
MUTEX = Mutex.new
DISK_ACCESS_DENIED_FLAG = :deny_disk_access
ALLOW_KEY = :allow_disk_access
# If your code needs this method then your code needs to be fixed.
@ -34,7 +33,7 @@ module Gitlab
def self.disk_access_denied?
return false if rugged_enabled?
!temporarily_allowed?(ALLOW_KEY) && Feature::Gitaly.enabled?(DISK_ACCESS_DENIED_FLAG)
!temporarily_allowed?(ALLOW_KEY)
rescue
false # Err on the side of caution, don't break gitlab for people
end
@ -62,7 +61,7 @@ module Gitlab
def legacy_disk_path
if self.class.disk_access_denied?
raise DirectPathAccessError, "git disk access denied via the gitaly_#{DISK_ACCESS_DENIED_FLAG} feature"
raise DirectPathAccessError, "git disk access denied"
end
@legacy_disk_path

View file

@ -11,6 +11,10 @@ module Gitlab
name.sub(/_check$/, '').capitalize
end
def available?
true
end
def readiness
raise NotImplementedError
end

View file

@ -8,7 +8,16 @@ module Gitlab
extend SimpleAbstractCheck
class << self
extend ::Gitlab::Utils::Override
override :available?
def available?
Gitlab::Runtime.puma_in_clustered_mode?
end
def register_master
return unless available?
# when we fork, we pass the read pipe to child
# child can then react on whether the other end
# of pipe is still available
@ -16,11 +25,15 @@ module Gitlab
end
def finish_master
return unless available?
close_read
close_write
end
def register_worker
return unless available?
# fork needs to close the pipe
close_write
end

View file

@ -48,6 +48,7 @@ module Gitlab
def probe_readiness
checks
.select(&:available?)
.flat_map(&:readiness)
.compact
.group_by(&:name)

View file

@ -12,6 +12,10 @@ module Gitlab
Gitlab::HealthChecks::Result.new(
'web_exporter', exporter.running)
end
def available?
true
end
end
attr_reader :running

View file

@ -81,6 +81,10 @@ module Gitlab
puma? || sidekiq? || action_cable?
end
def puma_in_clustered_mode?
puma? && Puma.cli_config.options[:workers].to_i > 0
end
def max_threads
threads = 1 # main thread

View file

@ -32,7 +32,7 @@ module Gitlab
GitalyServer: {
address: Gitlab::GitalyClient.address(repository.storage),
token: Gitlab::GitalyClient.token(repository.storage),
features: Feature::Gitaly.server_feature_flags
features: Feature::Gitaly.server_feature_flags(repository.project)
}
}
@ -231,7 +231,7 @@ module Gitlab
{
address: Gitlab::GitalyClient.address(repository.shard),
token: Gitlab::GitalyClient.token(repository.shard),
features: Feature::Gitaly.server_feature_flags
features: Feature::Gitaly.server_feature_flags(repository.project)
}
end

View file

@ -26,6 +26,7 @@ RSpec.describe 'Database schema' do
chat_names: %w[chat_id team_id user_id],
chat_teams: %w[team_id],
ci_builds: %w[erased_by_id runner_id trigger_request_id user_id],
ci_namespace_monthly_usages: %w[namespace_id],
ci_pipelines: %w[user_id],
ci_runner_projects: %w[runner_id],
ci_trigger_requests: %w[commit_id],

View file

@ -52,6 +52,6 @@ RSpec.describe 'Import multiple repositories by uploading a manifest file', :js
end
def second_row
page.all('table.import-table tbody tr')[1]
page.all('table tbody tr')[1]
end
end

View file

@ -1,4 +1,4 @@
import { GlLoadingIcon, GlButton, GlIntersectionObserver } from '@gitlab/ui';
import { GlLoadingIcon, GlButton, GlIntersectionObserver, GlFormInput } from '@gitlab/ui';
import { createLocalVue, shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import Vuex from 'vuex';
@ -12,7 +12,9 @@ describe('ImportProjectsTable', () => {
let wrapper;
const findFilterField = () =>
wrapper.find('input[data-qa-selector="githubish_import_filter_field"]');
wrapper
.findAllComponents(GlFormInput)
.wrappers.find((w) => w.attributes('placeholder') === 'Filter your repositories by name');
const providerTitle = 'THE PROVIDER';
const providerRepo = {
@ -205,7 +207,7 @@ describe('ImportProjectsTable', () => {
it('does not render filtering input field when filterable is false', () => {
createComponent({ filterable: false });
expect(findFilterField().exists()).toBe(false);
expect(findFilterField()).toBeUndefined();
});
describe('when paginatable is set to true', () => {

View file

@ -1,4 +1,4 @@
import { GlBadge } from '@gitlab/ui';
import { GlBadge, GlButton } from '@gitlab/ui';
import { createLocalVue, shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import Vuex from 'vuex';
@ -34,7 +34,7 @@ describe('ProviderRepoTableRow', () => {
}
const findImportButton = () => {
const buttons = wrapper.findAll('button').filter((node) => node.text() === 'Import');
const buttons = wrapper.findAllComponents(GlButton).filter((node) => node.text() === 'Import');
return buttons.length ? buttons.at(0) : buttons;
};
@ -91,7 +91,7 @@ describe('ProviderRepoTableRow', () => {
});
it('imports repo when clicking import button', async () => {
findImportButton().trigger('click');
findImportButton().vm.$emit('click');
await nextTick();

View file

@ -591,5 +591,75 @@ describe('Issuables list component', () => {
expect(findFilteredSearchBar().props('initialFilterValue')).toEqual(['free text']);
});
});
describe('on filter search', () => {
beforeEach(() => {
factory({ type: 'jira' });
window.history.pushState = jest.fn();
});
afterEach(() => {
window.history.pushState.mockRestore();
});
const emitOnFilter = (filter) => findFilteredSearchBar().vm.$emit('onFilter', filter);
describe('empty filter', () => {
const mockFilter = [];
it('updates URL with correct params', () => {
emitOnFilter(mockFilter);
expect(window.history.pushState).toHaveBeenCalledWith(
{},
'',
`${TEST_LOCATION}?state=opened`,
);
});
});
describe('filter with search term', () => {
const mockFilter = [
{
type: 'filtered-search-term',
value: { data: 'free' },
},
];
it('updates URL with correct params', () => {
emitOnFilter(mockFilter);
expect(window.history.pushState).toHaveBeenCalledWith(
{},
'',
`${TEST_LOCATION}?state=opened&search=free`,
);
});
});
describe('filter with multiple search terms', () => {
const mockFilter = [
{
type: 'filtered-search-term',
value: { data: 'free' },
},
{
type: 'filtered-search-term',
value: { data: 'text' },
},
];
it('updates URL with correct params', () => {
emitOnFilter(mockFilter);
expect(window.history.pushState).toHaveBeenCalledWith(
{},
'',
`${TEST_LOCATION}?state=opened&search=free+text`,
);
});
});
});
});
});

View file

@ -378,4 +378,28 @@ RSpec.describe DiffHelper do
expect(subject).to match(/foo\/bar\/-\/commit\/#{commit.sha}\/diff_for_path/)
end
end
describe "#render_fork_suggestion" do
subject { helper.render_fork_suggestion }
before do
allow(helper).to receive(:current_user).and_return(current_user)
end
context "user signed in" do
let(:current_user) { build(:user) }
it "renders the partial" do
expect(helper).to receive(:render).with(partial: "projects/fork_suggestion").exactly(:once)
5.times { subject }
end
end
context "guest" do
let(:current_user) { nil }
it { is_expected.to be_nil }
end
end
end

View file

@ -3,12 +3,20 @@
require 'spec_helper'
RSpec.describe Feature::Gitaly do
let(:feature_flag) { "mep_mep" }
let_it_be(:project) { create(:project) }
let_it_be(:project_2) { create(:project) }
before do
skip_feature_flags_yaml_validation
end
describe ".enabled?" do
context 'when the flag is set globally' do
let(:feature_flag) { 'global_flag' }
context 'when the gate is closed' do
before do
stub_feature_flags(gitaly_mep_mep: false)
stub_feature_flags(gitaly_global_flag: false)
end
it 'returns false' do
@ -23,15 +31,50 @@ RSpec.describe Feature::Gitaly do
end
end
context 'when the flag is enabled for a particular project' do
let(:feature_flag) { 'project_flag' }
before do
stub_feature_flags(gitaly_project_flag: project)
end
it 'returns true for that project' do
expect(described_class.enabled?(feature_flag, project)).to be(true)
end
it 'returns false for any other project' do
expect(described_class.enabled?(feature_flag, project_2)).to be(false)
end
it 'returns false when no project is passed' do
expect(described_class.enabled?(feature_flag)).to be(false)
end
end
end
describe ".server_feature_flags" do
before do
stub_feature_flags(gitaly_mep_mep: true, foo: true)
stub_feature_flags(gitaly_global_flag: true, gitaly_project_flag: project, non_gitaly_flag: false)
end
subject { described_class.server_feature_flags }
it { is_expected.to be_a(Hash) }
it { is_expected.to eq("gitaly-feature-mep-mep" => "true") }
it 'returns a hash of flags starting with the prefix, with dashes instead of underscores' do
expect(subject).to eq('gitaly-feature-global-flag' => 'true',
'gitaly-feature-project-flag' => 'false')
end
context 'when a project is passed' do
it 'returns the value for the flag on the given project' do
expect(described_class.server_feature_flags(project))
.to eq('gitaly-feature-global-flag' => 'true',
'gitaly-feature-project-flag' => 'true')
expect(described_class.server_feature_flags(project_2))
.to eq('gitaly-feature-global-flag' => 'true',
'gitaly-feature-project-flag' => 'false')
end
end
context 'when table does not exist' do
before do

View file

@ -0,0 +1,35 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillProjectUpdatedAtAfterRepositoryStorageMove, :migration, schema: 20210210093901 do
let(:projects) { table(:projects) }
let(:project_repository_storage_moves) { table(:project_repository_storage_moves) }
let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
subject { described_class.new }
describe '#perform' do
it 'updates project updated_at column if they were moved to a different repository storage' do
freeze_time do
project_1 = projects.create!(id: 1, namespace_id: namespace.id, updated_at: 1.day.ago)
project_2 = projects.create!(id: 2, namespace_id: namespace.id, updated_at: Time.current)
original_project_3_updated_at = 2.minutes.from_now
project_3 = projects.create!(id: 3, namespace_id: namespace.id, updated_at: original_project_3_updated_at)
original_project_4_updated_at = 10.days.ago
project_4 = projects.create!(id: 4, namespace_id: namespace.id, updated_at: original_project_4_updated_at)
repository_storage_move_1 = project_repository_storage_moves.create!(project_id: project_1.id, updated_at: 2.hours.ago, source_storage_name: 'default', destination_storage_name: 'default')
repository_storage_move_2 = project_repository_storage_moves.create!(project_id: project_2.id, updated_at: Time.current, source_storage_name: 'default', destination_storage_name: 'default')
project_repository_storage_moves.create!(project_id: project_3.id, updated_at: Time.current, source_storage_name: 'default', destination_storage_name: 'default')
subject.perform([1, 2, 3, 4, non_existing_record_id])
expect(project_1.reload.updated_at).to eq(repository_storage_move_1.updated_at + 1.second)
expect(project_2.reload.updated_at).to eq(repository_storage_move_2.updated_at + 1.second)
expect(project_3.reload.updated_at).to eq(original_project_3_updated_at)
expect(project_4.reload.updated_at).to eq(original_project_4_updated_at)
end
end
end
end

View file

@ -267,11 +267,6 @@ RSpec.describe Gitlab::GitalyClient do
end
describe '.request_kwargs' do
context 'when catfile-cache feature is enabled' do
before do
stub_feature_flags('gitaly_catfile-cache': true)
end
it 'sets the gitaly-session-id in the metadata' do
results = described_class.request_kwargs('default', timeout: 1)
expect(results[:metadata]).to include('gitaly-session-id')
@ -294,7 +289,6 @@ RSpec.describe Gitlab::GitalyClient do
end
end
end
end
context 'gitlab_git_env' do
let(:policy) { 'gitaly-route-repository-accessor-policy' }
@ -311,9 +305,21 @@ RSpec.describe Gitlab::GitalyClient do
end
end
context 'when RequestStore is enabled with git_env', :request_store do
context 'when RequestStore is enabled with empty git_env', :request_store do
before do
Gitlab::SafeRequestStore[:gitlab_git_env] = true
Gitlab::SafeRequestStore[:gitlab_git_env] = {}
end
it 'disables force-routing to primary' do
expect(described_class.request_kwargs('default', timeout: 1)[:metadata][policy]).to be_nil
end
end
context 'when RequestStore is enabled with populated git_env', :request_store do
before do
Gitlab::SafeRequestStore[:gitlab_git_env] = {
"GIT_OBJECT_DIRECTORY_RELATIVE" => "foo/bar"
}
end
it 'enables force-routing to primary' do

View file

@ -4,11 +4,15 @@ require 'spec_helper'
require_relative './simple_check_shared'
RSpec.describe Gitlab::HealthChecks::MasterCheck do
let(:result_class) { Gitlab::HealthChecks::Result }
before do
stub_const('SUCCESS_CODE', 100)
stub_const('FAILURE_CODE', 101)
end
context 'when Puma runs in Clustered mode' do
before do
allow(Gitlab::Runtime).to receive(:puma_in_clustered_mode?).and_return(true)
described_class.register_master
end
@ -16,7 +20,11 @@ RSpec.describe Gitlab::HealthChecks::MasterCheck do
described_class.finish_master
end
describe '#readiness' do
describe '.available?' do
specify { expect(described_class.available?).to be true }
end
describe '.readiness' do
context 'when master is running' do
it 'worker does return success' do
_, child_status = run_worker
@ -48,3 +56,15 @@ RSpec.describe Gitlab::HealthChecks::MasterCheck do
end
end
end
# '.readiness' check is not invoked if '.available?' returns false
context 'when Puma runs in Single mode' do
before do
allow(Gitlab::Runtime).to receive(:puma_in_clustered_mode?).and_return(false)
end
describe '.available?' do
specify { expect(described_class.available?).to be false }
end
end
end

View file

@ -61,6 +61,35 @@ RSpec.describe Gitlab::HealthChecks::Probes::Collection do
expect(subject.json[:message]).to eq('Redis::CannotConnectError : Redis down')
end
end
context 'when some checks are not available' do
before do
allow(Gitlab::Runtime).to receive(:puma_in_clustered_mode?).and_return(false)
end
let(:checks) do
[
Gitlab::HealthChecks::MasterCheck
]
end
it 'asks for check availability' do
expect(Gitlab::HealthChecks::MasterCheck).to receive(:available?)
subject
end
it 'does not call `readiness` on checks that are not available' do
expect(Gitlab::HealthChecks::MasterCheck).not_to receive(:readiness)
subject
end
it 'does not fail collection check' do
expect(subject.http_status).to eq(200)
expect(subject.json[:status]).to eq('ok')
end
end
end
context 'without checks' do

View file

@ -44,10 +44,11 @@ RSpec.describe Gitlab::Runtime do
context "puma" do
let(:puma_type) { double('::Puma') }
let(:max_workers) { 2 }
before do
stub_const('::Puma', puma_type)
allow(puma_type).to receive_message_chain(:cli_config, :options).and_return(max_threads: 2)
allow(puma_type).to receive_message_chain(:cli_config, :options).and_return(max_threads: 2, workers: max_workers)
stub_env('ACTION_CABLE_IN_APP', 'false')
end
@ -70,6 +71,20 @@ RSpec.describe Gitlab::Runtime do
it_behaves_like "valid runtime", :puma, 11
end
describe ".puma_in_clustered_mode?" do
context 'when Puma is set up with workers > 0' do
let(:max_workers) { 4 }
specify { expect(described_class.puma_in_clustered_mode?).to be true }
end
context 'when Puma is set up with workers = 0' do
let(:max_workers) { 0 }
specify { expect(described_class.puma_in_clustered_mode?).to be false }
end
end
end
context "unicorn" do

View file

@ -15,9 +15,7 @@ RSpec.describe Gitlab::Workhorse do
end
before do
allow(Feature::Gitaly).to receive(:server_feature_flags).and_return({
'gitaly-feature-foobar' => 'true'
})
stub_feature_flags(gitaly_enforce_requests_limits: true)
end
describe ".send_git_archive" do
@ -43,7 +41,7 @@ RSpec.describe Gitlab::Workhorse do
expect(command).to eq('git-archive')
expect(params).to eq({
'GitalyServer' => {
features: { 'gitaly-feature-foobar' => 'true' },
features: { 'gitaly-feature-enforce-requests-limits' => 'true' },
address: Gitlab::GitalyClient.address(project.repository_storage),
token: Gitlab::GitalyClient.token(project.repository_storage)
},
@ -73,7 +71,7 @@ RSpec.describe Gitlab::Workhorse do
expect(command).to eq('git-archive')
expect(params).to eq({
'GitalyServer' => {
features: { 'gitaly-feature-foobar' => 'true' },
features: { 'gitaly-feature-enforce-requests-limits' => 'true' },
address: Gitlab::GitalyClient.address(project.repository_storage),
token: Gitlab::GitalyClient.token(project.repository_storage)
},
@ -124,7 +122,7 @@ RSpec.describe Gitlab::Workhorse do
expect(command).to eq("git-format-patch")
expect(params).to eq({
'GitalyServer' => {
features: { 'gitaly-feature-foobar' => 'true' },
features: { 'gitaly-feature-enforce-requests-limits' => 'true' },
address: Gitlab::GitalyClient.address(project.repository_storage),
token: Gitlab::GitalyClient.token(project.repository_storage)
},
@ -187,7 +185,7 @@ RSpec.describe Gitlab::Workhorse do
expect(command).to eq("git-diff")
expect(params).to eq({
'GitalyServer' => {
features: { 'gitaly-feature-foobar' => 'true' },
features: { 'gitaly-feature-enforce-requests-limits' => 'true' },
address: Gitlab::GitalyClient.address(project.repository_storage),
token: Gitlab::GitalyClient.token(project.repository_storage)
},
@ -274,7 +272,7 @@ RSpec.describe Gitlab::Workhorse do
let(:gitaly_params) do
{
GitalyServer: {
features: { 'gitaly-feature-foobar' => 'true' },
features: { 'gitaly-feature-enforce-requests-limits' => 'true' },
address: Gitlab::GitalyClient.address('default'),
token: Gitlab::GitalyClient.token('default')
}
@ -310,6 +308,35 @@ RSpec.describe Gitlab::Workhorse do
it { is_expected.to include(ShowAllRefs: true) }
end
context 'when a feature flag is set for a single project' do
before do
stub_feature_flags(gitaly_mep_mep: project)
end
it 'sets the flag to true for that project' do
response = described_class.git_http_ok(repository, Gitlab::GlRepository::PROJECT, user, action)
expect(response.dig(:GitalyServer, :features)).to eq('gitaly-feature-enforce-requests-limits' => 'true',
'gitaly-feature-mep-mep' => 'true')
end
it 'sets the flag to false for other projects' do
other_project = create(:project, :public, :repository)
response = described_class.git_http_ok(other_project.repository, Gitlab::GlRepository::PROJECT, user, action)
expect(response.dig(:GitalyServer, :features)).to eq('gitaly-feature-enforce-requests-limits' => 'true',
'gitaly-feature-mep-mep' => 'false')
end
it 'sets the flag to false when there is no project' do
snippet = create(:personal_snippet, :repository)
response = described_class.git_http_ok(snippet.repository, Gitlab::GlRepository::SNIPPET, user, action)
expect(response.dig(:GitalyServer, :features)).to eq('gitaly-feature-enforce-requests-limits' => 'true',
'gitaly-feature-mep-mep' => 'false')
end
end
end
context "when git_receive_pack action is passed" do
@ -423,7 +450,7 @@ RSpec.describe Gitlab::Workhorse do
expect(command).to eq('git-blob')
expect(params).to eq({
'GitalyServer' => {
features: { 'gitaly-feature-foobar' => 'true' },
features: { 'gitaly-feature-enforce-requests-limits' => 'true' },
address: Gitlab::GitalyClient.address(project.repository_storage),
token: Gitlab::GitalyClient.token(project.repository_storage)
},
@ -485,7 +512,7 @@ RSpec.describe Gitlab::Workhorse do
expect(command).to eq('git-snapshot')
expect(params).to eq(
'GitalyServer' => {
'features' => { 'gitaly-feature-foobar' => 'true' },
'features' => { 'gitaly-feature-enforce-requests-limits' => 'true' },
'address' => Gitlab::GitalyClient.address(project.repository_storage),
'token' => Gitlab::GitalyClient.token(project.repository_storage)
},

View file

@ -0,0 +1,47 @@
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20210210093901_backfill_updated_at_after_repository_storage_move.rb')
RSpec.describe BackfillUpdatedAtAfterRepositoryStorageMove, :sidekiq do
let_it_be(:projects) { table(:projects) }
let_it_be(:project_repository_storage_moves) { table(:project_repository_storage_moves) }
let_it_be(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
describe '#up' do
it 'schedules background jobs for all distinct projects in batches' do
stub_const("#{described_class}::BATCH_SIZE", 3)
project_1 = projects.create!(id: 1, namespace_id: namespace.id)
project_2 = projects.create!(id: 2, namespace_id: namespace.id)
project_3 = projects.create!(id: 3, namespace_id: namespace.id)
project_4 = projects.create!(id: 4, namespace_id: namespace.id)
project_5 = projects.create!(id: 5, namespace_id: namespace.id)
project_6 = projects.create!(id: 6, namespace_id: namespace.id)
project_7 = projects.create!(id: 7, namespace_id: namespace.id)
projects.create!(id: 8, namespace_id: namespace.id)
project_repository_storage_moves.create!(id: 1, project_id: project_1.id, source_storage_name: 'default', destination_storage_name: 'default')
project_repository_storage_moves.create!(id: 2, project_id: project_1.id, source_storage_name: 'default', destination_storage_name: 'default')
project_repository_storage_moves.create!(id: 3, project_id: project_2.id, source_storage_name: 'default', destination_storage_name: 'default')
project_repository_storage_moves.create!(id: 4, project_id: project_3.id, source_storage_name: 'default', destination_storage_name: 'default')
project_repository_storage_moves.create!(id: 5, project_id: project_3.id, source_storage_name: 'default', destination_storage_name: 'default')
project_repository_storage_moves.create!(id: 6, project_id: project_4.id, source_storage_name: 'default', destination_storage_name: 'default')
project_repository_storage_moves.create!(id: 7, project_id: project_4.id, source_storage_name: 'default', destination_storage_name: 'default')
project_repository_storage_moves.create!(id: 8, project_id: project_5.id, source_storage_name: 'default', destination_storage_name: 'default')
project_repository_storage_moves.create!(id: 9, project_id: project_6.id, source_storage_name: 'default', destination_storage_name: 'default')
project_repository_storage_moves.create!(id: 10, project_id: project_7.id, source_storage_name: 'default', destination_storage_name: 'default')
Sidekiq::Testing.fake! do
freeze_time do
migrate!
expect(BackgroundMigrationWorker.jobs.size).to eq(3)
expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(2.minutes, 1, 2, 3)
expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(4.minutes, 4, 5, 6)
expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(6.minutes, 7)
end
end
end
end
end

View file

@ -543,6 +543,7 @@ RSpec.describe API::Internal::Base do
end
context "git pull" do
context "with a feature flag enabled globally" do
before do
stub_feature_flags(gitaly_mep_mep: true)
end
@ -565,6 +566,31 @@ RSpec.describe API::Internal::Base do
end
end
context "with a feature flag enabled for a project" do
before do
stub_feature_flags(gitaly_mep_mep: project)
end
it "has the flag set to true for that project" do
pull(key, project)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response["gl_repository"]).to eq("project-#{project.id}")
expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-mep-mep' => 'true')
end
it "has the flag set to false for other projects" do
other_project = create(:project, :public, :repository)
pull(key, other_project)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response["gl_repository"]).to eq("project-#{other_project.id}")
expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-mep-mep' => 'false')
end
end
end
context "git push" do
context 'project as namespace/project' do
it do

View file

@ -612,7 +612,6 @@ RSpec.describe API::Repositories do
end
describe 'POST /projects/:id/repository/changelog' do
context 'when the changelog_api feature flag is enabled' do
it 'generates the changelog for a version' do
spy = instance_spy(Repositories::ChangelogService)
@ -692,20 +691,4 @@ RSpec.describe API::Repositories do
expect(json_response['message']).to eq('Failed to generate the changelog: oops')
end
end
context 'when the changelog_api feature flag is disabled' do
before do
stub_feature_flags(changelog_api: false)
end
it 'responds with a 404 Not Found' do
post(
api("/projects/#{project.id}/repository/changelog", user),
params: { version: '1.0.0', from: 'foo', to: 'bar' }
)
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
end

View file

@ -77,6 +77,11 @@ RSpec.describe HealthController do
shared_context 'endpoint responding with readiness data' do
context 'when requesting instance-checks' do
context 'when Puma runs in Clustered mode' do
before do
allow(Gitlab::Runtime).to receive(:puma_in_clustered_mode?).and_return(true)
end
it 'responds with readiness checks data' do
expect(Gitlab::HealthChecks::MasterCheck).to receive(:check) { true }
@ -100,7 +105,23 @@ RSpec.describe HealthController do
end
end
context 'when Puma runs in Single mode' do
before do
allow(Gitlab::Runtime).to receive(:puma_in_clustered_mode?).and_return(false)
end
it 'does not invoke MasterCheck, succeedes' do
expect(Gitlab::HealthChecks::MasterCheck).not_to receive(:check) { true }
subject
expect(json_response).to eq('status' => 'ok')
end
end
end
context 'when requesting all checks' do
shared_context 'endpoint responding with readiness data for all checks' do
before do
params.merge!(all: true)
end
@ -164,6 +185,23 @@ RSpec.describe HealthController do
end
end
end
context 'when Puma runs in Clustered mode' do
before do
allow(Gitlab::Runtime).to receive(:puma_in_clustered_mode?).and_return(true)
end
it_behaves_like 'endpoint responding with readiness data for all checks'
end
context 'when Puma runs in Single mode' do
before do
allow(Gitlab::Runtime).to receive(:puma_in_clustered_mode?).and_return(false)
end
it_behaves_like 'endpoint responding with readiness data for all checks'
end
end
end
context 'accessed from whitelisted ip' do

View file

@ -31,6 +31,8 @@ module Spec
fill_in('note[note]', with: new_note_text)
find('.js-comment-button').click
end
wait_for_requests
end
def preview_note(text)

View file

@ -4,7 +4,7 @@ require_relative 'test_env'
# This file is specific to specs in spec/lib/gitlab/git/
SEED_STORAGE_PATH = TestEnv.repos_path
SEED_STORAGE_PATH = Gitlab::GitalyClient::StorageSettings.allow_disk_access { TestEnv.repos_path }
TEST_REPO_PATH = 'gitlab-git-test.git'
TEST_NORMAL_REPO_PATH = 'not-bare-repo.git'
TEST_MUTABLE_REPO_PATH = 'mutable-repo.git'

View file

@ -149,7 +149,9 @@ module TestEnv
end
end
FileUtils.mkdir_p(repos_path)
FileUtils.mkdir_p(
Gitlab::GitalyClient::StorageSettings.allow_disk_access { TestEnv.repos_path }
)
FileUtils.mkdir_p(SECOND_STORAGE_PATH)
FileUtils.mkdir_p(backup_path)
FileUtils.mkdir_p(pages_path)

View file

@ -148,7 +148,6 @@ RSpec.shared_examples 'clone quick action' do
expect(issue.reload).not_to be_closed
edit_note("/cloe #{target_project.full_path}", "test note.\n/clone #{target_project.full_path}")
wait_for_all_requests
expect(page).to have_content 'test note.'
expect(issue.reload).to be_open
@ -166,7 +165,6 @@ RSpec.shared_examples 'clone quick action' do
expect(page).not_to have_content 'Commands applied'
edit_note("/cloe #{target_project.full_path}", "/clone #{target_project.full_path}")
wait_for_all_requests
expect(page).not_to have_content "/clone #{target_project.full_path}"
expect(issue.reload).to be_open

View file

@ -106,7 +106,6 @@ RSpec.shared_examples 'move quick action' do
expect(issue.reload).not_to be_closed
edit_note("/mvoe #{target_project.full_path}", "test note.\n/move #{target_project.full_path}")
wait_for_all_requests
expect(page).to have_content 'test note.'
expect(issue.reload).to be_closed
@ -125,7 +124,6 @@ RSpec.shared_examples 'move quick action' do
expect(issue.reload).not_to be_closed
edit_note("/mvoe #{target_project.full_path}", "/move #{target_project.full_path}")
wait_for_all_requests
expect(page).not_to have_content "/move #{target_project.full_path}"
expect(issue.reload).to be_closed

View file

@ -1,5 +1,15 @@
# Changelog for gitlab-workhorse
## v8.63.0
### Added
- Accept more paths as Git HTTP
https://gitlab.com/gitlab-org/gitlab-workhorse/-/merge_requests/684
### Other
- Migrate error tracking from raven to labkit
https://gitlab.com/gitlab-org/gitlab-workhorse/-/merge_requests/671
## v8.62.0
### Added

View file

@ -1 +1 @@
8.62.0
8.63.0

View file

@ -9,6 +9,7 @@ import (
"math/rand"
"net"
"net/http"
"net/url"
"os"
"os/exec"
"path"
@ -169,6 +170,62 @@ func TestGetInfoRefsProxiedToGitalyInterruptedStream(t *testing.T) {
waitDone(t, done)
}
func TestGetInfoRefsRouting(t *testing.T) {
gitalyServer, socketPath := startGitalyServer(t, codes.OK)
defer gitalyServer.GracefulStop()
apiResponse := gitOkBody(t)
apiResponse.GitalyServer.Address = "unix:" + socketPath
ts := testAuthServer(t, nil, url.Values{"service": {"git-receive-pack"}}, 200, apiResponse)
defer ts.Close()
ws := startWorkhorseServer(ts.URL)
defer ws.Close()
testCases := []struct {
method string
path string
status int
}{
// valid requests
{"GET", "/toplevel.git/info/refs?service=git-receive-pack", 200},
{"GET", "/toplevel.wiki.git/info/refs?service=git-receive-pack", 200},
{"GET", "/toplevel/child/project.git/info/refs?service=git-receive-pack", 200},
{"GET", "/toplevel/child/project.wiki.git/info/refs?service=git-receive-pack", 200},
{"GET", "/toplevel/child/project/snippets/123.git/info/refs?service=git-receive-pack", 200},
{"GET", "/snippets/123.git/info/refs?service=git-receive-pack", 200},
// failing due to missing service parameter
{"GET", "/foo/bar.git/info/refs", 403},
// failing due to invalid service parameter
{"GET", "/foo/bar.git/info/refs?service=git-zzz-pack", 403},
// failing due to invalid repository path
{"GET", "/.git/info/refs?service=git-receive-pack", 204},
// failing due to invalid request method
{"POST", "/toplevel.git/info/refs?service=git-receive-pack", 204},
}
for _, tc := range testCases {
t.Run(tc.path, func(t *testing.T) {
req, err := http.NewRequest(tc.method, ws.URL+tc.path, nil)
require.NoError(t, err)
resp, err := http.DefaultClient.Do(req)
require.NoError(t, err)
defer resp.Body.Close()
body := string(testhelper.ReadAll(t, resp.Body))
if tc.status == 200 {
require.Equal(t, 200, resp.StatusCode)
require.Contains(t, body, "\x00", "expect response generated by test gitaly server")
} else {
require.Equal(t, tc.status, resp.StatusCode)
require.Empty(t, body, "normal request has empty response body")
}
})
}
}
func waitDone(t *testing.T, done chan struct{}) {
t.Helper()
select {
@ -259,6 +316,65 @@ func TestPostReceivePackProxiedToGitalyInterrupted(t *testing.T) {
waitDone(t, done)
}
func TestPostReceivePackRouting(t *testing.T) {
gitalyServer, socketPath := startGitalyServer(t, codes.OK)
defer gitalyServer.GracefulStop()
apiResponse := gitOkBody(t)
apiResponse.GitalyServer.Address = "unix:" + socketPath
ts := testAuthServer(t, nil, nil, 200, apiResponse)
defer ts.Close()
ws := startWorkhorseServer(ts.URL)
defer ws.Close()
testCases := []struct {
method string
path string
contentType string
match bool
}{
{"POST", "/toplevel.git/git-receive-pack", "application/x-git-receive-pack-request", true},
{"POST", "/toplevel.wiki.git/git-receive-pack", "application/x-git-receive-pack-request", true},
{"POST", "/toplevel/child/project.git/git-receive-pack", "application/x-git-receive-pack-request", true},
{"POST", "/toplevel/child/project.wiki.git/git-receive-pack", "application/x-git-receive-pack-request", true},
{"POST", "/toplevel/child/project/snippets/123.git/git-receive-pack", "application/x-git-receive-pack-request", true},
{"POST", "/snippets/123.git/git-receive-pack", "application/x-git-receive-pack-request", true},
{"POST", "/foo/bar/git-receive-pack", "application/x-git-receive-pack-request", false},
{"POST", "/foo/bar.git/git-zzz-pack", "application/x-git-receive-pack-request", false},
{"POST", "/.git/git-receive-pack", "application/x-git-receive-pack-request", false},
{"POST", "/toplevel.git/git-receive-pack", "application/x-git-upload-pack-request", false},
{"GET", "/toplevel.git/git-receive-pack", "application/x-git-receive-pack-request", false},
}
for _, tc := range testCases {
t.Run(tc.path, func(t *testing.T) {
req, err := http.NewRequest(
tc.method,
ws.URL+tc.path,
bytes.NewReader(testhelper.GitalyReceivePackResponseMock),
)
require.NoError(t, err)
req.Header.Set("Content-Type", tc.contentType)
resp, err := http.DefaultClient.Do(req)
require.NoError(t, err)
defer resp.Body.Close()
body := string(testhelper.ReadAll(t, resp.Body))
if tc.match {
require.Equal(t, 200, resp.StatusCode)
require.Contains(t, body, "\x00", "expect response generated by test gitaly server")
} else {
require.Equal(t, 204, resp.StatusCode)
require.Empty(t, body, "normal request has empty response body")
}
})
}
}
// ReaderFunc is an adapter to turn a conforming function into an io.Reader.
type ReaderFunc func(b []byte) (int, error)
@ -376,6 +492,65 @@ func TestPostUploadPackProxiedToGitalyInterrupted(t *testing.T) {
waitDone(t, done)
}
func TestPostUploadPackRouting(t *testing.T) {
gitalyServer, socketPath := startGitalyServer(t, codes.OK)
defer gitalyServer.GracefulStop()
apiResponse := gitOkBody(t)
apiResponse.GitalyServer.Address = "unix:" + socketPath
ts := testAuthServer(t, nil, nil, 200, apiResponse)
defer ts.Close()
ws := startWorkhorseServer(ts.URL)
defer ws.Close()
testCases := []struct {
method string
path string
contentType string
match bool
}{
{"POST", "/toplevel.git/git-upload-pack", "application/x-git-upload-pack-request", true},
{"POST", "/toplevel.wiki.git/git-upload-pack", "application/x-git-upload-pack-request", true},
{"POST", "/toplevel/child/project.git/git-upload-pack", "application/x-git-upload-pack-request", true},
{"POST", "/toplevel/child/project.wiki.git/git-upload-pack", "application/x-git-upload-pack-request", true},
{"POST", "/toplevel/child/project/snippets/123.git/git-upload-pack", "application/x-git-upload-pack-request", true},
{"POST", "/snippets/123.git/git-upload-pack", "application/x-git-upload-pack-request", true},
{"POST", "/foo/bar/git-upload-pack", "application/x-git-upload-pack-request", false},
{"POST", "/foo/bar.git/git-zzz-pack", "application/x-git-upload-pack-request", false},
{"POST", "/.git/git-upload-pack", "application/x-git-upload-pack-request", false},
{"POST", "/toplevel.git/git-upload-pack", "application/x-git-receive-pack-request", false},
{"GET", "/toplevel.git/git-upload-pack", "application/x-git-upload-pack-request", false},
}
for _, tc := range testCases {
t.Run(tc.path, func(t *testing.T) {
req, err := http.NewRequest(
tc.method,
ws.URL+tc.path,
bytes.NewReader(testhelper.GitalyReceivePackResponseMock),
)
require.NoError(t, err)
req.Header.Set("Content-Type", tc.contentType)
resp, err := http.DefaultClient.Do(req)
require.NoError(t, err)
defer resp.Body.Close()
body := string(testhelper.ReadAll(t, resp.Body))
if tc.match {
require.Equal(t, 200, resp.StatusCode)
require.Contains(t, body, "\x00", "expect response generated by test gitaly server")
} else {
require.Equal(t, 204, resp.StatusCode)
require.Empty(t, body, "normal request has empty response body")
}
})
}
}
func TestGetDiffProxiedToGitalySuccessfully(t *testing.T) {
gitalyServer, socketPath := startGitalyServer(t, codes.OK)
defer gitalyServer.GracefulStop()

View file

@ -8,10 +8,8 @@ require (
github.com/FZambia/sentinel v1.0.0
github.com/alecthomas/chroma v0.7.3
github.com/aws/aws-sdk-go v1.36.1
github.com/certifi/gocertifi v0.0.0-20200922220541-2c3bb06c6054 // indirect
github.com/dgrijalva/jwt-go v3.2.0+incompatible
github.com/disintegration/imaging v1.6.2
github.com/getsentry/raven-go v0.2.0
github.com/golang/gddo v0.0.0-20190419222130-af0f2af80721
github.com/golang/protobuf v1.4.3
github.com/gomodule/redigo v2.0.0+incompatible

View file

@ -145,8 +145,6 @@ github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QH
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/certifi/gocertifi v0.0.0-20180905225744-ee1a9a0726d2/go.mod h1:GJKEexRPVJrBSOjoqN5VNOIKJ5Q3RViH6eu3puDRwx4=
github.com/certifi/gocertifi v0.0.0-20200922220541-2c3bb06c6054 h1:uH66TXeswKn5PW5zdZ39xEwfS9an067BirqA+P4QaLI=
github.com/certifi/gocertifi v0.0.0-20200922220541-2c3bb06c6054/go.mod h1:sGbDF6GwGcLpkNXPUTkMRoywsNa/ol15pxFe6ERfguA=
github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=

View file

@ -0,0 +1,60 @@
package errortracker
import (
"fmt"
"net/http"
"os"
"runtime/debug"
"gitlab.com/gitlab-org/labkit/errortracking"
"gitlab.com/gitlab-org/labkit/log"
)
// NewHandler allows us to handle panics in upstreams gracefully, by logging them
// using structured logging and reporting them into Sentry as `error`s with a
// proper correlation ID attached.
func NewHandler(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
defer func() {
if p := recover(); p != nil {
fields := log.ContextFields(r.Context())
log.WithFields(fields).Error(p)
debug.PrintStack()
// A panic isn't always an `error`, so we may have to convert it into one.
e, ok := p.(error)
if !ok {
e = fmt.Errorf("%v", p)
}
TrackFailedRequest(r, e, fields)
}
}()
next.ServeHTTP(w, r)
})
}
func TrackFailedRequest(r *http.Request, err error, fields log.Fields) {
captureOpts := []errortracking.CaptureOption{
errortracking.WithContext(r.Context()),
errortracking.WithRequest(r),
}
for k, v := range fields {
captureOpts = append(captureOpts, errortracking.WithField(k, fmt.Sprintf("%v", v)))
}
errortracking.Capture(err, captureOpts...)
}
func Initialize(version string) error {
// Use a custom environment variable (not SENTRY_DSN) to prevent
// clashes with gitlab-rails.
sentryDSN := os.Getenv("GITLAB_WORKHORSE_SENTRY_DSN")
sentryEnvironment := os.Getenv("GITLAB_WORKHORSE_SENTRY_ENVIRONMENT")
return errortracking.Initialize(
errortracking.WithSentryDSN(sentryDSN),
errortracking.WithSentryEnvironment(sentryEnvironment),
errortracking.WithVersion(version),
)
}

View file

@ -14,50 +14,31 @@ import (
"syscall"
"github.com/sebest/xff"
"gitlab.com/gitlab-org/labkit/log"
"gitlab.com/gitlab-org/labkit/mask"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/log"
)
const NginxResponseBufferHeader = "X-Accel-Buffering"
func logErrorWithFields(r *http.Request, err error, fields log.Fields) {
if err != nil {
CaptureRavenError(r, err, fields)
}
printError(r, err, fields)
}
func CaptureAndFail(w http.ResponseWriter, r *http.Request, err error, msg string, code int) {
func CaptureAndFail(w http.ResponseWriter, r *http.Request, err error, msg string, code int, loggerCallbacks ...log.ConfigureLogger) {
http.Error(w, msg, code)
logErrorWithFields(r, err, nil)
logger := log.WithRequest(r).WithError(err)
for _, cb := range loggerCallbacks {
logger = cb(logger)
}
func Fail500(w http.ResponseWriter, r *http.Request, err error) {
CaptureAndFail(w, r, err, "Internal server error", http.StatusInternalServerError)
logger.Error(msg)
}
func Fail500WithFields(w http.ResponseWriter, r *http.Request, err error, fields log.Fields) {
http.Error(w, "Internal server error", http.StatusInternalServerError)
logErrorWithFields(r, err, fields)
func Fail500(w http.ResponseWriter, r *http.Request, err error, loggerCallbacks ...log.ConfigureLogger) {
CaptureAndFail(w, r, err, "Internal server error", http.StatusInternalServerError, loggerCallbacks...)
}
func RequestEntityTooLarge(w http.ResponseWriter, r *http.Request, err error) {
CaptureAndFail(w, r, err, "Request Entity Too Large", http.StatusRequestEntityTooLarge)
}
func printError(r *http.Request, err error, fields log.Fields) {
if r != nil {
entry := log.WithContextFields(r.Context(), log.Fields{
"method": r.Method,
"uri": mask.URL(r.RequestURI),
})
entry.WithFields(fields).WithError(err).Error()
} else {
log.WithFields(fields).WithError(err).Error("unknown error")
}
}
func SetNoCacheHeaders(header http.Header) {
header.Set("Cache-Control", "no-cache, no-store, max-age=0, must-revalidate")
header.Set("Pragma", "no-cache")
@ -97,7 +78,7 @@ func OpenFile(path string) (file *os.File, fi os.FileInfo, err error) {
func URLMustParse(s string) *url.URL {
u, err := url.Parse(s)
if err != nil {
log.WithError(err).WithField("url", s).Fatal("urlMustParse")
log.WithError(err).WithFields(log.Fields{"url": s}).Error("urlMustParse")
}
return u
}

View file

@ -1,58 +0,0 @@
package helper
import (
"net/http"
"reflect"
raven "github.com/getsentry/raven-go"
//lint:ignore SA1019 this was recently deprecated. Update workhorse to use labkit errortracking package.
correlation "gitlab.com/gitlab-org/labkit/correlation/raven"
"gitlab.com/gitlab-org/labkit/log"
)
var ravenHeaderBlacklist = []string{
"Authorization",
"Private-Token",
}
func CaptureRavenError(r *http.Request, err error, fields log.Fields) {
client := raven.DefaultClient
extra := raven.Extra{}
for k, v := range fields {
extra[k] = v
}
interfaces := []raven.Interface{}
if r != nil {
CleanHeadersForRaven(r)
interfaces = append(interfaces, raven.NewHttp(r))
//lint:ignore SA1019 this was recently deprecated. Update workhorse to use labkit errortracking package.
extra = correlation.SetExtra(r.Context(), extra)
}
exception := &raven.Exception{
Stacktrace: raven.NewStacktrace(2, 3, nil),
Value: err.Error(),
Type: reflect.TypeOf(err).String(),
}
interfaces = append(interfaces, exception)
packet := raven.NewPacketWithExtra(err.Error(), extra, interfaces...)
client.Capture(packet, nil)
}
func CleanHeadersForRaven(r *http.Request) {
if r == nil {
return
}
for _, key := range ravenHeaderBlacklist {
if r.Header.Get(key) != "" {
r.Header.Set(key, "[redacted]")
}
}
}

View file

@ -428,16 +428,18 @@ func logFields(startTime time.Time, params *resizeParams, outcome *resizeOutcome
func handleOutcome(w http.ResponseWriter, req *http.Request, startTime time.Time, params *resizeParams, outcome *resizeOutcome) {
fields := logFields(startTime, params, outcome)
log := log.WithRequest(req).WithFields(fields)
logger := log.WithRequest(req).WithFields(fields)
switch outcome.status {
case statusRequestFailure:
if outcome.bytesWritten <= 0 {
helper.Fail500WithFields(w, req, outcome.err, fields)
helper.Fail500(w, req, outcome.err, func(b *log.Builder) *log.Builder {
return b.WithFields(fields)
})
} else {
log.WithError(outcome.err).Error(outcome.status)
logger.WithError(outcome.err).Error(outcome.status)
}
default:
log.Info(outcome.status)
logger.Info(outcome.status)
}
}

View file

@ -8,11 +8,13 @@ import (
"gitlab.com/gitlab-org/labkit/mask"
"golang.org/x/net/context"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/helper"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/errortracker"
)
type Fields = log.Fields
type ConfigureLogger func(*Builder) *Builder
type Builder struct {
entry *logrus.Entry
fields log.Fields
@ -83,6 +85,6 @@ func (b *Builder) Error(args ...interface{}) {
b.entry.Error(args...)
if b.req != nil && b.err != nil {
helper.CaptureRavenError(b.req, b.err, b.fields)
errortracker.TrackFailedRequest(b.req, b.err, b.fields)
}
}

View file

@ -55,7 +55,7 @@ type uploadPreparers struct {
const (
apiPattern = `^/api/`
ciAPIPattern = `^/ci/api/`
gitProjectPattern = `^/([^/]+/){1,}[^/]+\.git/`
gitProjectPattern = `^/.+\.git/`
projectPattern = `^/([^/]+/){1,}[^/]+/`
snippetUploadPattern = `^/uploads/personal_snippet`
userUploadPattern = `^/uploads/user`

View file

@ -16,6 +16,7 @@ import (
"gitlab.com/gitlab-org/labkit/correlation"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/config"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/errortracker"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/helper"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/rejectmethods"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/upload"
@ -63,7 +64,7 @@ func NewUpstream(cfg config.Config, accessLogger *logrus.Logger) http.Handler {
correlationOpts = append(correlationOpts, correlation.WithPropagation())
}
handler := correlation.InjectCorrelationID(&up, correlationOpts...)
handler := correlation.InjectCorrelationID(errortracker.NewHandler(&up), correlationOpts...)
// TODO: move to LabKit https://gitlab.com/gitlab-org/gitlab-workhorse/-/issues/339
handler = rejectmethods.NewMiddleware(handler)
return handler

View file

@ -16,6 +16,7 @@ import (
"gitlab.com/gitlab-org/labkit/tracing"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/config"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/errortracker"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/queueing"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/redis"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/secret"
@ -156,6 +157,8 @@ func run(boot bootConfig, cfg config.Config) error {
}
defer closer.Close()
errortracker.Initialize(cfg.Version)
tracing.Initialize(tracing.WithServiceName("gitlab-workhorse"))
log.WithField("version", Version).WithField("build_time", BuildTime).Print("Starting")
@ -223,7 +226,7 @@ func run(boot bootConfig, cfg config.Config) error {
}
defer accessCloser.Close()
up := wrapRaven(upstream.NewUpstream(cfg, accessLogger))
up := upstream.NewUpstream(cfg, accessLogger)
go func() { finalErrors <- http.Serve(listener, up) }()

View file

@ -694,6 +694,12 @@ func testAuthServer(t *testing.T, url *regexp.Regexp, params url.Values, code in
return testhelper.TestServerWithHandler(url, func(w http.ResponseWriter, r *http.Request) {
require.NotEmpty(t, r.Header.Get("X-Request-Id"))
// return a 204 No Content response if we don't receive the JWT header
if r.Header.Get(secret.RequestHeader) == "" {
w.WriteHeader(204)
return
}
w.Header().Set("Content-Type", api.ResponseContentType)
logEntry := log.WithFields(log.Fields{

View file

@ -1,40 +0,0 @@
package main
import (
"net/http"
"os"
raven "github.com/getsentry/raven-go"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/helper"
)
func wrapRaven(h http.Handler) http.Handler {
// Use a custom environment variable (not SENTRY_DSN) to prevent
// clashes with gitlab-rails.
sentryDSN := os.Getenv("GITLAB_WORKHORSE_SENTRY_DSN")
sentryEnvironment := os.Getenv("GITLAB_WORKHORSE_SENTRY_ENVIRONMENT")
raven.SetDSN(sentryDSN) // sentryDSN may be empty
if sentryEnvironment != "" {
raven.SetEnvironment(sentryEnvironment)
}
if sentryDSN == "" {
return h
}
raven.DefaultClient.SetRelease(Version)
return http.HandlerFunc(raven.RecoveryHandler(
func(w http.ResponseWriter, r *http.Request) {
defer func() {
if p := recover(); p != nil {
helper.CleanHeadersForRaven(r)
panic(p)
}
}()
h.ServeHTTP(w, r)
}))
}

View file

@ -292,6 +292,74 @@ func TestLfsUpload(t *testing.T) {
require.Equal(t, rspBody, string(rspData))
}
func TestLfsUploadRouting(t *testing.T) {
reqBody := "test data"
rspBody := "test success"
oid := "916f0027a575074ce72a331777c3478d6513f786a591bd892da1a577bf2335f9"
ts := testhelper.TestServerWithHandler(regexp.MustCompile(`.`), func(w http.ResponseWriter, r *http.Request) {
if r.Header.Get(secret.RequestHeader) == "" {
w.WriteHeader(204)
} else {
fmt.Fprint(w, rspBody)
}
})
defer ts.Close()
ws := startWorkhorseServer(ts.URL)
defer ws.Close()
testCases := []struct {
method string
path string
contentType string
match bool
}{
{"PUT", "/toplevel.git/gitlab-lfs/objects", "application/octet-stream", true},
{"PUT", "/toplevel.wiki.git/gitlab-lfs/objects", "application/octet-stream", true},
{"PUT", "/toplevel/child/project.git/gitlab-lfs/objects", "application/octet-stream", true},
{"PUT", "/toplevel/child/project.wiki.git/gitlab-lfs/objects", "application/octet-stream", true},
{"PUT", "/toplevel/child/project/snippets/123.git/gitlab-lfs/objects", "application/octet-stream", true},
{"PUT", "/snippets/123.git/gitlab-lfs/objects", "application/octet-stream", true},
{"PUT", "/foo/bar/gitlab-lfs/objects", "application/octet-stream", false},
{"PUT", "/foo/bar.git/gitlab-lfs/objects/zzz", "application/octet-stream", false},
{"PUT", "/.git/gitlab-lfs/objects", "application/octet-stream", false},
{"PUT", "/toplevel.git/gitlab-lfs/objects", "application/zzz", false},
{"POST", "/toplevel.git/gitlab-lfs/objects", "application/octet-stream", false},
}
for _, tc := range testCases {
t.Run(tc.path, func(t *testing.T) {
resource := fmt.Sprintf(tc.path+"/%s/%d", oid, len(reqBody))
req, err := http.NewRequest(
tc.method,
ws.URL+resource,
strings.NewReader(reqBody),
)
require.NoError(t, err)
req.Header.Set("Content-Type", tc.contentType)
req.ContentLength = int64(len(reqBody))
resp, err := http.DefaultClient.Do(req)
require.NoError(t, err)
defer resp.Body.Close()
rspData, err := ioutil.ReadAll(resp.Body)
require.NoError(t, err)
if tc.match {
require.Equal(t, 200, resp.StatusCode)
require.Equal(t, rspBody, string(rspData), "expect response generated by test upstream server")
} else {
require.Equal(t, 204, resp.StatusCode)
require.Empty(t, rspData, "normal request has empty response body")
}
})
}
}
func packageUploadTestServer(t *testing.T, method string, resource string, reqBody string, rspBody string) *httptest.Server {
return testhelper.TestServerWithHandler(regexp.MustCompile(`.`), func(w http.ResponseWriter, r *http.Request) {
require.Equal(t, r.Method, method)