Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-02-14 09:13:38 +00:00
parent a1c59f6d02
commit 41482e5dce
19 changed files with 114 additions and 97 deletions

View File

@ -11,7 +11,6 @@ Database/MultipleDatabases:
- ee/spec/services/ee/merge_requests/update_service_spec.rb
- lib/backup/database.rb
- lib/backup/manager.rb
- lib/gitlab/current_settings.rb
- lib/gitlab/database/load_balancing/load_balancer.rb
- lib/gitlab/database/load_balancing.rb
- lib/gitlab/database/load_balancing/sticking.rb
@ -29,7 +28,6 @@ Database/MultipleDatabases:
- spec/db/schema_spec.rb
- spec/initializers/database_config_spec.rb
- spec/lib/backup/manager_spec.rb
- spec/lib/gitlab/current_settings_spec.rb
- spec/lib/gitlab/database_spec.rb
- spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
- spec/lib/gitlab/profiler_spec.rb

View File

@ -8,6 +8,7 @@ query searchMilestones($fullPath: ID!, $search: String, $isProject: Boolean = fa
includeAncestors: true
includeDescendants: true
sort: EXPIRED_LAST_DUE_DATE_ASC
state: active
) {
nodes {
...Milestone
@ -16,7 +17,12 @@ query searchMilestones($fullPath: ID!, $search: String, $isProject: Boolean = fa
}
project(fullPath: $fullPath) @include(if: $isProject) {
id
milestones(searchTitle: $search, includeAncestors: true, sort: EXPIRED_LAST_DUE_DATE_ASC) {
milestones(
searchTitle: $search
includeAncestors: true
sort: EXPIRED_LAST_DUE_DATE_ASC
state: active
) {
nodes {
...Milestone
}

View File

@ -15,6 +15,7 @@ const VueDraggable = () => import('vuedraggable');
export default {
vueDraggableAttributes: {
animation: 200,
forceFallback: true,
ghostClass: 'gl-visibility-hidden',
tag: 'ul',
},

View File

@ -49,6 +49,10 @@ class DeploymentsFinder
private
def raise_for_inefficient_updated_at_query?
params.fetch(:raise_for_inefficient_updated_at_query, Rails.env.development? || Rails.env.test?)
end
def validate!
if filter_by_updated_at? && filter_by_finished_at?
raise InefficientQueryError, 'Both `updated_at` filter and `finished_at` filter can not be specified'
@ -57,9 +61,11 @@ class DeploymentsFinder
# Currently, the inefficient parameters are allowed in order to avoid breaking changes in Deployment API.
# We'll switch to a hard error in https://gitlab.com/gitlab-org/gitlab/-/issues/328500.
if (filter_by_updated_at? && !order_by_updated_at?) || (!filter_by_updated_at? && order_by_updated_at?)
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(
InefficientQueryError.new('`updated_at` filter and `updated_at` sorting must be paired')
)
error = InefficientQueryError.new('`updated_at` filter and `updated_at` sorting must be paired')
Gitlab::ErrorTracking.log_exception(error)
raise error if raise_for_inefficient_updated_at_query?
end
if (filter_by_finished_at? && !order_by_finished_at?) || (!filter_by_finished_at? && order_by_finished_at?)

View File

@ -1255,7 +1255,7 @@ POST /projects
| `repository_access_level` | string | **{dotted-circle}** No | One of `disabled`, `private`, or `enabled`. |
| `repository_storage` | string | **{dotted-circle}** No | Which storage shard the repository is on. _(administrator only)_ |
| `request_access_enabled` | boolean | **{dotted-circle}** No | Allow users to request member access. |
| `requirements_access_level` | string | **{dotted-circle}** No | One of `disabled`, `private`, `enabled` or `public` |
| `requirements_access_level` | string | **{dotted-circle}** No | One of `disabled`, `private` or `enabled` |
| `resolve_outdated_diff_discussions` | boolean | **{dotted-circle}** No | Automatically resolve merge request diffs discussions on lines changed with a push. |
| `shared_runners_enabled` | boolean | **{dotted-circle}** No | Enable shared runners for this project. |
| `snippets_access_level` | string | **{dotted-circle}** No | One of `disabled`, `private`, or `enabled`. |

View File

@ -62,7 +62,7 @@ module Gitlab
# need to be added to the application settings. To prevent Rake tasks
# and other callers from failing, use any loaded settings and return
# defaults for missing columns.
if Gitlab::Runtime.rake? && ActiveRecord::Base.connection.migration_context.needs_migration?
if Gitlab::Runtime.rake? && ::ApplicationSetting.connection.migration_context.needs_migration?
db_attributes = current_settings&.attributes || {}
fake_application_settings(db_attributes)
elsif current_settings.present?
@ -82,7 +82,7 @@ module Gitlab
def connect_to_db?
# When the DBMS is not available, an exception (e.g. PG::ConnectionBad) is raised
active_db_connection = ActiveRecord::Base.connection.active? rescue false
active_db_connection = ::ApplicationSetting.connection.active? rescue false
active_db_connection &&
ApplicationSetting.database.cached_table_exists?

View File

@ -222,11 +222,7 @@ RSpec.describe DeploymentsFinder do
end
describe 'enforce sorting to `updated_at` sorting' do
let(:params) { { **base_params, updated_before: 1.day.ago, order_by: 'id', sort: 'asc' } }
before do
allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
end
let(:params) { { **base_params, updated_before: 1.day.ago, order_by: 'id', sort: 'asc', raise_for_inefficient_updated_at_query: false } }
it 'sorts by only one column' do
expect(subject.order_values.size).to eq(2)

View File

@ -118,7 +118,7 @@ RSpec.describe Gitlab::CurrentSettings do
allow(Gitlab::Runtime).to receive(:rake?).and_return(true)
# For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(false)` causes issues
# during the initialization phase of the test suite, so instead let's mock the internals of it
allow(ActiveRecord::Base.connection).to receive(:active?).and_return(false)
allow(ApplicationSetting.connection).to receive(:active?).and_return(false)
end
context 'and no settings in cache' do
@ -150,8 +150,8 @@ RSpec.describe Gitlab::CurrentSettings do
it 'fetches the settings from cache' do
# For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(true)` causes issues
# during the initialization phase of the test suite, so instead let's mock the internals of it
expect(ActiveRecord::Base.connection).not_to receive(:active?)
expect(ActiveRecord::Base.connection).not_to receive(:cached_table_exists?)
expect(ApplicationSetting.connection).not_to receive(:active?)
expect(ApplicationSetting.connection).not_to receive(:cached_table_exists?)
expect_any_instance_of(ActiveRecord::MigrationContext).not_to receive(:needs_migration?)
expect(ActiveRecord::QueryRecorder.new { described_class.current_application_settings }.count).to eq(0)
end
@ -159,8 +159,8 @@ RSpec.describe Gitlab::CurrentSettings do
context 'and no settings in cache' do
before do
allow(ActiveRecord::Base.connection).to receive(:active?).and_return(true)
allow(ActiveRecord::Base.connection).to receive(:cached_table_exists?).with('application_settings').and_return(true)
allow(ApplicationSetting.connection).to receive(:active?).and_return(true)
allow(ApplicationSetting.connection).to receive(:cached_table_exists?).with('application_settings').and_return(true)
end
context 'with RequestStore enabled', :request_store do
@ -181,7 +181,7 @@ RSpec.describe Gitlab::CurrentSettings do
context 'when ApplicationSettings does not have a primary key' do
before do
allow(ActiveRecord::Base.connection).to receive(:primary_key).with('application_settings').and_return(nil)
allow(ApplicationSetting.connection).to receive(:primary_key).with('application_settings').and_return(nil)
end
it 'raises an exception if ApplicationSettings does not have a primary key' do

View File

@ -121,7 +121,6 @@ project_feature:
- created_at
- metrics_dashboard_access_level
- project_id
- requirements_access_level
- security_and_compliance_access_level
- updated_at
computed_attributes:

View File

@ -162,6 +162,6 @@ func UploadArtifacts(myAPI *api.API, h http.Handler, p upload.Preparer) http.Han
format := r.URL.Query().Get(ArtifactFormatKey)
mg := &artifactsUploadProcessor{opts: opts, format: format, SavedFileTracker: upload.SavedFileTracker{Request: r}}
upload.HandleFileUploads(w, r, h, a, mg, opts)
upload.InterceptMultipartFiles(w, r, h, a, mg, opts)
}, "/authorize")
}

View File

@ -91,12 +91,12 @@ func TestInject(t *testing.T) {
}))
defer originResourceServer.Close()
// BodyUploader expects http.Handler as its second param, we can create a stub function and verify that
// RequestBody expects http.Handler as its second param, we can create a stub function and verify that
// it's only called for successful requests
handlerIsCalled := false
handlerFunc := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { handlerIsCalled = true })
bodyUploader := upload.BodyUploader(&fakePreAuthHandler{}, handlerFunc, &upload.DefaultPreparer{})
bodyUploader := upload.RequestBody(&fakePreAuthHandler{}, handlerFunc, &upload.DefaultPreparer{})
injector := NewInjector()
injector.SetUploadHandler(bodyUploader)

View File

@ -6,7 +6,6 @@ package lfs
import (
"fmt"
"net/http"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
@ -49,7 +48,3 @@ func (l *uploadPreparer) Prepare(a *api.Response) (*filestore.SaveFileOpts, uplo
return opts, &object{oid: a.LfsOid, size: a.LfsSize}, nil
}
func PutStore(a *api.API, h http.Handler, p upload.Preparer) http.Handler {
return upload.BodyUploader(a, h, p)
}

View File

@ -17,16 +17,21 @@ type MultipartClaims struct {
jwt.StandardClaims
}
func Accelerate(rails PreAuthorizer, h http.Handler, p Preparer) http.Handler {
// Multipart is a request middleware. If the request has a MIME multipart
// request body, the middleware will iterate through the multipart parts.
// When it finds a file part (filename != ""), the middleware will save
// the file contents to a temporary location and replace the file part
// with a reference to the temporary location.
func Multipart(rails PreAuthorizer, h http.Handler, p Preparer) http.Handler {
return rails.PreAuthorizeHandler(func(w http.ResponseWriter, r *http.Request, a *api.Response) {
s := &SavedFileTracker{Request: r}
opts, _, err := p.Prepare(a)
if err != nil {
helper.Fail500(w, r, fmt.Errorf("Accelerate: error preparing file storage options"))
helper.Fail500(w, r, fmt.Errorf("Multipart: error preparing file storage options"))
return
}
HandleFileUploads(w, r, h, a, s, opts)
InterceptMultipartFiles(w, r, h, a, s, opts)
}, "/authorize")
}

View File

@ -16,16 +16,23 @@ type PreAuthorizer interface {
PreAuthorizeHandler(next api.HandleFunc, suffix string) http.Handler
}
// Verifier allows to check an upload before sending it to rails
// Verifier is an optional pluggable behavior for upload paths. If
// Verify() returns an error, Workhorse will return an error response to
// the client instead of propagating the request to Rails. The motivating
// use case is Git LFS, where Workhorse checks the size and SHA256
// checksum of the uploaded file.
type Verifier interface {
// Verify can abort the upload returning an error
// Verify can abort the upload by returning an error
Verify(handler *filestore.FileHandler) error
}
// Preparer allows to customize BodyUploader configuration
// Preparer is a pluggable behavior that interprets a Rails API response
// and either tells Workhorse how to handle the upload, via the
// SaveFileOpts and Verifier, or it rejects the request by returning a
// non-nil error. Its intended use is to make sure the upload gets stored
// in the right location: either a local directory, or one of several
// supported object storage backends.
type Preparer interface {
// Prepare converts api.Response into a *SaveFileOpts, it can optionally return an Verifier that will be
// invoked after the real upload, before the finalization with rails
Prepare(a *api.Response) (*filestore.SaveFileOpts, Verifier, error)
}
@ -36,26 +43,26 @@ func (s *DefaultPreparer) Prepare(a *api.Response) (*filestore.SaveFileOpts, Ver
return opts, nil, err
}
// BodyUploader is an http.Handler that perform a pre authorization call to rails before hijacking the request body and
// uploading it.
// Providing an Preparer allows to customize the upload process
func BodyUploader(rails PreAuthorizer, h http.Handler, p Preparer) http.Handler {
// RequestBody is a request middleware. It will store the request body to
// a location by determined an api.Response value. It then forwards the
// request to gitlab-rails without the original request body.
func RequestBody(rails PreAuthorizer, h http.Handler, p Preparer) http.Handler {
return rails.PreAuthorizeHandler(func(w http.ResponseWriter, r *http.Request, a *api.Response) {
opts, verifier, err := p.Prepare(a)
if err != nil {
helper.Fail500(w, r, fmt.Errorf("BodyUploader: preparation failed: %v", err))
helper.Fail500(w, r, fmt.Errorf("RequestBody: preparation failed: %v", err))
return
}
fh, err := filestore.SaveFileFromReader(r.Context(), r.Body, r.ContentLength, opts)
if err != nil {
helper.Fail500(w, r, fmt.Errorf("BodyUploader: upload failed: %v", err))
helper.Fail500(w, r, fmt.Errorf("RequestBody: upload failed: %v", err))
return
}
if verifier != nil {
if err := verifier.Verify(fh); err != nil {
helper.Fail500(w, r, fmt.Errorf("BodyUploader: verification failed: %v", err))
helper.Fail500(w, r, fmt.Errorf("RequestBody: verification failed: %v", err))
return
}
}
@ -63,7 +70,7 @@ func BodyUploader(rails PreAuthorizer, h http.Handler, p Preparer) http.Handler
data := url.Values{}
fields, err := fh.GitLabFinalizeFields("file")
if err != nil {
helper.Fail500(w, r, fmt.Errorf("BodyUploader: finalize fields failed: %v", err))
helper.Fail500(w, r, fmt.Errorf("RequestBody: finalize fields failed: %v", err))
return
}
@ -80,7 +87,7 @@ func BodyUploader(rails PreAuthorizer, h http.Handler, p Preparer) http.Handler
sft := SavedFileTracker{Request: r}
sft.Track("file", fh.LocalPath)
if err := sft.Finalize(r.Context()); err != nil {
helper.Fail500(w, r, fmt.Errorf("BodyUploader: finalize failed: %v", err))
helper.Fail500(w, r, fmt.Errorf("RequestBody: finalize failed: %v", err))
return
}

View File

@ -24,7 +24,7 @@ const (
fileLen = len(fileContent)
)
func TestBodyUploader(t *testing.T) {
func TestRequestBody(t *testing.T) {
testhelper.ConfigureSecret()
body := strings.NewReader(fileContent)
@ -38,7 +38,7 @@ func TestBodyUploader(t *testing.T) {
require.Equal(t, fileContent, string(uploadEcho))
}
func TestBodyUploaderCustomPreparer(t *testing.T) {
func TestRequestBodyCustomPreparer(t *testing.T) {
body := strings.NewReader(fileContent)
resp := testUpload(&rails{}, &alwaysLocalPreparer{}, echoProxy(t, fileLen), body)
@ -49,7 +49,7 @@ func TestBodyUploaderCustomPreparer(t *testing.T) {
require.Equal(t, fileContent, string(uploadEcho))
}
func TestBodyUploaderCustomVerifier(t *testing.T) {
func TestRequestBodyCustomVerifier(t *testing.T) {
body := strings.NewReader(fileContent)
verifier := &mockVerifier{}
@ -62,11 +62,11 @@ func TestBodyUploaderCustomVerifier(t *testing.T) {
require.True(t, verifier.invoked, "Verifier.Verify not invoked")
}
func TestBodyUploaderAuthorizationFailure(t *testing.T) {
func TestRequestBodyAuthorizationFailure(t *testing.T) {
testNoProxyInvocation(t, http.StatusUnauthorized, &rails{unauthorized: true}, &alwaysLocalPreparer{})
}
func TestBodyUploaderErrors(t *testing.T) {
func TestRequestBodyErrors(t *testing.T) {
tests := []struct {
name string
preparer *alwaysLocalPreparer
@ -95,7 +95,7 @@ func testUpload(auth PreAuthorizer, preparer Preparer, proxy http.Handler, body
req := httptest.NewRequest("POST", "http://example.com/upload", body)
w := httptest.NewRecorder()
BodyUploader(auth, proxy, preparer).ServeHTTP(w, req)
RequestBody(auth, proxy, preparer).ServeHTTP(w, req)
return w.Result()
}

View File

@ -6,15 +6,15 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
)
// SkipRailsAuthorizer implements a fake PreAuthorizer that do not calls rails API and
// authorize each call as a local only upload to TempPath
// SkipRailsAuthorizer implements a fake PreAuthorizer that does not call
// the gitlab-rails API. It must be fast because it gets called on each
// request proxied to Rails.
type SkipRailsAuthorizer struct {
// TempPath is the temporary path for a local only upload
// TempPath is a directory where workhorse can store files that can later
// be accessed by gitlab-rails.
TempPath string
}
// PreAuthorizeHandler implements PreAuthorizer. It always grant the upload.
// The fake API response contains only TempPath
func (l *SkipRailsAuthorizer) PreAuthorizeHandler(next api.HandleFunc, _ string) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
next(w, r, &api.Response{TempPath: l.TempPath})

View File

@ -15,7 +15,8 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/zipartifacts"
)
// These methods are allowed to have thread-unsafe implementations.
// MultipartFormProcessor abstracts away implementation differences
// between generic MIME multipart file uploads and CI artifact uploads.
type MultipartFormProcessor interface {
ProcessFile(ctx context.Context, formName string, file *filestore.FileHandler, writer *multipart.Writer) error
ProcessField(ctx context.Context, formName string, writer *multipart.Writer) error
@ -24,7 +25,10 @@ type MultipartFormProcessor interface {
Count() int
}
func HandleFileUploads(w http.ResponseWriter, r *http.Request, h http.Handler, preauth *api.Response, filter MultipartFormProcessor, opts *filestore.SaveFileOpts) {
// InterceptMultipartFiles is the core of the implementation of
// Multipart. Because it is also used for CI artifact uploads it is a
// public function.
func InterceptMultipartFiles(w http.ResponseWriter, r *http.Request, h http.Handler, preauth *api.Response, filter MultipartFormProcessor, opts *filestore.SaveFileOpts) {
var body bytes.Buffer
writer := multipart.NewWriter(&body)
defer writer.Close()

View File

@ -78,7 +78,7 @@ func TestUploadHandlerForwardingRawData(t *testing.T) {
opts, _, err := preparer.Prepare(apiResponse)
require.NoError(t, err)
HandleFileUploads(response, httpRequest, handler, apiResponse, nil, opts)
InterceptMultipartFiles(response, httpRequest, handler, apiResponse, nil, opts)
require.Equal(t, 202, response.Code)
require.Equal(t, "RESPONSE", response.Body.String(), "response body")
@ -149,7 +149,7 @@ func TestUploadHandlerRewritingMultiPartData(t *testing.T) {
opts, _, err := preparer.Prepare(apiResponse)
require.NoError(t, err)
HandleFileUploads(response, httpRequest, handler, apiResponse, &testFormProcessor{}, opts)
InterceptMultipartFiles(response, httpRequest, handler, apiResponse, &testFormProcessor{}, opts)
require.Equal(t, 202, response.Code)
cancel() // this will trigger an async cleanup
@ -218,7 +218,7 @@ func TestUploadHandlerDetectingInjectedMultiPartData(t *testing.T) {
opts, _, err := preparer.Prepare(apiResponse)
require.NoError(t, err)
HandleFileUploads(response, httpRequest, handler, apiResponse, &testFormProcessor{}, opts)
InterceptMultipartFiles(response, httpRequest, handler, apiResponse, &testFormProcessor{}, opts)
require.Equal(t, test.response, response.Code)
cancel() // this will trigger an async cleanup
@ -248,7 +248,7 @@ func TestUploadProcessingField(t *testing.T) {
opts, _, err := preparer.Prepare(apiResponse)
require.NoError(t, err)
HandleFileUploads(response, httpRequest, nilHandler, apiResponse, &testFormProcessor{}, opts)
InterceptMultipartFiles(response, httpRequest, nilHandler, apiResponse, &testFormProcessor{}, opts)
require.Equal(t, 500, response.Code)
}
@ -279,7 +279,7 @@ func TestUploadingMultipleFiles(t *testing.T) {
opts, _, err := preparer.Prepare(apiResponse)
require.NoError(t, err)
HandleFileUploads(response, httpRequest, nilHandler, apiResponse, &testFormProcessor{}, opts)
InterceptMultipartFiles(response, httpRequest, nilHandler, apiResponse, &testFormProcessor{}, opts)
require.Equal(t, 400, response.Code)
require.Equal(t, "upload request contains more than 10 files\n", response.Body.String())
@ -335,7 +335,7 @@ func TestUploadProcessingFile(t *testing.T) {
opts, _, err := preparer.Prepare(apiResponse)
require.NoError(t, err)
HandleFileUploads(response, httpRequest, nilHandler, apiResponse, &testFormProcessor{}, opts)
InterceptMultipartFiles(response, httpRequest, nilHandler, apiResponse, &testFormProcessor{}, opts)
require.Equal(t, 200, response.Code)
})
@ -381,7 +381,7 @@ func TestInvalidFileNames(t *testing.T) {
opts, _, err := preparer.Prepare(apiResponse)
require.NoError(t, err)
HandleFileUploads(response, httpRequest, nilHandler, apiResponse, &SavedFileTracker{Request: httpRequest}, opts)
InterceptMultipartFiles(response, httpRequest, nilHandler, apiResponse, &SavedFileTracker{Request: httpRequest}, opts)
require.Equal(t, testCase.code, response.Code)
require.Equal(t, testCase.expectedPrefix, opts.TempFilePrefix)
}
@ -447,7 +447,7 @@ func TestContentDispositionRewrite(t *testing.T) {
opts, _, err := preparer.Prepare(apiResponse)
require.NoError(t, err)
HandleFileUploads(response, httpRequest, customHandler, apiResponse, &SavedFileTracker{Request: httpRequest}, opts)
InterceptMultipartFiles(response, httpRequest, customHandler, apiResponse, &SavedFileTracker{Request: httpRequest}, opts)
upstreamRequest, err := http.ReadRequest(bufio.NewReader(&upstreamRequestBuffer))
require.NoError(t, err)
@ -570,7 +570,7 @@ func runUploadTest(t *testing.T, image []byte, filename string, httpCode int, ts
opts, _, err := preparer.Prepare(apiResponse)
require.NoError(t, err)
HandleFileUploads(response, httpRequest, handler, apiResponse, &testFormProcessor{}, opts)
InterceptMultipartFiles(response, httpRequest, handler, apiResponse, &testFormProcessor{}, opts)
require.Equal(t, httpCode, response.Code)
}

View File

@ -228,17 +228,17 @@ func configureRoutes(u *upstream) {
preparers := createUploadPreparers(u.Config)
uploadPath := path.Join(u.DocumentRoot, "uploads/tmp")
uploadAccelerateProxy := upload.Accelerate(&upload.SkipRailsAuthorizer{TempPath: uploadPath}, proxy, preparers.uploads)
ciAPIProxyQueue := queueing.QueueRequests("ci_api_job_requests", uploadAccelerateProxy, u.APILimit, u.APIQueueLimit, u.APIQueueTimeout)
tempfileMultipartProxy := upload.Multipart(&upload.SkipRailsAuthorizer{TempPath: uploadPath}, proxy, preparers.uploads)
ciAPIProxyQueue := queueing.QueueRequests("ci_api_job_requests", tempfileMultipartProxy, u.APILimit, u.APIQueueLimit, u.APIQueueTimeout)
ciAPILongPolling := builds.RegisterHandler(ciAPIProxyQueue, redis.WatchKey, u.APICILongPollingDuration)
dependencyProxyInjector.SetUploadHandler(upload.BodyUploader(api, signingProxy, preparers.packages))
dependencyProxyInjector.SetUploadHandler(upload.RequestBody(api, signingProxy, preparers.packages))
// Serve static files or forward the requests
defaultUpstream := static.ServeExisting(
u.URLPrefix,
staticpages.CacheDisabled,
static.DeployPage(static.ErrorPagesUnless(u.DevelopmentMode, staticpages.ErrorFormatHTML, uploadAccelerateProxy)),
static.DeployPage(static.ErrorPagesUnless(u.DevelopmentMode, staticpages.ErrorFormatHTML, tempfileMultipartProxy)),
)
probeUpstream := static.ErrorPagesUnless(u.DevelopmentMode, staticpages.ErrorFormatJSON, proxy)
healthUpstream := static.ErrorPagesUnless(u.DevelopmentMode, staticpages.ErrorFormatText, proxy)
@ -248,7 +248,7 @@ func configureRoutes(u *upstream) {
u.route("GET", gitProjectPattern+`info/refs\z`, git.GetInfoRefsHandler(api)),
u.route("POST", gitProjectPattern+`git-upload-pack\z`, contentEncodingHandler(git.UploadPack(api)), withMatcher(isContentType("application/x-git-upload-pack-request"))),
u.route("POST", gitProjectPattern+`git-receive-pack\z`, contentEncodingHandler(git.ReceivePack(api)), withMatcher(isContentType("application/x-git-receive-pack-request"))),
u.route("PUT", gitProjectPattern+`gitlab-lfs/objects/([0-9a-f]{64})/([0-9]+)\z`, lfs.PutStore(api, signingProxy, preparers.lfs), withMatcher(isContentType("application/octet-stream"))),
u.route("PUT", gitProjectPattern+`gitlab-lfs/objects/([0-9a-f]{64})/([0-9]+)\z`, upload.RequestBody(api, signingProxy, preparers.lfs), withMatcher(isContentType("application/octet-stream"))),
// CI Artifacts
u.route("POST", apiPattern+`v4/jobs/[0-9]+/artifacts\z`, contentEncodingHandler(artifacts.UploadArtifacts(api, signingProxy, preparers.artifacts))),
@ -276,56 +276,56 @@ func configureRoutes(u *upstream) {
// https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56731.
// Maven Artifact Repository
u.route("PUT", apiProjectPattern+`packages/maven/`, upload.BodyUploader(api, signingProxy, preparers.packages)),
u.route("PUT", apiProjectPattern+`packages/maven/`, upload.RequestBody(api, signingProxy, preparers.packages)),
// Conan Artifact Repository
u.route("PUT", apiPattern+`v4/packages/conan/`, upload.BodyUploader(api, signingProxy, preparers.packages)),
u.route("PUT", apiProjectPattern+`packages/conan/`, upload.BodyUploader(api, signingProxy, preparers.packages)),
u.route("PUT", apiPattern+`v4/packages/conan/`, upload.RequestBody(api, signingProxy, preparers.packages)),
u.route("PUT", apiProjectPattern+`packages/conan/`, upload.RequestBody(api, signingProxy, preparers.packages)),
// Generic Packages Repository
u.route("PUT", apiProjectPattern+`packages/generic/`, upload.BodyUploader(api, signingProxy, preparers.packages)),
u.route("PUT", apiProjectPattern+`packages/generic/`, upload.RequestBody(api, signingProxy, preparers.packages)),
// NuGet Artifact Repository
u.route("PUT", apiProjectPattern+`packages/nuget/`, upload.Accelerate(api, signingProxy, preparers.packages)),
u.route("PUT", apiProjectPattern+`packages/nuget/`, upload.Multipart(api, signingProxy, preparers.packages)),
// PyPI Artifact Repository
u.route("POST", apiProjectPattern+`packages/pypi`, upload.Accelerate(api, signingProxy, preparers.packages)),
u.route("POST", apiProjectPattern+`packages/pypi`, upload.Multipart(api, signingProxy, preparers.packages)),
// Debian Artifact Repository
u.route("PUT", apiProjectPattern+`packages/debian/`, upload.BodyUploader(api, signingProxy, preparers.packages)),
u.route("PUT", apiProjectPattern+`packages/debian/`, upload.RequestBody(api, signingProxy, preparers.packages)),
// Gem Artifact Repository
u.route("POST", apiProjectPattern+`packages/rubygems/`, upload.BodyUploader(api, signingProxy, preparers.packages)),
u.route("POST", apiProjectPattern+`packages/rubygems/`, upload.RequestBody(api, signingProxy, preparers.packages)),
// Terraform Module Package Repository
u.route("PUT", apiProjectPattern+`packages/terraform/modules/`, upload.BodyUploader(api, signingProxy, preparers.packages)),
u.route("PUT", apiProjectPattern+`packages/terraform/modules/`, upload.RequestBody(api, signingProxy, preparers.packages)),
// Helm Artifact Repository
u.route("POST", apiProjectPattern+`packages/helm/api/[^/]+/charts\z`, upload.Accelerate(api, signingProxy, preparers.packages)),
u.route("POST", apiProjectPattern+`packages/helm/api/[^/]+/charts\z`, upload.Multipart(api, signingProxy, preparers.packages)),
// We are porting API to disk acceleration
// we need to declare each routes until we have fixed all the routes on the rails codebase.
// Overall status can be seen at https://gitlab.com/groups/gitlab-org/-/epics/1802#current-status
u.route("POST", apiProjectPattern+`wikis/attachments\z`, uploadAccelerateProxy),
u.route("POST", apiPattern+`graphql\z`, uploadAccelerateProxy),
u.route("POST", apiTopicPattern, uploadAccelerateProxy),
u.route("PUT", apiTopicPattern, uploadAccelerateProxy),
u.route("POST", apiPattern+`v4/groups/import`, upload.Accelerate(api, signingProxy, preparers.uploads)),
u.route("POST", apiPattern+`v4/projects/import`, upload.Accelerate(api, signingProxy, preparers.uploads)),
u.route("POST", apiProjectPattern+`wikis/attachments\z`, tempfileMultipartProxy),
u.route("POST", apiPattern+`graphql\z`, tempfileMultipartProxy),
u.route("POST", apiTopicPattern, tempfileMultipartProxy),
u.route("PUT", apiTopicPattern, tempfileMultipartProxy),
u.route("POST", apiPattern+`v4/groups/import`, upload.Multipart(api, signingProxy, preparers.uploads)),
u.route("POST", apiPattern+`v4/projects/import`, upload.Multipart(api, signingProxy, preparers.uploads)),
// Project Import via UI upload acceleration
u.route("POST", importPattern+`gitlab_project`, upload.Accelerate(api, signingProxy, preparers.uploads)),
u.route("POST", importPattern+`gitlab_project`, upload.Multipart(api, signingProxy, preparers.uploads)),
// Group Import via UI upload acceleration
u.route("POST", importPattern+`gitlab_group`, upload.Accelerate(api, signingProxy, preparers.uploads)),
u.route("POST", importPattern+`gitlab_group`, upload.Multipart(api, signingProxy, preparers.uploads)),
// Metric image upload
u.route("POST", apiProjectPattern+`issues/[0-9]+/metric_images\z`, upload.Accelerate(api, signingProxy, preparers.uploads)),
u.route("POST", apiProjectPattern+`issues/[0-9]+/metric_images\z`, upload.Multipart(api, signingProxy, preparers.uploads)),
// Requirements Import via UI upload acceleration
u.route("POST", projectPattern+`requirements_management/requirements/import_csv`, upload.Accelerate(api, signingProxy, preparers.uploads)),
u.route("POST", projectPattern+`requirements_management/requirements/import_csv`, upload.Multipart(api, signingProxy, preparers.uploads)),
// Uploads via API
u.route("POST", apiProjectPattern+`uploads\z`, upload.Accelerate(api, signingProxy, preparers.uploads)),
u.route("POST", apiProjectPattern+`uploads\z`, upload.Multipart(api, signingProxy, preparers.uploads)),
// Explicitly proxy API requests
u.route("", apiPattern, proxy),
@ -343,9 +343,9 @@ func configureRoutes(u *upstream) {
),
// Uploads
u.route("POST", projectPattern+`uploads\z`, upload.Accelerate(api, signingProxy, preparers.uploads)),
u.route("POST", snippetUploadPattern, upload.Accelerate(api, signingProxy, preparers.uploads)),
u.route("POST", userUploadPattern, upload.Accelerate(api, signingProxy, preparers.uploads)),
u.route("POST", projectPattern+`uploads\z`, upload.Multipart(api, signingProxy, preparers.uploads)),
u.route("POST", snippetUploadPattern, upload.Multipart(api, signingProxy, preparers.uploads)),
u.route("POST", userUploadPattern, upload.Multipart(api, signingProxy, preparers.uploads)),
// health checks don't intercept errors and go straight to rails
// TODO: We should probably not return a HTML deploy page?