Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
6c26db46b8
commit
9a10662981
2
Gemfile
2
Gemfile
|
@ -407,7 +407,7 @@ group :development, :test do
|
|||
end
|
||||
|
||||
group :development, :test, :danger do
|
||||
gem 'gitlab-dangerfiles', '~> 3.3.0', require: false
|
||||
gem 'gitlab-dangerfiles', '~> 3.4.0', require: false
|
||||
end
|
||||
|
||||
group :development, :test, :coverage do
|
||||
|
|
|
@ -475,7 +475,7 @@ GEM
|
|||
terminal-table (~> 1.5, >= 1.5.1)
|
||||
gitlab-chronic (0.10.5)
|
||||
numerizer (~> 0.2)
|
||||
gitlab-dangerfiles (3.3.0)
|
||||
gitlab-dangerfiles (3.4.0)
|
||||
danger (>= 8.4.5)
|
||||
danger-gitlab (>= 8.0.0)
|
||||
rake
|
||||
|
@ -1534,7 +1534,7 @@ DEPENDENCIES
|
|||
gitaly (~> 15.1.0.pre.rc1)
|
||||
github-markup (~> 1.7.0)
|
||||
gitlab-chronic (~> 0.10.5)
|
||||
gitlab-dangerfiles (~> 3.3.0)
|
||||
gitlab-dangerfiles (~> 3.4.0)
|
||||
gitlab-experiment (~> 0.7.1)
|
||||
gitlab-fog-azure-rm (~> 1.3.0)
|
||||
gitlab-labkit (~> 0.22.0)
|
||||
|
|
|
@ -72,7 +72,7 @@ export default {
|
|||
category="primary"
|
||||
variant="default"
|
||||
icon="pencil"
|
||||
class="gl-mr-3 js-edit-button ml-2 pb-2"
|
||||
class="gl-mr-3 js-edit-button gl-ml-3 gl-pb-3"
|
||||
:title="$options.i18n.editButton"
|
||||
:aria-label="$options.i18n.editButton"
|
||||
:href="editLink"
|
||||
|
|
|
@ -1,12 +1,6 @@
|
|||
- page_title s_("UsageQuota|Usage")
|
||||
|
||||
- presenter_class = Namespaces::FreeUserCap::Projects::UsageQuotaLimitationsBannerPresenter
|
||||
- usage_quota_limits_banner_presenter = presenter_class.new(@project, current_user: current_user)
|
||||
|
||||
- if usage_quota_limits_banner_presenter.visible?
|
||||
= render Pajamas::AlertComponent.new(**usage_quota_limits_banner_presenter.alert_component_attributes) do |c|
|
||||
- c.body do
|
||||
= usage_quota_limits_banner_presenter.body_text
|
||||
= render_if_exists 'namespaces/free_user_cap/projects/usage_quota_limitations_banner'
|
||||
|
||||
= render Pajamas::AlertComponent.new(title: _('Repository usage recalculation started'),
|
||||
variant: :info,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
---
|
||||
name: refactor_security_extension
|
||||
introduced_by_url:
|
||||
rollout_issue_url:
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/365320
|
||||
milestone: '14.10'
|
||||
type: development
|
||||
group: group::threat insights
|
||||
|
|
|
@ -4,5 +4,5 @@ introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87541
|
|||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/363410
|
||||
milestone: '15.1'
|
||||
type: development
|
||||
group: 'group::source code'
|
||||
default_enabled: false
|
||||
group: 'group::authentication and authorization'
|
||||
default_enabled: true
|
||||
|
|
|
@ -4,7 +4,7 @@ key_path: usage_activity_by_stage_monthly.manage.groups
|
|||
description: Number of users who are group members for last 28 days
|
||||
product_section: dev
|
||||
product_stage: manage
|
||||
product_group: group::access
|
||||
product_group: group::authentication and authorization
|
||||
product_category: subgroups
|
||||
value_type: number
|
||||
status: active
|
||||
|
|
|
@ -4,7 +4,7 @@ key_path: usage_activity_by_stage_monthly.manage.users_created
|
|||
description: Number of users created in the month
|
||||
product_section: dev
|
||||
product_stage: manage
|
||||
product_group: group::access
|
||||
product_group: group::authentication and authorization
|
||||
product_category: users
|
||||
value_type: number
|
||||
status: active
|
||||
|
|
|
@ -4,7 +4,7 @@ key_path: usage_activity_by_stage_monthly.manage.omniauth_providers
|
|||
description: List of unique OmniAuth providers
|
||||
product_section: dev
|
||||
product_stage: manage
|
||||
product_group: group::access
|
||||
product_group: group::authentication and authorization
|
||||
product_category: authentication_and_authorization
|
||||
value_type: object
|
||||
status: active
|
||||
|
|
|
@ -4,7 +4,7 @@ name: count_distinct_users_using_two_factor_authentication
|
|||
description: Number of unique user logins using two factor authentication for available providers
|
||||
product_section: dev
|
||||
product_stage: manage
|
||||
product_group: group::access
|
||||
product_group: group::authentication and authorization
|
||||
product_category: authentication_and_authorization
|
||||
value_type: object
|
||||
status: active
|
||||
|
|
|
@ -4,7 +4,7 @@ key_path: counts.groups
|
|||
description: Total count of groups as of usage ping snapshot
|
||||
product_section: dev
|
||||
product_stage: manage
|
||||
product_group: group::access
|
||||
product_group: group::authentication and authorization
|
||||
product_category: subgroups
|
||||
value_type: number
|
||||
status: active
|
||||
|
|
|
@ -4,7 +4,7 @@ key_path: counts.keys
|
|||
description: Number of keys.
|
||||
product_section: dev
|
||||
product_stage: managed
|
||||
product_group: group::access
|
||||
product_group: group::authentication and authorization
|
||||
product_category: authentication_and_authorization
|
||||
value_type: number
|
||||
status: active
|
||||
|
|
|
@ -4,7 +4,7 @@ key_path: usage_activity_by_stage.manage.groups
|
|||
description: Number of users who are group members.
|
||||
product_section: dev
|
||||
product_stage: manage
|
||||
product_group: group::access
|
||||
product_group: group::authentication and authorization
|
||||
product_category: subgroups
|
||||
value_type: number
|
||||
status: active
|
||||
|
|
|
@ -4,7 +4,7 @@ key_path: usage_activity_by_stage.manage.users_created
|
|||
description: Number of users
|
||||
product_section: dev
|
||||
product_stage: manage
|
||||
product_group: group::access
|
||||
product_group: group::authentication and authorization
|
||||
product_category:
|
||||
value_type: number
|
||||
status: active
|
||||
|
|
|
@ -4,7 +4,7 @@ key_path: usage_activity_by_stage.manage.omniauth_providers
|
|||
description: List of unique OmniAuth providers
|
||||
product_section: dev
|
||||
product_stage: manage
|
||||
product_group: group::access
|
||||
product_group: group::authentication and authorization
|
||||
product_category: authentication_and_authorization
|
||||
value_type: object
|
||||
status: active
|
||||
|
|
|
@ -4,7 +4,7 @@ name: count_distinct_users_using_two_factor_authentication
|
|||
description: Number of unique user logins using two factor authentication for available providers
|
||||
product_section: dev
|
||||
product_stage: manage
|
||||
product_group: group::access
|
||||
product_group: group::authentication and authorization
|
||||
product_category: authentication_and_authorization
|
||||
value_type: object
|
||||
status: active
|
||||
|
|
|
@ -4,7 +4,7 @@ key_path: gravatar_enabled
|
|||
description: Whether gravatar is enabled
|
||||
product_section: dev
|
||||
product_stage: manage
|
||||
product_group: group::access
|
||||
product_group: group::authentication and authorization
|
||||
product_category: users
|
||||
value_type: boolean
|
||||
status: active
|
||||
|
|
|
@ -4,7 +4,7 @@ key_path: ldap_enabled
|
|||
description: Whether LDAP is enabled
|
||||
product_section: dev
|
||||
product_stage: manage
|
||||
product_group: group::access
|
||||
product_group: group::authentication and authorization
|
||||
product_category: authentication_and_authorization
|
||||
value_type: boolean
|
||||
status: active
|
||||
|
|
|
@ -4,7 +4,7 @@ key_path: omniauth_enabled
|
|||
description: Whether OmniAuth is enabled
|
||||
product_section: dev
|
||||
product_stage: manage
|
||||
product_group: group::access
|
||||
product_group: group::authentication and authorization
|
||||
product_category: authentication_and_authorization
|
||||
value_type: boolean
|
||||
status: active
|
||||
|
|
|
@ -4,7 +4,7 @@ key_path: signup_enabled
|
|||
description: Whether public signup is enabled
|
||||
product_section: dev
|
||||
product_stage: manage
|
||||
product_group: group::access
|
||||
product_group: group::authentication and authorization
|
||||
product_category: authentication_and_authorization
|
||||
value_type: boolean
|
||||
status: active
|
||||
|
|
|
@ -111,6 +111,10 @@ if changes.any?
|
|||
markdown_row_for_spin(spin.category, spin)
|
||||
end
|
||||
|
||||
roulette.required_approvals.each do |approval|
|
||||
rows << markdown_row_for_spin(approval.category, approval.spin)
|
||||
end
|
||||
|
||||
markdown(REVIEW_ROULETTE_SECTION)
|
||||
|
||||
if rows.empty?
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddIndexForOpenIssuesCount < Gitlab::Database::Migration[2.0]
|
||||
disable_ddl_transaction!
|
||||
|
||||
INDEX_NAME = 'idx_open_issues_on_project_id_and_confidential'
|
||||
|
||||
def up
|
||||
add_concurrent_index :issues, [:project_id, :confidential], where: 'state_id = 1', name: INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :issues, INDEX_NAME
|
||||
end
|
||||
end
|
|
@ -0,0 +1 @@
|
|||
aff0fb4359747e74fd8275774f1644f0d0acd9f22469c88874cfcd0c7d44752f
|
|
@ -26875,6 +26875,8 @@ CREATE UNIQUE INDEX idx_on_external_status_checks_project_id_external_url ON ext
|
|||
|
||||
CREATE UNIQUE INDEX idx_on_external_status_checks_project_id_name ON external_status_checks USING btree (project_id, name);
|
||||
|
||||
CREATE INDEX idx_open_issues_on_project_id_and_confidential ON issues USING btree (project_id, confidential) WHERE (state_id = 1);
|
||||
|
||||
CREATE INDEX idx_packages_debian_group_component_files_on_architecture_id ON packages_debian_group_component_files USING btree (architecture_id);
|
||||
|
||||
CREATE INDEX idx_packages_debian_project_component_files_on_architecture_id ON packages_debian_project_component_files USING btree (architecture_id);
|
||||
|
|
|
@ -718,6 +718,91 @@ variables:
|
|||
| `CACHE_COMPRESSION_LEVEL` | To adjust compression ratio, set to `fastest`, `fast`, `default`, `slow`, or `slowest`. This setting works with the Fastzip archiver only, so the GitLab Runner feature flag [`FF_USE_FASTZIP`](https://docs.gitlab.com/runner/configuration/feature-flags.html#available-feature-flags) must also be enabled. |
|
||||
| `CACHE_REQUEST_TIMEOUT` | Configure the maximum duration of cache upload and download operations for a single job in minutes. Default is `10` minutes. |
|
||||
|
||||
## Artifact attestation
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/28940) in GitLab Runner 15.1.
|
||||
|
||||
GitLab Runner can generate and produce attestation metadata for all build artifacts. To enable this feature, you must set the `RUNNER_GENERATE_ARTIFACTS_METADATA` environment variable to `true`. This variable can either be set globally or it can be set for individual jobs. The metadata is in rendered in a plain text `.json` file that's stored with the artifact. The file name is as follows: `{JOB_ID}-artifacts-metadata.json`.
|
||||
|
||||
### Attestation format
|
||||
|
||||
The attestation metadata is generated in the [in-toto attestation format](https://github.com/in-toto/attestation) for spec version [v0.1](https://in-toto.io/Statement/v0.1). The following fields are populated by default:
|
||||
|
||||
| Field | Value |
|
||||
| ------ | ------ |
|
||||
| `_type` | `https://in-toto.io/Statement/v0.1` |
|
||||
| `subject.name` | The filename of the artifact. |
|
||||
| `subject.digest.sha256` | The artifact's `sha256` checksum. |
|
||||
| `predicateType` | `https://slsa.dev/provenance/v0.2` |
|
||||
| `predicate.buildType` | `https://gitlab.com/gitlab-org/gitlab-runner/-/blob/{GITLAB_RUNNER_VERSION}/PROVENANCE.md`. For example v15.0.0 |
|
||||
| `predicate.builder.id` | A URI pointing to the runner details page, for example `https://gitlab.com/gitlab-com/www-gitlab-com/-/runners/3785264`. |
|
||||
| `predicate.invocation.configSource.uri` | ``https://gitlab.example.com/.../{PROJECT_NAME}`` |
|
||||
| `predicate.invocation.configSource.digest.sha256` | The repository's `sha256` checksum. |
|
||||
| `predicate.invocation.configSource.entryPoint` | The name of the CI job that triggered the build. |
|
||||
| `predicate.invocation.environment.name` | The name of the runner. |
|
||||
| `predicate.invocation.environment.executor` | The runner executor. |
|
||||
| `predicate.invocation.environment.architecture` | The architecture on which the CI job is run. |
|
||||
| `predicate.invocation.parameters` | The names of any CI/CD or environment variables that were present when the build command was run. The value is always represented as an empty string to avoid leaking any secrets. |
|
||||
| `metadata.buildStartedOn` | The time when the build was started. `RFC3339` formatted. |
|
||||
| `metadata.buildEndedOn` | The time when the build ended. Since metadata generation happens during the build this moment in time will be slightly earlier than the one reported in GitLab. `RFC3339` formatted. |
|
||||
| `metadata.reproducible` | Whether the build is reproducible by gathering all the generated metadata. Always `false`. |
|
||||
| `metadata.completeness.parameters` | Whether the parameters are supplied. Always `true`. |
|
||||
| `metadata.completeness.environment` | Whether the builder's environment is reported. Always `true`. |
|
||||
| `metadata.completeness.materials` | Whether the build materials are reported. Always `false`. |
|
||||
|
||||
An example of an attestation that the GitLab Runner might generate is as follows:
|
||||
|
||||
```yaml
|
||||
{
|
||||
"_type": "https://gitlab.com/gitlab-org/gitlab-runner/-/blob/v15.1.0/PROVENANCE.md",
|
||||
"subject": [
|
||||
{
|
||||
"name": "script.sh",
|
||||
"digest": {
|
||||
"sha256": "f5ae5ced234922eebe6461d32228ba8ab9c3d0c0f3983a3bef707e6e1a1ab52a"
|
||||
}
|
||||
}
|
||||
],
|
||||
"predicateType": "https://slsa.dev/provenance/v0.2",
|
||||
"predicate": {
|
||||
"buildType": "https://gitlab.com/gitlab-org/gitlab-runner/-/blob/v15.1.0/PROVENANCE.md",
|
||||
"builder": {
|
||||
"id": "https://gitlab.com/ggeorgiev_gitlab/playground/-/runners/14811533"
|
||||
},
|
||||
"invocation": {
|
||||
"configSource": {
|
||||
"uri": "https://gitlab.com/ggeorgiev_gitlab/playground",
|
||||
"digest": {
|
||||
"sha256": "f0582e2c9a16b5cc2cde90e8be8f1b50fd67c631"
|
||||
},
|
||||
"entryPoint": "whoami shell"
|
||||
},
|
||||
"environment": {
|
||||
"name": "local",
|
||||
"executor": "shell",
|
||||
"architecture": "amd64"
|
||||
},
|
||||
"parameters": {
|
||||
"CI_PIPELINE_ID": "",
|
||||
"CI_PIPELINE_URL": "",
|
||||
// All other CI variable names are listed here. Values are always represented as empty strings to avoid leaking secrets.
|
||||
}
|
||||
},
|
||||
"metadata": {
|
||||
"buildStartedOn": "2022-06-17T00:47:27+03:00",
|
||||
"buildFinishedOn": "2022-06-17T00:47:28+03:00",
|
||||
"completeness": {
|
||||
"parameters": true,
|
||||
"environment": true,
|
||||
"materials": false
|
||||
},
|
||||
"reproducible": false
|
||||
},
|
||||
"materials": []
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Staging directory
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/3403) in GitLab Runner 15.0.
|
||||
|
|
|
@ -185,11 +185,31 @@ GitLab Pages access control is disabled by default. To enable it:
|
|||
- 'gitlab-pages/gitlab-pages.conf'
|
||||
```
|
||||
|
||||
### Enabling object storage
|
||||
|
||||
GitLab Pages support using object storage for storing artifacts, but object storage
|
||||
is disabled by default. You can enable it in the GDK:
|
||||
|
||||
1. Edit `gdk.yml` to enable the object storage in GitLab itself:
|
||||
|
||||
```yaml
|
||||
# $GDK_ROOT/gdk.yml
|
||||
object_store:
|
||||
enabled: true
|
||||
```
|
||||
|
||||
1. Reconfigure and restart GitLab by running the commands `gdk reconfigure` and `gdk restart`.
|
||||
|
||||
For more information, refer to the [GDK documentation](https://gitlab.com/gitlab-org/gitlab-development-kit/-/blob/main/doc/configuration.md#object-storage-configuration).
|
||||
|
||||
## Linting
|
||||
|
||||
```shell
|
||||
# Run the linter locally
|
||||
make lint
|
||||
|
||||
# Run linter and fix issues (if supported by the linter)
|
||||
make format
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
|
|
@ -289,6 +289,18 @@ In the event of an emergency, or false positive from this job, add the
|
|||
`pipeline:skip-undercoverage` label to the merge request to allow this job to
|
||||
fail.
|
||||
|
||||
### Troubleshooting `rspec:undercoverage` failures
|
||||
|
||||
The `rspec:undercoverage` job has [known bugs](https://gitlab.com/groups/gitlab-org/-/epics/8254)
|
||||
that can cause false positive failures. You can locally test coverage locally to determine if it's
|
||||
safe to apply `~"pipeline:skip-undercoverage"`. For example, using `<spec>` as the name of the
|
||||
test causing the failure:
|
||||
|
||||
1. Run `SIMPLECOV=1 bundle exec rspec <spec>`.
|
||||
1. Run `scripts/undercoverage`.
|
||||
|
||||
If these commands return `undercover: ✅ No coverage is missing in latest changes` then you can apply `~"pipeline:skip-undercoverage"` to bypass pipeline failures.
|
||||
|
||||
## Ruby versions testing
|
||||
|
||||
Our test suite runs against Ruby 2 in merge requests and default branch pipelines.
|
||||
|
|
|
@ -45,12 +45,24 @@ Here are some problems with current issues usage and why we are looking into wor
|
|||
- Codebase maintainability and feature development becomes a bigger challenges as we grow issues
|
||||
beyond its core role of issue tracking into supporting the different types and subtle differences between them.
|
||||
|
||||
## Work item and work item type terms
|
||||
## Work item terminology
|
||||
|
||||
Using the terms "issue" or "issuable" to reference the types of collaboration objects
|
||||
(for example, issue, bug, feature, or epic) often creates confusion. To avoid confusion, we will use the term
|
||||
work item type (WIT) when referring to the type of a collaboration object.
|
||||
An instance of a WIT is a work item (WI). For example, `issue#123`, `bug#456`, `requirement#789`.
|
||||
To avoid confusion and ensure communication is efficient, we will use the following terms exclusively when discussing work items.
|
||||
|
||||
| Term | Description | Example of misuse | Should be |
|
||||
| --- | --- | --- | --- |
|
||||
| work item type | Classes of work item; for example: issue, requirement, test case, incident, or task | _Epics will eventually become issues_ | _Epics will eventually become a **work item type**_ |
|
||||
| work item | An instance of a work item type | | |
|
||||
| work item view | The new frontend view that renders work items of any type | | |
|
||||
| legacy issue view | The existing view used to render issues and incidents | | |
|
||||
| issue | The existing issue model | | |
|
||||
| issuable | Any model currently using the issueable module (issues, epics and MRs) | _Incidents are an **issuable**_ | _Incidents are a **work item type**_ |
|
||||
|
||||
Some terms have been used in the past but have since become confusing and are now discouraged.
|
||||
|
||||
| Term | Description | Example of misuse | Should be |
|
||||
| --- | --- | --- | --- |
|
||||
| issue type | A former way to refer to classes of work item | _Tasks are an **issue type**_ | _Tasks are a **work item type**_ |
|
||||
|
||||
### Migration strategy
|
||||
|
||||
|
|
|
@ -50,13 +50,12 @@ By default, the GitLab.com and self-managed settings for the
|
|||
|
||||
### Block banned or compromised keys **(FREE)**
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/24614) in GitLab 15.1 [with a flag](../administration/feature_flags.md) named `ssh_banned_key`. Disabled by default.
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/24614) in GitLab 15.1 [with a flag](../administration/feature_flags.md) named `ssh_banned_key`. Enabled by default.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default this feature is not available. To make it available,
|
||||
ask an administrator to [enable the feature flag](../administration/feature_flags.md) named `ssh_banned_key`.
|
||||
On GitLab.com, this feature is available but can be configured by GitLab.com administrators only.
|
||||
The feature is not ready for production use.
|
||||
On self-managed GitLab, by default this feature is available. To hide the feature per user,
|
||||
ask an administrator to [disable the feature flag](../administration/feature_flags.md) named `ssh_banned_key`.
|
||||
On GitLab.com, this feature is available.
|
||||
|
||||
When users attempt to [add a new SSH key](../user/ssh.md#add-an-ssh-key-to-your-gitlab-account)
|
||||
to GitLab accounts, the key is checked against a list of SSH keys which are known
|
||||
|
|
|
@ -156,8 +156,6 @@ type Response struct {
|
|||
ShowAllRefs bool
|
||||
// Detects whether an artifact is used for code intelligence
|
||||
ProcessLsif bool
|
||||
// Detects whether LSIF artifact will be parsed with references
|
||||
ProcessLsifReferences bool
|
||||
// The maximum accepted size in bytes of the upload
|
||||
MaximumSize int64
|
||||
}
|
||||
|
|
|
@ -14,8 +14,8 @@ type cache struct {
|
|||
chunkSize int64
|
||||
}
|
||||
|
||||
func newCache(tempDir, filename string, data interface{}) (*cache, error) {
|
||||
f, err := os.CreateTemp(tempDir, filename)
|
||||
func newCache(filename string, data interface{}) (*cache, error) {
|
||||
f, err := os.CreateTemp("", filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ type chunk struct {
|
|||
}
|
||||
|
||||
func TestCache(t *testing.T) {
|
||||
cache, err := newCache("", "test-chunks", chunk{})
|
||||
cache, err := newCache("test-chunks", chunk{})
|
||||
require.NoError(t, err)
|
||||
defer cache.Close()
|
||||
|
||||
|
|
|
@ -35,8 +35,8 @@ type Metadata struct {
|
|||
Root string `json:"projectRoot"`
|
||||
}
|
||||
|
||||
func NewDocs(config Config) (*Docs, error) {
|
||||
ranges, err := NewRanges(config)
|
||||
func NewDocs() (*Docs, error) {
|
||||
ranges, err := NewRanges()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ func createLine(id, label, uri string) []byte {
|
|||
}
|
||||
|
||||
func TestParse(t *testing.T) {
|
||||
d, err := NewDocs(Config{})
|
||||
d, err := NewDocs()
|
||||
require.NoError(t, err)
|
||||
defer d.Close()
|
||||
|
||||
|
@ -31,7 +31,7 @@ func TestParse(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestParseContainsLine(t *testing.T) {
|
||||
d, err := NewDocs(Config{})
|
||||
d, err := NewDocs()
|
||||
require.NoError(t, err)
|
||||
defer d.Close()
|
||||
|
||||
|
@ -44,7 +44,7 @@ func TestParseContainsLine(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestParsingVeryLongLine(t *testing.T) {
|
||||
d, err := NewDocs(Config{})
|
||||
d, err := NewDocs()
|
||||
require.NoError(t, err)
|
||||
defer d.Close()
|
||||
|
||||
|
|
|
@ -35,10 +35,8 @@ type ResultSetRef struct {
|
|||
RefId Id `json:"inV"`
|
||||
}
|
||||
|
||||
func NewHovers(config Config) (*Hovers, error) {
|
||||
tempPath := config.TempPath
|
||||
|
||||
file, err := os.CreateTemp(tempPath, "hovers")
|
||||
func NewHovers() (*Hovers, error) {
|
||||
file, err := os.CreateTemp("", "hovers")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -47,7 +45,7 @@ func NewHovers(config Config) (*Hovers, error) {
|
|||
return nil, err
|
||||
}
|
||||
|
||||
offsets, err := newCache(tempPath, "hovers-indexes", Offset{})
|
||||
offsets, err := newCache("hovers-indexes", Offset{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@ func TestHoversRead(t *testing.T) {
|
|||
}
|
||||
|
||||
func setupHovers(t *testing.T) *Hovers {
|
||||
h, err := NewHovers(Config{})
|
||||
h, err := NewHovers()
|
||||
require.NoError(t, err)
|
||||
|
||||
require.NoError(t, h.Read("hoverResult", []byte(`{"id":"2","label":"hoverResult","result":{"contents": ["hello"]}}`)))
|
||||
|
|
|
@ -21,18 +21,14 @@ type Parser struct {
|
|||
pr *io.PipeReader
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
TempPath string
|
||||
}
|
||||
|
||||
func NewParser(ctx context.Context, r io.Reader, config Config) (io.ReadCloser, error) {
|
||||
docs, err := NewDocs(config)
|
||||
func NewParser(ctx context.Context, r io.Reader) (io.ReadCloser, error) {
|
||||
docs, err := NewDocs()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// ZIP files need to be seekable. Don't hold it all in RAM, use a tempfile
|
||||
tempFile, err := os.CreateTemp(config.TempPath, Lsif)
|
||||
tempFile, err := os.CreateTemp("", Lsif)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ func createFiles(t *testing.T, filePath, tmpDir string) {
|
|||
file, err := os.Open(filePath)
|
||||
require.NoError(t, err)
|
||||
|
||||
parser, err := NewParser(context.Background(), file, Config{})
|
||||
parser, err := NewParser(context.Background(), file)
|
||||
require.NoError(t, err)
|
||||
|
||||
zipFileName := tmpDir + ".zip"
|
||||
|
|
|
@ -21,7 +21,7 @@ func BenchmarkGenerate(b *testing.B) {
|
|||
file, err := os.Open(filePath)
|
||||
require.NoError(b, err)
|
||||
|
||||
parser, err := NewParser(context.Background(), file, Config{})
|
||||
parser, err := NewParser(context.Background(), file)
|
||||
require.NoError(b, err)
|
||||
|
||||
_, err = io.Copy(io.Discard, parser)
|
||||
|
|
|
@ -50,18 +50,18 @@ type SerializedRange struct {
|
|||
References []SerializedReference `json:"references,omitempty"`
|
||||
}
|
||||
|
||||
func NewRanges(config Config) (*Ranges, error) {
|
||||
hovers, err := NewHovers(config)
|
||||
func NewRanges() (*Ranges, error) {
|
||||
hovers, err := NewHovers()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
references, err := NewReferences(config)
|
||||
references, err := NewReferences()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cache, err := newCache(config.TempPath, "ranges", Range{})
|
||||
cache, err := newCache("ranges", Range{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ func TestSerialize(t *testing.T) {
|
|||
}
|
||||
|
||||
func setup(t *testing.T) (*Ranges, func()) {
|
||||
r, err := NewRanges(Config{})
|
||||
r, err := NewRanges()
|
||||
require.NoError(t, err)
|
||||
|
||||
require.NoError(t, r.Read("range", []byte(`{"id":1,"label":"range","start":{"line":1,"character":2}}`)))
|
||||
|
|
|
@ -19,15 +19,13 @@ type SerializedReference struct {
|
|||
Path string `json:"path"`
|
||||
}
|
||||
|
||||
func NewReferences(config Config) (*References, error) {
|
||||
tempPath := config.TempPath
|
||||
|
||||
items, err := newCache(tempPath, "references", Item{})
|
||||
func NewReferences() (*References, error) {
|
||||
items, err := newCache("references", Item{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
offsets, err := newCache(tempPath, "references-offsets", ReferencesOffset{})
|
||||
offsets, err := newCache("references-offsets", ReferencesOffset{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ func TestReferencesStore(t *testing.T) {
|
|||
refId = 3
|
||||
)
|
||||
|
||||
r, err := NewReferences(Config{})
|
||||
r, err := NewReferences()
|
||||
require.NoError(t, err)
|
||||
|
||||
err = r.Store(refId, []Item{{Line: 2, DocId: docId}, {Line: 3, DocId: docId}})
|
||||
|
@ -30,7 +30,7 @@ func TestReferencesStore(t *testing.T) {
|
|||
func TestReferencesStoreEmpty(t *testing.T) {
|
||||
const refId = 3
|
||||
|
||||
r, err := NewReferences(Config{})
|
||||
r, err := NewReferences()
|
||||
require.NoError(t, err)
|
||||
|
||||
err = r.Store(refId, []Item{})
|
||||
|
|
|
@ -3,6 +3,7 @@ package upload
|
|||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"os"
|
||||
|
@ -16,6 +17,7 @@ import (
|
|||
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/lsif_transformer/parser"
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/zipartifacts"
|
||||
)
|
||||
|
@ -34,8 +36,9 @@ var zipSubcommandsErrorsCounter = promauto.NewCounterVec(
|
|||
}, []string{"error"})
|
||||
|
||||
type artifactsUploadProcessor struct {
|
||||
opts *destination.UploadOpts
|
||||
format string
|
||||
format string
|
||||
processLSIF bool
|
||||
tempDir string
|
||||
|
||||
SavedFileTracker
|
||||
}
|
||||
|
@ -43,16 +46,11 @@ type artifactsUploadProcessor struct {
|
|||
// Artifacts is like a Multipart but specific for artifacts upload.
|
||||
func Artifacts(myAPI *api.API, h http.Handler, p Preparer) http.Handler {
|
||||
return myAPI.PreAuthorizeHandler(func(w http.ResponseWriter, r *http.Request, a *api.Response) {
|
||||
opts, err := p.Prepare(a)
|
||||
if err != nil {
|
||||
helper.Fail500(w, r, fmt.Errorf("UploadArtifacts: error preparing file storage options"))
|
||||
return
|
||||
}
|
||||
|
||||
format := r.URL.Query().Get(ArtifactFormatKey)
|
||||
mg := &artifactsUploadProcessor{
|
||||
opts: opts,
|
||||
format: format,
|
||||
processLSIF: a.ProcessLsif,
|
||||
tempDir: a.TempPath,
|
||||
SavedFileTracker: SavedFileTracker{Request: r},
|
||||
}
|
||||
interceptMultipartFiles(w, r, h, mg, &eagerAuthorizer{a}, p)
|
||||
|
@ -61,7 +59,7 @@ func Artifacts(myAPI *api.API, h http.Handler, p Preparer) http.Handler {
|
|||
|
||||
func (a *artifactsUploadProcessor) generateMetadataFromZip(ctx context.Context, file *destination.FileHandler) (*destination.FileHandler, error) {
|
||||
metaOpts := &destination.UploadOpts{
|
||||
LocalTempPath: a.opts.LocalTempPath,
|
||||
LocalTempPath: a.tempDir,
|
||||
}
|
||||
if metaOpts.LocalTempPath == "" {
|
||||
metaOpts.LocalTempPath = os.TempDir()
|
||||
|
@ -115,10 +113,10 @@ func (a *artifactsUploadProcessor) generateMetadataFromZip(ctx context.Context,
|
|||
|
||||
func (a *artifactsUploadProcessor) ProcessFile(ctx context.Context, formName string, file *destination.FileHandler, writer *multipart.Writer) error {
|
||||
// ProcessFile for artifacts requires file form-data field name to eq `file`
|
||||
|
||||
if formName != "file" {
|
||||
return fmt.Errorf("invalid form field: %q", formName)
|
||||
}
|
||||
|
||||
if a.Count() > 0 {
|
||||
return fmt.Errorf("artifacts request contains more than one file")
|
||||
}
|
||||
|
@ -134,7 +132,6 @@ func (a *artifactsUploadProcessor) ProcessFile(ctx context.Context, formName str
|
|||
return nil
|
||||
}
|
||||
|
||||
// TODO: can we rely on disk for shipping metadata? Not if we split workhorse and rails in 2 different PODs
|
||||
metadata, err := a.generateMetadataFromZip(ctx, file)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -156,6 +153,12 @@ func (a *artifactsUploadProcessor) ProcessFile(ctx context.Context, formName str
|
|||
return nil
|
||||
}
|
||||
|
||||
func (a *artifactsUploadProcessor) Name() string {
|
||||
return "artifacts"
|
||||
func (a *artifactsUploadProcessor) Name() string { return "artifacts" }
|
||||
|
||||
func (a *artifactsUploadProcessor) TransformContents(ctx context.Context, filename string, r io.Reader) (io.ReadCloser, error) {
|
||||
if a.processLSIF {
|
||||
return parser.NewParser(ctx, r)
|
||||
}
|
||||
|
||||
return a.SavedFileTracker.TransformContents(ctx, filename, r)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,91 @@
|
|||
package upload
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
"gitlab.com/gitlab-org/labkit/log"
|
||||
"golang.org/x/image/tiff"
|
||||
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/exif"
|
||||
)
|
||||
|
||||
func handleExifUpload(ctx context.Context, r io.Reader, filename string, imageType exif.FileType) (io.ReadCloser, error) {
|
||||
tmpfile, err := os.CreateTemp("", "exifremove")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
go func() {
|
||||
<-ctx.Done()
|
||||
tmpfile.Close()
|
||||
}()
|
||||
if err := os.Remove(tmpfile.Name()); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, err = io.Copy(tmpfile, r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if _, err := tmpfile.Seek(0, io.SeekStart); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
isValidType := false
|
||||
switch imageType {
|
||||
case exif.TypeJPEG:
|
||||
isValidType = isJPEG(tmpfile)
|
||||
case exif.TypeTIFF:
|
||||
isValidType = isTIFF(tmpfile)
|
||||
}
|
||||
|
||||
if _, err := tmpfile.Seek(0, io.SeekStart); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !isValidType {
|
||||
log.WithContextFields(ctx, log.Fields{
|
||||
"filename": filename,
|
||||
"imageType": imageType,
|
||||
}).Info("invalid content type, not running exiftool")
|
||||
|
||||
return tmpfile, nil
|
||||
}
|
||||
|
||||
log.WithContextFields(ctx, log.Fields{
|
||||
"filename": filename,
|
||||
}).Info("running exiftool to remove any metadata")
|
||||
|
||||
cleaner, err := exif.NewCleaner(ctx, tmpfile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return cleaner, nil
|
||||
}
|
||||
|
||||
func isTIFF(r io.Reader) bool {
|
||||
_, err := tiff.DecodeConfig(r)
|
||||
if err == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
if _, unsupported := err.(tiff.UnsupportedError); unsupported {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func isJPEG(r io.Reader) bool {
|
||||
// Only the first 512 bytes are used to sniff the content type.
|
||||
buf, err := io.ReadAll(io.LimitReader(r, 512))
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
return http.DetectContentType(buf) == "image/jpeg"
|
||||
}
|
|
@ -9,18 +9,15 @@ import (
|
|||
"mime/multipart"
|
||||
"net/http"
|
||||
"net/textproto"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/prometheus/client_golang/prometheus/promauto"
|
||||
"golang.org/x/image/tiff"
|
||||
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/log"
|
||||
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/lsif_transformer/parser"
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/exif"
|
||||
)
|
||||
|
@ -151,22 +148,11 @@ func (rew *rewriter) handleFilePart(r *http.Request, name string, p *multipart.P
|
|||
return err
|
||||
}
|
||||
|
||||
var inputReader io.ReadCloser
|
||||
ctx := r.Context()
|
||||
if imageType := exif.FileTypeFromSuffix(filename); imageType != exif.TypeUnknown {
|
||||
inputReader, err = handleExifUpload(ctx, p, filename, imageType)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else if apiResponse.ProcessLsif {
|
||||
inputReader, err = handleLsifUpload(ctx, p, opts.LocalTempPath, filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
inputReader = io.NopCloser(p)
|
||||
inputReader, err := rew.filter.TransformContents(ctx, filename, p)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer inputReader.Close()
|
||||
|
||||
fh, err := destination.Upload(ctx, inputReader, -1, filename, opts)
|
||||
|
@ -194,92 +180,6 @@ func (rew *rewriter) handleFilePart(r *http.Request, name string, p *multipart.P
|
|||
return rew.filter.ProcessFile(ctx, name, fh, rew.writer)
|
||||
}
|
||||
|
||||
func handleExifUpload(ctx context.Context, r io.Reader, filename string, imageType exif.FileType) (io.ReadCloser, error) {
|
||||
tmpfile, err := os.CreateTemp("", "exifremove")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
go func() {
|
||||
<-ctx.Done()
|
||||
tmpfile.Close()
|
||||
}()
|
||||
if err := os.Remove(tmpfile.Name()); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, err = io.Copy(tmpfile, r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if _, err := tmpfile.Seek(0, io.SeekStart); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
isValidType := false
|
||||
switch imageType {
|
||||
case exif.TypeJPEG:
|
||||
isValidType = isJPEG(tmpfile)
|
||||
case exif.TypeTIFF:
|
||||
isValidType = isTIFF(tmpfile)
|
||||
}
|
||||
|
||||
if _, err := tmpfile.Seek(0, io.SeekStart); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !isValidType {
|
||||
log.WithContextFields(ctx, log.Fields{
|
||||
"filename": filename,
|
||||
"imageType": imageType,
|
||||
}).Info("invalid content type, not running exiftool")
|
||||
|
||||
return tmpfile, nil
|
||||
}
|
||||
|
||||
log.WithContextFields(ctx, log.Fields{
|
||||
"filename": filename,
|
||||
}).Info("running exiftool to remove any metadata")
|
||||
|
||||
cleaner, err := exif.NewCleaner(ctx, tmpfile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return cleaner, nil
|
||||
}
|
||||
|
||||
func isTIFF(r io.Reader) bool {
|
||||
_, err := tiff.DecodeConfig(r)
|
||||
if err == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
if _, unsupported := err.(tiff.UnsupportedError); unsupported {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func isJPEG(r io.Reader) bool {
|
||||
// Only the first 512 bytes are used to sniff the content type.
|
||||
buf, err := io.ReadAll(io.LimitReader(r, 512))
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
return http.DetectContentType(buf) == "image/jpeg"
|
||||
}
|
||||
|
||||
func handleLsifUpload(ctx context.Context, reader io.Reader, tempPath, filename string) (io.ReadCloser, error) {
|
||||
parserConfig := parser.Config{
|
||||
TempPath: tempPath,
|
||||
}
|
||||
|
||||
return parser.NewParser(ctx, reader, parserConfig)
|
||||
}
|
||||
|
||||
func (rew *rewriter) copyPart(ctx context.Context, name string, p *multipart.Part) error {
|
||||
np, err := rew.writer.CreatePart(p.Header)
|
||||
if err != nil {
|
||||
|
|
|
@ -3,11 +3,13 @@ package upload
|
|||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/secret"
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/exif"
|
||||
)
|
||||
|
||||
type SavedFileTracker struct {
|
||||
|
@ -54,6 +56,12 @@ func (s *SavedFileTracker) Finalize(_ context.Context) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (s *SavedFileTracker) Name() string {
|
||||
return "accelerate"
|
||||
func (s *SavedFileTracker) Name() string { return "accelerate" }
|
||||
|
||||
func (*SavedFileTracker) TransformContents(ctx context.Context, filename string, r io.Reader) (io.ReadCloser, error) {
|
||||
if imageType := exif.FileTypeFromSuffix(filename); imageType != exif.TypeUnknown {
|
||||
return handleExifUpload(ctx, r, filename, imageType)
|
||||
}
|
||||
|
||||
return io.NopCloser(r), nil
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ type MultipartFormProcessor interface {
|
|||
Finalize(ctx context.Context) error
|
||||
Name() string
|
||||
Count() int
|
||||
TransformContents(ctx context.Context, filename string, r io.Reader) (io.ReadCloser, error)
|
||||
}
|
||||
|
||||
// interceptMultipartFiles is the core of the implementation of
|
||||
|
|
Loading…
Reference in New Issue