Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-08-05 09:10:01 +00:00
parent bd4eece38d
commit 88ad172d04
25 changed files with 619 additions and 238 deletions

View File

@ -44,6 +44,22 @@ PreCommit:
# on_warn: fail # Treat all warnings as failures
ScssLint:
enabled: true
MarkdownLint:
enabled: true
description: 'Lint documentation for Markdown errors'
required_executable: 'node_modules/.bin/markdownlint'
flags: ['--config', '.markdownlint.json', 'doc/**/*.md']
install_command: 'yarn install'
include:
- 'doc/**/*.md'
Vale:
enabled: true
description: 'Lint documentation for grammatical and formatting errors'
required_executable: 'vale'
flags: ['--config', '.vale.ini', '--minAlertLevel', 'error', 'doc']
install_command: 'brew install vale # (or use another package manager)'
include:
- 'doc/**/*.md'
CommitMsg:
TextWidth:

View File

@ -4,6 +4,7 @@ import { GlIcon, GlTooltip, GlTooltipDirective } from '@gitlab/ui';
import { sprintf } from '~/locale';
import IssueMilestone from './issue_milestone.vue';
import IssueAssignees from './issue_assignees.vue';
import IssueDueDate from '~/boards/components/issue_due_date.vue';
import relatedIssuableMixin from '../../mixins/related_issuable_mixin';
import CiIcon from '../ci_icon.vue';
@ -15,6 +16,8 @@ export default {
CiIcon,
GlIcon,
GlTooltip,
IssueWeight: () => import('ee_component/boards/components/issue_card_weight.vue'),
IssueDueDate,
},
directives: {
GlTooltip: GlTooltipDirective,
@ -120,8 +123,21 @@ export default {
/>
<!-- Flex order for slots is defined in the parent component: e.g. related_issues_block.vue -->
<slot name="dueDate"></slot>
<slot name="weight"></slot>
<span v-if="weight > 0" class="order-md-1">
<issue-weight
:weight="weight"
class="item-weight gl-display-flex gl-align-items-center"
tag-name="span"
/>
</span>
<span v-if="dueDate" class="order-md-1">
<issue-due-date
:date="dueDate"
tooltip-placement="top"
css-class="item-due-date gl-display-flex gl-align-items-center"
/>
</span>
<issue-assignees
v-if="hasAssignees"

View File

@ -311,10 +311,6 @@
content: '\f1b3';
}
.fa-times-circle::before {
content: '\f057';
}
.fa-skype::before {
content: '\f17e';
}

View File

@ -249,7 +249,7 @@ input[type='checkbox']:hover {
.search-clear {
position: absolute;
right: 10px;
top: 10px;
top: 9px;
padding: 0;
color: $gray-darkest;
line-height: 0;

View File

@ -22,6 +22,7 @@ module Packages
package_detail[:maven_metadatum] = @package.maven_metadatum if @package.maven_metadatum
package_detail[:nuget_metadatum] = @package.nuget_metadatum if @package.nuget_metadatum
package_detail[:composer_metadatum] = @package.composer_metadatum if @package.composer_metadatum
package_detail[:dependency_links] = @package.dependency_links.map(&method(:build_dependency_links))
package_detail[:pipeline] = build_pipeline_info(@package.build_info.pipeline) if @package.build_info

View File

@ -169,10 +169,6 @@ module ObjectStorage
object_store_options.connection.to_hash.deep_symbolize_keys
end
def consolidated_settings?
object_store_options.fetch('consolidated_settings', false)
end
def remote_store_path
object_store_options.remote_directory
end
@ -193,14 +189,18 @@ module ObjectStorage
File.join(self.root, TMP_UPLOAD_PATH)
end
def object_store_config
ObjectStorage::Config.new(object_store_options)
end
def workhorse_remote_upload_options(has_length:, maximum_size: nil)
return unless self.object_store_enabled?
return unless self.direct_upload_enabled?
id = [CarrierWave.generate_cache_id, SecureRandom.hex].join('-')
upload_path = File.join(TMP_UPLOAD_PATH, id)
direct_upload = ObjectStorage::DirectUpload.new(self.object_store_credentials, remote_store_path, upload_path,
has_length: has_length, maximum_size: maximum_size, consolidated_settings: consolidated_settings?)
direct_upload = ObjectStorage::DirectUpload.new(self.object_store_config, upload_path,
has_length: has_length, maximum_size: maximum_size)
direct_upload.to_hash.merge(ID: id)
end
@ -283,6 +283,10 @@ module ObjectStorage
self.class.object_store_credentials
end
def fog_attributes
@fog_attributes ||= self.class.object_store_config.fog_attributes
end
# Set ACL of uploaded objects to not-public (fog-aws)[1] or no ACL at all
# (fog-google). Value is ignored by other supported backends (fog-aliyun,
# fog-openstack, fog-rackspace)

View File

@ -72,10 +72,10 @@
.btn-group
- if runner.active?
= link_to pause_group_runner_path(@group, runner), method: :post, class: 'btn btn-default has-tooltip', title: _('Pause'), ref: 'tooltip', aria: { label: _('Pause') }, data: { placement: 'top', container: 'body', confirm: _('Are you sure?') } do
= icon('pause')
= sprite_icon('pause')
- else
= link_to resume_group_runner_path(@group, runner), method: :post, class: 'btn btn-default has-tooltip', title: _('Resume'), ref: 'tooltip', aria: { label: _('Resume') }, data: { placement: 'top', container: 'body'} do
= icon('play')
= sprite_icon('play')
- if runner.belongs_to_more_than_one_project?
.btn-group
.btn.btn-danger.has-tooltip{ 'aria-label' => 'Remove', 'data-container' => 'body', 'data-original-title' => _('Multi-project Runners cannot be removed'), 'data-placement' => 'top', disabled: 'disabled' }

View File

@ -11,7 +11,7 @@
= search_field_tag :search, params[:search], placeholder: _("Search for projects, issues, etc."), class: "form-control search-text-input js-search-input", id: "dashboard_search", autofocus: true, spellcheck: false
= icon("search", class: "search-icon")
%button.search-clear.js-search-clear{ class: ("hidden" if !params[:search].present?), type: "button", tabindex: "-1" }
= icon("times-circle")
= sprite_icon('clear', size: 16)
%span.sr-only
= _("Clear search")
- unless params[:snippets].eql? 'true'

View File

@ -0,0 +1,5 @@
---
title: Replace fa-play/pause icons with svg
merge_request: 38535
author:
type: other

View File

@ -0,0 +1,5 @@
---
title: Replace times-circle with GitLab SVG clear icon
merge_request: 38409
author:
type: other

View File

@ -0,0 +1,5 @@
---
title: Add support for specifying AWS S3 Server Side Encryption (AWS-KMS)
merge_request: 38240
author:
type: added

View File

@ -218,6 +218,9 @@ production: &base
# region: us-east-1
# aws_signature_version: 4 # For creation of signed URLs. Set to 2 if provider does not support v4.
# endpoint: 'https://s3.amazonaws.com' # default: nil - Useful for S3 compliant services such as DigitalOcean Spaces
# storage_options:
# server_side_encryption: AES256 # AES256, aws:kms
# server_side_encryption_kms_key_id: # Amazon Resource Name. See https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingKMSEncryption.html
# objects:
# artifacts:
# bucket: artifacts

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
require "carrierwave/storage/fog"
# This pulls in https://github.com/carrierwaveuploader/carrierwave/pull/2504 to support
# sending AWS S3 encryption headers when copying objects.
module CarrierWave
module Storage
class Fog < Abstract
class File
def copy_to(new_path)
connection.copy_object(@uploader.fog_directory, file.key, @uploader.fog_directory, new_path, copy_to_options)
CarrierWave::Storage::Fog::File.new(@uploader, @base, new_path)
end
def copy_to_options
acl_header.merge(@uploader.fog_attributes)
end
end
end
end
end

View File

@ -13,6 +13,7 @@ class ObjectStoreSettings
object_store['direct_upload'] = false if object_store['direct_upload'].nil?
object_store['background_upload'] = true if object_store['background_upload'].nil?
object_store['proxy_download'] = false if object_store['proxy_download'].nil?
object_store['storage_options'] ||= {}
# Convert upload connection settings to use string keys, to make Fog happy
object_store['connection']&.deep_stringify_keys!
@ -37,6 +38,8 @@ class ObjectStoreSettings
# region: gdk
# endpoint: 'http://127.0.0.1:9000'
# path_style: true
# storage_options:
# server_side_encryption: AES256
# proxy_download: true
# objects:
# artifacts:
@ -49,7 +52,7 @@ class ObjectStoreSettings
#
# Settings.artifacts['object_store'] = {
# "enabled" => true,
# "connection"=> {
# "connection" => {
# "provider" => "AWS",
# "aws_access_key_id" => "minio",
# "aws_secret_access_key" => "gdk-minio",
@ -57,6 +60,9 @@ class ObjectStoreSettings
# "endpoint" => "http://127.0.0.1:9000",
# "path_style" => true
# },
# "storage_options" => {
# "server_side_encryption" => "AES256"
# },
# "direct_upload" => true,
# "background_upload" => false,
# "proxy_download" => false,
@ -73,6 +79,9 @@ class ObjectStoreSettings
# "endpoint" => "http://127.0.0.1:9000",
# "path_style" => true
# },
# "storage_options" => {
# "server_side_encryption" => "AES256"
# },
# "direct_upload" => true,
# "background_upload" => false,
# "proxy_download" => true,
@ -91,12 +100,13 @@ class ObjectStoreSettings
return unless use_consolidated_settings?
main_config = settings['object_store']
common_config = main_config.slice('enabled', 'connection', 'proxy_download')
common_config = main_config.slice('enabled', 'connection', 'proxy_download', 'storage_options')
# Convert connection settings to use string keys, to make Fog happy
common_config['connection']&.deep_stringify_keys!
# These are no longer configurable if common config is used
common_config['direct_upload'] = true
common_config['background_upload'] = false
common_config['storage_options'] ||= {}
SUPPORTED_TYPES.each do |store_type|
overrides = main_config.dig('objects', store_type) || {}

View File

@ -387,6 +387,7 @@ reverify
Rubix
Rubocop
Rubular
ruleset
runbook
runbooks
runit

View File

@ -10,26 +10,23 @@ we suggest investigating to see if a plugin exists. For instance here is the
## Pre-commit static analysis
You're strongly advised to install
[Overcommit](https://github.com/sds/overcommit) to automatically check for
You should install [`overcommit`](https://github.com/sds/overcommit) to automatically check for
static analysis offenses before committing locally.
In your GitLab source directory run:
After installing `overcommit`, run the following in your GitLab source directory:
```shell
make -C tooling/overcommit
```
Then before a commit is created, Overcommit will automatically check for
RuboCop (and other checks) offenses on every modified file.
Then before a commit is created, `overcommit` automatically checks for RuboCop (and other checks)
offenses on every modified file.
This saves you time as you don't have to wait for the same errors to be detected
by the CI.
This saves you time as you don't have to wait for the same errors to be detected by CI/CD.
Overcommit relies on a pre-commit hook to prevent commits that violate its ruleset.
If you wish to override this behavior, it can be done by passing the ENV variable
`OVERCOMMIT_DISABLE`; i.e. `OVERCOMMIT_DISABLE=1 git rebase master` to rebase while
disabling the Git hook.
`overcommit` relies on a pre-commit hook to prevent commits that violate its ruleset. To override
this behavior, pass the `OVERCOMMIT_DISABLE` environment variable. For example,
`OVERCOMMIT_DISABLE=1 git rebase master` to rebase while disabling the Git hook.
## Ruby, Rails, RSpec

View File

@ -624,6 +624,7 @@ You can use markdownlint:
- [On the command line](https://github.com/igorshubovych/markdownlint-cli#markdownlint-cli--).
- [Within a code editor](#configure-editors).
- [In a `pre-commit` hook](#configure-pre-commit-hooks).
#### Vale
@ -650,6 +651,9 @@ You can use Vale:
- [On the command line](https://errata-ai.gitbook.io/vale/getting-started/usage).
- [Within a code editor](#configure-editors).
- [In a `pre-commit` hook](#configure-pre-commit-hooks). Vale only reports errors in the
`pre-commit` hook (the same configuration as the CI/CD pipelines), and does not report suggestions
or warnings.
#### Install linters
@ -703,6 +707,22 @@ To configure Vale within your editor, install one of the following as appropriat
We don't use [Vale Server](https://errata-ai.github.io/vale/#using-vale-with-a-text-editor-or-another-third-party-application).
#### Configure pre-commit hooks
Git [pre-commit hooks](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks) allow Git users to
run tests or other processes before committing to a branch, with the ability to not commit to the branch if
failures occur with these tests.
[`overcommit`](https://github.com/sds/overcommit) is a Git hooks manager, making configuring,
installing, and removing Git hooks easy.
Sample configuration for `overcommit` is available in the
[`.overcommit.yml.example`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.overcommit.yml.example)
file for the [`gitlab`](https://gitlab.com/gitlab-org/gitlab) project.
To set up `overcommit` for documentation linting, see
[Pre-commit static analysis](../contributing/style_guides.md#pre-commit-static-analysis).
#### Disable Vale tests
You can disable a specific Vale linting rule or all Vale linting rules for any portion of a

View File

@ -0,0 +1,79 @@
# frozen_string_literal: true
module ObjectStorage
class Config
attr_reader :options
def initialize(options)
@options = options.to_hash.deep_symbolize_keys
end
def credentials
@credentials ||= options[:connection] || {}
end
def storage_options
@storage_options ||= options[:storage_options] || {}
end
def enabled?
options[:enabled]
end
def bucket
options[:remote_directory]
end
def consolidated_settings?
options.fetch(:consolidated_settings, false)
end
# AWS-specific options
def aws?
provider == 'AWS'
end
def use_iam_profile?
credentials.fetch(:use_iam_profile, false)
end
def use_path_style?
credentials.fetch(:path_style, false)
end
def server_side_encryption
storage_options[:server_side_encryption]
end
def server_side_encryption_kms_key_id
storage_options[:server_side_encryption_kms_key_id]
end
def provider
credentials[:provider].to_s
end
# End AWS-specific options
def google?
provider == 'Google'
end
def fog_attributes
@fog_attributes ||= begin
return {} unless enabled? && aws?
return {} unless server_side_encryption.present?
aws_server_side_encryption_headers.compact
end
end
private
def aws_server_side_encryption_headers
{
'x-amz-server-side-encryption' => server_side_encryption,
'x-amz-server-side-encryption-aws-kms-key-id' => server_side_encryption_kms_key_id
}
end
end
end

View File

@ -22,20 +22,20 @@ module ObjectStorage
MAXIMUM_MULTIPART_PARTS = 100
MINIMUM_MULTIPART_SIZE = 5.megabytes
attr_reader :credentials, :bucket_name, :object_name
attr_reader :has_length, :maximum_size, :consolidated_settings
attr_reader :config, :credentials, :bucket_name, :object_name
attr_reader :has_length, :maximum_size
def initialize(credentials, bucket_name, object_name, has_length:, maximum_size: nil, consolidated_settings: false)
def initialize(config, object_name, has_length:, maximum_size: nil)
unless has_length
raise ArgumentError, 'maximum_size has to be specified if length is unknown' unless maximum_size
end
@credentials = credentials
@bucket_name = bucket_name
@config = config
@credentials = config.credentials
@bucket_name = config.bucket
@object_name = object_name
@has_length = has_length
@maximum_size = maximum_size
@consolidated_settings = consolidated_settings
end
def to_hash
@ -62,7 +62,7 @@ module ObjectStorage
end
def workhorse_client_hash
return {} unless aws?
return {} unless config.aws?
{
UseWorkhorseClient: use_workhorse_s3_client?,
@ -73,16 +73,18 @@ module ObjectStorage
Bucket: bucket_name,
Region: credentials[:region],
Endpoint: credentials[:endpoint],
PathStyle: credentials.fetch(:path_style, false),
UseIamProfile: credentials.fetch(:use_iam_profile, false)
}
PathStyle: config.use_path_style?,
UseIamProfile: config.use_iam_profile?,
ServerSideEncryption: config.server_side_encryption,
SSEKMSKeyID: config.server_side_encryption_kms_key_id
}.compact
}
}
end
def use_workhorse_s3_client?
return false unless Feature.enabled?(:use_workhorse_s3_client, default_enabled: true)
return false unless credentials.fetch(:use_iam_profile, false) || consolidated_settings
return false unless config.use_iam_profile? || config.consolidated_settings?
# The Golang AWS SDK does not support V2 signatures
return false unless credentials.fetch(:aws_signature_version, 4).to_i >= 4
@ -95,7 +97,7 @@ module ObjectStorage
# Implements https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectGET.html
def get_url
if google?
if config.google?
connection.get_object_https_url(bucket_name, object_name, expire_at)
else
connection.get_object_url(bucket_name, object_name, expire_at)
@ -169,23 +171,15 @@ module ObjectStorage
].min
end
def aws?
provider == 'AWS'
end
def google?
provider == 'Google'
end
def requires_multipart_upload?
aws? && !has_length
config.aws? && !has_length
end
def upload_id
return unless requires_multipart_upload?
strong_memoize(:upload_id) do
new_upload = connection.initiate_multipart_upload(bucket_name, object_name)
new_upload = connection.initiate_multipart_upload(bucket_name, object_name, config.fog_attributes)
new_upload.body["UploadId"]
end
end

View File

@ -1,7 +1,7 @@
import Vue from 'vue';
import { mount } from '@vue/test-utils';
import { formatDate } from '~/lib/utils/datetime_utility';
import RelatedIssuableItem from '~/vue_shared/components/issue/related_issuable_item.vue';
import IssueDueDate from '~/boards/components/issue_due_date.vue';
import { defaultAssignees, defaultMilestone } from './related_issuable_mock_data';
import { TEST_HOST } from 'jest/helpers/test_constants';
@ -71,85 +71,65 @@ describe('RelatedIssuableItem', () => {
});
describe('token state', () => {
let tokenState;
const tokenState = () => wrapper.find({ ref: 'iconElementXL' });
beforeEach(done => {
beforeEach(() => {
wrapper.setProps({ state: 'opened' });
Vue.nextTick(() => {
tokenState = wrapper.find('.issue-token-state-icon-open');
done();
});
});
it('renders if hasState', () => {
expect(tokenState.exists()).toBe(true);
expect(tokenState().exists()).toBe(true);
});
it('renders state title', () => {
const stateTitle = tokenState.attributes('title');
const stateTitle = tokenState().attributes('title');
const formattedCreateDate = formatDate(props.createdAt);
expect(stateTitle).toContain('<span class="bold">Opened</span>');
expect(stateTitle).toContain(`<span class="text-tertiary">${formattedCreateDate}</span>`);
});
it('renders aria label', () => {
expect(tokenState.attributes('aria-label')).toEqual('opened');
expect(tokenState().attributes('aria-label')).toEqual('opened');
});
it('renders open icon when open state', () => {
expect(tokenState.classes('issue-token-state-icon-open')).toBe(true);
expect(tokenState().classes('issue-token-state-icon-open')).toBe(true);
});
it('renders close icon when close state', done => {
it('renders close icon when close state', async () => {
wrapper.setProps({
state: 'closed',
closedAt: '2018-12-01T00:00:00.00Z',
});
await wrapper.vm.$nextTick();
Vue.nextTick(() => {
expect(tokenState.classes('issue-token-state-icon-closed')).toBe(true);
done();
});
expect(tokenState().classes('issue-token-state-icon-closed')).toBe(true);
});
});
describe('token metadata', () => {
let tokenMetadata;
beforeEach(done => {
Vue.nextTick(() => {
tokenMetadata = wrapper.find('.item-meta');
done();
});
});
const tokenMetadata = () => wrapper.find('.item-meta');
it('renders item path and ID', () => {
const pathAndID = tokenMetadata.find('.item-path-id').text();
const pathAndID = tokenMetadata()
.find('.item-path-id')
.text();
expect(pathAndID).toContain('gitlab-org/gitlab-test');
expect(pathAndID).toContain('#1');
});
it('renders milestone icon and name', () => {
const milestoneIcon = tokenMetadata.find('.item-milestone svg use');
const milestoneTitle = tokenMetadata.find('.item-milestone .milestone-title');
const milestoneIcon = tokenMetadata().find('.item-milestone svg use');
const milestoneTitle = tokenMetadata().find('.item-milestone .milestone-title');
expect(milestoneIcon.attributes('href')).toContain('clock');
expect(milestoneTitle.text()).toContain('Milestone title');
});
it('renders due date component', () => {
expect(tokenMetadata.find('.js-due-date-slot').exists()).toBe(true);
});
it('renders weight component', () => {
expect(tokenMetadata.find('.js-weight-slot').exists()).toBe(true);
it('renders due date component with correct due date', () => {
expect(wrapper.find(IssueDueDate).props('date')).toBe(props.dueDate);
});
});
@ -163,40 +143,30 @@ describe('RelatedIssuableItem', () => {
});
describe('remove button', () => {
let removeBtn;
const removeButton = () => wrapper.find({ ref: 'removeButton' });
beforeEach(done => {
beforeEach(() => {
wrapper.setProps({ canRemove: true });
Vue.nextTick(() => {
removeBtn = wrapper.find({ ref: 'removeButton' });
done();
});
});
it('renders if canRemove', () => {
expect(removeBtn.exists()).toBe(true);
expect(removeButton().exists()).toBe(true);
});
it('renders disabled button when removeDisabled', done => {
wrapper.vm.removeDisabled = true;
it('renders disabled button when removeDisabled', async () => {
wrapper.setData({ removeDisabled: true });
await wrapper.vm.$nextTick();
Vue.nextTick(() => {
expect(removeBtn.attributes('disabled')).toEqual('disabled');
done();
});
expect(removeButton().attributes('disabled')).toEqual('disabled');
});
it('triggers onRemoveRequest when clicked', () => {
removeBtn.trigger('click');
it('triggers onRemoveRequest when clicked', async () => {
removeButton().trigger('click');
await wrapper.vm.$nextTick();
const { relatedIssueRemoveRequest } = wrapper.emitted();
return wrapper.vm.$nextTick().then(() => {
const { relatedIssueRemoveRequest } = wrapper.emitted();
expect(relatedIssueRemoveRequest.length).toBe(1);
expect(relatedIssueRemoveRequest[0]).toEqual([props.idKey]);
});
expect(relatedIssueRemoveRequest.length).toBe(1);
expect(relatedIssueRemoveRequest[0]).toEqual([props.idKey]);
});
});
});

View File

@ -0,0 +1,150 @@
# frozen_string_literal: true
require 'fast_spec_helper'
RSpec.describe ObjectStorage::Config do
let(:region) { 'us-east-1' }
let(:bucket_name) { 'test-bucket' }
let(:path_style) { false }
let(:use_iam_profile) { false }
let(:credentials) do
{
provider: 'AWS',
aws_access_key_id: 'AWS_ACCESS_KEY_ID',
aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY',
region: region,
path_style: path_style,
use_iam_profile: use_iam_profile
}
end
let(:storage_options) do
{
server_side_encryption: 'AES256',
server_side_encryption_kms_key_id: 'arn:aws:12345'
}
end
let(:raw_config) do
{
enabled: true,
connection: credentials,
remote_directory: bucket_name,
storage_options: storage_options
}
end
subject { described_class.new(raw_config.as_json) }
describe '#credentials' do
it { expect(subject.credentials).to eq(credentials) }
end
describe '#storage_options' do
it { expect(subject.storage_options).to eq(storage_options) }
end
describe '#enabled?' do
it { expect(subject.enabled?).to eq(true) }
end
describe '#bucket' do
it { expect(subject.bucket).to eq(bucket_name) }
end
context 'with unconsolidated settings' do
describe 'consolidated_settings? returns false' do
it { expect(subject.consolidated_settings?).to be false }
end
end
context 'with consolidated settings' do
before do
raw_config[:consolidated_settings] = true
end
describe 'consolidated_settings? returns true' do
it { expect(subject.consolidated_settings?).to be true }
end
end
context 'with IAM profile in use' do
let(:use_iam_profile) { true }
it '#use_iam_profile? returns true' do
expect(subject.use_iam_profile?).to be true
end
end
context 'with IAM profile not in use' do
it '#use_iam_profile? returns false' do
expect(subject.use_iam_profile?).to be false
end
end
context 'with path style' do
let(:path_style) { true }
it '#use_path_style? returns true' do
expect(subject.use_path_style?).to be true
end
end
context 'with hostname style access' do
it '#use_path_style? returns false' do
expect(subject.use_path_style?).to be false
end
end
context 'with AWS credentials' do
it { expect(subject.provider).to eq('AWS') }
it { expect(subject.aws?).to be true }
it { expect(subject.google?).to be false }
end
context 'with Google credentials' do
let(:credentials) do
{
provider: 'Google',
google_client_email: 'foo@gcp-project.example.com',
google_json_key_location: '/path/to/gcp.json'
}
end
it { expect(subject.provider).to eq('Google') }
it { expect(subject.aws?).to be false }
it { expect(subject.google?).to be true }
it { expect(subject.fog_attributes).to eq({}) }
end
context 'with SSE-KMS enabled' do
it { expect(subject.server_side_encryption).to eq('AES256') }
it { expect(subject.server_side_encryption_kms_key_id).to eq('arn:aws:12345') }
it { expect(subject.fog_attributes.keys).to match_array(%w(x-amz-server-side-encryption x-amz-server-side-encryption-aws-kms-key-id)) }
end
context 'with only server side encryption enabled' do
let(:storage_options) { { server_side_encryption: 'AES256' } }
it { expect(subject.server_side_encryption).to eq('AES256') }
it { expect(subject.server_side_encryption_kms_key_id).to be_nil }
it { expect(subject.fog_attributes).to eq({ 'x-amz-server-side-encryption' => 'AES256' }) }
end
context 'without encryption enabled' do
let(:storage_options) { {} }
it { expect(subject.server_side_encryption).to be_nil }
it { expect(subject.server_side_encryption_kms_key_id).to be_nil }
it { expect(subject.fog_attributes).to eq({}) }
end
context 'with object storage disabled' do
before do
raw_config['enabled'] = false
end
it { expect(subject.enabled?).to be false }
it { expect(subject.fog_attributes).to eq({}) }
end
end

View File

@ -18,13 +18,25 @@ RSpec.describe ObjectStorage::DirectUpload do
}
end
let(:storage_options) { {} }
let(:raw_config) do
{
enabled: true,
connection: credentials,
remote_directory: bucket_name,
storage_options: storage_options,
consolidated_settings: consolidated_settings
}
end
let(:config) { ObjectStorage::Config.new(raw_config) }
let(:storage_url) { 'https://uploads.s3.amazonaws.com/' }
let(:bucket_name) { 'uploads' }
let(:object_name) { 'tmp/uploads/my-file' }
let(:maximum_size) { 1.gigabyte }
let(:direct_upload) { described_class.new(credentials, bucket_name, object_name, has_length: has_length, maximum_size: maximum_size, consolidated_settings: consolidated_settings) }
let(:direct_upload) { described_class.new(config, object_name, has_length: has_length, maximum_size: maximum_size) }
before do
Fog.unmock!
@ -62,7 +74,7 @@ RSpec.describe ObjectStorage::DirectUpload do
end
describe '#get_url' do
subject { described_class.new(credentials, bucket_name, object_name, has_length: true) }
subject { described_class.new(config, object_name, has_length: true) }
context 'when AWS is used' do
it 'calls the proper method' do
@ -111,6 +123,7 @@ RSpec.describe ObjectStorage::DirectUpload do
expect(s3_config[:Region]).to eq(region)
expect(s3_config[:PathStyle]).to eq(path_style)
expect(s3_config[:UseIamProfile]).to eq(use_iam_profile)
expect(s3_config.keys).not_to include(%i(ServerSideEncryption SSEKMSKeyID))
end
context 'when feature flag is disabled' do
@ -150,6 +163,33 @@ RSpec.describe ObjectStorage::DirectUpload do
expect(subject[:UseWorkhorseClient]).to be true
end
end
context 'when only server side encryption is used' do
let(:storage_options) { { server_side_encryption: 'AES256' } }
it 'sends server side encryption settings' do
s3_config = subject[:ObjectStorage][:S3Config]
expect(s3_config[:ServerSideEncryption]).to eq('AES256')
expect(s3_config.keys).not_to include(:SSEKMSKeyID)
end
end
context 'when SSE-KMS is used' do
let(:storage_options) do
{
server_side_encryption: 'AES256',
server_side_encryption_kms_key_id: 'arn:aws:12345'
}
end
it 'sends server side encryption settings' do
s3_config = subject[:ObjectStorage][:S3Config]
expect(s3_config[:ServerSideEncryption]).to eq('AES256')
expect(s3_config[:SSEKMSKeyID]).to eq('arn:aws:12345')
end
end
end
shared_examples 'a valid Google upload' do

View File

@ -10,13 +10,14 @@ RSpec.describe ::Packages::Detail::PackagePresenter do
let_it_be(:user_info) { { name: user.name, avatar_url: user.avatar_url } }
let!(:expected_package_files) do
npm_file = package.package_files.first
[{
created_at: npm_file.created_at,
download_path: npm_file.download_path,
file_name: npm_file.file_name,
size: npm_file.size
}]
package.package_files.map do |file|
{
created_at: file.created_at,
download_path: file.download_path,
file_name: file.file_name,
size: file.size
}
end
end
let(:pipeline_info) do
pipeline = package.build_info.pipeline
@ -67,6 +68,15 @@ RSpec.describe ::Packages::Detail::PackagePresenter do
end
end
context 'with composer metadata' do
let(:package) { create(:composer_package, :with_metadatum, sha: '123', project: project) }
let(:expected_package_details) { super().merge(composer_metadatum: package.composer_metadatum) }
it 'returns composer_metadatum' do
expect(presenter.detail_view).to eq expected_package_details
end
end
context 'with nuget_metadatum' do
let_it_be(:package) { create(:nuget_package, project: project) }
let_it_be(:nuget_metadatum) { create(:nuget_metadatum, package: package) }

View File

@ -13,135 +13,133 @@ RSpec.describe API::GroupPackages do
let(:url) { "/groups/#{group.id}/packages" }
let(:package_schema) { 'public_api/v4/packages/group_packages' }
context 'without the need for a license' do
context 'with sorting' do
let_it_be(:package1) { create(:npm_package, project: project, version: '3.1.0', name: "@#{project.root_namespace.path}/foo1") }
let_it_be(:package2) { create(:nuget_package, project: project, version: '2.0.4') }
let(:package3) { create(:maven_package, project: project, version: '1.1.1', name: 'zzz') }
context 'with sorting' do
let_it_be(:package1) { create(:npm_package, project: project, version: '3.1.0', name: "@#{project.root_namespace.path}/foo1") }
let_it_be(:package2) { create(:nuget_package, project: project, version: '2.0.4') }
let(:package3) { create(:maven_package, project: project, version: '1.1.1', name: 'zzz') }
before do
travel_to(1.day.ago) do
package3
end
end
context 'without sorting params' do
let(:packages) { [package3, package1, package2] }
it 'sorts by created_at asc' do
subject
expect(json_response.map { |package| package['id'] }).to eq(packages.map(&:id))
end
end
it_behaves_like 'package sorting', 'name' do
let(:packages) { [package1, package2, package3] }
end
it_behaves_like 'package sorting', 'created_at' do
let(:packages) { [package3, package1, package2] }
end
it_behaves_like 'package sorting', 'version' do
let(:packages) { [package3, package2, package1] }
end
it_behaves_like 'package sorting', 'type' do
let(:packages) { [package3, package1, package2] }
end
it_behaves_like 'package sorting', 'project_path' do
let(:another_project) { create(:project, :public, namespace: group, name: 'project B') }
let!(:package4) { create(:npm_package, project: another_project, version: '3.1.0', name: "@#{project.root_namespace.path}/bar") }
let(:packages) { [package1, package2, package3, package4] }
before do
travel_to(1.day.ago) do
package3
end
end
context 'with private group' do
let!(:package1) { create(:package, project: project) }
let!(:package2) { create(:package, project: project) }
context 'without sorting params' do
let(:packages) { [package3, package1, package2] }
let(:group) { create(:group, :private) }
let(:subgroup) { create(:group, :private, parent: group) }
let(:project) { create(:project, :private, namespace: group) }
let(:subproject) { create(:project, :private, namespace: subgroup) }
it 'sorts by created_at asc' do
subject
context 'with unauthenticated user' do
it_behaves_like 'rejects packages access', :group, :no_type, :not_found
expect(json_response.map { |package| package['id'] }).to eq(packages.map(&:id))
end
end
context 'with authenticated user' do
subject { get api(url, user) }
it_behaves_like 'package sorting', 'name' do
let(:packages) { [package1, package2, package3] }
end
it_behaves_like 'returns packages', :group, :owner
it_behaves_like 'returns packages', :group, :maintainer
it_behaves_like 'returns packages', :group, :developer
it_behaves_like 'package sorting', 'created_at' do
let(:packages) { [package3, package1, package2] }
end
it_behaves_like 'package sorting', 'version' do
let(:packages) { [package3, package2, package1] }
end
it_behaves_like 'package sorting', 'type' do
let(:packages) { [package3, package1, package2] }
end
it_behaves_like 'package sorting', 'project_path' do
let(:another_project) { create(:project, :public, namespace: group, name: 'project B') }
let!(:package4) { create(:npm_package, project: another_project, version: '3.1.0', name: "@#{project.root_namespace.path}/bar") }
let(:packages) { [package1, package2, package3, package4] }
end
end
context 'with private group' do
let!(:package1) { create(:package, project: project) }
let!(:package2) { create(:package, project: project) }
let(:group) { create(:group, :private) }
let(:subgroup) { create(:group, :private, parent: group) }
let(:project) { create(:project, :private, namespace: group) }
let(:subproject) { create(:project, :private, namespace: subgroup) }
context 'with unauthenticated user' do
it_behaves_like 'rejects packages access', :group, :no_type, :not_found
end
context 'with authenticated user' do
subject { get api(url, user) }
it_behaves_like 'returns packages', :group, :owner
it_behaves_like 'returns packages', :group, :maintainer
it_behaves_like 'returns packages', :group, :developer
it_behaves_like 'rejects packages access', :group, :reporter, :forbidden
it_behaves_like 'rejects packages access', :group, :guest, :forbidden
context 'with subgroup' do
let(:subgroup) { create(:group, :private, parent: group) }
let(:subproject) { create(:project, :private, namespace: subgroup) }
let!(:package3) { create(:npm_package, project: subproject) }
it_behaves_like 'returns packages with subgroups', :group, :owner
it_behaves_like 'returns packages with subgroups', :group, :maintainer
it_behaves_like 'returns packages with subgroups', :group, :developer
it_behaves_like 'rejects packages access', :group, :reporter, :forbidden
it_behaves_like 'rejects packages access', :group, :guest, :forbidden
context 'with subgroup' do
let(:subgroup) { create(:group, :private, parent: group) }
let(:subproject) { create(:project, :private, namespace: subgroup) }
let!(:package3) { create(:npm_package, project: subproject) }
context 'excluding subgroup' do
let(:url) { "/groups/#{group.id}/packages?exclude_subgroups=true" }
it_behaves_like 'returns packages with subgroups', :group, :owner
it_behaves_like 'returns packages with subgroups', :group, :maintainer
it_behaves_like 'returns packages with subgroups', :group, :developer
it_behaves_like 'returns packages', :group, :owner
it_behaves_like 'returns packages', :group, :maintainer
it_behaves_like 'returns packages', :group, :developer
it_behaves_like 'rejects packages access', :group, :reporter, :forbidden
it_behaves_like 'rejects packages access', :group, :guest, :forbidden
context 'excluding subgroup' do
let(:url) { "/groups/#{group.id}/packages?exclude_subgroups=true" }
it_behaves_like 'returns packages', :group, :owner
it_behaves_like 'returns packages', :group, :maintainer
it_behaves_like 'returns packages', :group, :developer
it_behaves_like 'rejects packages access', :group, :reporter, :forbidden
it_behaves_like 'rejects packages access', :group, :guest, :forbidden
end
end
end
end
end
context 'with public group' do
let_it_be(:package1) { create(:package, project: project) }
let_it_be(:package2) { create(:package, project: project) }
context 'with public group' do
let_it_be(:package1) { create(:package, project: project) }
let_it_be(:package2) { create(:package, project: project) }
context 'with unauthenticated user' do
it_behaves_like 'returns packages', :group, :no_type
end
context 'with authenticated user' do
subject { get api(url, user) }
it_behaves_like 'returns packages', :group, :owner
it_behaves_like 'returns packages', :group, :maintainer
it_behaves_like 'returns packages', :group, :developer
it_behaves_like 'returns packages', :group, :reporter
it_behaves_like 'returns packages', :group, :guest
end
context 'with unauthenticated user' do
it_behaves_like 'returns packages', :group, :no_type
end
context 'with pagination params' do
let_it_be(:package1) { create(:package, project: project) }
let_it_be(:package2) { create(:package, project: project) }
let_it_be(:package3) { create(:npm_package, project: project) }
let_it_be(:package4) { create(:npm_package, project: project) }
context 'with authenticated user' do
subject { get api(url, user) }
it_behaves_like 'returns paginated packages'
it_behaves_like 'returns packages', :group, :owner
it_behaves_like 'returns packages', :group, :maintainer
it_behaves_like 'returns packages', :group, :developer
it_behaves_like 'returns packages', :group, :reporter
it_behaves_like 'returns packages', :group, :guest
end
end
it_behaves_like 'filters on each package_type', is_project: false
context 'with pagination params' do
let_it_be(:package1) { create(:package, project: project) }
let_it_be(:package2) { create(:package, project: project) }
let_it_be(:package3) { create(:npm_package, project: project) }
let_it_be(:package4) { create(:npm_package, project: project) }
context 'does not accept non supported package_type value' do
include_context 'package filter context'
it_behaves_like 'returns paginated packages'
end
let(:url) { group_filter_url(:type, 'foo') }
it_behaves_like 'filters on each package_type', is_project: false
it_behaves_like 'returning response status', :bad_request
end
context 'does not accept non supported package_type value' do
include_context 'package filter context'
let(:url) { group_filter_url(:type, 'foo') }
it_behaves_like 'returning response status', :bad_request
end
end
end

View File

@ -382,6 +382,32 @@ RSpec.describe ObjectStorage do
it { is_expected.to eq(nil) }
end
describe '#fog_attributes' do
subject { uploader.fog_attributes }
it { is_expected.to eq({}) }
context 'with encryption configured' do
let(:raw_options) do
{
"enabled" => true,
"connection" => { "provider" => 'AWS' },
"storage_options" => { "server_side_encryption" => "AES256" }
}
end
let(:options) { Settingslogic.new(raw_options) }
before do
allow(uploader_class).to receive(:options) do
double(object_store: options)
end
end
it { is_expected.to eq({ "x-amz-server-side-encryption" => "AES256" }) }
end
end
describe '.workhorse_authorize' do
let(:has_length) { true }
let(:maximum_size) { nil }
@ -459,13 +485,18 @@ RSpec.describe ObjectStorage do
context 'uses AWS' do
let(:storage_url) { "https://uploads.s3-eu-central-1.amazonaws.com/" }
let(:credentials) do
{
provider: "AWS",
aws_access_key_id: "AWS_ACCESS_KEY_ID",
aws_secret_access_key: "AWS_SECRET_ACCESS_KEY",
region: "eu-central-1"
}
end
before do
expect(uploader_class).to receive(:object_store_credentials) do
{ provider: "AWS",
aws_access_key_id: "AWS_ACCESS_KEY_ID",
aws_secret_access_key: "AWS_SECRET_ACCESS_KEY",
region: "eu-central-1" }
expect_next_instance_of(ObjectStorage::Config) do |instance|
allow(instance).to receive(:credentials).and_return(credentials)
end
end
@ -502,12 +533,17 @@ RSpec.describe ObjectStorage do
context 'uses Google' do
let(:storage_url) { "https://storage.googleapis.com/uploads/" }
let(:credentials) do
{
provider: "Google",
google_storage_access_key_id: 'ACCESS_KEY_ID',
google_storage_secret_access_key: 'SECRET_ACCESS_KEY'
}
end
before do
expect(uploader_class).to receive(:object_store_credentials) do
{ provider: "Google",
google_storage_access_key_id: 'ACCESS_KEY_ID',
google_storage_secret_access_key: 'SECRET_ACCESS_KEY' }
expect_next_instance_of(ObjectStorage::Config) do |instance|
allow(instance).to receive(:credentials).and_return(credentials)
end
end
@ -537,15 +573,18 @@ RSpec.describe ObjectStorage do
context 'uses GDK/minio' do
let(:storage_url) { "http://minio:9000/uploads/" }
let(:credentials) do
{ provider: "AWS",
aws_access_key_id: "AWS_ACCESS_KEY_ID",
aws_secret_access_key: "AWS_SECRET_ACCESS_KEY",
endpoint: 'http://minio:9000',
path_style: true,
region: "gdk" }
end
before do
expect(uploader_class).to receive(:object_store_credentials) do
{ provider: "AWS",
aws_access_key_id: "AWS_ACCESS_KEY_ID",
aws_secret_access_key: "AWS_SECRET_ACCESS_KEY",
endpoint: 'http://minio:9000',
path_style: true,
region: "gdk" }
expect_next_instance_of(ObjectStorage::Config) do |instance|
allow(instance).to receive(:credentials).and_return(credentials)
end
end