Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-10-25 12:10:36 +00:00
parent 919f0e86ba
commit f471d83619
31 changed files with 464 additions and 92 deletions

View File

@ -27,6 +27,8 @@ stages:
QA_KNAPSACK_REPORT_PATH: $CI_PROJECT_DIR/qa/knapsack
.ruby-image:
# Because this pipeline template can be included directly in other projects,
# image path and registry needs to be defined explicitly
image: ${REGISTRY_HOST}/${REGISTRY_GROUP}/gitlab-build-images/debian-bullseye-ruby-${RUBY_VERSION}:bundler-2.3
.qa-install:
@ -161,7 +163,7 @@ cache-gems:
extends:
- .qa-install
- .ruby-image
- .rules:prepare
- .rules:update-cache
stage: .pre
tags:
- e2e

View File

@ -28,6 +28,9 @@
.process-test-results: &process-test-results
if: $PROCESS_TEST_RESULTS == "true"
.not-canonical-project: &not-canonical-project
if: '$CI_PROJECT_PATH != "gitlab-org/gitlab" && $CI_PROJECT_PATH != "gitlab-cn/gitlab"'
# Selective test execution against omnibus instance have following execution scenarios:
# * only e2e spec files changed - runs only changed specs
# * qa framework changes - runs full test suite
@ -55,6 +58,12 @@
when: never
- when: always
.rules:update-cache:
rules:
- <<: *not-canonical-project
when: never
- when: always
# ------------------------------------------
# Test
# ------------------------------------------
@ -124,4 +133,6 @@
.rules:report:process-results:
rules:
- <<: *not-canonical-project
when: never
- *process-test-results

View File

@ -1,6 +1,8 @@
# Default variables for package-and-test
variables:
REGISTRY_HOST: "registry.gitlab.com"
REGISTRY_GROUP: "gitlab-org"
SKIP_REPORT_IN_ISSUES: "true"
OMNIBUS_GITLAB_CACHE_UPDATE: "false"
OMNIBUS_GITLAB_RUBY3_BUILD: "false"

View File

@ -109,11 +109,11 @@ module DiffHelper
end
def inline_diff_btn
diff_btn('Inline', 'inline', diff_view == :inline)
diff_btn(s_('Diffs|Inline'), 'inline', diff_view == :inline)
end
def parallel_diff_btn
diff_btn('Side-by-side', 'parallel', diff_view == :parallel)
diff_btn(s_('Diffs|Side-by-side'), 'parallel', diff_view == :parallel)
end
def submodule_link(blob, ref, repository = @repository)
@ -283,7 +283,8 @@ module DiffHelper
def toggle_whitespace_link(url, options)
options[:class] = [*options[:class], 'btn gl-button btn-default'].join(' ')
link_to "#{hide_whitespace? ? 'Show' : 'Hide'} whitespace changes", url, class: options[:class]
toggle_text = hide_whitespace? ? s_('Diffs|Show whitespace changes') : s_('Diffs|Hide whitespace changes')
link_to toggle_text, url, class: options[:class]
end
def code_navigation_path(diffs)

View File

@ -242,13 +242,13 @@ module MergeRequestsHelper
''
end
link_to branch, branch_path, title: branch, class: 'gl-text-blue-500! gl-font-monospace gl-bg-blue-50 gl-rounded-base gl-font-sm gl-px-2 gl-display-inline-block gl-text-truncate gl-max-w-26 gl-mb-n2'
link_to branch, branch_path, title: branch, class: 'gl-text-blue-500! gl-font-monospace gl-bg-blue-50 gl-rounded-base gl-font-sm gl-px-2 gl-display-inline-block gl-text-truncate gl-max-w-26 gl-mx-2'
end
def merge_request_header(project, merge_request)
link_to_author = link_to_member(project, merge_request.author, size: 24, extra_class: 'gl-font-weight-bold', avatar: false)
link_to_author = link_to_member(project, merge_request.author, size: 24, extra_class: 'gl-font-weight-bold gl-mr-2', avatar: false)
copy_button = clipboard_button(text: merge_request.source_branch, title: _('Copy branch name'), class: 'btn btn-default btn-sm gl-button btn-default-tertiary btn-icon gl-display-none! gl-md-display-inline-block! js-source-branch-copy')
target_branch = link_to merge_request.target_branch, project_tree_path(merge_request.target_project, merge_request.target_branch), title: merge_request.target_branch, class: 'gl-text-blue-500! gl-font-monospace gl-bg-blue-50 gl-rounded-base gl-font-sm gl-px-2 gl-display-inline-block gl-text-truncate gl-max-w-26 gl-mb-n2'
target_branch = link_to merge_request.target_branch, project_tree_path(merge_request.target_project, merge_request.target_branch), title: merge_request.target_branch, class: 'gl-text-blue-500! gl-font-monospace gl-bg-blue-50 gl-rounded-base gl-font-sm gl-px-2 gl-display-inline-block gl-text-truncate gl-max-w-26 gl-mx-2'
_('%{author} requested to merge %{source_branch} %{copy_button} into %{target_branch} %{created_at}').html_safe % { author: link_to_author.html_safe, source_branch: merge_request_source_branch(merge_request).html_safe, copy_button: copy_button.html_safe, target_branch: target_branch.html_safe, created_at: time_ago_with_tooltip(merge_request.created_at, html_class: 'gl-display-inline-block').html_safe }
end

View File

@ -38,7 +38,7 @@ module MergeRequests
def failure_reason
raise 'Execute needs to be called before' if results.nil?
results.find(&:failed?)&.payload&.fetch(:reason)
results.find(&:failed?)&.payload&.fetch(:reason)&.to_sym
end
private

View File

@ -5,6 +5,7 @@ module Namespaces
RefresherError = Class.new(StandardError)
def execute(root_namespace)
root_namespace = root_namespace.root_ancestor # just in case the true root isn't passed
root_storage_statistics = find_or_create_root_storage_statistics(root_namespace.id)
root_storage_statistics.recalculate!

View File

@ -1,6 +1,6 @@
- display_issuable_type = issuable_display_type(@merge_request)
.float-left.btn-group.gl-md-ml-3.gl-display-flex.dropdown.gl-new-dropdown.gl-md-w-auto.gl-w-full
.btn-group.gl-md-ml-3.gl-display-flex.dropdown.gl-new-dropdown.gl-md-w-auto.gl-w-full
= button_tag type: 'button', class: "btn dropdown-toggle btn-default btn-md gl-button gl-dropdown-toggle btn-default-tertiary dropdown-icon-only dropdown-toggle-no-caret has-tooltip gl-display-none! gl-md-display-inline-flex!", data: { toggle: 'dropdown', title: _('Merge request actions'), testid: 'merge-request-actions' } do
= sprite_icon "ellipsis_v", size: 16, css_class: "dropdown-icon gl-icon"
= button_tag type: 'button', class: "btn dropdown-toggle btn-default btn-md btn-block gl-button gl-dropdown-toggle gl-md-display-none!", data: { 'toggle' => 'dropdown' } do

View File

@ -1,3 +1,3 @@
.detail-page-description.py-2{ class: "#{'is-merge-request' if moved_mr_sidebar_enabled? && !fluid_layout}" }
.detail-page-description.py-2.gl-display-flex.gl-align-items-center.gl-flex-wrap{ class: "#{'is-merge-request' if moved_mr_sidebar_enabled? && !fluid_layout}" }
= render 'shared/issuable/status_box', issuable: @merge_request
= merge_request_header(@project, @merge_request)

View File

@ -0,0 +1,8 @@
---
name: workhorse_google_client
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/96891
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/372596
milestone: '15.6'
type: development
group: 'group::package registry'
default_enabled: false

View File

@ -329,7 +329,7 @@ A similar thing can be done for all Models handled by the [Geo Self-Service Fram
NOTE:
`GroupWikiRepository` is not in the previous list since verification is not implemented.
There is an [issue to implement this functionality in the Admin UI](https://gitlab.com/gitlab-org/gitlab/-/issues/364729).
There is an [issue to implement this functionality in the Admin Area UI](https://gitlab.com/gitlab-org/gitlab/-/issues/364729).
### Artifacts

View File

@ -111,8 +111,8 @@ Users are available globally and not restricted to a single Pod. Users can be me
- Users can create multiple top-level namespaces
- Users can be a member of multiple top-level namespaces
- Users can be a member of multiple organizations
- Users can administrate organizations
- User activity is aggregated within an organization
- Users can administer organizations
- User activity is aggregated in an organization
- Every user has one personal namespace
## Goals

View File

@ -187,10 +187,10 @@ load any global pages like `/dashboard` and will end up being redirected to
for legacy APIs and such users may only ever be able to use APIs scoped to a
organization.
## Detailed explanation of admin settings
## Detailed explanation of Admin Area settings
We believe that maintaining and synchronizing admin settings will be
frustrating and painful so to avoid this we will decompose and share all admin
We believe that maintaining and synchronizing Admin Area settings will be
frustrating and painful so to avoid this we will decompose and share all Admin Area
settings in the `gitlab_admin` schema. This should be safe (similar to other
shared schemas) because these receive very little write traffic.
@ -582,11 +582,11 @@ TODO
1. Router picks a random pod `Pod US0`
1. Pod US0 redirects user to `/admin/pods/podus0`
1. Pod US0 renders an admin page and also returns a cache header to cache `/admin/podss/podus0/* => Pod US0`. The admin page contains a dropdown showing other pods they could select and it changes the query parameter.
1. Pod US0 renders an Admin Area page and also returns a cache header to cache `/admin/podss/podus0/* => Pod US0`. The Admin Area page contains a dropdown list showing other pods they could select and it changes the query parameter.
Admin settings are in Postgres are all shared across all pods to avoid
Admin Area settings in Postgres are all shared across all pods to avoid
divergence but we still make it clear in the URL and UI which pod is serving
the admin page as there is dynamic data being generated from these pages and
the Admin Area page as there is dynamic data being generated from these pages and
the operator may want to view a specific pod.
## More Technical Problems To Solve

View File

@ -43,7 +43,7 @@ To point a domain to your remote machine, create an `A` record from `example.rem
#### Install Certbot
[Certbot](https://certbot.eff.org/) is a free and open-source software tool that automatically uses Let's Encrypt certificates on manually administrated websites to enable HTTPS.
[Certbot](https://certbot.eff.org/) is a free and open-source software tool that automatically uses Let's Encrypt certificates on manually administered websites to enable HTTPS.
To install Certbot, run the following command:

View File

@ -22,8 +22,8 @@ module Gitlab
def self.from_hash(data)
new(
status: data.fetch('status').to_sym,
payload: data.fetch('payload'))
status: data.fetch(:status).to_sym,
payload: data.fetch(:payload))
end
def initialize(status:, payload: {})

View File

@ -14,7 +14,7 @@ module Gitlab
def retrieve_check(merge_check:)
Gitlab::Redis::Cache.with do |redis|
Gitlab::Json.parse(redis.get(merge_check.cache_key + ":#{VERSION}"))
Gitlab::Json.parse(redis.get(merge_check.cache_key + ":#{VERSION}"), symbolize_keys: true)
end
end
end

View File

@ -66,6 +66,8 @@ module ObjectStorage
workhorse_aws_hash
elsif config.azure?
workhorse_azure_hash
elsif Feature.enabled?(:workhorse_google_client) && config.google?
workhorse_google_hash
else
{}
end
@ -111,6 +113,23 @@ module ObjectStorage
url
end
def workhorse_google_hash
{
UseWorkhorseClient: use_workhorse_google_client?,
RemoteTempObjectID: object_name,
ObjectStorage: {
Provider: 'Google',
GoCloudConfig: {
URL: google_gocloud_url
}
}
}
end
def google_gocloud_url
"gs://#{bucket_name}"
end
def use_workhorse_s3_client?
return false unless config.use_iam_profile? || config.consolidated_settings?
# The Golang AWS SDK does not support V2 signatures
@ -119,6 +138,15 @@ module ObjectStorage
true
end
def use_workhorse_google_client?
return false unless config.consolidated_settings?
return true if credentials[:google_application_default]
return true if credentials[:google_json_key_location]
return true if credentials[:google_json_key_string]
false
end
def provider
credentials[:provider].to_s
end

View File

@ -13983,6 +13983,12 @@ msgstr[1] ""
msgid "Diffs|Expand all lines"
msgstr ""
msgid "Diffs|Hide whitespace changes"
msgstr ""
msgid "Diffs|Inline"
msgstr ""
msgid "Diffs|Next 20 lines"
msgstr ""
@ -13998,11 +14004,17 @@ msgstr ""
msgid "Diffs|Show all unchanged lines"
msgstr ""
msgid "Diffs|Show whitespace changes"
msgstr ""
msgid "Diffs|Showing %{dropdownStart}%{count} changed file%{dropdownEnd}"
msgid_plural "Diffs|Showing %{dropdownStart}%{count} changed files%{dropdownEnd}"
msgstr[0] ""
msgstr[1] ""
msgid "Diffs|Side-by-side"
msgstr ""
msgid "Diffs|Something went wrong while fetching diff lines."
msgstr ""

View File

@ -18,20 +18,20 @@ namespace :knapsack do
desc "Download latest knapsack reports for parallel jobs"
task :download, [:stage_name] do |_, args|
test_stage_name = args[:stage_name]
knapsack_reports = ENV["QA_KNAPSACK_REPORTS"]&.split(",")
ci_token = ENV["QA_GITLAB_CI_TOKEN"]
# QA_KNAPSACK_REPORTS remains for changes to be backwards compatible
# TODO: remove and only use automated detection once changes are merged
unless ENV["QA_KNAPSACK_REPORTS"] || test_stage_name
QA::Runtime::Logger.warn("Missing QA_KNAPSACK_REPORTS environment variable or test stage name for autodetection")
next
end
reports = if test_stage_name
QA::Support::ParallelPipelineJobs
.fetch(stage_name: test_stage_name, access_token: ENV["QA_GITLAB_CI_TOKEN"])
.map { |job| job.tr(":", "-") }
reports = if knapsack_reports
knapsack_reports
else
ENV["QA_KNAPSACK_REPORTS"].split(",")
unless ci_token
QA::Runtime::Logger.error("Missing QA_GITLAB_CI_TOKEN for automatically detecting parallel jobs")
next
end
QA::Support::ParallelPipelineJobs
.fetch(stage_name: test_stage_name, access_token: ci_token)
.map { |job| job.tr(":", "-") }
end
reports.each do |report_name|

View File

@ -70,8 +70,8 @@ RSpec.describe Gitlab::MergeRequests::Mergeability::CheckResult do
let(:payload) { { test: 'test' } }
let(:hash) do
{
'status' => status,
'payload' => payload
status: status,
payload: payload
}
end

View File

@ -6,7 +6,7 @@ RSpec.describe Gitlab::MergeRequests::Mergeability::RedisInterface, :clean_gitla
subject(:redis_interface) { described_class.new }
let(:merge_check) { double(cache_key: '13') }
let(:result_hash) { { 'test' => 'test' } }
let(:result_hash) { { test: 'test' } }
let(:expected_key) { "#{merge_check.cache_key}:#{described_class::VERSION}" }
describe '#save_check' do

View File

@ -10,15 +10,15 @@ RSpec.describe Gitlab::MergeRequests::Mergeability::ResultsStore do
let(:merge_request) { double }
describe '#read' do
let(:result_hash) { { 'status' => 'success', 'payload' => {} } }
let(:result_hash) { { status: 'success', payload: {} } }
it 'calls #retrieve_check on the interface' do
expect(interface).to receive(:retrieve_check).with(merge_check: merge_check).and_return(result_hash)
cached_result = results_store.read(merge_check: merge_check)
expect(cached_result.status).to eq(result_hash['status'].to_sym)
expect(cached_result.payload).to eq(result_hash['payload'])
expect(cached_result.status).to eq(result_hash[:status].to_sym)
expect(cached_result.payload).to eq(result_hash[:payload])
end
context 'when #retrieve_check returns nil' do

View File

@ -192,11 +192,28 @@ RSpec.describe ObjectStorage::DirectUpload do
end
end
shared_examples 'a valid Google upload' do
shared_examples 'a valid Google upload' do |use_workhorse_client: true|
let(:gocloud_url) { "gs://#{bucket_name}" }
it_behaves_like 'a valid upload'
it 'does not set Workhorse client data' do
expect(subject.keys).not_to include(:UseWorkhorseClient, :RemoteTempObjectID, :ObjectStorage)
if use_workhorse_client
it 'enables the Workhorse client' do
expect(subject[:UseWorkhorseClient]).to be true
expect(subject[:RemoteTempObjectID]).to eq(object_name)
expect(subject[:ObjectStorage][:Provider]).to eq('Google')
expect(subject[:ObjectStorage][:GoCloudConfig]).to eq({ URL: gocloud_url })
end
end
context 'with workhorse_google_client disabled' do
before do
stub_feature_flags(workhorse_google_client: false)
end
it 'does not set Workhorse client data' do
expect(subject.keys).not_to include(:UseWorkhorseClient, :RemoteTempObjectID, :ObjectStorage)
end
end
end
@ -411,28 +428,88 @@ RSpec.describe ObjectStorage::DirectUpload do
end
context 'when Google is used' do
let(:credentials) do
{
provider: 'Google',
google_storage_access_key_id: 'GOOGLE_ACCESS_KEY_ID',
google_storage_secret_access_key: 'GOOGLE_SECRET_ACCESS_KEY'
}
let(:consolidated_settings) { true }
# We need to use fog mocks as using google_application_default
# will trigger network requests which we don't want in this spec.
# In turn, using fog mocks will don't use a specific storage endpoint,
# hence the storage_url with the empty host.
let(:storage_url) { 'https:///uploads/' }
before do
Fog.mock!
end
let(:storage_url) { 'https://storage.googleapis.com/uploads/' }
context 'with google_application_default' do
let(:credentials) do
{
provider: 'Google',
google_project: 'GOOGLE_PROJECT',
google_application_default: true
}
end
context 'when length is known' do
let(:has_length) { true }
context 'when length is known' do
let(:has_length) { true }
it_behaves_like 'a valid Google upload'
it_behaves_like 'a valid upload without multipart data'
it_behaves_like 'a valid Google upload'
it_behaves_like 'a valid upload without multipart data'
end
context 'when length is unknown' do
let(:has_length) { false }
it_behaves_like 'a valid Google upload'
it_behaves_like 'a valid upload without multipart data'
end
end
context 'when length is unknown' do
let(:has_length) { false }
context 'with google_json_key_location' do
let(:credentials) do
{
provider: 'Google',
google_project: 'GOOGLE_PROJECT',
google_json_key_location: 'LOCATION'
}
end
it_behaves_like 'a valid Google upload'
it_behaves_like 'a valid upload without multipart data'
context 'when length is known' do
let(:has_length) { true }
it_behaves_like 'a valid Google upload', use_workhorse_client: true
it_behaves_like 'a valid upload without multipart data'
end
context 'when length is unknown' do
let(:has_length) { false }
it_behaves_like 'a valid Google upload', use_workhorse_client: true
it_behaves_like 'a valid upload without multipart data'
end
end
context 'with google_json_key_string' do
let(:credentials) do
{
provider: 'Google',
google_project: 'GOOGLE_PROJECT',
google_json_key_string: 'STRING'
}
end
context 'when length is known' do
let(:has_length) { true }
it_behaves_like 'a valid Google upload', use_workhorse_client: true
it_behaves_like 'a valid upload without multipart data'
end
context 'when length is unknown' do
let(:has_length) { false }
it_behaves_like 'a valid Google upload', use_workhorse_client: true
it_behaves_like 'a valid upload without multipart data'
end
end
end
@ -466,4 +543,38 @@ RSpec.describe ObjectStorage::DirectUpload do
end
end
end
describe '#use_workhorse_google_client?' do
let(:direct_upload) { described_class.new(config, object_name, has_length: true) }
subject { direct_upload.use_workhorse_google_client? }
context 'with consolidated_settings' do
let(:consolidated_settings) { true }
[
{ google_application_default: true },
{ google_json_key_string: 'TEST' },
{ google_json_key_location: 'PATH' }
].each do |google_config|
context "with #{google_config.each_key.first}" do
let(:credentials) { google_config }
it { is_expected.to be_truthy }
end
end
context 'without any google setting' do
let(:credentials) { {} }
it { is_expected.to be_falsey }
end
end
context 'without consolidated_settings' do
let(:consolidated_settings) { true }
it { is_expected.to be_falsey }
end
end
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe MergeRequests::Mergeability::RunChecksService do
RSpec.describe MergeRequests::Mergeability::RunChecksService, :clean_gitlab_redis_cache do
subject(:run_checks) { described_class.new(merge_request: merge_request, params: {}) }
describe '#execute' do
@ -161,11 +161,11 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService do
let_it_be(:merge_request) { create(:merge_request) }
context 'when the execute method has been executed' do
before do
run_checks.execute
end
context 'when all the checks succeed' do
before do
run_checks.execute
end
it 'returns nil' do
expect(failure_reason).to eq(nil)
end

View File

@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Namespaces::StatisticsRefresherService, '#execute' do
let(:group) { create(:group) }
let(:subgroup) { create(:group, parent: group) }
let(:projects) { create_list(:project, 5, namespace: group) }
let(:service) { described_class.new }
@ -23,6 +24,14 @@ RSpec.describe Namespaces::StatisticsRefresherService, '#execute' do
service.execute(group)
end
context 'when given a subgroup' do
it 'does not create statistics for the subgroup' do
service.execute(subgroup)
expect(subgroup.reload.root_storage_statistics).not_to be_present
end
end
end
context 'with a root storage statistics relation', :sidekiq_might_not_need_inline do
@ -43,6 +52,16 @@ RSpec.describe Namespaces::StatisticsRefresherService, '#execute' do
service.execute(group)
end
context 'when given a subgroup' do
it "recalculates the root namespace's statistics" do
expect(Namespace::RootStorageStatistics)
.to receive(:safe_find_or_create_by!).with({ namespace_id: group.id })
.and_return(group.root_storage_statistics)
service.execute(subgroup)
end
end
end
context 'when something goes wrong' do

View File

@ -7,16 +7,23 @@
URL = "unix:/home/git/gitlab/redis/redis.socket"
[object_storage]
provider = "AWS" # Allowed options: AWS, AzureRM
provider = "AWS" # Allowed options: AWS, AzureRM, Google
[object_storage.s3]
aws_access_key_id = "YOUR AWS ACCESS KEY"
aws_secret_access_key = "YOUR AWS SECRET ACCESS KEY"
[object_store.azurerm]
[object_storage.azurerm]
azure_storage_account_name = "YOUR ACCOUNT NAME"
azure_storage_access_key = "YOUR ACCOUNT KEY"
[object_storage.google]
google_application_default = true # if the application default should be used
google_json_key_string = '''
JSON KEY STRING
'''
google_json_key_location = "PATH TO JSON KEY FILE"
[image_resizer]
max_scaler_procs = 4 # Recommendation: CPUs / 2
max_filesize = 250000

View File

@ -33,6 +33,7 @@ require (
golang.org/x/image v0.0.0-20220722155232-062f8c9fd539
golang.org/x/lint v0.0.0-20210508222113-6edffad5e616
golang.org/x/net v0.0.0-20220722155237-a158d28d115b
golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a
golang.org/x/tools v0.1.12
google.golang.org/grpc v1.50.1
google.golang.org/protobuf v1.28.1
@ -42,8 +43,10 @@ require (
require (
cloud.google.com/go v0.100.2 // indirect
cloud.google.com/go/compute v1.5.0 // indirect
cloud.google.com/go/iam v0.3.0 // indirect
cloud.google.com/go/monitoring v1.4.0 // indirect
cloud.google.com/go/profiler v0.1.0 // indirect
cloud.google.com/go/storage v1.21.0 // indirect
cloud.google.com/go/trace v1.2.0 // indirect
contrib.go.opencensus.io/exporter/stackdriver v0.13.10 // indirect
github.com/Azure/azure-pipeline-go v0.2.3 // indirect
@ -105,7 +108,6 @@ require (
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa // indirect
golang.org/x/exp/typeparams v0.0.0-20220218215828-6cf2b201936e // indirect
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 // indirect
golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a // indirect
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 // indirect
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f // indirect
golang.org/x/text v0.3.8 // indirect

View File

@ -1,17 +1,22 @@
package config
import (
"context"
"fmt"
"math"
"net/url"
"os"
"runtime"
"strings"
"time"
"github.com/Azure/azure-storage-blob-go/azblob"
"github.com/BurntSushi/toml"
"gitlab.com/gitlab-org/labkit/log"
"gocloud.dev/blob"
"gocloud.dev/blob/azureblob"
"gocloud.dev/blob/gcsblob"
"gocloud.dev/gcp"
"golang.org/x/oauth2/google"
)
type TomlURL struct {
@ -37,8 +42,9 @@ func (d *TomlDuration) UnmarshalText(text []byte) error {
type ObjectStorageCredentials struct {
Provider string
S3Credentials S3Credentials `toml:"s3"`
AzureCredentials AzureCredentials `toml:"azurerm"`
S3Credentials S3Credentials `toml:"s3"`
AzureCredentials AzureCredentials `toml:"azurerm"`
GoogleCredentials GoogleCredentials `toml:"google"`
}
type ObjectStorageConfig struct {
@ -69,6 +75,12 @@ type AzureCredentials struct {
AccountKey string `toml:"azure_storage_access_key"`
}
type GoogleCredentials struct {
ApplicationDefault bool `toml:"google_application_default"`
JSONKeyString string `toml:"google_json_key_string"`
JSONKeyLocation string `toml:"google_json_key_location"`
}
type RedisConfig struct {
URL TomlURL
Sentinel []TomlURL
@ -143,27 +155,83 @@ func (c *Config) RegisterGoCloudURLOpeners() error {
creds := c.ObjectStorageCredentials
if strings.EqualFold(creds.Provider, "AzureRM") && creds.AzureCredentials.AccountName != "" && creds.AzureCredentials.AccountKey != "" {
accountName := azureblob.AccountName(creds.AzureCredentials.AccountName)
accountKey := azureblob.AccountKey(creds.AzureCredentials.AccountKey)
credential, err := azureblob.NewCredential(accountName, accountKey)
urlOpener, err := creds.AzureCredentials.getURLOpener()
if err != nil {
log.WithError(err).Error("error creating Azure credentials")
return err
}
pipeline := azureblob.NewPipeline(credential, azblob.PipelineOptions{})
c.ObjectStorageConfig.URLMux.RegisterBucket(azureblob.Scheme, urlOpener)
}
azureURLOpener := &azureURLOpener{
&azureblob.URLOpener{
AccountName: accountName,
Pipeline: pipeline,
Options: azureblob.Options{Credential: credential},
},
if strings.EqualFold(creds.Provider, "Google") && (creds.GoogleCredentials.JSONKeyLocation != "" || creds.GoogleCredentials.JSONKeyString != "" || creds.GoogleCredentials.ApplicationDefault) {
urlOpener, err := creds.GoogleCredentials.getURLOpener()
if err != nil {
return err
}
c.ObjectStorageConfig.URLMux.RegisterBucket(azureblob.Scheme, azureURLOpener)
c.ObjectStorageConfig.URLMux.RegisterBucket(gcsblob.Scheme, urlOpener)
}
return nil
}
func (creds *AzureCredentials) getURLOpener() (*azureURLOpener, error) {
accountName := azureblob.AccountName(creds.AccountName)
accountKey := azureblob.AccountKey(creds.AccountKey)
credential, err := azureblob.NewCredential(accountName, accountKey)
if err != nil {
return nil, fmt.Errorf("error creating Azure credentials: %w", err)
}
pipeline := azureblob.NewPipeline(credential, azblob.PipelineOptions{})
return &azureURLOpener{
&azureblob.URLOpener{
AccountName: accountName,
Pipeline: pipeline,
Options: azureblob.Options{Credential: credential},
},
}, nil
}
func (creds *GoogleCredentials) getURLOpener() (*gcsblob.URLOpener, error) {
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) // lint:allow context.Background
defer cancel()
gcpCredentials, err := creds.getGCPCredentials(ctx)
if err != nil {
return nil, err
}
client, err := gcp.NewHTTPClient(
gcp.DefaultTransport(),
gcp.CredentialsTokenSource(gcpCredentials),
)
if err != nil {
return nil, fmt.Errorf("error creating Google HTTP client: %w", err)
}
return &gcsblob.URLOpener{
Client: client,
}, nil
}
func (creds *GoogleCredentials) getGCPCredentials(ctx context.Context) (*google.Credentials, error) {
const gcpCredentialsScope = "https://www.googleapis.com/auth/devstorage.read_write"
if creds.ApplicationDefault {
return gcp.DefaultCredentials(ctx)
}
if creds.JSONKeyLocation != "" {
b, err := os.ReadFile(creds.JSONKeyLocation)
if err != nil {
return nil, fmt.Errorf("error reading Google json key location: %w", err)
}
return google.CredentialsFromJSON(ctx, b, gcpCredentialsScope)
}
b := []byte(creds.JSONKeyString)
return google.CredentialsFromJSON(ctx, b, gcpCredentialsScope)
}

View File

@ -1,6 +1,8 @@
package config
import (
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
@ -15,6 +17,34 @@ azure_storage_account_name = "azuretester"
azure_storage_access_key = "deadbeef"
`
const googleConfigWithKeyLocation = `
[object_storage]
provider = "Google"
[object_storage.google]
google_json_key_location = "../../testdata/google_dummy_credentials.json"
`
const googleConfigWithKeyString = `
[object_storage]
provider = "Google"
[object_storage.google]
google_json_key_string = """
{
"type": "service_account"
}
"""
`
const googleConfigWithApplicationDefault = `
[object_storage]
provider = "Google"
[object_storage.google]
google_application_default = true
`
func TestLoadEmptyConfig(t *testing.T) {
config := ``
@ -55,12 +85,10 @@ aws_secret_access_key = "gdk-minio"
require.Equal(t, expected, cfg.ObjectStorageCredentials)
}
func TestRegisterGoCloudURLOpeners(t *testing.T) {
func TestRegisterGoCloudAzureURLOpeners(t *testing.T) {
cfg, err := LoadConfig(azureConfig)
require.NoError(t, err)
require.NotNil(t, cfg.ObjectStorageCredentials, "Expected object storage credentials")
expected := ObjectStorageCredentials{
Provider: "AzureRM",
AzureCredentials: AzureCredentials{
@ -70,13 +98,68 @@ func TestRegisterGoCloudURLOpeners(t *testing.T) {
}
require.Equal(t, expected, cfg.ObjectStorageCredentials)
require.Nil(t, cfg.ObjectStorageConfig.URLMux)
testRegisterGoCloudURLOpener(t, cfg, "azblob")
}
func TestRegisterGoCloudGoogleURLOpenersWithJSONKeyLocation(t *testing.T) {
cfg, err := LoadConfig(googleConfigWithKeyLocation)
require.NoError(t, err)
expected := ObjectStorageCredentials{
Provider: "Google",
GoogleCredentials: GoogleCredentials{
JSONKeyLocation: "../../testdata/google_dummy_credentials.json",
},
}
require.Equal(t, expected, cfg.ObjectStorageCredentials)
testRegisterGoCloudURLOpener(t, cfg, "gs")
}
func TestRegisterGoCloudGoogleURLOpenersWithJSONKeyString(t *testing.T) {
cfg, err := LoadConfig(googleConfigWithKeyString)
require.NoError(t, err)
expected := ObjectStorageCredentials{
Provider: "Google",
GoogleCredentials: GoogleCredentials{
JSONKeyString: `{
"type": "service_account"
}
`,
},
}
require.Equal(t, expected, cfg.ObjectStorageCredentials)
testRegisterGoCloudURLOpener(t, cfg, "gs")
}
func TestRegisterGoCloudGoogleURLOpenersWithApplicationDefault(t *testing.T) {
cfg, err := LoadConfig(googleConfigWithApplicationDefault)
require.NoError(t, err)
expected := ObjectStorageCredentials{
Provider: "Google",
GoogleCredentials: GoogleCredentials{
ApplicationDefault: true,
},
}
require.Equal(t, expected, cfg.ObjectStorageCredentials)
path, err := filepath.Abs("../../testdata/google_dummy_credentials.json")
require.NoError(t, err)
os.Setenv("GOOGLE_APPLICATION_CREDENTIALS", path)
defer os.Unsetenv("GOOGLE_APPLICATION_CREDENTIALS")
testRegisterGoCloudURLOpener(t, cfg, "gs")
}
func testRegisterGoCloudURLOpener(t *testing.T, cfg *Config, bucketScheme string) {
t.Helper()
require.NoError(t, cfg.RegisterGoCloudURLOpeners())
require.NotNil(t, cfg.ObjectStorageConfig.URLMux)
require.True(t, cfg.ObjectStorageConfig.URLMux.ValidBucketScheme("azblob"))
require.Equal(t, []string{"azblob"}, cfg.ObjectStorageConfig.URLMux.BucketSchemes())
require.Equal(t, []string{bucketScheme}, cfg.ObjectStorageConfig.URLMux.BucketSchemes())
}
func TestLoadImageResizerConfig(t *testing.T) {

View File

@ -42,10 +42,15 @@ func NewGoCloudObject(p *GoCloudObjectParams) (*GoCloudObject, error) {
return o, nil
}
const ChunkSize = 5 * 1024 * 1024
func (o *GoCloudObject) Upload(ctx context.Context, r io.Reader) error {
defer o.bucket.Close()
writer, err := o.bucket.NewWriter(ctx, o.objectName, nil)
writerOptions := &blob.WriterOptions{
BufferSize: ChunkSize,
}
writer, err := o.bucket.NewWriter(ctx, o.objectName, writerOptions)
if err != nil {
log.ContextLogger(ctx).WithError(err).Error("error creating GoCloud bucket")
return err

View File

@ -0,0 +1,12 @@
{
"type": "service_account",
"project_id": "test",
"private_key_id": "test",
"private_key": "-----BEGIN PRIVATE KEY-----\ntest\n-----END PRIVATE KEY-----\n",
"client_email": "test@test.iam.gserviceaccount.com",
"client_id": "1234567890",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/test.iam.gserviceaccount.com"
}