Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-09-06 03:12:08 +00:00
parent f48ded4221
commit 091f928203
25 changed files with 251 additions and 181 deletions

11
.gitlab/ci/_skip.yml Normal file
View File

@ -0,0 +1,11 @@
# no-op pipeline template for skipping whole child pipeline execution
no-op:
image: ${GITLAB_DEPENDENCY_PROXY}alpine:latest
stage: test
variables:
GIT_STRATEGY: none
script:
- echo "${SKIP_MESSAGE:-no-op run, nothing will be executed!}"
rules:
- when: always

View File

@ -3,6 +3,7 @@
include:
- local: .gitlab/ci/global.gitlab-ci.yml
- local: .gitlab/ci/package-and-test/rules.gitlab-ci.yml
- local: .gitlab/ci/package-and-test/variables.gitlab-ci.yml
- project: gitlab-org/quality/pipeline-common
ref: 1.2.1
file:

View File

@ -1,14 +0,0 @@
# no-op pipeline triggered on quarantine only changes
stages:
- qa
no-op:
image: ${GITLAB_DEPENDENCY_PROXY}alpine:latest
stage: qa
variables:
GIT_STRATEGY: none
script:
- echo "Skipping E2E tests because the MR includes only quarantine changes"
rules:
- when: always

View File

@ -0,0 +1,10 @@
# Default variables for package-and-test
variables:
RELEASE: "gitlab/gitlab-ee:nightly"
SKIP_REPORT_IN_ISSUES: "true"
OMNIBUS_GITLAB_CACHE_UPDATE: "false"
COLORIZED_LOGS: "true"
QA_LOG_LEVEL: "info"
QA_TESTS: ""
QA_FEATURE_FLAGS: ""

View File

@ -66,48 +66,18 @@ qa:update-qa-cache:
script:
- echo "Cache has been updated and ready to be uploaded."
populate-e2e-test-vars:
extends:
- .qa-job-base
- .qa:rules:determine-e2e-tests
stage: prepare
variables:
ENV_FILE: $CI_PROJECT_DIR/qa_tests_vars.env
COLORIZED_LOGS: "true"
script:
- bundle exec rake "ci:detect_changes[$ENV_FILE]"
artifacts:
expire_in: 1 day
reports:
dotenv: $ENV_FILE
e2e-test-pipeline-generate:
extends:
- .qa:rules:determine-e2e-tests
stage: prepare
when: on_success
needs:
- populate-e2e-test-vars
variables:
PIPELINE_YML: package-and-test.yml
script:
- scripts/generate-e2e-pipeline $PIPELINE_YML
artifacts:
expire_in: 1 day
paths:
- $PIPELINE_YML
e2e:package-and-test:
extends:
- .qa:rules:package-and-test
stage: qa
when: on_success
needs:
- build-assets-image
- build-qa-image
- e2e-test-pipeline-generate
variables:
SKIP_MESSAGE: Skipping package-and-test due to mr containing only quarantine changes!
trigger:
strategy: depend
include:
- artifact: package-and-test.yml
- artifact: package-and-test-pipeline.yml
job: e2e-test-pipeline-generate

View File

@ -1,13 +0,0 @@
stages:
- review
include:
- local: .gitlab/ci/global.gitlab-ci.yml
- local: .gitlab/ci/rules.gitlab-ci.yml
no-op:
extends:
- .review:rules:start-review-app-pipeline
stage: review
script:
- echo "Skip Review App because the MR includes only quarantine changes"

View File

@ -23,34 +23,13 @@ review-cleanup:
- ruby -rrubygems scripts/review_apps/automated_cleanup.rb
- gcp_cleanup
review-app-pipeline-generate:
image: ${GITLAB_DEPENDENCY_PROXY}ruby:${RUBY_VERSION}
stage: prepare
extends:
- .review:rules:start-review-app-pipeline
needs:
- populate-e2e-test-vars
script:
- |
if [ "$QA_SKIP_ALL_TESTS" == "true" ]; then
echo "Skip Review App because the MR includes only quarantine changes"
cp .gitlab/ci/review-apps/skip-qa.gitlab-ci.yml review-app-pipeline.yml
else
echo "Review App will use the full pipeline"
cp .gitlab/ci/review-apps/main.gitlab-ci.yml review-app-pipeline.yml
fi
artifacts:
expire_in: 7d
paths:
- review-app-pipeline.yml
start-review-app-pipeline:
extends:
- .review:rules:start-review-app-pipeline
resource_group: review/${CI_COMMIT_REF_SLUG}${SCHEDULE_TYPE} # CI_ENVIRONMENT_SLUG is not available here and we want this to be the same as the environment
stage: review
needs:
- review-app-pipeline-generate
- job: e2e-test-pipeline-generate
- job: build-assets-image
artifacts: false
# These variables are set in the pipeline schedules.
@ -59,11 +38,12 @@ start-review-app-pipeline:
variables:
SCHEDULE_TYPE: $SCHEDULE_TYPE
DAST_RUN: $DAST_RUN
SKIP_MESSAGE: Skipping review-app due to mr containing only quarantine changes!
trigger:
strategy: depend
include:
- artifact: review-app-pipeline.yml
job: review-app-pipeline-generate
strategy: depend
job: e2e-test-pipeline-generate
danger-review:
extends:

View File

@ -156,3 +156,23 @@ detect-previous-failed-tests:
expire_in: 7d
paths:
- ${PREVIOUS_FAILED_TESTS_DIR}
e2e-test-pipeline-generate:
extends:
- .qa-job-base
- .minimal-job
- .qa:rules:determine-e2e-tests
stage: prepare
variables:
ENV_FILE: $CI_PROJECT_DIR/qa_tests_vars.env
OMNIBUS_PIPELINE_YML: package-and-test-pipeline.yml
REVIEW_PIPELINE_YML: review-app-pipeline.yml
COLORIZED_LOGS: "true"
script:
- bundle exec rake "ci:detect_changes[$ENV_FILE]"
- cd $CI_PROJECT_DIR && scripts/generate-e2e-pipeline
artifacts:
expire_in: 1 day
paths:
- $OMNIBUS_PIPELINE_YML
- $REVIEW_PIPELINE_YML

View File

@ -66,7 +66,7 @@ module Commits
validate_on_branch!
validate_branch_existence!
validate_new_branch_name! if different_branch?
validate_new_branch_name! if project.empty_repo? || different_branch?
end
def validate_permissions!

View File

@ -607,6 +607,7 @@ job3:
stage: deploy
script:
- deploy_to_staging
environment: staging
```
In this example, `job1` and `job2` run in parallel:
@ -1478,6 +1479,7 @@ test linux:
deploy:
stage: deploy
script: make deploy
environment: production
```
In this example, two jobs have artifacts: `build osx` and `build linux`. When `test osx` is executed,
@ -2120,6 +2122,7 @@ mac:rspec:
production:
stage: deploy
script: echo "Running production..."
environment: production
```
This example creates four paths of execution:
@ -2382,12 +2385,14 @@ deploy-job:
- job: test-job2
optional: true
- job: test-job1
environment: production
review-job:
stage: deploy
needs:
- job: test-job2
optional: true
environment: review
```
In this example:
@ -2668,6 +2673,7 @@ pages:
- public
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
environment: production
```
This example moves all files from the root of the project to the `public/` directory.
@ -2749,6 +2755,7 @@ deploystacks:
STACK: [monitoring, backup, app]
- PROVIDER: [gcp, vultr]
STACK: [data, processing]
environment: $PROVIDER/$STACK
```
The example generates 10 parallel `deploystacks` jobs, each with different values
@ -3718,6 +3725,7 @@ job4:
stage: deploy
script:
- echo "This job deploys the code. It runs when the test stage completes."
environment: production
```
**Additional details**:
@ -4065,6 +4073,7 @@ deploy_job:
stage: deploy
script:
- deploy-script --url $DEPLOY_SITE --path "/"
environment: production
deploy_review_job:
stage: deploy
@ -4072,6 +4081,7 @@ deploy_review_job:
REVIEW_PATH: "/review"
script:
- deploy-review-script --url $DEPLOY_SITE --path $REVIEW_PATH
environment: production
```
**Additional details**:
@ -4164,6 +4174,7 @@ deploy_job:
script:
- make deploy
when: manual
environment: production
cleanup_job:
stage: cleanup

View File

@ -244,6 +244,7 @@ pages-job:
stage: deploy
script:
- curl --header 'PRIVATE-TOKEN: ${PRIVATE_TOKEN}' "https://gitlab.example.com/api/v4/projects"
environment: production
```
The YAML parser thinks the `:` defines a YAML keyword, and outputs the
@ -257,6 +258,7 @@ pages-job:
stage: deploy
script:
- 'curl --header "PRIVATE-TOKEN: ${PRIVATE_TOKEN}" "https://gitlab.example.com/api/v4/projects"'
environment: production
```
### Job does not fail when using `&&` in a script

View File

@ -20,21 +20,19 @@ module Gitlab
::RequestStore[call_duration_key] += duration
end
def add_call_details(duration, args)
def add_call_details(duration, commands)
return unless Gitlab::PerformanceBar.enabled_for_request?
# redis-rb passes an array (e.g. [[:get, key]])
return unless args.length == 1
detail_store << {
cmd: args.first,
commands: commands,
duration: duration,
backtrace: ::Gitlab::BacktraceCleaner.clean_backtrace(caller)
}
end
def increment_request_count
def increment_request_count(amount = 1)
::RequestStore[request_count_key] ||= 0
::RequestStore[request_count_key] += 1
::RequestStore[request_count_key] += amount
end
def increment_read_bytes(num_bytes)
@ -78,9 +76,9 @@ module Gitlab
self
end
def instance_count_request
def instance_count_request(amount = 1)
@request_counter ||= Gitlab::Metrics.counter(:gitlab_redis_client_requests_total, 'Client side Redis request count, per Redis server')
@request_counter.increment({ storage: storage_key })
@request_counter.increment({ storage: storage_key }, amount)
end
def instance_count_exception(ex)

View File

@ -13,27 +13,15 @@ module Gitlab
end
end
def call(*args, &block)
start = Gitlab::Metrics::System.monotonic_time # must come first so that 'start' is always defined
instrumentation_class.instance_count_request
instrumentation_class.redis_cluster_validate!(args.first)
super(*args, &block)
rescue ::Redis::BaseError => ex
instrumentation_class.instance_count_exception(ex)
raise ex
ensure
duration = Gitlab::Metrics::System.monotonic_time - start
unless APDEX_EXCLUDE.include?(command_from_args(args))
instrumentation_class.instance_observe_duration(duration)
def call(command)
instrument_call([command]) do
super
end
end
if ::RequestStore.active?
# These metrics measure total Redis usage per Rails request / job.
instrumentation_class.increment_request_count
instrumentation_class.add_duration(duration)
instrumentation_class.add_call_details(duration, args)
def call_pipeline(pipeline)
instrument_call(pipeline.commands) do
super
end
end
@ -50,6 +38,31 @@ module Gitlab
private
def instrument_call(commands)
start = Gitlab::Metrics::System.monotonic_time # must come first so that 'start' is always defined
instrumentation_class.instance_count_request(commands.size)
commands.each { |c| instrumentation_class.redis_cluster_validate!(c) }
yield
rescue ::Redis::BaseError => ex
instrumentation_class.instance_count_exception(ex)
raise ex
ensure
duration = Gitlab::Metrics::System.monotonic_time - start
unless exclude_from_apdex?(commands)
commands.each { instrumentation_class.instance_observe_duration(duration / commands.size) }
end
if ::RequestStore.active?
# These metrics measure total Redis usage per Rails request / job.
instrumentation_class.increment_request_count(commands.size)
instrumentation_class.add_duration(duration)
instrumentation_class.add_call_details(duration, commands)
end
end
def measure_write_size(command)
size = 0
@ -97,10 +110,8 @@ module Gitlab
@options[:instrumentation_class] # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
def command_from_args(args)
command = args[0]
command = command[0] if command.is_a?(Array)
command.to_s.downcase
def exclude_from_apdex?(commands)
commands.any? { |command| APDEX_EXCLUDE.include?(command.first.to_s.downcase) }
end
end
end

View File

@ -15,8 +15,10 @@ module Gitlab
keys = read(key).map { |value| "#{cache_namespace}:#{value}" }
keys << cache_key(key)
redis.pipelined do |pipeline|
keys.each_slice(1000) { |subset| pipeline.unlink(*subset) }
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
redis.pipelined do |pipeline|
keys.each_slice(1000) { |subset| pipeline.unlink(*subset) }
end
end
end
end

View File

@ -112,10 +112,12 @@ module Gitlab
end
def delete!
with_redis do |redis|
redis.multi do |multi|
multi.del(idempotency_key, deduplicated_flag_key)
delete_wal_locations!(multi)
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
with_redis do |redis|
redis.multi do |multi|
multi.del(idempotency_key, deduplicated_flag_key)
delete_wal_locations!(multi)
end
end
end
end

View File

@ -16,7 +16,11 @@ module Peek
private
def format_call_details(call)
super.merge(cmd: format_command(call[:cmd]),
cmd = call[:commands].map do |command|
format_command(command)
end.join(', ')
super.merge(cmd: cmd,
instance: call[:storage])
end

View File

@ -144,7 +144,7 @@ namespace :gitlab do
rescue ActiveRecord::StatementInvalid => err
raise unless err.cause.is_a?(PG::ReadOnlySqlTransaction)
warn "WARNING: Could not write to the database #{db_config.name}: #{err.message}"
warn "WARNING: Could not write to the database #{db_config.name}: cannot execute UPSERT in a read-only transaction"
end
def get_db_identifier(db_config)

View File

@ -11809,6 +11809,12 @@ msgstr ""
msgid "DastProfiles|A passive scan monitors all HTTP messages (requests and responses) sent to the target. An active scan attacks the target to find potential vulnerabilities."
msgstr ""
msgid "DastProfiles|A scanner profile defines the configuration details of a security scanner. %{linkStart}Learn more%{linkEnd}."
msgstr ""
msgid "DastProfiles|A site profile defines the attributes and configuration details of your deployed application, website, or API. %{linkStart}Learn more%{linkEnd}."
msgstr ""
msgid "DastProfiles|AJAX spider"
msgstr ""
@ -12007,16 +12013,13 @@ msgstr ""
msgid "DastProfiles|Scan mode"
msgstr ""
msgid "DastProfiles|Scanner Profile"
msgstr ""
msgid "DastProfiles|Scanner Profiles"
msgstr ""
msgid "DastProfiles|Scanner name"
msgstr ""
msgid "DastProfiles|Scanner profiles define the configuration details of a security scanner. %{linkStart}Learn more%{linkEnd}."
msgid "DastProfiles|Scanner profile"
msgstr ""
msgid "DastProfiles|Scanner profiles"
msgstr ""
msgid "DastProfiles|Select a scanner profile to run a DAST scan"
@ -12037,16 +12040,13 @@ msgstr ""
msgid "DastProfiles|Show debug messages"
msgstr ""
msgid "DastProfiles|Site Profile"
msgstr ""
msgid "DastProfiles|Site Profiles"
msgstr ""
msgid "DastProfiles|Site name"
msgstr ""
msgid "DastProfiles|Site profiles define the attributes and configuration details of your deployed application, website, or API. %{linkStart}Learn more%{linkEnd}."
msgid "DastProfiles|Site profile"
msgstr ""
msgid "DastProfiles|Site profiles"
msgstr ""
msgid "DastProfiles|Site type"

View File

@ -27,33 +27,29 @@ namespace :ci do
next
end
# run all tests when framework changes detected
if qa_changes.framework_changes?
tests = qa_changes.qa_tests
if qa_changes.framework_changes? # run all tests when framework changes detected
logger.info(" merge request contains qa framework changes, full test suite will be executed")
append_to_file(env_file, <<~TXT)
QA_FRAMEWORK_CHANGES=true
TXT
end
# detect if any of the test suites would not execute any tests and populate environment variables
tests = qa_changes.qa_tests
if tests
logger.info(" following changed specs detected: '#{tests}'")
elsif tests
logger.info(" detected following specs to execute: '#{tests}'")
else
logger.info(" no specific spec changes detected")
logger.info(" no specific specs to execute detected")
end
# always check all test suites in case a suite is defined but doesn't have any runnable specs
suites = QA::Tools::Ci::NonEmptySuites.new(tests).fetch
append_to_file(env_file, <<~TXT)
QA_TESTS=#{tests}
QA_SUITES=#{suites}
QA_TESTS='#{tests}'
QA_SUITES='#{suites}'
TXT
# check if mr contains feature flag changes
feature_flags = QA::Tools::Ci::FfChanges.new(diff).fetch
append_to_file(env_file, <<~TXT)
QA_FEATURE_FLAGS=#{feature_flags}
QA_FEATURE_FLAGS='#{feature_flags}'
TXT
end
end

View File

@ -5,21 +5,21 @@ set -e
# Script to generate e2e test child pipeline
# This is required because environment variables that are generated dynamically are not picked up by rules in child pipelines
pipeline_yml="${1:-package-and-test.yml}"
source $ENV_FILE
echo "Generating child pipeline yml definitions for review-app and package-and-test child pipelines"
if [ "$QA_SKIP_ALL_TESTS" == "true" ]; then
echo "Generated no-op child pipeline due to QA_SKIP_ALL_TESTS set to 'true'"
cp .gitlab/ci/package-and-test/skip.gitlab-ci.yml $pipeline_yml
skip_pipeline=".gitlab/ci/_skip.yml"
echo "Using ${skip_pipeline} due to QA_SKIP_ALL_TESTS set to 'true'"
cp $skip_pipeline "$OMNIBUS_PIPELINE_YML"
cp $skip_pipeline "$REVIEW_PIPELINE_YML"
exit
fi
variables=$(cat <<YML
common_variables=$(cat <<YML
variables:
RELEASE: "${CI_REGISTRY}/gitlab-org/build/omnibus-gitlab-mirror/gitlab-ee:${CI_COMMIT_SHA}"
SKIP_REPORT_IN_ISSUES: "${SKIP_REPORT_IN_ISSUES:-true}"
OMNIBUS_GITLAB_CACHE_UPDATE: "${OMNIBUS_GITLAB_CACHE_UPDATE:-false}"
COLORIZED_LOGS: "true"
QA_LOG_LEVEL: "info"
QA_TESTS: "$QA_TESTS"
QA_FEATURE_FLAGS: "${QA_FEATURE_FLAGS}"
QA_FRAMEWORK_CHANGES: "${QA_FRAMEWORK_CHANGES:-false}"
@ -27,8 +27,20 @@ variables:
YML
)
echo "$variables" >$pipeline_yml
cat .gitlab/ci/package-and-test/main.gitlab-ci.yml >>$pipeline_yml
echo "Using .gitlab/ci/review-apps/main.gitlab-ci.yml and .gitlab/ci/package-and-test/main.gitlab-ci.yml"
echo "Generated e2e:package-and-test pipeline with following variables section:"
echo "$variables"
cp .gitlab/ci/review-apps/main.gitlab-ci.yml "$REVIEW_PIPELINE_YML"
echo "$common_variables" >>"$REVIEW_PIPELINE_YML"
echo "Successfully generated review-app pipeline with following variables section:"
echo -e "$common_variables"
omnibus_variables=$(cat <<YML
RELEASE: "${CI_REGISTRY}/gitlab-org/build/omnibus-gitlab-mirror/gitlab-ee:${CI_COMMIT_SHA}"
OMNIBUS_GITLAB_CACHE_UPDATE: "${OMNIBUS_GITLAB_CACHE_UPDATE:-false}"
YML
)
cp .gitlab/ci/package-and-test/main.gitlab-ci.yml "$OMNIBUS_PIPELINE_YML"
echo "$common_variables" >>"$OMNIBUS_PIPELINE_YML"
echo "$omnibus_variables" >>"$OMNIBUS_PIPELINE_YML"
echo "Successfully generated package-and-test pipeline with following variables section:"
echo -e "${common_variables}\n${omnibus_variables}"

View File

@ -65,6 +65,13 @@ RSpec.describe Gitlab::Instrumentation::RedisBase, :request_store do
expect(instrumentation_class_b.get_request_count).to eq(2)
end
end
it 'increments by the given amount' do
instrumentation_class_a.increment_request_count(2)
instrumentation_class_a.increment_request_count(3)
expect(instrumentation_class_a.get_request_count).to eq(5)
end
end
describe '.increment_write_bytes' do
@ -103,21 +110,21 @@ RSpec.describe Gitlab::Instrumentation::RedisBase, :request_store do
context 'storage key overlapping' do
it 'keys do not overlap across storages' do
2.times do
instrumentation_class_a.add_call_details(0.3, [:set])
instrumentation_class_b.add_call_details(0.4, [:set])
instrumentation_class_a.add_call_details(0.3, [[:set]])
instrumentation_class_b.add_call_details(0.4, [[:set]])
end
expect(instrumentation_class_a.detail_store).to match(
[
a_hash_including(cmd: :set, duration: 0.3, backtrace: an_instance_of(Array)),
a_hash_including(cmd: :set, duration: 0.3, backtrace: an_instance_of(Array))
a_hash_including(commands: [[:set]], duration: 0.3, backtrace: an_instance_of(Array)),
a_hash_including(commands: [[:set]], duration: 0.3, backtrace: an_instance_of(Array))
]
)
expect(instrumentation_class_b.detail_store).to match(
[
a_hash_including(cmd: :set, duration: 0.4, backtrace: an_instance_of(Array)),
a_hash_including(cmd: :set, duration: 0.4, backtrace: an_instance_of(Array))
a_hash_including(commands: [[:set]], duration: 0.4, backtrace: an_instance_of(Array)),
a_hash_including(commands: [[:set]], duration: 0.4, backtrace: an_instance_of(Array))
]
)
end

View File

@ -47,11 +47,22 @@ RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_sh
let(:instrumentation_class) { Gitlab::Redis::SharedState.instrumentation_class }
it 'counts successful requests' do
expect(instrumentation_class).to receive(:instance_count_request).and_call_original
expect(instrumentation_class).to receive(:instance_count_request).with(1).and_call_original
Gitlab::Redis::SharedState.with { |redis| redis.call(:get, 'foobar') }
end
it 'counts successful pipelined requests' do
expect(instrumentation_class).to receive(:instance_count_request).with(2).and_call_original
Gitlab::Redis::SharedState.with do |redis|
redis.pipelined do |pipeline|
pipeline.call(:get, 'foobar')
pipeline.call(:get, 'foobarbaz')
end
end
end
it 'counts exceptions' do
expect(instrumentation_class).to receive(:instance_count_exception)
.with(instance_of(Redis::CommandError)).and_call_original
@ -84,6 +95,20 @@ RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_sh
Gitlab::Redis::SharedState.with { |redis| redis.call(*command) }
end
end
context 'with pipelined commands' do
it 'measures requests that do not have blocking commands' do
expect(instrumentation_class).to receive(:instance_observe_duration).twice.with(a_value > 0)
.and_call_original
Gitlab::Redis::SharedState.with do |redis|
redis.pipelined do |pipeline|
pipeline.call(:get, 'foobar')
pipeline.call(:get, 'foobarbaz')
end
end
end
end
end
describe 'commands not in the apdex' do
@ -109,6 +134,19 @@ RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_sh
end
end
end
context 'with pipelined commands' do
it 'skips requests that have blocking commands' do
expect(instrumentation_class).not_to receive(:instance_observe_duration)
Gitlab::Redis::SharedState.with do |redis|
redis.pipelined do |pipeline|
pipeline.call(:get, 'foo')
pipeline.call(:brpop, 'foobar', '0.01')
end
end
end
end
end
end
end

View File

@ -7,17 +7,19 @@ RSpec.describe Peek::Views::RedisDetailed, :request_store do
using RSpec::Parameterized::TableSyntax
where(:cmd, :expected) do
[:auth, 'test'] | 'auth <redacted>'
[:set, 'key', 'value'] | 'set key <redacted>'
[:set, 'bad'] | 'set bad'
[:hmset, 'key1', 'value1', 'key2', 'value2'] | 'hmset key1 <redacted>'
[:get, 'key'] | 'get key'
where(:commands, :expected) do
[[:auth, 'test']] | 'auth <redacted>'
[[:set, 'key', 'value']] | 'set key <redacted>'
[[:set, 'bad']] | 'set bad'
[[:hmset, 'key1', 'value1', 'key2', 'value2']] | 'hmset key1 <redacted>'
[[:get, 'key']] | 'get key'
[[:get, 'key1'], [:get, 'key2']] | 'get key1, get key2'
[[:set, 'key1', 'value'], [:set, 'key2', 'value']] | 'set key1 <redacted>, set key2 <redacted>'
end
with_them do
it 'scrubs Redis commands' do
Gitlab::Instrumentation::Redis::SharedState.detail_store << { cmd: cmd, duration: 1.second }
Gitlab::Instrumentation::Redis::SharedState.detail_store << { commands: commands, duration: 1.second }
expect(subject.results[:details].count).to eq(1)
expect(subject.results[:details].first)
@ -29,9 +31,9 @@ RSpec.describe Peek::Views::RedisDetailed, :request_store do
end
it 'returns aggregated results' do
Gitlab::Instrumentation::Redis::Cache.detail_store << { cmd: [:get, 'test'], duration: 0.001 }
Gitlab::Instrumentation::Redis::Cache.detail_store << { cmd: [:get, 'test'], duration: 1.second }
Gitlab::Instrumentation::Redis::SharedState.detail_store << { cmd: [:get, 'test'], duration: 1.second }
Gitlab::Instrumentation::Redis::Cache.detail_store << { commands: [[:get, 'test']], duration: 0.001 }
Gitlab::Instrumentation::Redis::Cache.detail_store << { commands: [[:get, 'test']], duration: 1.second }
Gitlab::Instrumentation::Redis::SharedState.detail_store << { commands: [[:get, 'test']], duration: 1.second }
expect(subject.results[:calls]).to eq(3)
expect(subject.results[:duration]).to eq('2001.00ms')

View File

@ -530,6 +530,26 @@ RSpec.describe API::Commits do
end
end
context 'when repository is empty' do
let!(:project) { create(:project, :empty_repo) }
context 'when params are valid' do
before do
post api(url, user), params: valid_c_params
end
it_behaves_like "successfully creates the commit"
end
context 'when branch name is invalid' do
before do
post api(url, user), params: valid_c_params.merge(branch: 'wrong:name')
end
it { expect(response).to have_gitlab_http_status(:bad_request) }
end
end
context 'a new file with utf8 chars in project repo' do
before do
post api(url, user), params: valid_utf8_c_params

View File

@ -216,7 +216,7 @@ RSpec.describe 'gitlab:db:validate_config', :silence_stdout, :suppress_gitlab_sc
let(:exception) { ActiveRecord::StatementInvalid.new("READONLY") }
before do
allow(exception).to receive(:cause).and_return(PG::ReadOnlySqlTransaction.new("cannot execute INSERT in a read-only transaction"))
allow(exception).to receive(:cause).and_return(PG::ReadOnlySqlTransaction.new("cannot execute UPSERT in a read-only transaction"))
allow(ActiveRecord::InternalMetadata).to receive(:upsert).at_least(:once).and_raise(exception)
end