Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-09-19 12:13:05 +00:00
parent bffc536bf8
commit 7de8ed230a
19 changed files with 716 additions and 77 deletions

View File

@ -14,13 +14,13 @@ build-qa-image:
- .build-images:rules:build-qa-image
stage: build-images
needs: []
variables:
# Default latest tag for particular branch
QA_IMAGE_BRANCH: ${CI_REGISTRY}/${CI_PROJECT_PATH}/gitlab-ee-qa:${CI_COMMIT_REF_SLUG}
script:
# Tag with commit SHA by default
- export QA_IMAGE="${CI_REGISTRY}/${CI_PROJECT_PATH}/gitlab-ee-qa:${CI_COMMIT_SHA}"
# Auto-deploy tag format uses first 12 letters of commit SHA. Tag with
# that reference also
# For branches, tag with slugified branch name. For tags, use the tag directly
- export QA_IMAGE_BRANCH="${CI_REGISTRY}/${CI_PROJECT_PATH}/gitlab-ee-qa:${CI_COMMIT_TAG:-$CI_COMMIT_REF_SLUG}"
# Auto-deploy tag format uses first 12 letters of commit SHA. Tag with that
# reference also
- export QA_IMAGE_FOR_AUTO_DEPLOY="${CI_REGISTRY}/${CI_PROJECT_PATH}/gitlab-ee-qa:${CI_COMMIT_SHA:0:11}"
- echo $QA_IMAGE
- echo $QA_IMAGE_BRANCH

View File

@ -167,6 +167,9 @@
- ".gitlab/ci/frontend.gitlab-ci.yml"
- ".gitlab/ci/build-images.gitlab-ci.yml"
- ".gitlab/ci/qa.gitlab-ci.yml"
- ".gitlab/ci/package-and-test/*.yml"
- ".gitlab/ci/review-apps/qa.gitlab-ci.yml"
- ".gitlab/ci/review-apps/rules.gitlab-ci.yml"
.gitaly-patterns: &gitaly-patterns
- "GITALY_SERVER_VERSION"

View File

@ -65,6 +65,17 @@ class SearchController < ApplicationController
)
increment_search_counters
ensure
if @search_type
# If we raise an error somewhere in the @global_search_duration_s benchmark block, we will end up here
# with a 200 status code, but an empty @global_search_duration_s.
Gitlab::Metrics::GlobalSearchSlis.record_error_rate(
error: @global_search_duration_s.nil? || (status < 200 || status >= 400),
search_type: @search_type,
search_level: @search_level,
search_scope: @scope
)
end
end
def count

View File

@ -13,7 +13,7 @@ module Namespaces
urgency :high
idempotent!
deduplicate :until_executing
deduplicate :until_executed, if_deduplicated: :reschedule_once
def perform
results = ::Ci::ProcessSyncEventsService.new(

View File

@ -13,7 +13,7 @@ module Projects
urgency :high
idempotent!
deduplicate :until_executing
deduplicate :until_executed, if_deduplicated: :reschedule_once
def perform
results = ::Ci::ProcessSyncEventsService.new(

View File

@ -0,0 +1,8 @@
---
name: global_search_error_rate_sli
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/96667
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/373800
milestone: '15.4'
type: development
group: group::application performance
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: graphql_keyset_pagination_without_next_page_query
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/97509
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/373792
milestone: '15.4'
type: development
group: group::optimize
default_enabled: false

View File

@ -1311,54 +1311,3 @@ Prints the metrics saved in `conversational_development_index_metrics`.
```shell
rake gitlab:usage_data:generate_and_send
```
## Elasticsearch
### Configuration attributes
Open the rails console (`gitlab rails c`) and run the following command to see all the available attributes:
```ruby
ApplicationSetting.last.attributes
```
Among other attributes, the output contains all the settings available in the [Elasticsearch Integration page](../../integration/advanced_search/elasticsearch.md), such as `elasticsearch_indexing`, `elasticsearch_url`, `elasticsearch_replicas`, and `elasticsearch_pause_indexing`.
#### Setting attributes
You can then set anyone of Elasticsearch integration settings by issuing a command similar to:
```ruby
ApplicationSetting.last.update(elasticsearch_url: '<your ES URL and port>')
#or
ApplicationSetting.last.update(elasticsearch_indexing: false)
```
#### Getting attributes
You can then check if the settings have been set in the [Elasticsearch Integration page](../../integration/advanced_search/elasticsearch.md) or in the rails console by issuing:
```ruby
Gitlab::CurrentSettings.elasticsearch_url
#or
Gitlab::CurrentSettings.elasticsearch_indexing
```
#### Changing the Elasticsearch password
```ruby
es_url = Gitlab::CurrentSettings.current_application_settings
# Confirm the current ElasticSearch URL
es_url.elasticsearch_url
# Set the ElasticSearch URL
es_url.elasticsearch_url = "http://<username>:<password>@your.es.host:<port>"
# Save the change
es_url.save!
```

View File

@ -9,6 +9,64 @@ info: To determine the technical writer assigned to the Stage/Group associated w
Use the following information to troubleshoot Elasticsearch issues.
## Set configurations in the Rails console
See [Starting a Rails console session](../../administration/operations/rails_console.md#starting-a-rails-console-session).
### List attributes
To list all available attributes:
1. Open the Rails console (`gitlab rails c`).
1. Run the following command:
```ruby
ApplicationSetting.last.attributes
```
The output contains all the settings available in [Elasticsearch integration](../../integration/advanced_search/elasticsearch.md), such as `elasticsearch_indexing`, `elasticsearch_url`, `elasticsearch_replicas`, and `elasticsearch_pause_indexing`.
### Set attributes
To set an Elasticsearch integration setting, run a command like:
```ruby
ApplicationSetting.last.update(elasticsearch_url: '<your ES URL and port>')
#or
ApplicationSetting.last.update(elasticsearch_indexing: false)
```
### Get attributes
To check if the settings have been set in [Elasticsearch integration](../../integration/advanced_search/elasticsearch.md) or in the Rails console, run a command like:
```ruby
Gitlab::CurrentSettings.elasticsearch_url
#or
Gitlab::CurrentSettings.elasticsearch_indexing
```
### Change the password
To change the Elasticsearch password, run the following commands:
```ruby
es_url = Gitlab::CurrentSettings.current_application_settings
# Confirm the current Elasticsearch URL
es_url.elasticsearch_url
# Set the Elasticsearch URL
es_url.elasticsearch_url = "http://<username>:<password>@your.es.host:<port>"
# Save the change
es_url.save!
```
## View logs
One of the most valuable tools for identifying issues with the Elasticsearch

View File

@ -75,6 +75,16 @@ module API
Gitlab::UsageDataCounters::SearchCounter.count(:all_searches)
paginate(@results)
ensure
# If we raise an error somewhere in the @search_duration_s benchmark block, we will end up here
# with a 200 status code, but an empty @search_duration_s.
Gitlab::Metrics::GlobalSearchSlis.record_error_rate(
error: @search_duration_s.nil? || (status < 200 || status >= 400),
search_type: search_type,
search_level: search_service(additional_params).level,
search_scope: search_scope
)
end
def snippets?

View File

@ -59,11 +59,15 @@ module Gitlab
if before
true
elsif first
case sliced_nodes
when Array
sliced_nodes.size > limit_value
if Feature.enabled?(:graphql_keyset_pagination_without_next_page_query)
limited_nodes.size > limit_value
else
sliced_nodes.limit(1).offset(limit_value).exists? # rubocop: disable CodeReuse/ActiveRecord
case sliced_nodes
when Array
sliced_nodes.size > limit_value
else
sliced_nodes.limit(1).offset(limit_value).exists? # rubocop: disable CodeReuse/ActiveRecord
end
end
else
false
@ -89,7 +93,7 @@ module Gitlab
# So we're ok loading them into memory here as that's bound to happen
# anyway. Having them ready means we can modify the result while
# rendering the fields.
@nodes ||= limited_nodes.to_a
@nodes ||= limited_nodes.to_a.take(limit_value) # rubocop: disable CodeReuse/ActiveRecord
end
def items
@ -122,9 +126,15 @@ module Gitlab
@has_previous_page = paginated_nodes.count > limit_value
@has_previous_page ? paginated_nodes.last(limit_value) : paginated_nodes
elsif loaded?(sliced_nodes)
sliced_nodes.take(limit_value) # rubocop: disable CodeReuse/ActiveRecord
if Feature.enabled?(:graphql_keyset_pagination_without_next_page_query)
sliced_nodes.take(limit_value + 1) # rubocop: disable CodeReuse/ActiveRecord
else
sliced_nodes.take(limit_value) # rubocop: disable CodeReuse/ActiveRecord
end
elsif Feature.enabled?(:graphql_keyset_pagination_without_next_page_query)
sliced_nodes.limit(limit_value + 1).to_a
else
sliced_nodes.limit(limit_value) # rubocop: disable CodeReuse/ActiveRecord
sliced_nodes.limit(limit_value)
end
end
end
@ -141,7 +151,7 @@ module Gitlab
def limit_value
# note: only first _or_ last can be specified, not both
@limit_value ||= [first, last, max_page_size].compact.min
@limit_value ||= [first, last, max_page_size, GitlabSchema.default_max_page_size].compact.min
end
def loaded?(items)

View File

@ -13,9 +13,13 @@ module Gitlab
ADVANCED_CODE_TARGET_S = 13.546
def initialize_slis!
return unless Feature.enabled?(:global_search_custom_slis)
if Feature.enabled?(:global_search_custom_slis)
Gitlab::Metrics::Sli::Apdex.initialize_sli(:global_search, possible_labels)
end
Gitlab::Metrics::Sli::Apdex.initialize_sli(:global_search, possible_labels)
return unless Feature.enabled?(:global_search_error_rate_sli)
Gitlab::Metrics::Sli::ErrorRate.initialize_sli(:global_search, possible_labels)
end
def record_apdex(elapsed:, search_type:, search_level:, search_scope:)
@ -27,6 +31,15 @@ module Gitlab
)
end
def record_error_rate(error:, search_type:, search_level:, search_scope:)
return unless Feature.enabled?(:global_search_error_rate_sli)
Gitlab::Metrics::Sli::ErrorRate[:global_search].increment(
labels: labels(search_type: search_type, search_level: search_level, search_scope: search_scope),
error: error
)
end
private
def duration_target(search_type, search_scope)

View File

@ -281,6 +281,48 @@ RSpec.describe SearchController do
get :show, params: { scope: 'issues', search: 'hello world' }
end
context 'custom search sli error rate' do
context 'when the search is successful' do
it 'increments the custom search sli error rate with error: false' do
expect(Gitlab::Metrics::GlobalSearchSlis).to receive(:record_error_rate).with(
error: false,
search_scope: 'issues',
search_type: 'basic',
search_level: 'global'
)
get :show, params: { scope: 'issues', search: 'hello world' }
end
end
context 'when the search raises an error' do
before do
allow_next_instance_of(SearchService) do |service|
allow(service).to receive(:search_results).and_raise(ActiveRecord::QueryCanceled)
end
end
it 'increments the custom search sli error rate with error: true' do
expect(Gitlab::Metrics::GlobalSearchSlis).to receive(:record_error_rate).with(
error: true,
search_scope: 'issues',
search_type: 'basic',
search_level: 'global'
)
get :show, params: { scope: 'issues', search: 'hello world' }
end
end
context 'when something goes wrong before a search is done' do
it 'does not increment the error rate' do
expect(Gitlab::Metrics::GlobalSearchSlis).not_to receive(:record_error_rate)
get :show, params: { scope: 'issues' } # no search query
end
end
end
end
describe 'GET #count', :aggregate_failures do

View File

@ -0,0 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
factory :sync_event, class: 'Namespaces::SyncEvent' do
association :namespace, factory: :namespace, strategy: :build
end
end

View File

@ -49,6 +49,31 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
Gitlab::Json.parse(Base64Bp.urlsafe_decode64(cursor))
end
before do
stub_feature_flags(graphql_keyset_pagination_without_next_page_query: false)
end
it 'invokes an extra query for the next page check' do
arguments[:first] = 1
subject.nodes
count = ActiveRecord::QueryRecorder.new { subject.has_next_page }.count
expect(count).to eq(1)
end
context 'when the relation is loaded' do
it 'invokes no extra query' do
allow(subject).to receive(:sliced_nodes).and_return(Project.all.to_a)
arguments[:first] = 1
subject.nodes
count = ActiveRecord::QueryRecorder.new { subject.has_next_page }.count
expect(count).to eq(0)
end
end
describe "with generic keyset order support" do
let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id])) }
@ -412,4 +437,382 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
end
end
end
# duplicated tests, remove with the removal of the graphql_keyset_pagination_without_next_page_query FF
context 'when the graphql_keyset_pagination_without_next_page_query is on' do
let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id])) }
before do
stub_feature_flags(graphql_keyset_pagination_without_next_page_query: true)
end
it 'does not invoke an extra query for the next page check' do
arguments[:first] = 1
subject.nodes
count = ActiveRecord::QueryRecorder.new { subject.has_next_page }.count
expect(count).to eq(0)
end
it_behaves_like 'a connection with collection methods'
it_behaves_like 'a redactable connection' do
let_it_be(:projects) { create_list(:project, 2) }
let(:unwanted) { projects.second }
end
describe '#cursor_for' do
let(:project) { create(:project) }
let(:cursor) { connection.cursor_for(project) }
it 'returns an encoded ID' do
expect(decoded_cursor(cursor)).to eq('id' => project.id.to_s)
end
context 'when an order is specified' do
let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id])) }
it 'returns the encoded value of the order' do
expect(decoded_cursor(cursor)).to include('id' => project.id.to_s)
end
end
context 'when multiple orders are specified' do
let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_updated_at, column_order_created_at, column_order_id])) }
it 'returns the encoded value of the order' do
expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.to_s(:inspect))
end
end
end
describe '#sliced_nodes' do
let(:projects) { create_list(:project, 4) }
context 'when before is passed' do
let(:arguments) { { before: encoded_cursor(projects[1]) } }
it 'only returns the project before the selected one' do
expect(subject.sliced_nodes).to contain_exactly(projects.first)
end
context 'when the sort order is descending' do
let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id_desc])) }
it 'returns the correct nodes' do
expect(subject.sliced_nodes).to contain_exactly(*projects[2..])
end
end
end
context 'when after is passed' do
let(:arguments) { { after: encoded_cursor(projects[1]) } }
it 'only returns the project before the selected one' do
expect(subject.sliced_nodes).to contain_exactly(*projects[2..])
end
context 'when the sort order is descending' do
let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id_desc])) }
it 'returns the correct nodes' do
expect(subject.sliced_nodes).to contain_exactly(projects.first)
end
end
end
context 'when both before and after are passed' do
let(:arguments) do
{
after: encoded_cursor(projects[1]),
before: encoded_cursor(projects[3])
}
end
it 'returns the expected set' do
expect(subject.sliced_nodes).to contain_exactly(projects[2])
end
end
shared_examples 'nodes are in ascending order' do
context 'when no cursor is passed' do
let(:arguments) { {} }
it 'returns projects in ascending order' do
expect(subject.sliced_nodes).to eq(ascending_nodes)
end
end
context 'when before cursor value is not NULL' do
let(:arguments) { { before: encoded_cursor(ascending_nodes[2]) } }
it 'returns all projects before the cursor' do
expect(subject.sliced_nodes).to eq(ascending_nodes.first(2))
end
end
context 'when after cursor value is not NULL' do
let(:arguments) { { after: encoded_cursor(ascending_nodes[1]) } }
it 'returns all projects after the cursor' do
expect(subject.sliced_nodes).to eq(ascending_nodes.last(3))
end
end
context 'when before and after cursor' do
let(:arguments) { { before: encoded_cursor(ascending_nodes.last), after: encoded_cursor(ascending_nodes.first) } }
it 'returns all projects after the cursor' do
expect(subject.sliced_nodes).to eq(ascending_nodes[1..3])
end
end
end
shared_examples 'nodes are in descending order' do
context 'when no cursor is passed' do
let(:arguments) { {} }
it 'only returns projects in descending order' do
expect(subject.sliced_nodes).to eq(descending_nodes)
end
end
context 'when before cursor value is not NULL' do
let(:arguments) { { before: encoded_cursor(descending_nodes[2]) } }
it 'returns all projects before the cursor' do
expect(subject.sliced_nodes).to eq(descending_nodes.first(2))
end
end
context 'when after cursor value is not NULL' do
let(:arguments) { { after: encoded_cursor(descending_nodes[1]) } }
it 'returns all projects after the cursor' do
expect(subject.sliced_nodes).to eq(descending_nodes.last(3))
end
end
context 'when before and after cursor' do
let(:arguments) { { before: encoded_cursor(descending_nodes.last), after: encoded_cursor(descending_nodes.first) } }
it 'returns all projects after the cursor' do
expect(subject.sliced_nodes).to eq(descending_nodes[1..3])
end
end
end
context 'when multiple orders with nil values are defined' do
let_it_be(:project1) { create(:project, last_repository_check_at: 10.days.ago) } # Asc: project5 Desc: project3
let_it_be(:project2) { create(:project, last_repository_check_at: nil) } # Asc: project1 Desc: project1
let_it_be(:project3) { create(:project, last_repository_check_at: 5.days.ago) } # Asc: project3 Desc: project5
let_it_be(:project4) { create(:project, last_repository_check_at: nil) } # Asc: project2 Desc: project2
let_it_be(:project5) { create(:project, last_repository_check_at: 20.days.ago) } # Asc: project4 Desc: project4
context 'when ascending' do
let_it_be(:order) { Gitlab::Pagination::Keyset::Order.build([column_order_last_repo, column_order_id]) }
let_it_be(:nodes) { Project.order(order) }
let_it_be(:ascending_nodes) { [project5, project1, project3, project2, project4] }
it_behaves_like 'nodes are in ascending order'
context 'when before cursor value is NULL' do
let(:arguments) { { before: encoded_cursor(project4) } }
it 'returns all projects before the cursor' do
expect(subject.sliced_nodes).to eq([project5, project1, project3, project2])
end
end
context 'when after cursor value is NULL' do
let(:arguments) { { after: encoded_cursor(project2) } }
it 'returns all projects after the cursor' do
expect(subject.sliced_nodes).to eq([project4])
end
end
end
context 'when descending' do
let_it_be(:order) { Gitlab::Pagination::Keyset::Order.build([column_order_last_repo_desc, column_order_id]) }
let_it_be(:nodes) { Project.order(order) }
let_it_be(:descending_nodes) { [project3, project1, project5, project2, project4] }
it_behaves_like 'nodes are in descending order'
context 'when before cursor value is NULL' do
let(:arguments) { { before: encoded_cursor(project4) } }
it 'returns all projects before the cursor' do
expect(subject.sliced_nodes).to eq([project3, project1, project5, project2])
end
end
context 'when after cursor value is NULL' do
let(:arguments) { { after: encoded_cursor(project2) } }
it 'returns all projects after the cursor' do
expect(subject.sliced_nodes).to eq([project4])
end
end
end
end
context 'when ordering by similarity' do
let_it_be(:project1) { create(:project, name: 'test') }
let_it_be(:project2) { create(:project, name: 'testing') }
let_it_be(:project3) { create(:project, name: 'tests') }
let_it_be(:project4) { create(:project, name: 'testing stuff') }
let_it_be(:project5) { create(:project, name: 'test') }
let_it_be(:nodes) do
# Note: sorted_by_similarity_desc scope internally supports the generic keyset order.
Project.sorted_by_similarity_desc('test', include_in_select: true)
end
let_it_be(:descending_nodes) { nodes.to_a }
it_behaves_like 'nodes are in descending order'
end
context 'when an invalid cursor is provided' do
let(:arguments) { { before: Base64Bp.urlsafe_encode64('invalidcursor', padding: false) } }
it 'raises an error' do
expect { subject.sliced_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
end
end
end
describe '#nodes' do
let_it_be(:all_nodes) { create_list(:project, 5) }
let(:paged_nodes) { subject.nodes }
it_behaves_like 'connection with paged nodes' do
let(:paged_nodes_size) { 3 }
end
context 'when both are passed' do
let(:arguments) { { first: 2, last: 2 } }
it 'raises an error' do
expect { paged_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
end
end
context 'when primary key is not in original order' do
let(:nodes) { Project.order(last_repository_check_at: :desc) }
it 'is added to end' do
sliced = subject.sliced_nodes
order_sql = sliced.order_values.last.to_sql
expect(order_sql).to end_with(Project.arel_table[:id].desc.to_sql)
end
end
context 'when there is no primary key' do
before do
stub_const('NoPrimaryKey', Class.new(ActiveRecord::Base))
NoPrimaryKey.class_eval do
self.table_name = 'no_primary_key'
self.primary_key = nil
end
end
let(:nodes) { NoPrimaryKey.all }
it 'raises an error' do
expect(NoPrimaryKey.primary_key).to be_nil
expect { subject.sliced_nodes }.to raise_error(ArgumentError, 'Relation must have a primary key')
end
end
end
describe '#has_previous_page and #has_next_page' do
# using a list of 5 items with a max_page of 3
let_it_be(:project_list) { create_list(:project, 5) }
let_it_be(:nodes) { Project.order(Gitlab::Pagination::Keyset::Order.build([column_order_id])) }
context 'when default query' do
let(:arguments) { {} }
it 'has no previous, but a next' do
expect(subject.has_previous_page).to be_falsey
expect(subject.has_next_page).to be_truthy
end
end
context 'when before is first item' do
let(:arguments) { { before: encoded_cursor(project_list.first) } }
it 'has no previous, but a next' do
expect(subject.has_previous_page).to be_falsey
expect(subject.has_next_page).to be_truthy
end
end
describe 'using `before`' do
context 'when before is the last item' do
let(:arguments) { { before: encoded_cursor(project_list.last) } }
it 'has no previous, but a next' do
expect(subject.has_previous_page).to be_falsey
expect(subject.has_next_page).to be_truthy
end
end
context 'when before and last specified' do
let(:arguments) { { before: encoded_cursor(project_list.last), last: 2 } }
it 'has a previous and a next' do
expect(subject.has_previous_page).to be_truthy
expect(subject.has_next_page).to be_truthy
end
end
context 'when before and last does request all remaining nodes' do
let(:arguments) { { before: encoded_cursor(project_list[1]), last: 3 } }
it 'has a previous and a next' do
expect(subject.has_previous_page).to be_falsey
expect(subject.has_next_page).to be_truthy
expect(subject.nodes).to eq [project_list[0]]
end
end
end
describe 'using `after`' do
context 'when after is the first item' do
let(:arguments) { { after: encoded_cursor(project_list.first) } }
it 'has a previous, and a next' do
expect(subject.has_previous_page).to be_truthy
expect(subject.has_next_page).to be_truthy
end
end
context 'when after and first specified' do
let(:arguments) { { after: encoded_cursor(project_list.first), first: 2 } }
it 'has a previous and a next' do
expect(subject.has_previous_page).to be_truthy
expect(subject.has_next_page).to be_truthy
end
end
context 'when before and last does request all remaining nodes' do
let(:arguments) { { after: encoded_cursor(project_list[2]), last: 3 } }
it 'has a previous but no next' do
expect(subject.has_previous_page).to be_truthy
expect(subject.has_next_page).to be_falsey
end
end
end
end
end
end

View File

@ -5,15 +5,19 @@ require 'spec_helper'
RSpec.describe Gitlab::Metrics::GlobalSearchSlis do
using RSpec::Parameterized::TableSyntax
let(:apdex_feature_flag_enabled) { true }
let(:error_rate_feature_flag_enabled) { true }
before do
stub_feature_flags(global_search_custom_slis: feature_flag_enabled)
stub_feature_flags(global_search_custom_slis: apdex_feature_flag_enabled)
stub_feature_flags(global_search_error_rate_sli: error_rate_feature_flag_enabled)
end
describe '#initialize_slis!' do
context 'when global_search_custom_slis feature flag is enabled' do
let(:feature_flag_enabled) { true }
let(:apdex_feature_flag_enabled) { true }
it 'initializes Apdex SLI for global_search' do
it 'initializes Apdex SLIs for global_search' do
expect(Gitlab::Metrics::Sli::Apdex).to receive(:initialize_sli).with(
:global_search,
a_kind_of(Array)
@ -23,20 +27,43 @@ RSpec.describe Gitlab::Metrics::GlobalSearchSlis do
end
end
context 'when global_search_custom_slis feature flag is disabled' do
let(:feature_flag_enabled) { false }
context 'when global_search_error_rate_sli feature flag is enabled' do
let(:error_rate_feature_flag_enabled) { true }
it 'does not initialzie the Apdex SLI for global_search' do
it 'initializes ErrorRate SLIs for global_search' do
expect(Gitlab::Metrics::Sli::ErrorRate).to receive(:initialize_sli).with(
:global_search,
a_kind_of(Array)
)
described_class.initialize_slis!
end
end
context 'when global_search_custom_slis feature flag is disabled' do
let(:apdex_feature_flag_enabled) { false }
it 'does not initialize the Apdex SLIs for global_search' do
expect(Gitlab::Metrics::Sli::Apdex).not_to receive(:initialize_sli)
described_class.initialize_slis!
end
end
context 'when global_search_error_rate_sli feature flag is disabled' do
let(:error_rate_feature_flag_enabled) { false }
it 'does not initialize the ErrorRate SLIs for global_search' do
expect(Gitlab::Metrics::Sli::ErrorRate).not_to receive(:initialize_sli)
described_class.initialize_slis!
end
end
end
describe '#record_apdex' do
context 'when global_search_custom_slis feature flag is enabled' do
let(:feature_flag_enabled) { true }
let(:apdex_feature_flag_enabled) { true }
where(:search_type, :code_search, :duration_target) do
'basic' | false | 7.031
@ -97,7 +124,7 @@ RSpec.describe Gitlab::Metrics::GlobalSearchSlis do
end
context 'when global_search_custom_slis feature flag is disabled' do
let(:feature_flag_enabled) { false }
let(:apdex_feature_flag_enabled) { false }
it 'does not call increment on the apdex SLI' do
expect(Gitlab::Metrics::Sli::Apdex[:global_search]).not_to receive(:increment)
@ -111,4 +138,36 @@ RSpec.describe Gitlab::Metrics::GlobalSearchSlis do
end
end
end
describe '#record_error_rate' do
context 'when global_search_error_rate_sli feature flag is enabled' do
let(:error_rate_feature_flag_enabled) { true }
it 'calls increment on the error rate SLI' do
expect(Gitlab::Metrics::Sli::ErrorRate[:global_search]).to receive(:increment)
described_class.record_error_rate(
error: true,
search_type: 'basic',
search_level: 'global',
search_scope: 'issues'
)
end
end
context 'when global_search_error_rate_sli feature flag is disabled' do
let(:error_rate_feature_flag_enabled) { false }
it 'does not call increment on the error rate SLI' do
expect(Gitlab::Metrics::Sli::ErrorRate[:global_search]).not_to receive(:increment)
described_class.record_error_rate(
error: true,
search_type: 'basic',
search_level: 'global',
search_scope: 'issues'
)
end
end
end
end

View File

@ -363,6 +363,32 @@ RSpec.describe API::Search do
get api(endpoint, user), params: { scope: 'issues', search: 'john doe' }
end
it 'increments the custom search sli error rate with error false if no error occurred' do
expect(Gitlab::Metrics::GlobalSearchSlis).to receive(:record_error_rate).with(
error: false,
search_scope: 'issues',
search_type: 'basic',
search_level: 'global'
)
get api(endpoint, user), params: { scope: 'issues', search: 'john doe' }
end
it 'increments the custom search sli error rate with error true if an error occurred' do
allow_next_instance_of(SearchService) do |service|
allow(service).to receive(:search_results).and_raise(ActiveRecord::QueryCanceled)
end
expect(Gitlab::Metrics::GlobalSearchSlis).to receive(:record_error_rate).with(
error: true,
search_scope: 'issues',
search_type: 'basic',
search_level: 'global'
)
get api(endpoint, user), params: { scope: 'issues', search: 'john doe' }
end
it 'sets global search information for logging' do
expect(Gitlab::Instrumentation::GlobalSearchApi).to receive(:set_information).with(
type: 'basic',
@ -630,7 +656,7 @@ RSpec.describe API::Search do
context 'when requesting basic search' do
it 'passes the parameter to search service' do
expect(SearchService).to receive(:new).with(user, hash_including(basic_search: 'true'))
expect(SearchService).to receive(:new).with(user, hash_including(basic_search: 'true')).twice
get api(endpoint, user), params: { scope: 'issues', search: 'awesome', basic_search: 'true' }
end

View File

@ -11,6 +11,30 @@ RSpec.describe Namespaces::ProcessSyncEventsWorker do
include_examples 'an idempotent worker'
describe 'deduplication' do
before do
stub_const("Ci::ProcessSyncEventsService::BATCH_SIZE", 2)
end
it 'has the `until_executed` deduplicate strategy' do
expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
end
it 'has an option to reschedule once if deduplicated' do
expect(described_class.get_deduplication_options).to include({ if_deduplicated: :reschedule_once })
end
it 'expect the job to enqueue itself again if there was more items to be processed', :sidekiq_inline do
Namespaces::SyncEvent.delete_all # delete the sync_events that have been created by triggers of previous groups
create_list(:sync_event, 3, namespace_id: group1.id)
# It's called more than twice, because the job deduplication and rescheduling calls the perform_async again
expect(described_class).to receive(:perform_async).at_least(:twice).and_call_original
expect do
described_class.perform_async
end.to change(Namespaces::SyncEvent, :count).from(3).to(0)
end
end
describe '#perform' do
subject(:perform) { worker.perform }

View File

@ -10,6 +10,14 @@ RSpec.describe Projects::ProcessSyncEventsWorker do
include_examples 'an idempotent worker'
it 'has the `until_executed` deduplicate strategy' do
expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
end
it 'has an option to reschedule once if deduplicated' do
expect(described_class.get_deduplication_options).to include({ if_deduplicated: :reschedule_once })
end
describe '#perform' do
subject(:perform) { worker.perform }