Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-11-24 18:14:31 +00:00
parent b8d516a687
commit 844eb8879a
77 changed files with 1178 additions and 513 deletions

View file

@ -151,16 +151,16 @@
parallel: 3
.rspec-unit-parallel:
parallel: 20
parallel: 22
.rspec-ee-unit-parallel:
parallel: 12
parallel: 14
.rspec-ee-unit-geo-parallel:
parallel: 2
.rspec-integration-parallel:
parallel: 8
parallel: 10
.rspec-ee-integration-parallel:
parallel: 4

View file

@ -56,9 +56,7 @@ Geo secondary sites have a [Geo tracking database](https://gitlab.com/gitlab-org
```ruby
# frozen_string_literal: true
class CreateCoolWidgetRegistry < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
class CreateCoolWidgetRegistry < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
def up
@ -80,8 +78,8 @@ Geo secondary sites have a [Geo tracking database](https://gitlab.com/gitlab-org
t.boolean :missing_on_primary, default: false, null: false
t.binary :verification_checksum
t.binary :verification_checksum_mismatched
t.string :verification_failure, limit: 255 # rubocop:disable Migration/PreventStrings see https://gitlab.com/gitlab-org/gitlab/-/issues/323806
t.string :last_sync_failure, limit: 255 # rubocop:disable Migration/PreventStrings see https://gitlab.com/gitlab-org/gitlab/-/issues/323806
t.text :verification_failure, limit: 255
t.text :last_sync_failure, limit: 255
t.index :cool_widget_id, name: :index_cool_widget_registry_on_cool_widget_id, unique: true
t.index :retry_at
@ -126,9 +124,7 @@ The Geo primary site needs to checksum every replicable so secondaries can verif
```ruby
# frozen_string_literal: true
class CreateCoolWidgetStates < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
class CreateCoolWidgetStates < Gitlab::Database::Migration[1.0]
VERIFICATION_STATE_INDEX_NAME = "index_cool_widget_states_on_verification_state"
PENDING_VERIFICATION_INDEX_NAME = "index_cool_widget_states_pending_verification"
FAILED_VERIFICATION_INDEX_NAME = "index_cool_widget_states_failed_verification"
@ -137,23 +133,21 @@ The Geo primary site needs to checksum every replicable so secondaries can verif
disable_ddl_transaction!
def up
unless table_exists?(:cool_widget_states)
with_lock_retries do
create_table :cool_widget_states, id: false do |t|
t.references :cool_widget, primary_key: true, null: false, foreign_key: { on_delete: :cascade }
t.integer :verification_state, default: 0, limit: 2, null: false
t.column :verification_started_at, :datetime_with_timezone
t.datetime_with_timezone :verification_retry_at
t.datetime_with_timezone :verified_at
t.integer :verification_retry_count, limit: 2
t.binary :verification_checksum, using: 'verification_checksum::bytea'
t.text :verification_failure
with_lock_retries do
create_table :cool_widget_states, id: false do |t|
t.references :cool_widget, primary_key: true, null: false, foreign_key: { on_delete: :cascade }
t.integer :verification_state, default: 0, limit: 2, null: false
t.column :verification_started_at, :datetime_with_timezone
t.datetime_with_timezone :verification_retry_at
t.datetime_with_timezone :verified_at
t.integer :verification_retry_count, limit: 2
t.binary :verification_checksum, using: 'verification_checksum::bytea'
t.text :verification_failure
t.index :verification_state, name: VERIFICATION_STATE_INDEX_NAME
t.index :verified_at, where: "(verification_state = 0)", order: { verified_at: 'ASC NULLS FIRST' }, name: PENDING_VERIFICATION_INDEX_NAME
t.index :verification_retry_at, where: "(verification_state = 3)", order: { verification_retry_at: 'ASC NULLS FIRST' }, name: FAILED_VERIFICATION_INDEX_NAME
t.index :verification_state, where: "(verification_state = 0 OR verification_state = 3)", name: NEEDS_VERIFICATION_INDEX_NAME
end
t.index :verification_state, name: VERIFICATION_STATE_INDEX_NAME
t.index :verified_at, where: "(verification_state = 0)", order: { verified_at: 'ASC NULLS FIRST' }, name: PENDING_VERIFICATION_INDEX_NAME
t.index :verification_retry_at, where: "(verification_state = 3)", order: { verification_retry_at: 'ASC NULLS FIRST' }, name: FAILED_VERIFICATION_INDEX_NAME
t.index :verification_state, where: "(verification_state = 0 OR verification_state = 3)", name: NEEDS_VERIFICATION_INDEX_NAME
end
end
@ -209,6 +203,8 @@ That's all of the required database changes.
has_one :cool_widget_state, autosave: false, inverse_of: :cool_widget, class_name: 'Geo::CoolWidgetState'
after_save :save_verification_details
delegate :verification_retry_at, :verification_retry_at=,
:verified_at, :verified_at=,
:verification_checksum, :verification_checksum=,
@ -223,6 +219,8 @@ That's all of the required database changes.
scope :checksummed, -> { joins(:cool_widget_state).where.not(cool_widget_states: { verification_checksum: nil } ) }
scope :not_checksummed, -> { joins(:cool_widget_state).where(cool_widget_states: { verification_checksum: nil } ) }
scope :available_verifiables, -> { joins(:cool_widget_state) }
# Override the `all` default if not all records can be replicated. For an
# example of an existing Model that needs to do this, see
# `EE::MergeRequestDiff`.

View file

@ -58,42 +58,38 @@ Geo secondary sites have a [Geo tracking database](https://gitlab.com/gitlab-org
```ruby
# frozen_string_literal: true
class CreateCoolWidgetRegistry < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
class CreateCoolWidgetRegistry < Gitlab::Database::Migration[1.0]
disable_ddl_transaction!
def up
unless table_exists?(:cool_widget_registry)
ActiveRecord::Base.transaction do
create_table :cool_widget_registry, id: :bigserial, force: :cascade do |t|
t.bigint :cool_widget_id, null: false
t.datetime_with_timezone :created_at, null: false
t.datetime_with_timezone :last_synced_at
t.datetime_with_timezone :retry_at
t.datetime_with_timezone :verified_at
t.datetime_with_timezone :verification_started_at
t.datetime_with_timezone :verification_retry_at
t.integer :state, default: 0, null: false, limit: 2
t.integer :verification_state, default: 0, null: false, limit: 2
t.integer :retry_count, default: 0, limit: 2, null: false
t.integer :verification_retry_count, default: 0, limit: 2, null: false
t.boolean :checksum_mismatch, default: false, null: false
t.binary :verification_checksum
t.binary :verification_checksum_mismatched
t.string :verification_failure, limit: 255 # rubocop:disable Migration/PreventStrings see https://gitlab.com/gitlab-org/gitlab/-/issues/323806
t.string :last_sync_failure, limit: 255 # rubocop:disable Migration/PreventStrings see https://gitlab.com/gitlab-org/gitlab/-/issues/323806
ActiveRecord::Base.transaction do
create_table :cool_widget_registry, id: :bigserial, force: :cascade do |t|
t.bigint :cool_widget_id, null: false
t.datetime_with_timezone :created_at, null: false
t.datetime_with_timezone :last_synced_at
t.datetime_with_timezone :retry_at
t.datetime_with_timezone :verified_at
t.datetime_with_timezone :verification_started_at
t.datetime_with_timezone :verification_retry_at
t.integer :state, default: 0, null: false, limit: 2
t.integer :verification_state, default: 0, null: false, limit: 2
t.integer :retry_count, default: 0, limit: 2, null: false
t.integer :verification_retry_count, default: 0, limit: 2, null: false
t.boolean :checksum_mismatch, default: false, null: false
t.binary :verification_checksum
t.binary :verification_checksum_mismatched
t.text :verification_failure, limit: 255
t.text :last_sync_failure, limit: 255
t.index :cool_widget_id, name: :index_cool_widget_registry_on_cool_widget_id, unique: true
t.index :retry_at
t.index :state
# To optimize performance of CoolWidgetRegistry.verification_failed_batch
t.index :verification_retry_at, name: :cool_widget_registry_failed_verification, order: "NULLS FIRST", where: "((state = 2) AND (verification_state = 3))"
# To optimize performance of CoolWidgetRegistry.needs_verification_count
t.index :verification_state, name: :cool_widget_registry_needs_verification, where: "((state = 2) AND (verification_state = ANY (ARRAY[0, 3])))"
# To optimize performance of CoolWidgetRegistry.verification_pending_batch
t.index :verified_at, name: :cool_widget_registry_pending_verification, order: "NULLS FIRST", where: "((state = 2) AND (verification_state = 0))"
end
t.index :cool_widget_id, name: :index_cool_widget_registry_on_cool_widget_id, unique: true
t.index :retry_at
t.index :state
# To optimize performance of CoolWidgetRegistry.verification_failed_batch
t.index :verification_retry_at, name: :cool_widget_registry_failed_verification, order: "NULLS FIRST", where: "((state = 2) AND (verification_state = 3))"
# To optimize performance of CoolWidgetRegistry.needs_verification_count
t.index :verification_state, name: :cool_widget_registry_needs_verification, where: "((state = 2) AND (verification_state = ANY (ARRAY[0, 3])))"
# To optimize performance of CoolWidgetRegistry.verification_pending_batch
t.index :verified_at, name: :cool_widget_registry_pending_verification, order: "NULLS FIRST", where: "((state = 2) AND (verification_state = 0))"
end
end
end
@ -130,9 +126,7 @@ The Geo primary site needs to checksum every replicable so secondaries can verif
```ruby
# frozen_string_literal: true
class CreateCoolWidgetStates < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
class CreateCoolWidgetStates < Gitlab::Database::Migration[1.0]
VERIFICATION_STATE_INDEX_NAME = "index_cool_widget_states_on_verification_state"
PENDING_VERIFICATION_INDEX_NAME = "index_cool_widget_states_pending_verification"
FAILED_VERIFICATION_INDEX_NAME = "index_cool_widget_states_failed_verification"
@ -141,23 +135,21 @@ The Geo primary site needs to checksum every replicable so secondaries can verif
disable_ddl_transaction!
def up
unless table_exists?(:cool_widget_states)
with_lock_retries do
create_table :cool_widget_states, id: false do |t|
t.references :cool_widget, primary_key: true, null: false, foreign_key: { on_delete: :cascade }
t.integer :verification_state, default: 0, limit: 2, null: false
t.column :verification_started_at, :datetime_with_timezone
t.datetime_with_timezone :verification_retry_at
t.datetime_with_timezone :verified_at
t.integer :verification_retry_count, limit: 2
t.binary :verification_checksum, using: 'verification_checksum::bytea'
t.text :verification_failure
with_lock_retries do
create_table :cool_widget_states, id: false do |t|
t.references :cool_widget, primary_key: true, null: false, foreign_key: { on_delete: :cascade }
t.integer :verification_state, default: 0, limit: 2, null: false
t.column :verification_started_at, :datetime_with_timezone
t.datetime_with_timezone :verification_retry_at
t.datetime_with_timezone :verified_at
t.integer :verification_retry_count, limit: 2
t.binary :verification_checksum, using: 'verification_checksum::bytea'
t.text :verification_failure
t.index :verification_state, name: VERIFICATION_STATE_INDEX_NAME
t.index :verified_at, where: "(verification_state = 0)", order: { verified_at: 'ASC NULLS FIRST' }, name: PENDING_VERIFICATION_INDEX_NAME
t.index :verification_retry_at, where: "(verification_state = 3)", order: { verification_retry_at: 'ASC NULLS FIRST' }, name: FAILED_VERIFICATION_INDEX_NAME
t.index :verification_state, where: "(verification_state = 0 OR verification_state = 3)", name: NEEDS_VERIFICATION_INDEX_NAME
end
t.index :verification_state, name: VERIFICATION_STATE_INDEX_NAME
t.index :verified_at, where: "(verification_state = 0)", order: { verified_at: 'ASC NULLS FIRST' }, name: PENDING_VERIFICATION_INDEX_NAME
t.index :verification_retry_at, where: "(verification_state = 3)", order: { verification_retry_at: 'ASC NULLS FIRST' }, name: FAILED_VERIFICATION_INDEX_NAME
t.index :verification_state, where: "(verification_state = 0 OR verification_state = 3)", name: NEEDS_VERIFICATION_INDEX_NAME
end
end
@ -213,6 +205,8 @@ That's all of the required database changes.
has_one :cool_widget_state, autosave: false, inverse_of: :cool_widget, class_name: 'Geo::CoolWidgetState'
after_save :save_verification_details
delegate :verification_retry_at, :verification_retry_at=,
:verified_at, :verified_at=,
:verification_checksum, :verification_checksum=,
@ -227,6 +221,8 @@ That's all of the required database changes.
scope :checksummed, -> { joins(:cool_widget_state).where.not(cool_widget_states: { verification_checksum: nil } ) }
scope :not_checksummed, -> { joins(:cool_widget_state).where(cool_widget_states: { verification_checksum: nil } ) }
scope :available_verifiables, -> { joins(:cool_widget_state) }
# Override the `all` default if not all records can be replicated. For an
# example of an existing Model that needs to do this, see
# `EE::MergeRequestDiff`.

View file

@ -1,6 +1,6 @@
<script>
import { GlButton, GlLink, GlSprintf, GlTooltipDirective, GlTruncate } from '@gitlab/ui';
import { s__ } from '~/locale';
import { s__, __ } from '~/locale';
import ListItem from '~/vue_shared/components/registry/list_item.vue';
import {
PACKAGE_ERROR_STATUS,
@ -64,6 +64,7 @@ export default {
},
i18n: {
erroredPackageText: s__('PackageRegistry|Invalid Package: failed metadata extraction'),
createdAt: __('Created %{timestamp}'),
},
};
</script>
@ -127,8 +128,8 @@ export default {
</template>
<template #right-secondary>
<span>
<gl-sprintf :message="__('Created %{timestamp}')">
<span data-testid="created-date">
<gl-sprintf :message="$options.i18n.createdAt">
<template #timestamp>
<timeago-tooltip :time="packageEntity.createdAt" />
</template>

View file

@ -10,7 +10,7 @@ fragment PackageData on Package {
name
}
}
pipelines {
pipelines(last: 1) {
nodes {
sha
ref

View file

@ -1,4 +0,0 @@
import IntegrationSettingsForm from '~/integrations/integration_settings_form';
const integrationSettingsForm = new IntegrationSettingsForm('.js-integration-settings-form');
integrationSettingsForm.init();

View file

@ -1,4 +0,0 @@
import PersistentUserCallout from '~/persistent_user_callout';
const callout = document.querySelector('.js-service-templates-deprecated');
PersistentUserCallout.factory(callout);

View file

@ -98,6 +98,7 @@ export default {
class="gl-w-full gl-h-9 gl-rounded-0! gl-border-none! gl-border-b-solid! gl-border-1! gl-border-gray-100 gl-text-decoration-none! gl-outline-0! gl-display-flex"
:class="buttonClass"
:title="__('Toggle sidebar')"
data-qa-selector="toggle_sidebar_collapse_button"
@click="toggleDrawer"
>
<span v-if="isExpanded" class="gl-text-gray-500 gl-mr-3" data-testid="collapse-text">
@ -105,7 +106,12 @@ export default {
</span>
<gl-icon data-testid="toggle-icon" :name="buttonIconName" />
</gl-button>
<div v-if="isExpanded" class="gl-h-full gl-p-5" data-testid="drawer-content">
<div
v-if="isExpanded"
class="gl-h-full gl-p-5"
data-testid="drawer-content"
data-qa-selector="drawer_content"
>
<getting-started-card class="gl-mb-4" />
<first-pipeline-card class="gl-mb-4" />
<visualize-and-lint-card class="gl-mb-4" />

View file

@ -247,6 +247,7 @@ export default {
<gl-infinite-scroll
:fetched-items="branches.length"
:max-list-height="250"
data-qa-selector="branch_menu_container"
@bottomReached="fetchNextBranches"
>
<template #items>
@ -255,7 +256,7 @@ export default {
:key="branch"
:is-checked="currentBranch === branch"
:is-check-item="true"
data-qa-selector="menu_branch_button"
data-qa-selector="branch_menu_item_button"
@click="selectBranch(branch)"
>
{{ branch }}

View file

@ -178,9 +178,11 @@ export default {
:widget="$options.label || $options.name"
:tertiary-buttons="tertiaryActionsButtons"
/>
<div class="gl-border-l-1 gl-border-l-solid gl-border-gray-100 gl-ml-3 gl-pl-3 gl-h-6">
<div
v-if="isCollapsible"
class="gl-border-l-1 gl-border-l-solid gl-border-gray-100 gl-ml-3 gl-pl-3 gl-h-6"
>
<gl-button
v-if="isCollapsible"
v-gl-tooltip
:title="collapseButtonLabel"
:aria-expanded="`${!isCollapsed}`"

View file

@ -117,11 +117,12 @@ export default {
</span>
<template v-else>
<span class="bold">
{{ s__('mrWidget|There are merge conflicts') }}<span v-if="!canMerge">.</span>
{{ s__('mrWidget|Merge blocked: merge conflicts must be resolved.') }}
<span v-if="!canMerge">
{{
s__(`mrWidget|Resolve these conflicts or ask someone
with write access to this repository to merge it locally`)
s__(
`mrWidget|Users who can write to the source or target branches can resolve the conflicts.`,
)
}}
</span>
</span>

View file

@ -96,6 +96,7 @@ export default {
:id="`source-editor-${fileGlobalId}`"
ref="editor"
data-editor-loading
data-qa-selector="source_editor_container"
@[$options.readyEvent]="$emit($options.readyEvent)"
>
<pre class="editor-loading-content">{{ value }}</pre>

View file

@ -21,6 +21,7 @@
#
class ActiveSession
include ActiveModel::Model
include ::Gitlab::Redis::SessionsStoreHelper
SESSION_BATCH_SIZE = 200
ALLOWED_NUMBER_OF_ACTIVE_SESSIONS = 100
@ -43,7 +44,7 @@ class ActiveSession
end
def self.set(user, request)
Gitlab::Redis::SharedState.with do |redis|
redis_store_class.with do |redis|
session_private_id = request.session.id.private_id
client = DeviceDetector.new(request.user_agent)
timestamp = Time.current
@ -76,7 +77,7 @@ class ActiveSession
end
def self.list(user)
Gitlab::Redis::SharedState.with do |redis|
redis_store_class.with do |redis|
cleaned_up_lookup_entries(redis, user).map do |raw_session|
load_raw_session(raw_session)
end
@ -84,7 +85,7 @@ class ActiveSession
end
def self.cleanup(user)
Gitlab::Redis::SharedState.with do |redis|
redis_store_class.with do |redis|
clean_up_old_sessions(redis, user)
cleaned_up_lookup_entries(redis, user)
end
@ -104,7 +105,7 @@ class ActiveSession
def self.destroy_session(user, session_id)
return unless session_id
Gitlab::Redis::SharedState.with do |redis|
redis_store_class.with do |redis|
destroy_sessions(redis, user, [session_id].compact)
end
end
@ -113,7 +114,7 @@ class ActiveSession
sessions = not_impersonated(user)
sessions.reject! { |session| session.current?(current_rack_session) } if current_rack_session
Gitlab::Redis::SharedState.with do |redis|
redis_store_class.with do |redis|
session_ids = (sessions.map(&:session_id) | sessions.map(&:session_private_id)).compact
destroy_sessions(redis, user, session_ids) if session_ids.any?
end
@ -124,15 +125,15 @@ class ActiveSession
end
def self.rack_key_name(session_id)
"#{Gitlab::Redis::SharedState::SESSION_NAMESPACE}:#{session_id}"
"#{Gitlab::Redis::Sessions::SESSION_NAMESPACE}:#{session_id}"
end
def self.key_name(user_id, session_id = '*')
"#{Gitlab::Redis::SharedState::USER_SESSIONS_NAMESPACE}:#{user_id}:#{session_id}"
"#{Gitlab::Redis::Sessions::USER_SESSIONS_NAMESPACE}:#{user_id}:#{session_id}"
end
def self.lookup_key_name(user_id)
"#{Gitlab::Redis::SharedState::USER_SESSIONS_LOOKUP_NAMESPACE}:#{user_id}"
"#{Gitlab::Redis::Sessions::USER_SESSIONS_LOOKUP_NAMESPACE}:#{user_id}"
end
def self.list_sessions(user)
@ -143,7 +144,7 @@ class ActiveSession
#
# Returns an array of strings
def self.session_ids_for_user(user_id)
Gitlab::Redis::SharedState.with do |redis|
redis_store_class.with do |redis|
redis.smembers(lookup_key_name(user_id))
end
end
@ -156,7 +157,7 @@ class ActiveSession
def self.sessions_from_ids(session_ids)
return [] if session_ids.empty?
Gitlab::Redis::SharedState.with do |redis|
redis_store_class.with do |redis|
session_keys = rack_session_keys(session_ids)
session_keys.each_slice(SESSION_BATCH_SIZE).flat_map do |session_keys_batch|
@ -228,9 +229,9 @@ class ActiveSession
# only the single key entries are automatically expired by redis, the
# lookup entries in the set need to be removed manually.
session_ids_and_entries = session_ids.zip(entries)
redis.pipelined do
redis.pipelined do |pipeline|
session_ids_and_entries.reject { |_session_id, entry| entry }.each do |session_id, _entry|
redis.srem(lookup_key_name(user.id), session_id)
pipeline.srem(lookup_key_name(user.id), session_id)
end
end

View file

@ -1075,6 +1075,16 @@ module Ci
runner&.instance_type?
end
def job_variables_attributes
strong_memoize(:job_variables_attributes) do
job_variables.map do |variable|
variable.attributes.except('id', 'job_id', 'encrypted_value', 'encrypted_value_iv').tap do |attrs|
attrs[:value] = variable.value
end
end
end
end
protected
def run_status_commit_hooks!

View file

@ -7,7 +7,8 @@ module PartitionedTable
attr_reader :partitioning_strategy
PARTITIONING_STRATEGIES = {
monthly: Gitlab::Database::Partitioning::MonthlyStrategy
monthly: Gitlab::Database::Partitioning::MonthlyStrategy,
sliding_list: Gitlab::Database::Partitioning::SlidingListStrategy
}.freeze
def partitioned_by(partitioning_key, strategy:, **kwargs)

View file

@ -7,7 +7,7 @@ module Ci
allow_failure stage stage_id stage_idx trigger_request
yaml_variables when environment coverage_regex
description tag_list protected needs_attributes
resource_group scheduling_type].freeze
resource_group scheduling_type job_variables_attributes].freeze
end
def self.extra_accessors

View file

@ -1,7 +1,7 @@
---
name: use_multi_store
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/73660
rollout_issue_url:
rollout_issue_url: https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1429
milestone: '14.5'
type: development
group: group::memory

View file

@ -19,31 +19,22 @@ cookie_key = if Rails.env.development?
"_gitlab_session"
end
if Gitlab::Utils.to_boolean(ENV['GITLAB_REDIS_STORE_WITH_SESSION_STORE'], default: true)
store = Gitlab::Redis::SharedState.store(
namespace: Gitlab::Redis::SharedState::SESSION_NAMESPACE
)
store = if Gitlab::Utils.to_boolean(ENV['GITLAB_USE_REDIS_SESSIONS_STORE'], default: true)
Gitlab::Redis::Sessions.store(
namespace: Gitlab::Redis::Sessions::SESSION_NAMESPACE
)
else
Gitlab::Redis::SharedState.store(
namespace: Gitlab::Redis::Sessions::SESSION_NAMESPACE
)
end
Gitlab::Application.config.session_store(
:redis_store, # Using the cookie_store would enable session replay attacks.
redis_store: store,
key: cookie_key,
secure: Gitlab.config.gitlab.https,
httponly: true,
expires_in: Settings.gitlab['session_expire_delay'] * 60,
path: Rails.application.config.relative_url_root.presence || '/'
)
else
sessions_config = Gitlab::Redis::SharedState.params
sessions_config[:namespace] = Gitlab::Redis::SharedState::SESSION_NAMESPACE
Gitlab::Application.config.session_store(
:redis_store, # Using the cookie_store would enable session replay attacks.
servers: sessions_config,
key: cookie_key,
secure: Gitlab.config.gitlab.https,
httponly: true,
expires_in: Settings.gitlab['session_expire_delay'] * 60,
path: Rails.application.config.relative_url_root.presence || '/'
)
end
Gitlab::Application.config.session_store(
:redis_store, # Using the cookie_store would enable session replay attacks.
redis_store: store,
key: cookie_key,
secure: Gitlab.config.gitlab.https,
httponly: true,
expires_in: Settings.gitlab['session_expire_delay'] * 60,
path: Rails.application.config.relative_url_root.presence || '/'
)

View file

@ -163,6 +163,9 @@ const alias = {
// the following resolves files which are different between CE and JH
jh_else_ce: path.join(ROOT_PATH, 'app/assets/javascripts'),
// the following resolves files which are different between CE/EE/JH
any_else_ce: path.join(ROOT_PATH, 'app/assets/javascripts'),
// override loader path for icons.svg so we do not duplicate this asset
'@gitlab/svgs/dist/icons.svg': path.join(
ROOT_PATH,
@ -179,6 +182,8 @@ if (IS_EE) {
ee_images: path.join(ROOT_PATH, 'ee/app/assets/images'),
ee_jest: path.join(ROOT_PATH, 'ee/spec/frontend'),
ee_else_ce: path.join(ROOT_PATH, 'ee/app/assets/javascripts'),
jh_else_ee: path.join(ROOT_PATH, 'ee/app/assets/javascripts'),
any_else_ce: path.join(ROOT_PATH, 'ee/app/assets/javascripts'),
});
}
@ -190,7 +195,10 @@ if (IS_JH) {
jh_icons: path.join(ROOT_PATH, 'jh/app/views/shared/icons'),
jh_images: path.join(ROOT_PATH, 'jh/app/assets/images'),
jh_jest: path.join(ROOT_PATH, 'jh/spec/frontend'),
// jh path alias https://gitlab.com/gitlab-org/gitlab/-/merge_requests/74305#note_732793956
jh_else_ce: path.join(ROOT_PATH, 'jh/app/assets/javascripts'),
jh_else_ee: path.join(ROOT_PATH, 'jh/app/assets/javascripts'),
any_else_ce: path.join(ROOT_PATH, 'jh/app/assets/javascripts'),
});
}

View file

@ -222,6 +222,51 @@ These LDAP sync configuration settings are available:
| `external_groups` | An array of CNs of groups containing users that should be considered external. Not `cn=interns` or the full DN. | **{dotted-circle}** No | `['interns', 'contractors']` |
| `sync_ssh_keys` | The LDAP attribute containing a user's public SSH key. | **{dotted-circle}** No | `'sshPublicKey'` or false if not set |
### Use multiple LDAP servers **(PREMIUM SELF)**
If you have users on multiple LDAP servers, you can configure GitLab to use them. To add additional LDAP servers:
1. Duplicate the [`main` LDAP configuration](#configure-ldap).
1. Edit each duplicate configuration with the details of the additional servers.
- For each additional server, choose a different provider ID, like `main`, `secondary`, or `tertiary`. Use lowercase
alphanumeric characters. GitLab uses the provider ID to associate each user with a specific LDAP server.
- For each entry, use a unique `label` value. These values are used for the tab names on the sign-in page.
#### Example of multiple LDAP servers
The following example shows how to configure three LDAP servers in `gitlab.rb`:
```ruby
gitlab_rails['ldap_enabled'] = true
gitlab_rails['ldap_servers'] = {
'main' => {
'label' => 'GitLab AD',
'host' => 'ad.example.org',
'port' => 636,
...
},
'secondary' => {
'label' => 'GitLab Secondary AD',
'host' => 'ad-secondary.example.net',
'port' => 636,
...
},
'tertiary' => {
'label' => 'GitLab Tertiary AD',
'host' => 'ad-tertiary.example.net',
'port' => 636,
...
}
}
```
This example results in the following sign-in page:
![Multiple LDAP servers sign in](img/multi_login.gif)
### Set up LDAP user filter
To limit all GitLab access to a subset of the LDAP users on your LDAP server, first narrow the
@ -452,56 +497,6 @@ If initially your LDAP configuration looked like:
1. [Restart GitLab](../../restart_gitlab.md#installations-from-source) for the changes to take effect.
## Multiple LDAP servers **(PREMIUM SELF)**
With GitLab, you can configure multiple LDAP servers that your GitLab instance
connects to.
To add another LDAP server:
1. Duplicate the settings under [the main configuration](#configure-ldap).
1. Edit them to match the additional LDAP server.
Be sure to choose a different provider ID made of letters a-z and numbers 0-9.
This ID is stored in the database so that GitLab can remember which LDAP
server a user belongs to.
![Multiple LDAP Servers Sign in](img/multi_login.gif)
Based on the example illustrated on the image above,
our `gitlab.rb` configuration would look like:
```ruby
gitlab_rails['ldap_enabled'] = true
gitlab_rails['ldap_servers'] = {
'main' => {
'label' => 'GitLab AD',
'host' => 'ad.example.org',
'port' => 636,
...
},
'secondary' => {
'label' => 'GitLab Secondary AD',
'host' => 'ad-secondary.example.net',
'port' => 636,
...
},
'tertiary' => {
'label' => 'GitLab Tertiary AD',
'host' => 'ad-tertiary.example.net',
'port' => 636,
...
}
}
```
If you configure multiple LDAP servers, use a unique naming convention for the
`label` section of each entry. That label is used as the display name of the tab
shown on the sign-in page.
## Disable anonymous LDAP authentication
GitLab doesn't support TLS client authentication. Complete these steps on your LDAP server.

View file

@ -582,8 +582,8 @@ for each of these users.
## Expired license causes errors with multiple LDAP servers
Using [multiple LDAP servers](index.md#multiple-ldap-servers) requires a valid license. An expired
license can cause:
Using [multiple LDAP servers](index.md#use-multiple-ldap-servers) requires a valid license. An expired license can
cause:
- `502` errors in the web interface.
- The following error in logs (the actual strategy name depends on the name configured in `/etc/gitlab/gitlab.rb`):

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

View file

@ -17,7 +17,7 @@ through `/etc/gitlab/gitlab.rb`.
## PgBouncer as part of a fault-tolerant GitLab installation
This content has been moved to a [new location](replication_and_failover.md#configuring-the-pgbouncer-node).
This content has been moved to a [new location](replication_and_failover.md#configure-pgbouncer-nodes).
## PgBouncer as part of a non-fault-tolerant GitLab installation

View file

@ -19,13 +19,54 @@ replication and failover for GitLab.
## Architecture
The Omnibus GitLab recommended configuration for a PostgreSQL cluster with
replication and failover requires:
replication failover requires:
- A minimum of three database nodes.
- A minimum of three `Consul` server nodes.
- A minimum of one `pgbouncer` service node, but it's recommended to have one per database node. An internal load balancer (TCP) is required when there is more than one `pgbouncer` service node.
- A minimum of three PostgreSQL nodes.
- A minimum of three Consul server nodes.
- A minimum of three PgBouncer nodes that track and handle primary database reads and writes.
- An internal load balancer (TCP) to balance requests between the PgBouncer nodes.
- [Database Load Balancing](database_load_balancing.md) enabled.
- A local PgBouncer service configured on each PostgreSQL node. Note that this is separate from the main PgBouncer cluster that tracks the primary.
![PostgreSQL HA Architecture](img/pg_ha_architecture.png)
```plantuml
@startuml
card "**Internal Load Balancer**" as ilb #9370DB
skinparam linetype ortho
together {
collections "**GitLab Rails** x3" as gitlab #32CD32
collections "**Sidekiq** x4" as sidekiq #ff8dd1
}
collections "**Consul** x3" as consul #e76a9b
card "Database" as database {
collections "**PGBouncer x3**\n//Consul//" as pgbouncer #4EA7FF
card "**PostgreSQL** //Primary//\n//Patroni//\n//PgBouncer//\n//Consul//" as postgres_primary #4EA7FF
collections "**PostgreSQL** //Secondary// **x2**\n//Patroni//\n//PgBouncer//\n//Consul//" as postgres_secondary #4EA7FF
pgbouncer -[#4EA7FF]-> postgres_primary
postgres_primary .[#4EA7FF]r-> postgres_secondary
}
gitlab -[#32CD32]-> ilb
gitlab -[hidden]-> pgbouncer
gitlab .[#32CD32,norank]-> postgres_primary
gitlab .[#32CD32,norank]-> postgres_secondary
sidekiq -[#ff8dd1]-> ilb
sidekiq -[hidden]-> pgbouncer
sidekiq .[#ff8dd1,norank]-> postgres_primary
sidekiq .[#ff8dd1,norank]-> postgres_secondary
ilb -[#9370DB]-> pgbouncer
consul -[#e76a9b]r-> pgbouncer
consul .[#e76a9b,norank]r-> postgres_primary
consul .[#e76a9b,norank]r-> postgres_secondary
@enduml
```
You also need to take into consideration the underlying network topology, making
sure you have redundant connectivity between all Database and GitLab instances
@ -38,13 +79,14 @@ shipped with Omnibus GitLab, and thus Patroni becomes mandatory for replication
### Database node
Each database node runs three services:
Each database node runs four services:
- `PostgreSQL`: The database itself.
- `Patroni`: Communicates with other Patroni services in the cluster and handles failover when issues with the leader server occurs. The failover procedure consists of:
- Selecting a new leader for the cluster.
- Promoting the new node to leader.
- Instructing remaining servers to follow the new leader node.
- `PgBouncer`: A local pooler for the node. Used for _read_ queries as part of [Database Load Balancing](database_load_balancing.md).
- `Consul` agent: To communicate with Consul cluster which stores the current Patroni state. The agent monitors the status of each node in the database cluster and tracks its health in a service definition on the Consul cluster.
### Consul server node
@ -62,8 +104,26 @@ Each PgBouncer node runs two services:
Each service in the package comes with a set of [default ports](../package_information/defaults.md#ports). You may need to make specific firewall rules for the connections listed below:
There are several connection flows in this setup:
- [Primary](#primary)
- [Database Load Balancing](#database-load-balancing)
- [Replication](#replication)
#### Primary
- Application servers connect to either PgBouncer directly via its [default port](../package_information/defaults.md) or via a configured Internal Load Balancer (TCP) that serves multiple PgBouncers.
- PgBouncer connects to the primary database servers [PostgreSQL default port](../package_information/defaults.md)
- PgBouncer connects to the primary database server's [PostgreSQL default port](../package_information/defaults.md).
#### Database Load Balancing
For read queries against data that haven't been recently changed and are up to date on all database nodes:
- Application servers connect to the local PgBouncer service via its [default port](../package_information/defaults.md) on each database node in a round-robin approach.
- Local PgBouncer connects to the local database server's [PostgreSQL default port](../package_information/defaults.md).
#### Replication
- Patroni actively manages the running PostgreSQL processes and configuration.
- PostgreSQL secondaries connect to the primary database servers [PostgreSQL default port](../package_information/defaults.md)
- Consul servers and agents connect to each others [Consul default ports](../package_information/defaults.md)
@ -203,8 +263,8 @@ repmgr-specific configuration as well. Especially, make sure that you remove `po
Here is an example:
```ruby
# Disable all components except Patroni and Consul
roles(['patroni_role'])
# Disable all components except Patroni, PgBouncer and Consul
roles(['patroni_role', 'pgbouncer_role'])
# PostgreSQL configuration
postgresql['listen_address'] = '0.0.0.0'
@ -245,6 +305,15 @@ patroni['allowlist'] = %w(XXX.XXX.XXX.XXX/YY 127.0.0.1/32)
# Replace XXX.XXX.XXX.XXX/YY with Network Address
postgresql['trust_auth_cidr_addresses'] = %w(XXX.XXX.XXX.XXX/YY 127.0.0.1/32)
# Local PgBouncer service for Database Load Balancing
pgbouncer['databases'] = {
gitlabhq_production: {
host: "127.0.0.1",
user: "PGBOUNCER_USERNAME",
password: 'PGBOUNCER_PASSWORD_HASH'
}
}
# Replace placeholders:
#
# Y.Y.Y.Y consul1.gitlab.example.com Z.Z.Z.Z
@ -342,7 +411,7 @@ You can use different certificates and keys for both API server and client on di
However, the CA certificate (`patroni['tls_ca_file']`), TLS certificate verification (`patroni['tls_verify']`), and client TLS
authentication mode (`patroni['tls_client_mode']`), must each have the same value on all nodes.
### Configuring the PgBouncer node
### Configure PgBouncer nodes
1. Make sure you collect [`CONSUL_SERVER_NODES`](#consul-information), [`CONSUL_PASSWORD_HASH`](#consul-information), and [`PGBOUNCER_PASSWORD_HASH`](#pgbouncer-information) before executing the next step.
@ -480,6 +549,7 @@ attributes set, but the following need to be set.
gitlab_rails['db_port'] = 6432
gitlab_rails['db_password'] = 'POSTGRESQL_USER_PASSWORD'
gitlab_rails['auto_migrate'] = false
gitlab_rails['db_load_balancing'] = { 'hosts' => ['POSTGRESQL_NODE_1', 'POSTGRESQL_NODE_2', 'POSTGRESQL_NODE_3'] }
```
1. [Reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure) for the changes to take effect.
@ -595,8 +665,8 @@ An internal load balancer (TCP) is then required to be setup to serve each PgBou
On database nodes edit `/etc/gitlab/gitlab.rb`:
```ruby
# Disable all components except Patroni and Consul
roles(['patroni_role'])
# Disable all components except Patroni, PgBouncer and Consul
roles(['patroni_role', 'pgbouncer_role'])
# PostgreSQL configuration
postgresql['listen_address'] = '0.0.0.0'
@ -616,6 +686,15 @@ patroni['postgresql']['max_wal_senders'] = 7
patroni['allowlist'] = = %w(10.6.0.0/16 127.0.0.1/32)
postgresql['trust_auth_cidr_addresses'] = %w(10.6.0.0/16 127.0.0.1/32)
# Local PgBouncer service for Database Load Balancing
pgbouncer['databases'] = {
gitlabhq_production: {
host: "127.0.0.1",
user: "pgbouncer",
password: '771a8625958a529132abe6f1a4acb19c'
}
}
# Configure the Consul agent
consul['services'] = %w(postgresql)
consul['configuration'] = {
@ -650,115 +729,6 @@ After deploying the configuration follow these steps:
gitlab-rake gitlab:db:configure
```
### Example minimal setup
This example uses 3 PostgreSQL servers, and 1 application node (with PgBouncer setup alongside).
It differs from the [recommended setup](#example-recommended-setup) by moving the Consul servers into the same servers we use for PostgreSQL.
The trade-off is between reducing server counts, against the increased operational complexity of needing to deal with PostgreSQL [failover](#manual-failover-procedure-for-patroni) procedures in addition to [Consul outage recovery](../consul.md#outage-recovery) on the same set of machines.
In this example, we start with all servers on the same 10.6.0.0/16 private network range; they can connect to each freely other on those addresses.
Here is a list and description of each machine and the assigned IP:
- `10.6.0.21`: PostgreSQL 1
- `10.6.0.22`: PostgreSQL 2
- `10.6.0.23`: PostgreSQL 3
- `10.6.0.31`: GitLab application
All passwords are set to `toomanysecrets`. Please do not use this password or derived hashes.
The `external_url` for GitLab is `http://gitlab.example.com`
After the initial configuration, if a failover occurs, the PostgresSQL leader node changes to one of the available secondaries until it is failed back.
#### Example minimal configuration for database servers
On database nodes edit `/etc/gitlab/gitlab.rb`:
```ruby
# Disable all components except Patroni and Consul
roles(['patroni_role'])
# PostgreSQL configuration
postgresql['listen_address'] = '0.0.0.0'
postgresql['hot_standby'] = 'on'
postgresql['wal_level'] = 'replica'
# Disable automatic database migrations
gitlab_rails['auto_migrate'] = false
# Configure the Consul agent
consul['services'] = %w(postgresql)
postgresql['pgbouncer_user_password'] = '771a8625958a529132abe6f1a4acb19c'
postgresql['sql_user_password'] = '450409b85a0223a214b5fb1484f34d0f'
# Sets `max_replication_slots` to double the number of database nodes.
# Patroni uses one extra slot per node when initiating the replication.
patroni['postgresql']['max_replication_slots'] = 6
patroni['username'] = 'PATRONI_API_USERNAME'
patroni['password'] = 'PATRONI_API_PASSWORD'
# Set `max_wal_senders` to one more than the number of replication slots in the cluster.
# This is used to prevent replication from using up all of the
# available database connections.
patroni['postgresql']['max_wal_senders'] = 7
patroni['allowlist'] = = %w(10.6.0.0/16 127.0.0.1/32)
postgresql['trust_auth_cidr_addresses'] = %w(10.6.0.0/16 127.0.0.1/32)
consul['configuration'] = {
server: true,
retry_join: %w(10.6.0.21 10.6.0.22 10.6.0.23)
}
```
[Reconfigure Omnibus GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure) for the changes to take effect.
#### Example minimal configuration for application server
On the server edit `/etc/gitlab/gitlab.rb`:
```ruby
external_url 'http://gitlab.example.com'
gitlab_rails['db_host'] = '127.0.0.1'
gitlab_rails['db_port'] = 6432
gitlab_rails['db_password'] = 'toomanysecrets'
gitlab_rails['auto_migrate'] = false
postgresql['enable'] = false
pgbouncer['enable'] = true
consul['enable'] = true
# Configure PgBouncer
pgbouncer['admin_users'] = %w(pgbouncer gitlab-consul)
# Configure Consul agent
consul['watchers'] = %w(postgresql)
pgbouncer['users'] = {
'gitlab-consul': {
password: '5e0e3263571e3704ad655076301d6ebe'
},
'pgbouncer': {
password: '771a8625958a529132abe6f1a4acb19c'
}
}
consul['configuration'] = {
retry_join: %w(10.6.0.21 10.6.0.22 10.6.0.23)
}
```
[Reconfigure Omnibus GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure) for the changes to take effect.
#### Example minimal setup manual steps
The manual steps for this configuration are the same as for the [example recommended setup](#example-recommended-setup-manual-steps).
## Patroni
NOTE:
@ -1047,7 +1017,7 @@ Here are a few key facts that you must consider before upgrading PostgreSQL:
configured replication method (`pg_basebackup` is the only available option). It might take some
time for replica to catch up with the leader, depending on the size of your database.
- An overview of the upgrade procedure is outlined in [Patoni's documentation](https://patroni.readthedocs.io/en/latest/existing_data.html#major-upgrade-of-postgresql-version).
- An overview of the upgrade procedure is outlined in [Patroni's documentation](https://patroni.readthedocs.io/en/latest/existing_data.html#major-upgrade-of-postgresql-version).
You can still use `gitlab-ctl pg-upgrade` which implements this procedure with a few adjustments.
Considering these, you should carefully plan your PostgreSQL upgrade:

View file

@ -51,12 +51,12 @@ Session data can be accessed directly through Redis. This can let you check up o
```ruby
# Get a list of sessions
session_ids = Gitlab::Redis::SharedState.with do |redis|
redis.smembers("#{Gitlab::Redis::SharedState::USER_SESSIONS_LOOKUP_NAMESPACE}:#{user.id}")
session_ids = Gitlab::Redis::Sessions.with do |redis|
redis.smembers("#{Gitlab::Redis::Sessions::USER_SESSIONS_LOOKUP_NAMESPACE}:#{user.id}")
end
# Retrieve a specific session
session_data = Gitlab::Redis::SharedState.with { |redis| redis.get("#{Gitlab::Redis::SharedState::SESSION_NAMESPACE}:#{session_id}") }
session_data = Gitlab::Redis::Sessions.with { |redis| redis.get("#{Gitlab::Redis::Sessions::SESSION_NAMESPACE}:#{session_id}") }
Marshal.load(session_data)
```

View file

@ -51,6 +51,7 @@ module.exports = (path, options = {}) => {
'^shared_queries(/.*)$': '<rootDir>/app/graphql/queries$1',
'^ee_else_ce(/.*)$': '<rootDir>/app/assets/javascripts$1',
'^jh_else_ce(/.*)$': '<rootDir>/app/assets/javascripts$1',
'^any_else_ce(/.*)$': '<rootDir>/app/assets/javascripts$1',
'^helpers(/.*)$': '<rootDir>/spec/frontend/__helpers__$1',
'^vendor(/.*)$': '<rootDir>/vendor/assets/javascripts$1',
[TEST_FIXTURES_PATTERN]: '<rootDir>/tmp/tests/frontend/fixtures$1',
@ -72,6 +73,8 @@ module.exports = (path, options = {}) => {
'^ee_component(/.*)$': rootDirEE,
'^ee_else_ce(/.*)$': rootDirEE,
'^ee_jest/(.*)$': '<rootDir>/ee/spec/frontend/$1',
'^any_else_ce(/.*)$': rootDirEE,
'^jh_else_ee(/.*)$': rootDirEE,
[TEST_FIXTURES_PATTERN]: '<rootDir>/tmp/tests/frontend/fixtures-ee$1',
...extModuleNameMapperEE,
});
@ -84,8 +87,11 @@ module.exports = (path, options = {}) => {
Object.assign(moduleNameMapper, {
'^jh(/.*)$': rootDirJH,
'^jh_component(/.*)$': rootDirJH,
'^jh_else_ce(/.*)$': rootDirJH,
'^jh_jest/(.*)$': '<rootDir>/jh/spec/frontend/$1',
// jh path alias https://gitlab.com/gitlab-org/gitlab/-/merge_requests/74305#note_732793956
'^jh_else_ce(/.*)$': rootDirJH,
'^jh_else_ee(/.*)$': rootDirJH,
'^any_else_ce(/.*)$': rootDirJH,
...extModuleNameMapperJH,
});

View file

@ -2,12 +2,14 @@
module Gitlab
class AnonymousSession
include ::Gitlab::Redis::SessionsStoreHelper
def initialize(remote_ip)
@remote_ip = remote_ip
end
def count_session_ip
Gitlab::Redis::SharedState.with do |redis|
redis_store_class.with do |redis|
redis.pipelined do
redis.incr(session_lookup_name)
redis.expire(session_lookup_name, 24.hours)
@ -16,13 +18,13 @@ module Gitlab
end
def session_count
Gitlab::Redis::SharedState.with do |redis|
redis_store_class.with do |redis|
redis.get(session_lookup_name).to_i
end
end
def cleanup_session_per_ip_count
Gitlab::Redis::SharedState.with do |redis|
redis_store_class.with do |redis|
redis.del(session_lookup_name)
end
end
@ -32,7 +34,7 @@ module Gitlab
attr_reader :remote_ip
def session_lookup_name
@session_lookup_name ||= "#{Gitlab::Redis::SharedState::IP_SESSIONS_LOOKUP_NAMESPACE}:#{remote_ip}"
@session_lookup_name ||= "#{Gitlab::Redis::Sessions::IP_SESSIONS_LOOKUP_NAMESPACE}:#{remote_ip}"
end
end
end

View file

@ -124,11 +124,8 @@ module Gitlab
strong_memoize(:runner_project) do
next unless runner&.project_type?
projects = ::Gitlab::Database.allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/342147') do
runner.projects.take(2) # rubocop: disable CodeReuse/ActiveRecord
end
projects.first if projects.one?
runner_projects = runner.runner_projects.take(2) # rubocop: disable CodeReuse/ActiveRecord
runner_projects.first.project if runner_projects.one?
end
end

View file

@ -36,6 +36,10 @@ module Gitlab
partitions
end
def after_adding_partitions
# No-op, required by the partition manager
end
private
def desired_partitions

View file

@ -73,6 +73,7 @@ module Gitlab
partition_name: partition.partition_name,
table_name: partition.table)
end
model.partitioning_strategy.after_adding_partitions
end
end
end

View file

@ -0,0 +1,76 @@
# frozen_string_literal: true
module Gitlab
module Database
module Partitioning
class SingleNumericListPartition
include Comparable
def self.from_sql(table, partition_name, definition)
# A list partition can support multiple values, but we only support a single number
matches = definition.match(/FOR VALUES IN \('(?<value>\d+)'\)/)
raise ArgumentError, 'Unknown partition definition' unless matches
value = Integer(matches[:value])
new(table, value, partition_name: partition_name)
end
attr_reader :table, :value
def initialize(table, value, partition_name: nil )
@table = table
@value = value
@partition_name = partition_name
end
def partition_name
@partition_name || "#{table}_#{value}"
end
def to_sql
<<~SQL
CREATE TABLE IF NOT EXISTS #{fully_qualified_partition}
PARTITION OF #{conn.quote_table_name(table)}
FOR VALUES IN (#{conn.quote(value)})
SQL
end
def to_detach_sql
<<~SQL
ALTER TABLE #{conn.quote_table_name(table)}
DETACH PARTITION #{fully_qualified_partition}
SQL
end
def ==(other)
table == other.table &&
partition_name == other.partition_name &&
value == other.value
end
alias_method :eql?, :==
def hash
[table, partition_name, value].hash
end
def <=>(other)
return if table != other.table
value <=> other.value
end
private
def fully_qualified_partition
"%s.%s" % [conn.quote_table_name(Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA), conn.quote_table_name(partition_name)]
end
def conn
@conn ||= Gitlab::Database::SharedModel.connection
end
end
end
end
end

View file

@ -0,0 +1,75 @@
# frozen_string_literal: true
module Gitlab
module Database
module Partitioning
class SlidingListStrategy
attr_reader :model, :partitioning_key, :next_partition_if, :detach_partition_if
delegate :table_name, to: :model
def initialize(model, partitioning_key, next_partition_if:, detach_partition_if:)
@model = model
@partitioning_key = partitioning_key
@next_partition_if = next_partition_if
@detach_partition_if = detach_partition_if
ensure_partitioning_column_ignored!
end
def current_partitions
Gitlab::Database::PostgresPartition.for_parent_table(table_name).map do |partition|
SingleNumericListPartition.from_sql(table_name, partition.name, partition.condition)
end.sort
end
def missing_partitions
if no_partitions_exist?
[initial_partition]
elsif next_partition_if.call(active_partition.value)
[next_partition]
else
[]
end
end
def initial_partition
SingleNumericListPartition.new(table_name, 1)
end
def next_partition
SingleNumericListPartition.new(table_name, active_partition.value + 1)
end
def extra_partitions
possibly_extra = current_partitions[0...-1] # Never consider the most recent partition
possibly_extra.take_while { |p| detach_partition_if.call(p.value) }
end
def after_adding_partitions
active_value = active_partition.value
model.connection.change_column_default(model.table_name, partitioning_key, active_value)
end
def active_partition
# The current partitions list is sorted, so the last partition has the highest value
# This is the only partition that receives inserts.
current_partitions.last
end
def no_partitions_exist?
current_partitions.empty?
end
private
def ensure_partitioning_column_ignored!
unless model.ignored_columns.include?(partitioning_key.to_s)
raise "Add #{partitioning_key} to #{model.name}.ignored_columns to use it with SlidingListStrategy"
end
end
end
end
end
end

View file

@ -194,7 +194,7 @@ module Gitlab
def increment_method_missing_count(command_name)
@method_missing_counter ||= Gitlab::Metrics.counter(:gitlab_redis_multi_store_method_missing_total, 'Client side Redis MultiStore method missing')
@method_missing_counter.increment(command: command_name, innamece_name: instance_name)
@method_missing_counter.increment(command: command_name, instance_name: instance_name)
end
def validate_stores!

View file

@ -3,9 +3,45 @@
module Gitlab
module Redis
class Sessions < ::Gitlab::Redis::Wrapper
# The data we store on Sessions used to be stored on SharedState.
def self.config_fallback
SharedState
SESSION_NAMESPACE = 'session:gitlab'
USER_SESSIONS_NAMESPACE = 'session:user:gitlab'
USER_SESSIONS_LOOKUP_NAMESPACE = 'session:lookup:user:gitlab'
IP_SESSIONS_LOOKUP_NAMESPACE = 'session:lookup:ip:gitlab2'
OTP_SESSIONS_NAMESPACE = 'session:otp'
class << self
# The data we store on Sessions used to be stored on SharedState.
def config_fallback
SharedState
end
private
def redis
# Don't use multistore if redis.sessions configuration is not provided
return super if config_fallback?
primary_store = ::Redis.new(params)
secondary_store = ::Redis.new(config_fallback.params)
MultiStore.new(primary_store, secondary_store, name)
end
end
def store(extras = {})
# Don't use multistore if redis.sessions configuration is not provided
return super if self.class.config_fallback?
primary_store = create_redis_store(redis_store_options, extras)
secondary_store = create_redis_store(self.class.config_fallback.params, extras)
MultiStore.new(primary_store, secondary_store, self.class.name)
end
private
def create_redis_store(options, extras)
::Redis::Store.new(options.merge(extras))
end
end
end

View file

@ -0,0 +1,27 @@
# frozen_string_literal: true
module Gitlab
module Redis
module SessionsStoreHelper
extend ActiveSupport::Concern
module StoreMethods
def redis_store_class
use_redis_session_store? ? Gitlab::Redis::Sessions : Gitlab::Redis::SharedState
end
private
def use_redis_session_store?
Gitlab::Utils.to_boolean(ENV['GITLAB_USE_REDIS_SESSIONS_STORE'], default: true)
end
end
include StoreMethods
included do
extend StoreMethods
end
end
end
end

View file

@ -3,10 +3,6 @@
module Gitlab
module Redis
class SharedState < ::Gitlab::Redis::Wrapper
SESSION_NAMESPACE = 'session:gitlab'
USER_SESSIONS_NAMESPACE = 'session:user:gitlab'
USER_SESSIONS_LOOKUP_NAMESPACE = 'session:lookup:user:gitlab'
IP_SESSIONS_LOOKUP_NAMESPACE = 'session:lookup:ip:gitlab2'
end
end
end

View file

@ -28,7 +28,7 @@ module Gitlab
end
def pool
@pool ||= ConnectionPool.new(size: pool_size) { ::Redis.new(params) }
@pool ||= ConnectionPool.new(size: pool_size) { redis }
end
def pool_size
@ -67,6 +67,10 @@ module Gitlab
File.expand_path('../../..', __dir__)
end
def config_fallback?
config_file_name == config_fallback&.config_file_name
end
def config_file_name
[
# Instance specific config sources:
@ -100,6 +104,12 @@ module Gitlab
"::Gitlab::Instrumentation::Redis::#{store_name}".constantize
end
private
def redis
::Redis.new(params)
end
end
def initialize(rails_env = nil)

View file

@ -8,6 +8,7 @@ module Gitlab
EE_QUEUE_CONFIG_PATH = 'ee/app/workers/all_queues.yml'
JH_QUEUE_CONFIG_PATH = 'jh/app/workers/all_queues.yml'
SIDEKIQ_QUEUES_PATH = 'config/sidekiq_queues.yml'
JH_SIDEKIQ_QUEUES_PATH = 'jh/config/sidekiq_queues.yml'
QUEUE_CONFIG_PATHS = [
FOSS_QUEUE_CONFIG_PATH,
@ -100,18 +101,24 @@ module Gitlab
def queues_for_sidekiq_queues_yml
namespaces_with_equal_weights =
workers
.reject { |worker| worker.jh? }
.group_by(&:queue_namespace)
.map(&:last)
.select { |workers| workers.map(&:get_weight).uniq.count == 1 }
.map(&:first)
namespaces = namespaces_with_equal_weights.map(&:queue_namespace).to_set
remaining_queues = workers.reject { |worker| namespaces.include?(worker.queue_namespace) }
remaining_queues = workers.reject { |worker| worker.jh? }.reject { |worker| namespaces.include?(worker.queue_namespace) }
(namespaces_with_equal_weights.map(&:namespace_and_weight) +
remaining_queues.map(&:queue_and_weight)).sort
end
# Override in JH repo
def jh_queues_for_sidekiq_queues_yml
[]
end
# YAML.load_file is OK here as we control the file contents
def sidekiq_queues_yml_outdated?
config_queues = YAML.load_file(SIDEKIQ_QUEUES_PATH)[:queues]
@ -154,3 +161,5 @@ module Gitlab
end
end
end
Gitlab::SidekiqConfig.prepend_mod

View file

@ -10,9 +10,9 @@ module Sidebars
def configure_menu_items
return false unless can?(context.current_user, :read_analytics, context.project)
add_item(cycle_analytics_menu_item)
add_item(ci_cd_analytics_menu_item)
add_item(repository_analytics_menu_item)
add_item(cycle_analytics_menu_item)
true
end

View file

@ -100,13 +100,15 @@ namespace :gitlab do
namespace :sessions do
desc "GitLab | Cleanup | Sessions | Clean ActiveSession lookup keys"
task active_sessions_lookup_keys: :gitlab_environment do
session_key_pattern = "#{Gitlab::Redis::SharedState::USER_SESSIONS_LOOKUP_NAMESPACE}:*"
use_redis_session_store = Gitlab::Utils.to_boolean(ENV['GITLAB_USE_REDIS_SESSIONS_STORE'], default: true)
redis_store_class = use_redis_session_store ? Gitlab::Redis::Sessions : Gitlab::Redis::SharedState
session_key_pattern = "#{Gitlab::Redis::Sessions::USER_SESSIONS_LOOKUP_NAMESPACE}:*"
last_save_check = Time.at(0)
wait_time = 10.seconds
cursor = 0
total_users_scanned = 0
Gitlab::Redis::SharedState.with do |redis|
redis_store_class.with do |redis|
begin
cursor, keys = redis.scan(cursor, match: session_key_pattern)
total_users_scanned += keys.count

View file

@ -100,6 +100,10 @@ namespace :gitlab do
queues_and_weights = Gitlab::SidekiqConfig.queues_for_sidekiq_queues_yml
write_yaml(Gitlab::SidekiqConfig::SIDEKIQ_QUEUES_PATH, banner, queues: queues_and_weights)
if Gitlab.jh?
write_yaml(Gitlab::SidekiqConfig::JH_SIDEKIQ_QUEUES_PATH, banner, queues: Gitlab::SidekiqConfig.jh_queues_for_sidekiq_queues_yml)
end
end
desc 'GitLab | Sidekiq | Validate that sidekiq_queues.yml matches worker definitions'
@ -113,6 +117,7 @@ namespace :gitlab do
Then commit and push the changes from:
- #{Gitlab::SidekiqConfig::SIDEKIQ_QUEUES_PATH}
#{"- " + Gitlab::SidekiqConfig::JH_SIDEKIQ_QUEUES_PATH if Gitlab.jh?}
MSG
end

View file

@ -41549,6 +41549,9 @@ msgstr ""
msgid "mrWidget|Merge blocked: fast-forward merge is not possible. To merge this request, first rebase locally."
msgstr ""
msgid "mrWidget|Merge blocked: merge conflicts must be resolved."
msgstr ""
msgid "mrWidget|Merge blocked: pipeline must succeed. It's waiting for a manual action to continue."
msgstr ""
@ -41627,9 +41630,6 @@ msgstr ""
msgid "mrWidget|Resolve conflicts"
msgstr ""
msgid "mrWidget|Resolve these conflicts or ask someone with write access to this repository to merge it locally"
msgstr ""
msgid "mrWidget|Revert"
msgstr ""
@ -41675,9 +41675,6 @@ msgstr ""
msgid "mrWidget|The source branch is being deleted"
msgstr ""
msgid "mrWidget|There are merge conflicts"
msgstr ""
msgid "mrWidget|This merge request failed to be merged automatically"
msgstr ""
@ -41690,6 +41687,9 @@ msgstr ""
msgid "mrWidget|To merge, a Jira issue key must be mentioned in the title or description."
msgstr ""
msgid "mrWidget|Users who can write to the source or target branches can resolve the conflicts."
msgstr ""
msgid "mrWidget|What is a merge train?"
msgstr ""

View file

@ -6,37 +6,59 @@ module QA
module PipelineEditor
class Show < QA::Page::Base
view 'app/assets/javascripts/pipeline_editor/components/file_nav/branch_switcher.vue' do
element :branch_selector_button
element :menu_branch_button
element :branch_selector_button, require: true
element :branch_menu_item_button
element :branch_menu_container
end
view 'app/assets/javascripts/pipeline_editor/components/commit/commit_form.vue' do
element :target_branch_field
element :target_branch_field, require: true
end
def has_branch_selector_button?
has_element? :branch_selector_button
view 'app/assets/javascripts/pipeline_editor/components/drawer/pipeline_editor_drawer.vue' do
element :toggle_sidebar_collapse_button
element :drawer_content
end
def click_branch_selector_button
wait_until(reload: false) do
has_element?(:branch_selector_button)
end
click_element(:branch_selector_button, skip_finished_loading_check: true)
view 'app/assets/javascripts/vue_shared/components/source_editor.vue' do
element :source_editor_container, require: true
end
def select_branch_from_dropdown(branch_to_switch_to)
wait_until(reload: false) do
has_element?(:menu_branch_button)
end
click_element(:menu_branch_button, text: branch_to_switch_to, skip_finished_loading_check: true)
def initialize
super
wait_for_requests
close_toggle_sidebar
end
def open_branch_selector_dropdown
click_element(:branch_selector_button)
end
def select_branch_from_dropdown(branch_name)
wait_for_animated_element(:branch_menu_container)
click_element(:branch_menu_item_button, text: branch_name)
wait_for_requests
end
def target_branch_name
wait_until(reload: false) do
has_element?(:target_branch_field)
end
find_element(:target_branch_field, skip_finished_loading_check: true).value
find_element(:target_branch_field).value
end
def editing_content
find_element(:source_editor_container).text
end
private
# If the page thinks user has never opened pipeline editor before
# It will expand pipeline editor sidebar by default
# Collapse the sidebar if it is expanded
def close_toggle_sidebar
return unless has_element?(:drawer_content)
click_element(:toggle_sidebar_collapse_button)
end
end
end

View file

@ -2,7 +2,9 @@
module QA
RSpec.describe 'Verify' do
describe 'Pipeline editor', :requires_admin do
describe 'Pipeline editor' do
let(:random_test_string) { SecureRandom.hex(10) }
let(:project) do
Resource::Project.fabricate_via_api! do |project|
project.name = 'pipeline-editor-project'
@ -17,70 +19,71 @@ module QA
[
{
file_path: '.gitlab-ci.yml',
content: default_file_content
content: <<~YAML
stages:
- test
initialize:
stage: test
script:
- echo "I am now on branch #{project.default_branch}"
YAML
}
]
)
end
end
let!(:production_push) do
Resource::Repository::Push.fabricate! do |push|
push.repository_http_uri = project.repository_http_location.uri
push.branch_name = 'production'
push.file_name = '.gitlab-ci.yml'
push.file_content = production_file_content
let!(:test_branch) do
Resource::Repository::ProjectPush.fabricate! do |resource|
resource.project = project
resource.branch_name = random_test_string
resource.file_name = '.gitlab-ci.yml'
resource.file_content = <<~YAML
stages:
- test
initialize:
stage: test
script:
- echo "I am now on branch #{random_test_string}"
YAML
end
end
let(:default_file_content) do
<<~YAML
stages:
- test
initialize:
stage: test
script:
- echo "initialized in #{project.default_branch}"
YAML
end
let(:production_file_content) do
<<~YAML
stages:
- test
initialize:
stage: test
script:
- echo "initialized in production"
YAML
end
before do
Flow::Login.sign_in
project.visit!
Page::Project::Menu.perform(&:go_to_pipeline_editor)
# Project push sometimes takes a while to complete
# Making sure new branch is pushed successfully prior to interacting
Support::Retrier.retry_until(max_duration: 15, sleep_interval: 3, reload_page: false, message: 'Ensuring project has branch') do
project.has_branch?(random_test_string)
end
end
after do
project.remove_via_api!
Page::Main::Menu.perform(&:sign_out)
end
it 'can switch branches and target branch field updates accordingly', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/quality/test_cases/1891' do
Page::Project::Menu.perform(&:go_to_pipeline_editor)
Page::Project::PipelineEditor::Show.perform do |show|
expect(show).to have_branch_selector_button
show.open_branch_selector_dropdown
show.select_branch_from_dropdown(random_test_string)
show.click_branch_selector_button
show.select_branch_from_dropdown(production_push.branch_name)
aggregate_failures do
expect(show.target_branch_name).to eq(random_test_string), 'Target branch field is not showing expected branch name.'
expect(show.editing_content).to have_content(random_test_string), 'Editor content does not include expected test string.'
end
expect(show.target_branch_name).to eq(production_push.branch_name)
show.click_branch_selector_button
show.open_branch_selector_dropdown
show.select_branch_from_dropdown(project.default_branch)
expect(show.target_branch_name).to eq(project.default_branch)
aggregate_failures do
expect(show.target_branch_name).to eq(project.default_branch), 'Target branch field is not showing expected branch name.'
expect(show.editing_content).to have_content(project.default_branch), 'Editor content does not include expected test string.'
end
end
end
end

View file

@ -2,12 +2,12 @@
require 'spec_helper'
RSpec.describe ApplicationCable::Connection, :clean_gitlab_redis_shared_state do
RSpec.describe ApplicationCable::Connection, :clean_gitlab_redis_sessions do
let(:session_id) { Rack::Session::SessionId.new('6919a6f1bb119dd7396fadc38fd18d0d') }
context 'when session cookie is set' do
before do
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
redis.set("session:gitlab:#{session_id.private_id}", Marshal.dump(session_hash))
end

View file

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe 'Active user sessions', :clean_gitlab_redis_shared_state do
RSpec.describe 'Active user sessions', :clean_gitlab_redis_sessions do
it 'successful login adds a new active user login' do
now = Time.zone.parse('2018-03-12 09:06')
Timecop.freeze(now) do
@ -29,13 +29,13 @@ RSpec.describe 'Active user sessions', :clean_gitlab_redis_shared_state do
it 'successful login cleans up obsolete entries' do
user = create(:user)
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
redis.sadd("session:lookup:user:gitlab:#{user.id}", '59822c7d9fcdfa03725eff41782ad97d')
end
gitlab_sign_in(user)
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
expect(redis.smembers("session:lookup:user:gitlab:#{user.id}")).not_to include '59822c7d9fcdfa03725eff41782ad97d'
end
end
@ -44,14 +44,14 @@ RSpec.describe 'Active user sessions', :clean_gitlab_redis_shared_state do
user = create(:user)
personal_access_token = create(:personal_access_token, user: user)
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
redis.sadd("session:lookup:user:gitlab:#{user.id}", '59822c7d9fcdfa03725eff41782ad97d')
end
visit user_path(user, :atom, private_token: personal_access_token.token)
expect(page.status_code).to eq 200
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
expect(redis.smembers("session:lookup:user:gitlab:#{user.id}")).to include '59822c7d9fcdfa03725eff41782ad97d'
end
end

View file

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe 'Login', :clean_gitlab_redis_shared_state do
RSpec.describe 'Login', :clean_gitlab_redis_sessions do
include TermsHelper
include UserLoginHelper
include SessionHelpers
@ -84,7 +84,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_shared_state do
expect(page).to have_content('Your account has been blocked.')
end
it 'does not update Devise trackable attributes', :clean_gitlab_redis_shared_state do
it 'does not update Devise trackable attributes' do
expect(authentication_metrics)
.to increment(:user_blocked_counter)
.and increment(:user_unauthenticated_counter)
@ -161,7 +161,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_shared_state do
expect(page).to have_content('Invalid login or password.')
end
it 'does not update Devise trackable attributes', :clean_gitlab_redis_shared_state do
it 'does not update Devise trackable attributes' do
expect(authentication_metrics)
.to increment(:user_unauthenticated_counter)
.and increment(:user_password_invalid_counter)

View file

@ -77,7 +77,9 @@ exports[`packages_list_row renders 1`] = `
<div
class="gl-display-flex gl-align-items-center gl-min-h-6"
>
<span>
<span
data-testid="created-date"
>
Created
<timeago-tooltip-stub
cssclass=""

View file

@ -6,6 +6,8 @@ import PackagesListRow from '~/packages_and_registries/package_registry/componen
import PackagePath from '~/packages/shared/components/package_path.vue';
import PackageTags from '~/packages/shared/components/package_tags.vue';
import PackageIconAndName from '~/packages/shared/components/package_icon_and_name.vue';
import PublishMethod from '~/packages_and_registries/package_registry/components/list/publish_method.vue';
import TimeagoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
import { PACKAGE_ERROR_STATUS } from '~/packages_and_registries/package_registry/constants';
import ListItem from '~/vue_shared/components/registry/list_item.vue';
@ -29,6 +31,9 @@ describe('packages_list_row', () => {
const findPackageLink = () => wrapper.findComponent(GlLink);
const findWarningIcon = () => wrapper.findByTestId('warning-icon');
const findLeftSecondaryInfos = () => wrapper.findByTestId('left-secondary-infos');
const findPublishMethod = () => wrapper.findComponent(PublishMethod);
const findCreatedDateText = () => wrapper.findByTestId('created-date');
const findTimeAgoTooltip = () => wrapper.findComponent(TimeagoTooltip);
const mountComponent = ({
packageEntity = packageWithoutTags,
@ -153,4 +158,23 @@ describe('packages_list_row', () => {
expect(findPackageIconAndName().text()).toBe(packageWithoutTags.packageType.toLowerCase());
});
});
describe('right info', () => {
it('has publish method component', () => {
mountComponent({
packageEntity: { ...packageWithoutTags, pipelines: { nodes: packagePipelines() } },
});
expect(findPublishMethod().props('pipeline')).toEqual(packagePipelines()[0]);
});
it('has the created date', () => {
mountComponent();
expect(findCreatedDateText().text()).toMatchInterpolatedText(PackagesListRow.i18n.createdAt);
expect(findTimeAgoTooltip().props()).toMatchObject({
time: packageData().createdAt,
});
});
});
});

View file

@ -12,6 +12,14 @@ describe('MRWidgetConflicts', () => {
const findResolveButton = () => wrapper.findByTestId('resolve-conflicts-button');
const findMergeLocalButton = () => wrapper.findByTestId('merge-locally-button');
const mergeConflictsText = 'Merge blocked: merge conflicts must be resolved.';
const fastForwardMergeText =
'Merge blocked: fast-forward merge is not possible. To merge this request, first rebase locally.';
const userCannotMergeText =
'Users who can write to the source or target branches can resolve the conflicts.';
const resolveConflictsBtnText = 'Resolve conflicts';
const mergeLocallyBtnText = 'Merge locally';
function createComponent(propsData = {}) {
wrapper = extendedWrapper(
shallowMount(ConflictsComponent, {
@ -81,16 +89,16 @@ describe('MRWidgetConflicts', () => {
});
it('should tell you about conflicts without bothering other people', () => {
expect(wrapper.text()).toContain('There are merge conflicts');
expect(wrapper.text()).not.toContain('ask someone with write access');
expect(wrapper.text()).toContain(mergeConflictsText);
expect(wrapper.text()).not.toContain(userCannotMergeText);
});
it('should not allow you to resolve the conflicts', () => {
expect(wrapper.text()).not.toContain('Resolve conflicts');
expect(wrapper.text()).not.toContain(resolveConflictsBtnText);
});
it('should have merge buttons', () => {
expect(findMergeLocalButton().text()).toContain('Merge locally');
expect(findMergeLocalButton().text()).toContain(mergeLocallyBtnText);
});
});
@ -107,17 +115,17 @@ describe('MRWidgetConflicts', () => {
});
it('should tell you about conflicts', () => {
expect(wrapper.text()).toContain('There are merge conflicts');
expect(wrapper.text()).toContain('ask someone with write access');
expect(wrapper.text()).toContain(mergeConflictsText);
expect(wrapper.text()).toContain(userCannotMergeText);
});
it('should allow you to resolve the conflicts', () => {
expect(findResolveButton().text()).toContain('Resolve conflicts');
expect(findResolveButton().text()).toContain(resolveConflictsBtnText);
expect(findResolveButton().attributes('href')).toEqual(path);
});
it('should not have merge buttons', () => {
expect(wrapper.text()).not.toContain('Merge locally');
expect(wrapper.text()).not.toContain(mergeLocallyBtnText);
});
});
@ -134,17 +142,17 @@ describe('MRWidgetConflicts', () => {
});
it('should tell you about conflicts without bothering other people', () => {
expect(wrapper.text()).toContain('There are merge conflicts');
expect(wrapper.text()).not.toContain('ask someone with write access');
expect(wrapper.text()).toContain(mergeConflictsText);
expect(wrapper.text()).not.toContain(userCannotMergeText);
});
it('should allow you to resolve the conflicts', () => {
expect(findResolveButton().text()).toContain('Resolve conflicts');
expect(findResolveButton().text()).toContain(resolveConflictsBtnText);
expect(findResolveButton().attributes('href')).toEqual(path);
});
it('should have merge buttons', () => {
expect(findMergeLocalButton().text()).toContain('Merge locally');
expect(findMergeLocalButton().text()).toContain(mergeLocallyBtnText);
});
});
@ -158,9 +166,7 @@ describe('MRWidgetConflicts', () => {
},
});
expect(wrapper.text().trim().replace(/\s\s+/g, ' ')).toContain(
'ask someone with write access',
);
expect(wrapper.text().trim().replace(/\s\s+/g, ' ')).toContain(userCannotMergeText);
});
it('should not have action buttons', async () => {
@ -198,9 +204,7 @@ describe('MRWidgetConflicts', () => {
},
});
expect(removeBreakLine(wrapper.text()).trim()).toContain(
'Merge blocked: fast-forward merge is not possible. To merge this request, first rebase locally.',
);
expect(removeBreakLine(wrapper.text()).trim()).toContain(fastForwardMergeText);
});
});
@ -236,7 +240,7 @@ describe('MRWidgetConflicts', () => {
});
it('should allow you to resolve the conflicts', () => {
expect(findResolveButton().text()).toContain('Resolve conflicts');
expect(findResolveButton().text()).toContain(resolveConflictsBtnText);
expect(findResolveButton().attributes('href')).toEqual(TEST_HOST);
});
});

View file

@ -3,6 +3,7 @@
exports[`Source Editor component rendering matches the snapshot 1`] = `
<div
data-editor-loading=""
data-qa-selector="source_editor_container"
id="source-editor-snippet_777"
>
<pre

View file

@ -10,25 +10,37 @@ RSpec.describe 'Session initializer for GitLab' do
end
describe 'config#session_store' do
context 'when the GITLAB_REDIS_STORE_WITH_SESSION_STORE env is not set' do
context 'when the GITLAB_USE_REDIS_SESSIONS_STORE env is not set' do
before do
stub_env('GITLAB_REDIS_STORE_WITH_SESSION_STORE', nil)
stub_env('GITLAB_USE_REDIS_SESSIONS_STORE', nil)
end
it 'initialized as a redis_store with a proper Redis::Store instance' do
it 'initialized with Multistore as ENV var defaults to true' do
expect(subject).to receive(:session_store).with(:redis_store, a_hash_including(redis_store: kind_of(::Redis::Store)))
load_session_store
end
end
context 'when the GITLAB_REDIS_STORE_WITH_SESSION_STORE env is disabled' do
context 'when the GITLAB_USE_REDIS_SESSIONS_STORE env is disabled' do
before do
stub_env('GITLAB_REDIS_STORE_WITH_SESSION_STORE', false)
stub_env('GITLAB_USE_REDIS_SESSIONS_STORE', false)
end
it 'initialized as a redis_store with a proper servers configuration' do
expect(subject).to receive(:session_store).with(:redis_store, a_hash_including(servers: kind_of(Hash)))
expect(subject).to receive(:session_store).with(:redis_store, a_hash_including(redis_store: kind_of(Redis::Store)))
load_session_store
end
end
context 'when the GITLAB_USE_REDIS_SESSIONS_STORE env is enabled' do
before do
stub_env('GITLAB_USE_REDIS_SESSIONS_STORE', true)
end
it 'initialized as a redis_store with a proper servers configuration' do
expect(subject).to receive(:session_store).with(:redis_store, a_hash_including(redis_store: kind_of(::Redis::Store)))
load_session_store
end

View file

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_shared_state do
RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_sessions do
let(:default_session_id) { '6919a6f1bb119dd7396fadc38fd18d0d' }
let(:additional_session_id) { '7919a6f1bb119dd7396fadc38fd18d0d' }
@ -16,7 +16,7 @@ RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_shared_state do
it 'adds session id to proper key' do
subject.count_session_ip
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
expect(redis.get("session:lookup:ip:gitlab2:127.0.0.1").to_i).to eq 1
end
end
@ -25,7 +25,7 @@ RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_shared_state do
freeze_time do
subject.count_session_ip
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
expect(redis.ttl("session:lookup:ip:gitlab2:127.0.0.1")).to eq(24.hours.to_i)
end
end
@ -36,7 +36,7 @@ RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_shared_state do
subject.count_session_ip
new_anonymous_session.count_session_ip
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
expect(redis.get("session:lookup:ip:gitlab2:127.0.0.1").to_i).to eq(2)
end
end
@ -45,7 +45,7 @@ RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_shared_state do
describe '#stored_sessions' do
it 'returns all anonymous sessions per ip' do
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
redis.set("session:lookup:ip:gitlab2:127.0.0.1", 2)
end
@ -54,13 +54,13 @@ RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_shared_state do
end
it 'removes obsolete lookup through ip entries' do
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
redis.set("session:lookup:ip:gitlab2:127.0.0.1", 2)
end
subject.cleanup_session_per_ip_count
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
expect(redis.exists("session:lookup:ip:gitlab2:127.0.0.1")).to eq(false)
end
end

View file

@ -152,6 +152,38 @@ RSpec.describe Gitlab::ApplicationContext do
end
end
end
context 'when using a runner project' do
let_it_be_with_reload(:runner) { create(:ci_runner, :project) }
it 'sets project path from runner project' do
context = described_class.new(runner: runner)
expect(result(context)).to include(project: runner.runner_projects.first.project.full_path)
end
context 'when the runner serves multiple projects' do
before do
create(:ci_runner_project, runner: runner, project: create(:project))
end
it 'does not set project path' do
context = described_class.new(runner: runner)
expect(result(context)).to include(project: nil)
end
end
end
context 'when using an instance runner' do
let_it_be(:runner) { create(:ci_runner, :instance) }
it 'does not sets project path' do
context = described_class.new(runner: runner)
expect(result(context)).to include(project: nil)
end
end
end
describe '#use' do

View file

@ -16,7 +16,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
subject(:sync_partitions) { described_class.new(model).sync_partitions }
let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table, connection: connection) }
let(:partitioning_strategy) { double(missing_partitions: partitions, extra_partitions: []) }
let(:partitioning_strategy) { double(missing_partitions: partitions, extra_partitions: [], after_adding_partitions: nil) }
let(:connection) { ActiveRecord::Base.connection }
let(:table) { "some_table" }
@ -83,7 +83,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
let(:manager) { described_class.new(model) }
let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table, connection: connection) }
let(:partitioning_strategy) { double(extra_partitions: extra_partitions, missing_partitions: []) }
let(:partitioning_strategy) { double(extra_partitions: extra_partitions, missing_partitions: [], after_adding_partitions: nil) }
let(:connection) { ActiveRecord::Base.connection }
let(:table) { "foo" }

View file

@ -0,0 +1,50 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::Partitioning::SingleNumericListPartition do
describe '.from_sql' do
subject(:parsed_partition) { described_class.from_sql(table, partition_name, definition) }
let(:table) { 'partitioned_table' }
let(:partition_value) { 0 }
let(:partition_name) { "partitioned_table_#{partition_value}" }
let(:definition) { "FOR VALUES IN ('#{partition_value}')" }
it 'uses specified table name' do
expect(parsed_partition.table).to eq(table)
end
it 'uses specified partition name' do
expect(parsed_partition.partition_name).to eq(partition_name)
end
it 'parses the definition' do
expect(parsed_partition.value).to eq(partition_value)
end
end
describe '#partition_name' do
it 'is the explicit name if provided' do
expect(described_class.new('table', 1, partition_name: 'some_other_name').partition_name).to eq('some_other_name')
end
it 'defaults to the table name followed by the partition value' do
expect(described_class.new('table', 1).partition_name).to eq('table_1')
end
end
context 'sorting' do
it 'is incomparable if the tables do not match' do
expect(described_class.new('table1', 1) <=> described_class.new('table2', 2)).to be_nil
end
it 'sorts by the value when the tables match' do
expect(described_class.new('table1', 1) <=> described_class.new('table1', 2)).to eq(1 <=> 2)
end
it 'sorts by numeric value rather than text value' do
expect(described_class.new('table', 10)).to be > described_class.new('table', 9)
end
end
end

View file

@ -0,0 +1,214 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy do
let(:connection) { ActiveRecord::Base.connection }
let(:table_name) { :_test_partitioned_test }
let(:model) { double('model', table_name: table_name, ignored_columns: %w[partition]) }
let(:next_partition_if) { double('next_partition_if') }
let(:detach_partition_if) { double('detach_partition_if') }
subject(:strategy) do
described_class.new(model, :partition,
next_partition_if: next_partition_if,
detach_partition_if: detach_partition_if)
end
before do
connection.execute(<<~SQL)
create table #{table_name}
(
id serial not null,
partition bigint not null default 2,
created_at timestamptz not null,
primary key (id, partition)
)
partition by list(partition);
create table #{table_name}_1
partition of #{table_name} for values in (1);
create table #{table_name}_2
partition of #{table_name} for values in (2);
SQL
end
describe '#current_partitions' do
it 'detects both partitions' do
expect(strategy.current_partitions).to eq([
Gitlab::Database::Partitioning::SingleNumericListPartition.new(table_name, 1, partition_name: '_test_partitioned_test_1'),
Gitlab::Database::Partitioning::SingleNumericListPartition.new(table_name, 2, partition_name: '_test_partitioned_test_2')
])
end
end
describe '#active_partition' do
it 'is the partition with the largest value' do
expect(strategy.active_partition.value).to eq(2)
end
end
describe '#missing_partitions' do
context 'when next_partition_if returns true' do
let(:next_partition_if) { proc { true } }
it 'is a partition definition for the next partition in the series' do
extra = strategy.missing_partitions
expect(extra.length).to eq(1)
expect(extra.first.value).to eq(3)
end
end
context 'when next_partition_if returns false' do
let(:next_partition_if) { proc { false } }
it 'is empty' do
expect(strategy.missing_partitions).to be_empty
end
end
context 'when there are no partitions for the table' do
it 'returns a partition for value 1' do
connection.execute("drop table #{table_name}_1; drop table #{table_name}_2;")
missing_partitions = strategy.missing_partitions
expect(missing_partitions.size).to eq(1)
missing_partition = missing_partitions.first
expect(missing_partition.value).to eq(1)
end
end
end
describe '#extra_partitions' do
before do
(3..10).each do |i|
connection.execute("CREATE TABLE #{table_name}_#{i} PARTITION OF #{table_name} FOR VALUES IN (#{i})")
end
end
context 'when some partitions are true for detach_partition_if' do
let(:detach_partition_if) { ->(p) { p != 5 } }
it 'is the leading set of partitions before that value' do
expect(strategy.extra_partitions.map(&:value)).to contain_exactly(1, 2, 3, 4)
end
end
context 'when all partitions are true for detach_partition_if' do
let(:detach_partition_if) { proc { true } }
it 'is all but the most recent partition', :aggregate_failures do
expect(strategy.extra_partitions.map(&:value)).to contain_exactly(1, 2, 3, 4, 5, 6, 7, 8, 9)
expect(strategy.current_partitions.map(&:value).max).to eq(10)
end
end
end
describe '#initial_partition' do
it 'starts with the value 1', :aggregate_failures do
initial_partition = strategy.initial_partition
expect(initial_partition.value).to eq(1)
expect(initial_partition.table).to eq(strategy.table_name)
expect(initial_partition.partition_name).to eq("#{strategy.table_name}_1")
end
end
describe '#next_partition' do
it 'is one after the active partition', :aggregate_failures do
expect(strategy).to receive(:active_partition).and_return(double(value: 5))
next_partition = strategy.next_partition
expect(next_partition.value).to eq(6)
expect(next_partition.table).to eq(strategy.table_name)
expect(next_partition.partition_name).to eq("#{strategy.table_name}_6")
end
end
describe '#ensure_partitioning_column_ignored!' do
it 'raises when the column is not ignored' do
expect do
Class.new(ApplicationRecord) do
include PartitionedTable
partitioned_by :partition, strategy: :sliding_list,
next_partition_if: proc { false },
detach_partition_if: proc { false }
end
end.to raise_error(/ignored_columns/)
end
it 'does not raise when the column is ignored' do
expect do
Class.new(ApplicationRecord) do
include PartitionedTable
self.ignored_columns = [:partition]
partitioned_by :partition, strategy: :sliding_list,
next_partition_if: proc { false },
detach_partition_if: proc { false }
end
end.not_to raise_error
end
end
context 'redirecting inserts as the active partition changes' do
let(:model) do
Class.new(ApplicationRecord) do
include PartitionedTable
self.table_name = '_test_partitioned_test'
self.primary_key = :id
self.ignored_columns = %w[partition]
# method().call cannot be detected by rspec, so we add a layer of indirection here
def self.next_partition_if_wrapper(...)
next_partition?(...)
end
def self.detach_partition_if_wrapper(...)
detach_partition?(...)
end
partitioned_by :partition, strategy: :sliding_list,
next_partition_if: method(:next_partition_if_wrapper),
detach_partition_if: method(:detach_partition_if_wrapper)
def self.next_partition?(current_partition)
end
def self.detach_partition?(partition)
end
end
end
it 'redirects to the new partition', :aggregate_failures do
partition_2_model = model.create! # Goes in partition 2
allow(model).to receive(:next_partition?) do
model.partitioning_strategy.active_partition.value < 3
end
allow(model).to receive(:detach_partition?).and_return(false)
Gitlab::Database::Partitioning::PartitionManager.new(model).sync_partitions
partition_3_model = model.create!
# Rails doesn't pick up on database default changes, so we need to reload
# We also want to grab the partition column to verify what it was set to.
# In normal operation we make rails ignore it so that we can use a changing default
# So we force select * to load it
all_columns = model.select(model.arel_table[Arel.star])
partition_2_model = all_columns.find(partition_2_model.id)
partition_3_model = all_columns.find(partition_3_model.id)
expect(partition_2_model.partition).to eq(2)
expect(partition_3_model.partition).to eq(3)
end
end
end

View file

@ -114,6 +114,12 @@ RSpec.describe Gitlab::Redis::MultiStore do
end
RSpec.shared_examples_for 'fallback read from the secondary store' do
let(:counter) { Gitlab::Metrics::NullMetric.instance }
before do
allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
end
it 'fallback and execute on secondary instance' do
expect(secondary_store).to receive(name).with(*args).and_call_original
@ -128,7 +134,7 @@ RSpec.describe Gitlab::Redis::MultiStore do
end
it 'increment read fallback count metrics' do
expect(multi_store).to receive(:increment_read_fallback_count).with(name)
expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
subject
end
@ -401,9 +407,12 @@ RSpec.describe Gitlab::Redis::MultiStore do
end
context 'with unsupported command' do
let(:counter) { Gitlab::Metrics::NullMetric.instance }
before do
primary_store.flushdb
secondary_store.flushdb
allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
end
let_it_be(:key) { "redis:counter" }
@ -426,7 +435,7 @@ RSpec.describe Gitlab::Redis::MultiStore do
end
it 'increments method missing counter' do
expect(multi_store).to receive(:increment_method_missing_count).with(:incr)
expect(counter).to receive(:increment).with(command: :incr, instance_name: instance_name)
subject
end

View file

@ -4,4 +4,54 @@ require 'spec_helper'
RSpec.describe Gitlab::Redis::Sessions do
include_examples "redis_new_instance_shared_examples", 'sessions', Gitlab::Redis::SharedState
describe 'redis instance used in connection pool' do
before do
clear_pool
end
context 'when redis.sessions configuration is not provided' do
it 'uses ::Redis instance' do
expect(described_class).to receive(:config_fallback?).and_return(true)
described_class.pool.with do |redis_instance|
expect(redis_instance).to be_instance_of(::Redis)
end
end
end
context 'when redis.sessions configuration is provided' do
it 'instantiates an instance of MultiStore' do
expect(described_class).to receive(:config_fallback?).and_return(false)
described_class.pool.with do |redis_instance|
expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
end
end
end
def clear_pool
described_class.remove_instance_variable(:@pool)
rescue NameError
# raised if @pool was not set; ignore
end
end
describe '#store' do
subject { described_class.store(namespace: described_class::SESSION_NAMESPACE) }
context 'when redis.sessions configuration is provided' do
it 'instantiates ::Redis instance' do
expect(described_class).to receive(:config_fallback?).and_return(true)
expect(subject).to be_instance_of(::Redis::Store)
end
end
context 'when redis.sessions configuration is not provided' do
it 'instantiates an instance of MultiStore' do
expect(described_class).to receive(:config_fallback?).and_return(false)
expect(subject).to be_instance_of(::Gitlab::Redis::MultiStore)
end
end
end
end

View file

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
RSpec.describe ActiveSession, :clean_gitlab_redis_sessions do
let(:user) do
create(:user).tap do |user|
user.current_sign_in_at = Time.current
@ -44,7 +44,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
describe '.list' do
it 'returns all sessions by user' do
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
redis.set("session:user:gitlab:#{user.id}:6919a6f1bb119dd7396fadc38fd18d0d", Marshal.dump({ session_id: 'a' }))
redis.set("session:user:gitlab:#{user.id}:59822c7d9fcdfa03725eff41782ad97d", Marshal.dump({ session_id: 'b' }))
redis.set("session:user:gitlab:9999:5c8611e4f9c69645ad1a1492f4131358", '')
@ -62,7 +62,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
end
it 'does not return obsolete entries and cleans them up' do
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
redis.set("session:user:gitlab:#{user.id}:6919a6f1bb119dd7396fadc38fd18d0d", Marshal.dump({ session_id: 'a' }))
redis.sadd(
@ -76,7 +76,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
expect(ActiveSession.list(user)).to eq [{ session_id: 'a' }]
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
expect(redis.sscan_each("session:lookup:user:gitlab:#{user.id}").to_a).to eq ['6919a6f1bb119dd7396fadc38fd18d0d']
end
end
@ -88,7 +88,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
describe '.list_sessions' do
it 'uses the ActiveSession lookup to return original sessions' do
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
# Emulate redis-rack: https://github.com/redis-store/redis-rack/blob/c75f7f1a6016ee224e2615017fbfee964f23a837/lib/rack/session/redis.rb#L88
redis.set("session:gitlab:#{rack_session.private_id}", Marshal.dump({ _csrf_token: 'abcd' }))
@ -109,7 +109,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
it 'uses the user lookup table to return session ids' do
session_ids = ['59822c7d9fcdfa03725eff41782ad97d']
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
redis.sadd("session:lookup:user:gitlab:#{user.id}", session_ids)
end
@ -119,7 +119,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
describe '.sessions_from_ids' do
it 'uses the ActiveSession lookup to return original sessions' do
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
# Emulate redis-rack: https://github.com/redis-store/redis-rack/blob/c75f7f1a6016ee224e2615017fbfee964f23a837/lib/rack/session/redis.rb#L88
redis.set("session:gitlab:#{rack_session.private_id}", Marshal.dump({ _csrf_token: 'abcd' }))
end
@ -128,7 +128,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
end
it 'avoids a redis lookup for an empty array' do
expect(Gitlab::Redis::SharedState).not_to receive(:with)
expect(Gitlab::Redis::Sessions).not_to receive(:with)
expect(ActiveSession.sessions_from_ids([])).to eq([])
end
@ -137,7 +137,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
stub_const('ActiveSession::SESSION_BATCH_SIZE', 1)
redis = double(:redis)
expect(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis)
expect(Gitlab::Redis::Sessions).to receive(:with).and_yield(redis)
sessions = %w[session-a session-b]
mget_responses = sessions.map { |session| [Marshal.dump(session)]}
@ -151,7 +151,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
it 'sets a new redis entry for the user session and a lookup entry' do
ActiveSession.set(user, request)
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
expect(redis.scan_each.to_a).to include(
"session:user:gitlab:#{user.id}:2::418729c72310bbf349a032f0bb6e3fce9f5a69df8f000d8ae0ac5d159d8f21ae",
"session:lookup:user:gitlab:#{user.id}"
@ -201,7 +201,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
describe '.destroy_session' do
shared_examples 'removes all session data' do
before do
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
redis.set("session:user:gitlab:#{user.id}:#{active_session_lookup_key}", '')
# Emulate redis-rack: https://github.com/redis-store/redis-rack/blob/c75f7f1a6016ee224e2615017fbfee964f23a837/lib/rack/session/redis.rb#L88
redis.set("session:gitlab:#{rack_session.private_id}", '')
@ -216,7 +216,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
it 'removes the devise session' do
subject
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
expect(redis.scan_each(match: "session:gitlab:*").to_a).to be_empty
end
end
@ -224,7 +224,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
it 'removes the lookup entry' do
subject
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
expect(redis.scan_each(match: "session:lookup:user:gitlab:#{user.id}").to_a).to be_empty
end
end
@ -232,7 +232,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
it 'removes the ActiveSession' do
subject
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
expect(redis.scan_each(match: "session:user:gitlab:*").to_a).to be_empty
end
end
@ -269,7 +269,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
let(:current_session_id) { '6919a6f1bb119dd7396fadc38fd18d0d' }
before do
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
# setup for current user
[current_session_id, '59822c7d9fcdfa03725eff41782ad97d'].each do |session_public_id|
session_private_id = Rack::Session::SessionId.new(session_public_id).private_id
@ -303,7 +303,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
session_private_id = Rack::Session::SessionId.new(current_session_id).private_id
ActiveSession.destroy_all_but_current(user, request.session)
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
expect(
redis.smembers(described_class.lookup_key_name(user.id))
).to eq([session_private_id])
@ -312,7 +312,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
it 'does not remove impersonated sessions' do
impersonated_session_id = '6919a6f1bb119dd7396fadc38fd18eee'
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
redis.set(described_class.key_name(user.id, impersonated_session_id),
Marshal.dump(ActiveSession.new(session_id: Rack::Session::SessionId.new(impersonated_session_id), is_impersonated: true)))
redis.sadd(described_class.lookup_key_name(user.id), impersonated_session_id)
@ -331,7 +331,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
end
it 'removes obsolete lookup entries' do
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
redis.set("session:user:gitlab:#{user.id}:6919a6f1bb119dd7396fadc38fd18d0d", '')
redis.sadd("session:lookup:user:gitlab:#{user.id}", '6919a6f1bb119dd7396fadc38fd18d0d')
redis.sadd("session:lookup:user:gitlab:#{user.id}", '59822c7d9fcdfa03725eff41782ad97d')
@ -339,7 +339,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
ActiveSession.cleanup(user)
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
expect(redis.smembers("session:lookup:user:gitlab:#{user.id}")).to eq ['6919a6f1bb119dd7396fadc38fd18d0d']
end
end
@ -353,7 +353,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
let(:max_number_of_sessions_plus_two) { ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS + 2 }
before do
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
(1..max_number_of_sessions_plus_two).each do |number|
redis.set(
"session:user:gitlab:#{user.id}:#{number}",
@ -370,7 +370,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
it 'removes obsolete active sessions entries' do
ActiveSession.cleanup(user)
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
sessions = redis.scan_each(match: "session:user:gitlab:#{user.id}:*").to_a
expect(sessions.count).to eq(ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS)
@ -381,7 +381,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
it 'removes obsolete lookup entries' do
ActiveSession.cleanup(user)
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
lookup_entries = redis.smembers("session:lookup:user:gitlab:#{user.id}")
expect(lookup_entries.count).to eq(ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS)
@ -390,7 +390,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
end
it 'removes obsolete lookup entries even without active session' do
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
redis.sadd(
"session:lookup:user:gitlab:#{user.id}",
"#{max_number_of_sessions_plus_two + 1}"
@ -399,7 +399,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
ActiveSession.cleanup(user)
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
lookup_entries = redis.smembers("session:lookup:user:gitlab:#{user.id}")
expect(lookup_entries.count).to eq(ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS)
@ -413,7 +413,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
context 'when the number of active sessions is lower than the limit' do
before do
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
((max_number_of_sessions_plus_two - 4)..max_number_of_sessions_plus_two).each do |number|
redis.del("session:user:gitlab:#{user.id}:#{number}")
end
@ -421,17 +421,17 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
end
it 'does not remove active session entries, but removes lookup entries' do
lookup_entries_before_cleanup = Gitlab::Redis::SharedState.with do |redis|
lookup_entries_before_cleanup = Gitlab::Redis::Sessions.with do |redis|
redis.smembers("session:lookup:user:gitlab:#{user.id}")
end
sessions_before_cleanup = Gitlab::Redis::SharedState.with do |redis|
sessions_before_cleanup = Gitlab::Redis::Sessions.with do |redis|
redis.scan_each(match: "session:user:gitlab:#{user.id}:*").to_a
end
ActiveSession.cleanup(user)
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
lookup_entries = redis.smembers("session:lookup:user:gitlab:#{user.id}")
sessions = redis.scan_each(match: "session:user:gitlab:#{user.id}:*").to_a
expect(sessions.count).to eq(sessions_before_cleanup.count)
@ -446,7 +446,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
let(:max_number_of_sessions_plus_two) { ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS + 2 }
before do
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
(1..max_number_of_sessions_plus_two).each do |number|
redis.set(
"session:user:gitlab:#{user.id}:#{number}",
@ -463,7 +463,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
it 'removes obsolete active sessions entries' do
ActiveSession.cleanup(user)
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
sessions = redis.scan_each(match: "session:user:gitlab:#{user.id}:*").to_a
expect(sessions.count).to eq(ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS)

View file

@ -833,8 +833,8 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
let(:expected_params) { { project: project.full_path, client_id: "runner/#{runner.id}" } }
end
it_behaves_like 'not executing any extra queries for the application context', 2 do
# Extra queries: Project, Route
it_behaves_like 'not executing any extra queries for the application context', 3 do
# Extra queries: Project, Route, RunnerProject
let(:subject_proc) { proc { request_job } }
end
end

View file

@ -377,11 +377,11 @@ RSpec.describe API::Commits do
end
context 'when using warden' do
it 'increments usage counters', :clean_gitlab_redis_shared_state do
it 'increments usage counters', :clean_gitlab_redis_sessions do
session_id = Rack::Session::SessionId.new('6919a6f1bb119dd7396fadc38fd18d0d')
session_hash = { 'warden.user.user.key' => [[user.id], user.encrypted_password[0, 29]] }
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
redis.set("session:gitlab:#{session_id.private_id}", Marshal.dump(session_hash))
end

View file

@ -47,7 +47,7 @@ RSpec.describe API::ProjectImport do
it 'executes a limited number of queries' do
control_count = ActiveRecord::QueryRecorder.new { subject }.count
expect(control_count).to be <= 101
expect(control_count).to be <= 102
end
it 'schedules an import using a namespace' do

View file

@ -125,6 +125,14 @@ RSpec.describe Ci::RetryBuildService do
expect(new_build.needs_attributes).to match(build.needs_attributes)
expect(new_build.needs).not_to match(build.needs)
end
it 'clones only the job variables attributes' do
expect(new_build.job_variables.exists?).to be_truthy
expect(build.job_variables.exists?).to be_truthy
expect(new_build.job_variables_attributes).to match(build.job_variables_attributes)
expect(new_build.job_variables).not_to match(build.job_variables)
end
end
describe 'reject accessors' do
@ -147,7 +155,7 @@ RSpec.describe Ci::RetryBuildService do
Ci::Build.attribute_names.map(&:to_sym) +
Ci::Build.attribute_aliases.keys.map(&:to_sym) +
Ci::Build.reflect_on_all_associations.map(&:name) +
[:tag_list, :needs_attributes] -
[:tag_list, :needs_attributes, :job_variables_attributes] -
# ee-specific accessors should be tested in ee/spec/services/ci/retry_build_service_spec.rb instead
described_class.extra_accessors -
[:dast_site_profiles_build, :dast_scanner_profiles_build] # join tables

View file

@ -19,6 +19,17 @@ module NavbarStructureHelper
hash[:nav_sub_items].insert(index + 1, new_sub_nav_item_name)
end
def insert_before_sub_nav_item(after_sub_nav_item_name, within:, new_sub_nav_item_name:)
expect(structure).to include(a_hash_including(nav_item: within))
hash = structure.find { |h| h[:nav_item] == within if h }
expect(hash).to have_key(:nav_sub_items)
expect(hash[:nav_sub_items]).to include(after_sub_nav_item_name)
index = hash[:nav_sub_items].find_index(after_sub_nav_item_name)
hash[:nav_sub_items].insert(index, new_sub_nav_item_name)
end
def insert_package_nav(within)
insert_after_nav_item(
within,

View file

@ -17,10 +17,10 @@ module SessionHelpers
end
def get_session_keys
Gitlab::Redis::SharedState.with { |redis| redis.scan_each(match: 'session:gitlab:*').to_a }
Gitlab::Redis::Sessions.with { |redis| redis.scan_each(match: 'session:gitlab:*').to_a }
end
def get_ttl(key)
Gitlab::Redis::SharedState.with { |redis| redis.ttl(key) }
Gitlab::Redis::Sessions.with { |redis| redis.ttl(key) }
end
end

View file

@ -93,18 +93,23 @@ RSpec.shared_examples "redis_shared_examples" do
subject { described_class.new(rails_env).store }
shared_examples 'redis store' do
let(:redis_store) { ::Redis::Store }
let(:redis_store_to_s) { "Redis Client connected to #{host} against DB #{redis_database}" }
it 'instantiates Redis::Store' do
is_expected.to be_a(::Redis::Store)
expect(subject.to_s).to eq("Redis Client connected to #{host} against DB #{redis_database}")
is_expected.to be_a(redis_store)
expect(subject.to_s).to eq(redis_store_to_s)
end
context 'with the namespace' do
let(:namespace) { 'namespace_name' }
let(:redis_store_to_s) { "Redis Client connected to #{host} against DB #{redis_database} with namespace #{namespace}" }
subject { described_class.new(rails_env).store(namespace: namespace) }
it "uses specified namespace" do
expect(subject.to_s).to eq("Redis Client connected to #{host} against DB #{redis_database} with namespace #{namespace}")
expect(subject.to_s).to eq(redis_store_to_s)
end
end
end

View file

@ -94,11 +94,11 @@ RSpec.shared_context 'project navbar structure' do
{
nav_item: _('Analytics'),
nav_sub_items: [
_('Value stream'),
_('CI/CD'),
(_('Code review') if Gitlab.ee?),
(_('Merge request') if Gitlab.ee?),
_('Repository'),
_('Value stream')
_('Repository')
]
},
{

View file

@ -18,19 +18,19 @@ RSpec.shared_examples 'snippet edit usage data counters' do
end
end
context 'when user is not sessionless' do
context 'when user is not sessionless', :clean_gitlab_redis_sessions do
before do
session_id = Rack::Session::SessionId.new('6919a6f1bb119dd7396fadc38fd18d0d')
session_hash = { 'warden.user.user.key' => [[current_user.id], current_user.encrypted_password[0, 29]] }
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
redis.set("session:gitlab:#{session_id.private_id}", Marshal.dump(session_hash))
end
cookies[Gitlab::Application.config.session_options[:key]] = session_id.public_id
end
it 'tracks usage data actions', :clean_gitlab_redis_shared_state do
it 'tracks usage data actions', :clean_gitlab_redis_sessions do
expect(::Gitlab::UsageDataCounters::EditorUniqueCounter).to receive(:track_snippet_editor_edit_action)
post_graphql_mutation(mutation)

View file

@ -166,14 +166,14 @@ RSpec.describe 'gitlab:cleanup rake tasks', :silence_stdout do
end
context 'sessions' do
describe 'gitlab:cleanup:sessions:active_sessions_lookup_keys', :clean_gitlab_redis_shared_state do
describe 'gitlab:cleanup:sessions:active_sessions_lookup_keys', :clean_gitlab_redis_sessions do
subject(:rake_task) { run_rake_task('gitlab:cleanup:sessions:active_sessions_lookup_keys') }
let!(:user) { create(:user) }
let(:existing_session_id) { '5' }
before do
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
redis.set("session:user:gitlab:#{user.id}:#{existing_session_id}",
Marshal.dump(true))
redis.sadd("session:lookup:user:gitlab:#{user.id}", (1..10).to_a)
@ -185,7 +185,7 @@ RSpec.describe 'gitlab:cleanup rake tasks', :silence_stdout do
end
it 'removes expired active session lookup keys' do
Gitlab::Redis::SharedState.with do |redis|
Gitlab::Redis::Sessions.with do |redis|
lookup_key = "session:lookup:user:gitlab:#{user.id}"
expect { subject }.to change { redis.scard(lookup_key) }.from(10).to(1)
expect(redis.smembers("session:lookup:user:gitlab:#{user.id}")).to(

View file

@ -1,7 +1,6 @@
package main
import (
"archive/zip"
"context"
"errors"
"flag"
@ -9,6 +8,7 @@ import (
"io"
"os"
zip "gitlab.com/gitlab-org/golang-archive-zip"
"gitlab.com/gitlab-org/labkit/mask"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/zipartifacts"

View file

@ -29,6 +29,7 @@ require (
github.com/smartystreets/goconvey v1.6.4
github.com/stretchr/testify v1.7.0
gitlab.com/gitlab-org/gitaly/v14 v14.3.0-rc2.0.20211007055622-df7dadcc3f74
gitlab.com/gitlab-org/golang-archive-zip v0.1.0
gitlab.com/gitlab-org/labkit v1.6.0
gocloud.dev v0.23.0
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8

View file

@ -835,6 +835,8 @@ gitlab.com/gitlab-org/gitaly/v14 v14.3.0-rc2.0.20211007055622-df7dadcc3f74 h1:7R
gitlab.com/gitlab-org/gitaly/v14 v14.3.0-rc2.0.20211007055622-df7dadcc3f74/go.mod h1:2McjFiZrwflPGtXSquCAXWzewmxTPytoI/vamNz/QPM=
gitlab.com/gitlab-org/gitlab-shell v1.9.8-0.20201117050822-3f9890ef73dc/go.mod h1:5QSTbpAHY2v0iIH5uHh2KA9w7sPUqPmnLjDApI/sv1U=
gitlab.com/gitlab-org/gitlab-shell v1.9.8-0.20210720163109-50da611814d2/go.mod h1:QWDYBwuy24qGMandtCngLRPzFgnGPg6LSNoJWPKmJMc=
gitlab.com/gitlab-org/golang-archive-zip v0.1.0 h1:7PoEX9KIr7dBuQDTE+hBQwlOs7PKYsekATrj/i/nR4c=
gitlab.com/gitlab-org/golang-archive-zip v0.1.0/go.mod h1:ZDtqpWPGPB9qBuZnZDrKQjIdJtkN7ZAoVwhT6H2o2kE=
gitlab.com/gitlab-org/labkit v0.0.0-20190221122536-0c3fc7cdd57c/go.mod h1:rYhLgfrbEcyfinG+R3EvKu6bZSsmwQqcXzLfHWSfUKM=
gitlab.com/gitlab-org/labkit v0.0.0-20200908084045-45895e129029/go.mod h1:SNfxkfUwVNECgtmluVayv0GWFgEjjBs5AzgsowPQuo0=
gitlab.com/gitlab-org/labkit v1.0.0/go.mod h1:nohrYTSLDnZix0ebXZrbZJjymRar8HeV2roWL5/jw2U=

View file

@ -1,7 +1,6 @@
package zipartifacts
import (
"archive/zip"
"compress/gzip"
"encoding/binary"
"encoding/json"
@ -9,6 +8,8 @@ import (
"path"
"sort"
"strconv"
zip "gitlab.com/gitlab-org/golang-archive-zip"
)
type metadata struct {

View file

@ -1,7 +1,6 @@
package zipartifacts_test
import (
"archive/zip"
"bytes"
"compress/gzip"
"context"
@ -12,6 +11,7 @@ import (
"testing"
"github.com/stretchr/testify/require"
zip "gitlab.com/gitlab-org/golang-archive-zip"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/zipartifacts"
)

View file

@ -1,7 +1,6 @@
package zipartifacts
import (
"archive/zip"
"context"
"fmt"
"io"
@ -12,6 +11,7 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper/httptransport"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/httprs"
zip "gitlab.com/gitlab-org/golang-archive-zip"
"gitlab.com/gitlab-org/labkit/mask"
)

View file

@ -1,7 +1,6 @@
package zipartifacts
import (
"archive/zip"
"bytes"
"context"
"fmt"
@ -10,11 +9,10 @@ import (
"net/http/httptest"
"os"
"path/filepath"
"runtime"
"strings"
"testing"
"github.com/stretchr/testify/require"
zip "gitlab.com/gitlab-org/golang-archive-zip"
)
func createArchive(t *testing.T, dir string) (map[string][]byte, int64) {
@ -73,10 +71,6 @@ func TestOpenHTTPArchive(t *testing.T) {
}
func TestMinimalRangeRequests(t *testing.T) {
if strings.HasPrefix(runtime.Version(), "go1.17") {
t.Skipf("skipped for go1.17: https://gitlab.com/gitlab-org/gitlab/-/issues/340778")
}
dir := t.TempDir()
entries, archiveSize := createArchive(t, dir)