Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-06-28 12:09:11 +00:00
parent d81f7fc0b3
commit eea806d673
80 changed files with 1212 additions and 202 deletions

View File

@ -90,6 +90,8 @@ export const UPDATED_ASC = 'UPDATED_ASC';
export const UPDATED_DESC = 'UPDATED_DESC';
export const WEIGHT_ASC = 'WEIGHT_ASC';
export const WEIGHT_DESC = 'WEIGHT_DESC';
export const CLOSED_ASC = 'CLOSED_AT_ASC';
export const CLOSED_DESC = 'CLOSED_AT_DESC';
export const urlSortParams = {
[PRIORITY_ASC]: 'priority',
@ -98,6 +100,8 @@ export const urlSortParams = {
[CREATED_DESC]: 'created_date',
[UPDATED_ASC]: 'updated_asc',
[UPDATED_DESC]: 'updated_desc',
[CLOSED_ASC]: 'closed_asc',
[CLOSED_DESC]: 'closed_desc',
[MILESTONE_DUE_ASC]: 'milestone',
[MILESTONE_DUE_DESC]: 'milestone_due_desc',
[DUE_DATE_ASC]: 'due_date',

View File

@ -13,6 +13,7 @@ fragment IssueFragment on Issue {
state
title
updatedAt
closedAt
upvotes
userDiscussionsCount @include(if: $isSignedIn)
webPath

View File

@ -44,6 +44,8 @@ import {
urlSortParams,
WEIGHT_ASC,
WEIGHT_DESC,
CLOSED_ASC,
CLOSED_DESC,
} from './constants';
export const getInitialPageParams = (
@ -92,6 +94,14 @@ export const getSortOptions = (hasIssueWeightsFeature, hasBlockedIssuesFeature)
},
{
id: 4,
title: __('Closed date'),
sortDirection: {
ascending: CLOSED_ASC,
descending: CLOSED_DESC,
},
},
{
id: 5,
title: __('Milestone due date'),
sortDirection: {
ascending: MILESTONE_DUE_ASC,
@ -99,7 +109,7 @@ export const getSortOptions = (hasIssueWeightsFeature, hasBlockedIssuesFeature)
},
},
{
id: 5,
id: 6,
title: __('Due date'),
sortDirection: {
ascending: DUE_DATE_ASC,
@ -107,7 +117,7 @@ export const getSortOptions = (hasIssueWeightsFeature, hasBlockedIssuesFeature)
},
},
{
id: 6,
id: 7,
title: __('Popularity'),
sortDirection: {
ascending: POPULARITY_ASC,
@ -115,7 +125,7 @@ export const getSortOptions = (hasIssueWeightsFeature, hasBlockedIssuesFeature)
},
},
{
id: 7,
id: 8,
title: __('Label priority'),
sortDirection: {
ascending: LABEL_PRIORITY_ASC,
@ -123,7 +133,7 @@ export const getSortOptions = (hasIssueWeightsFeature, hasBlockedIssuesFeature)
},
},
{
id: 8,
id: 9,
title: __('Manual'),
sortDirection: {
ascending: RELATIVE_POSITION_ASC,
@ -131,7 +141,7 @@ export const getSortOptions = (hasIssueWeightsFeature, hasBlockedIssuesFeature)
},
},
{
id: 9,
id: 10,
title: __('Title'),
sortDirection: {
ascending: TITLE_ASC,

View File

@ -1,9 +1,7 @@
import $ from 'jquery';
export const initDetailsButton = () => {
$('body').on('click', '.js-details-expand', function expand(e) {
document.querySelector('.commit-info').addEventListener('click', function expand(e) {
e.preventDefault();
$(this).next('.js-details-content').removeClass('hide');
$(this).hide();
this.querySelector('.js-details-content').classList.remove('hide');
this.querySelector('.js-details-expand').classList.add('gl-display-none');
});
};

View File

@ -35,7 +35,7 @@ const fetchData = (projectPath, path, ref, offset) => {
gon.relative_url_root || '/',
projectPath,
'/-/refs/',
ref,
encodeURIComponent(ref),
'/logs_tree/',
encodeURIComponent(removeLeadingSlash(path)),
);

View File

@ -2,6 +2,7 @@
import { GlSkeletonLoader, GlButton } from '@gitlab/ui';
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import { sprintf, __ } from '~/locale';
import { cleanLeadingSeparator } from '~/lib/utils/url_utility';
import getRefMixin from '../../mixins/get_ref';
import projectPathQuery from '../../queries/project_path.query.graphql';
import TableHeader from './header.vue';
@ -103,13 +104,14 @@ export default {
return this.rowNumbers[key];
},
getCommit(fileName, type) {
getCommit(flatPath, type) {
if (!this.glFeatures.lazyLoadCommits) {
return {};
}
return this.commits.find(
(commitEntry) => commitEntry.fileName === fileName && commitEntry.type === type,
(commitEntry) =>
cleanLeadingSeparator(commitEntry.filePath) === flatPath && commitEntry.type === type,
);
},
},
@ -152,7 +154,7 @@ export default {
:loading-path="loadingPath"
:total-entries="totalEntries"
:row-number="generateRowNumber(entry.flatPath, entry.id, index)"
:commit-info="getCommit(entry.name, entry.type)"
:commit-info="getCommit(entry.flatPath, entry.type)"
v-on="$listeners"
/>
</template>

View File

@ -194,6 +194,24 @@ export default {
poll.makeRequest();
},
initExtensionFullDataPolling() {
const poll = new Poll({
resource: {
fetchData: () => this.fetchFullData(this),
},
method: 'fetchData',
successCallback: (response) => {
this.headerCheck(response, (data) => {
this.setFullData(data);
});
},
errorCallback: (e) => {
this.setExpandedError(e);
},
});
poll.makeRequest();
},
headerCheck(response, callback) {
const headers = normalizeHeaders(response.headers);
@ -220,6 +238,10 @@ export default {
});
}
},
setFullData(data) {
this.loadingState = null;
this.fullData = data.map((x, i) => ({ id: i, ...x }));
},
setCollapsedData(data) {
this.collapsedData = data;
this.loadingState = null;
@ -229,21 +251,26 @@ export default {
Sentry.captureException(e);
},
setExpandedError(e) {
this.loadingState = LOADING_STATES.expandedError;
Sentry.captureException(e);
},
loadAllData() {
if (this.hasFullData) return;
this.loadingState = LOADING_STATES.expandedLoading;
this.fetchFullData(this)
.then((data) => {
this.loadingState = null;
this.fullData = data.map((x, i) => ({ id: i, ...x }));
})
.catch((e) => {
this.loadingState = LOADING_STATES.expandedError;
Sentry.captureException(e);
});
if (this.$options.enableExpandedPolling) {
this.initExtensionFullDataPolling();
} else {
this.fetchFullData(this)
.then((data) => {
this.setFullData(data);
})
.catch((e) => {
this.setExpandedError(e);
});
}
},
appear(index) {
if (index === this.fullData.length - 1) {

View File

@ -20,6 +20,7 @@ export const registerExtension = (extension) => {
i18n: extension.i18n,
expandEvent: extension.expandEvent,
enablePolling: extension.enablePolling,
enableExpandedPolling: extension.enableExpandedPolling,
modalComponent: extension.modalComponent,
computed: {
...extension.props.reduce(

View File

@ -86,7 +86,18 @@ export default {
createdAt() {
return getTimeago().format(this.issuable.createdAt);
},
updatedAt() {
timestamp() {
if (this.issuable.state === 'closed') {
return this.issuable.closedAt;
}
return this.issuable.updatedAt;
},
formattedTimestamp() {
if (this.issuable.state === 'closed') {
return sprintf(__('closed %{timeago}'), {
timeago: getTimeago().format(this.issuable.closedAt),
});
}
return sprintf(__('updated %{timeAgo}'), {
timeAgo: getTimeago().format(this.issuable.updatedAt),
});
@ -311,10 +322,10 @@ export default {
<div
v-gl-tooltip.bottom
class="gl-text-gray-500 gl-display-none gl-sm-display-inline-block"
:title="tooltipTitle(issuable.updatedAt)"
data-testid="issuable-updated-at"
:title="tooltipTitle(timestamp)"
data-testid="issuable-timestamp"
>
{{ updatedAt }}
{{ formattedTimestamp }}
</div>
</div>
</li>

View File

@ -709,10 +709,6 @@
line-height: 20px;
padding: 0;
}
.issue-updated-at {
line-height: 20px;
}
}
@include media-breakpoint-down(xs) {
@ -736,7 +732,7 @@
.issuable-milestone,
.issuable-info,
.task-status,
.issuable-updated-at {
.issuable-timestamp {
font-weight: $gl-font-weight-normal;
color: $gl-text-color-secondary;

View File

@ -82,6 +82,13 @@ class GraphqlController < ApplicationController
render_error(exception.message, status: :unprocessable_entity)
end
rescue_from ActiveRecord::QueryAborted do |exception|
log_exception(exception)
error = "Request timed out. Please try a less complex query or a smaller set of records."
render_error(error, status: :service_unavailable)
end
override :feature_category
def feature_category
::Gitlab::FeatureCategories.default.from_request(request) || super

View File

@ -52,10 +52,8 @@ class Oauth::ApplicationsController < Doorkeeper::ApplicationsController
end
def set_index_vars
@applications = current_user.oauth_applications
@authorized_tokens = current_user.oauth_authorized_tokens
@authorized_anonymous_tokens = @authorized_tokens.reject(&:application)
@authorized_apps = @authorized_tokens.map(&:application).uniq.reject(&:nil?)
@applications = current_user.oauth_applications.load
@authorized_tokens = current_user.oauth_authorized_tokens.preload(:application).order(created_at: :desc).load # rubocop: disable CodeReuse/ActiveRecord
# Don't overwrite a value possibly set by `create`
@application ||= Doorkeeper::Application.new

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
module Pages
class PageDeployedEvent < ::Gitlab::EventStore::Event
def schema
{
'type' => 'object',
'properties' => {
'project_id' => { 'type' => 'integer' },
'namespace_id' => { 'type' => 'integer' },
'root_namespace_id' => { 'type' => 'integer' }
},
'required' => %w[project_id namespace_id root_namespace_id]
}
end
end
end

View File

@ -254,6 +254,7 @@ module SortingHelper
options = [
{ value: sort_value_priority, text: sort_title_priority, href: page_filter_path(sort: sort_value_priority) },
{ value: sort_value_created_date, text: sort_title_created_date, href: page_filter_path(sort: sort_value_created_date) },
{ value: sort_value_closed_date, text: sort_title_closed_date, href: page_filter_path(sort: sort_value_closed_date) },
{ value: sort_value_recently_updated, text: sort_title_recently_updated, href: page_filter_path(sort: sort_value_recently_updated) },
{ value: sort_value_milestone, text: sort_title_milestone, href: page_filter_path(sort: sort_value_milestone) }
]
@ -261,7 +262,7 @@ module SortingHelper
options.concat([due_date_option]) if viewing_issues
options.concat([popularity_option, label_priority_option])
options.concat([merged_option, closed_option]) if viewing_merge_requests
options.concat([merged_option]) if viewing_merge_requests
options.concat([relative_position_option]) if viewing_issues
options.concat([title_option])
@ -287,10 +288,6 @@ module SortingHelper
{ value: sort_value_merged_date, text: sort_title_merged_date, href: page_filter_path(sort: sort_value_merged_date) }
end
def closed_option
{ value: sort_value_closed_date, text: sort_title_closed_date, href: page_filter_path(sort: sort_value_closed_date) }
end
def relative_position_option
{ value: sort_value_relative_position, text: sort_title_relative_position, href: page_filter_path(sort: sort_value_relative_position) }
end

View File

@ -2,6 +2,7 @@
module Ci
class Build < Ci::Processable
prepend Ci::BulkInsertableTags
include Ci::Metadatable
include Ci::Contextable
include TokenAuthenticatable
@ -434,10 +435,6 @@ module Ci
true
end
def save_tags
super unless Thread.current['ci_bulk_insert_tags']
end
def archived?
return true if degenerated?

View File

@ -2,6 +2,7 @@
module Ci
class Runner < Ci::ApplicationRecord
prepend Ci::BulkInsertableTags
include Gitlab::SQL::Pattern
include RedisCacheable
include ChronicDurationAttribute

View File

@ -220,10 +220,6 @@ class CommitStatus < Ci::ApplicationRecord
false
end
def self.bulk_insert_tags!(statuses)
Gitlab::Ci::Tags::BulkInsert.new(statuses).insert!
end
def locking_enabled?
will_save_change_to_status?
end

View File

@ -0,0 +1,24 @@
# frozen_string_literal: true
module Ci
module BulkInsertableTags
extend ActiveSupport::Concern
BULK_INSERT_TAG_THREAD_KEY = 'ci_bulk_insert_tags'
class << self
def with_bulk_insert_tags
previous = Thread.current[BULK_INSERT_TAG_THREAD_KEY]
Thread.current[BULK_INSERT_TAG_THREAD_KEY] = true
yield
ensure
Thread.current[BULK_INSERT_TAG_THREAD_KEY] = previous
end
end
# overrides save_tags from acts-as-taggable
def save_tags
super unless Thread.current[BULK_INSERT_TAG_THREAD_KEY]
end
end
end

View File

@ -332,7 +332,7 @@ class Issue < ApplicationRecord
when 'severity_desc' then order_severity_desc.with_order_id_desc
when 'escalation_status_asc' then order_escalation_status_asc.with_order_id_desc
when 'escalation_status_desc' then order_escalation_status_desc.with_order_id_desc
when 'closed_at_asc' then order_closed_at_asc
when 'closed_at', 'closed_at_asc' then order_closed_at_asc
when 'closed_at_desc' then order_closed_at_desc
else
super

View File

@ -69,7 +69,7 @@ class BlobPresenter < Gitlab::View::Presenter::Delegated
end
def find_file_path
url_helpers.project_find_file_path(project, ref_qualified_path)
url_helpers.project_find_file_path(project, blob.commit_id)
end
def blame_path

View File

@ -8,7 +8,19 @@ module Ci
return unless runner_type_attrs
::Ci::Runner.create(attributes.merge(runner_type_attrs))
runner = ::Ci::Runner.new(attributes.merge(runner_type_attrs))
Ci::BulkInsertableTags.with_bulk_insert_tags do
Ci::Runner.transaction do
if runner.save
Gitlab::Ci::Tags::BulkInsert.bulk_insert_tags!([runner])
else
raise ActiveRecord::Rollback
end
end
end
runner
end
private

View File

@ -53,6 +53,7 @@ module Projects
def success
@commit_status.success
@project.mark_pages_as_deployed
publish_deployed_event
super
end
@ -203,6 +204,16 @@ module Projects
def pages_file_entries_limit
project.actual_limits.pages_file_entries
end
def publish_deployed_event
event = ::Pages::PageDeployedEvent.new(data: {
project_id: project.id,
namespace_id: project.namespace_id,
root_namespace_id: project.root_namespace.id
})
Gitlab::EventStore.publish(event)
end
end
end

View File

@ -1,6 +1,6 @@
- page_title _("Find File"), @ref
.file-finder-holder.tree-holder.clearfix.js-file-finder{ 'data-file-find-url': "#{escape_javascript(project_files_path(@project, @ref, format: :json))}", 'data-find-tree-url': escape_javascript(project_tree_path(@project, @ref)), 'data-blob-url-template': escape_javascript(project_blob_path(@project, @id || @commit.id)) }
.file-finder-holder.tree-holder.clearfix.js-file-finder{ 'data-file-find-url': "#{escape_javascript(project_files_path(@project, @ref, format: :json))}", 'data-find-tree-url': escape_javascript(project_tree_path(@project, @ref)), 'data-blob-url-template': escape_javascript(project_blob_path(@project, @ref)) }
.nav-block
.tree-ref-holder
= render 'shared/ref_switcher', destination: 'find_file', path: @path

View File

@ -65,6 +65,9 @@
= render 'shared/issuable_meta_data', issuable: issue
.float-right.issuable-updated-at.d-none.d-sm-inline-block
.float-right.issuable-timestamp.d-none.d-sm-inline-block
%span
= _('updated %{time_ago}').html_safe % { time_ago: time_ago_with_tooltip(issue.updated_at, placement: 'bottom', html_class: 'issue_update_ago') }
- if issue.closed?
= _('closed %{timeago}').html_safe % { timeago: time_ago_with_tooltip(issue.closed_at, placement: 'bottom') }
- else
= _('updated %{time_ago}').html_safe % { time_ago: time_ago_with_tooltip(issue.updated_at, placement: 'bottom') }

View File

@ -55,7 +55,7 @@
.oauth-authorized-applications.prepend-top-20.gl-mb-3
- if oauth_applications_enabled
%h5
= _("Authorized applications (%{size})") % { size: @authorized_apps.size + @authorized_anonymous_tokens.size }
= _("Authorized applications (%{size})") % { size: @authorized_tokens.size }
- if @authorized_tokens.any?
.table-responsive
@ -67,22 +67,22 @@
%th= _('Scope')
%th
%tbody
- @authorized_apps.each do |app|
- token = app.authorized_tokens.order('created_at desc').first # rubocop: disable CodeReuse/ActiveRecord
%tr{ id: "application_#{app.id}" }
%td= app.name
%td= token.created_at
%td= token.scopes
%td= render 'doorkeeper/authorized_applications/delete_form', application: app
- @authorized_anonymous_tokens.each do |token|
%tr
- @authorized_tokens.each do |token|
%tr{ id: ("application_#{token.application.id}" if token.application) }
%td
= _('Anonymous')
.form-text.text-muted
%em= _("Authorization was granted by entering your username and password in the application.")
- if token.application
= token.application.name
- else
= _('Anonymous')
.form-text.text-muted
%em= _("Authorization was granted by entering your username and password in the application.")
%td= token.created_at
%td= token.scopes
%td= render 'doorkeeper/authorized_applications/delete_form', token: token
%td
- if token.application
= render 'doorkeeper/authorized_applications/delete_form', application: token.application
- else
= render 'doorkeeper/authorized_applications/delete_form', token: token
- else
.settings-message.text-center
= _("You don't have any authorized applications")

View File

@ -0,0 +1,24 @@
- name: "Self-managed certificate-based integration with Kubernetes feature flagged"
announcement_milestone: "14.5"
announcement_date: "2021-11-15"
removal_milestone: "15.0"
removal_date: "2022-05-22"
breaking_change: true
reporter: nagyv-gitlab
stage: Configure
issue_url: https://gitlab.com/groups/gitlab-org/configure/-/epics/8
body: | # (required) Do not modify this line, instead modify the lines below.
In 15.0 the certificate-based integration with Kubernetes will be disabled by default.
After 15.0, you should use the [agent for Kubernetes](https://docs.gitlab.com/ee/user/clusters/agent/) to connect Kubernetes clusters with GitLab. The agent for Kubernetes is a more robust, secure, and reliable integration with Kubernetes. [How do I migrate to the agent?](https://docs.gitlab.com/ee/user/infrastructure/clusters/migrate_to_gitlab_agent.html)
If you need more time to migrate, you can enable the `certificate_based_clusters` [feature flag](https://docs.gitlab.com/ee/administration/feature_flags.html), which re-enables the certificate-based integration.
In GitLab 16.0, we will [remove the feature, its related code, and the feature flag](https://about.gitlab.com/blog/2021/11/15/deprecating-the-cert-based-kubernetes-integration/). GitLab will continue to fix any security or critical issues until 16.0.
For updates and details, follow [this epic](https://gitlab.com/groups/gitlab-org/configure/-/epics/8).
#
# OPTIONAL FIELDS
#
tiers: [Core, Premium, Ultimate]
documentation_url: 'https://docs.gitlab.com/ee/user/infrastructure/clusters/#certificate-based-kubernetes-integration-deprecated'

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class AddPartialIndexOnOauthAccessTokensRevokedAt < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
INDEX_NAME = 'partial_index_resource_owner_id_created_at_token_not_revoked'
EXISTING_INDEX_NAME = 'index_oauth_access_tokens_on_resource_owner_id'
def up
add_concurrent_index :oauth_access_tokens, [:resource_owner_id, :created_at],
name: INDEX_NAME, where: 'revoked_at IS NULL'
remove_concurrent_index :oauth_access_tokens, :resource_owner_id, name: EXISTING_INDEX_NAME
end
def down
add_concurrent_index :oauth_access_tokens, :resource_owner_id, name: EXISTING_INDEX_NAME
remove_concurrent_index :oauth_access_tokens, [:resource_owner_id, :created_at], name: INDEX_NAME
end
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
class BackfillImportedIssueSearchData < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
restrict_gitlab_migration gitlab_schema: :gitlab_main
MIGRATION = 'BackfillImportedIssueSearchData'
DELAY_INTERVAL = 120.seconds
def up
min_value = Gitlab::Database::BackgroundMigration::BatchedMigration.find_by(
job_class_name: "BackfillIssueSearchData"
)&.max_value || BATCH_MIN_VALUE
queue_batched_background_migration(
MIGRATION,
:issues,
:id,
job_interval: DELAY_INTERVAL,
batch_min_value: min_value
)
end
def down
delete_batched_background_migration(MIGRATION, :issues, :id, [])
end
end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
class FinaliseProjectNamespaceMembers < Gitlab::Database::Migration[2.0]
MIGRATION = 'BackfillProjectMemberNamespaceId'
disable_ddl_transaction!
restrict_gitlab_migration gitlab_schema: :gitlab_main
def up
ensure_batched_background_migration_is_finished(
job_class_name: MIGRATION,
table_name: :members,
column_name: :id,
job_arguments: [],
finalize: true
)
end
def down
# no-op
end
end

View File

@ -0,0 +1 @@
effd82de862e39edcba7793010bdd377b8141c49edebdd380276a8b558886835

View File

@ -0,0 +1 @@
6567c86c14f741b7ea8f49b04c3ad82f226f04c0ab2e68212b5f6e7bf4ef615f

View File

@ -0,0 +1 @@
5881441f8a6c0f25cff00aa9e164a1c19bcc34d4db678fc50712824fff82b24e

View File

@ -28740,8 +28740,6 @@ CREATE INDEX index_oauth_access_tokens_on_application_id ON oauth_access_tokens
CREATE UNIQUE INDEX index_oauth_access_tokens_on_refresh_token ON oauth_access_tokens USING btree (refresh_token);
CREATE INDEX index_oauth_access_tokens_on_resource_owner_id ON oauth_access_tokens USING btree (resource_owner_id);
CREATE UNIQUE INDEX index_oauth_access_tokens_on_token ON oauth_access_tokens USING btree (token);
CREATE INDEX index_oauth_applications_on_owner_id_and_owner_type ON oauth_applications USING btree (owner_id, owner_type);
@ -30022,6 +30020,8 @@ CREATE INDEX partial_index_deployments_for_legacy_successful_deployments ON depl
CREATE INDEX partial_index_deployments_for_project_id_and_tag ON deployments USING btree (project_id) WHERE (tag IS TRUE);
CREATE INDEX partial_index_resource_owner_id_created_at_token_not_revoked ON oauth_access_tokens USING btree (resource_owner_id, created_at) WHERE (revoked_at IS NULL);
CREATE INDEX partial_index_slack_integrations_with_bot_user_id ON slack_integrations USING btree (id) WHERE (bot_user_id IS NOT NULL);
CREATE UNIQUE INDEX partial_index_sop_configs_on_namespace_id ON security_orchestration_policy_configurations USING btree (namespace_id) WHERE (namespace_id IS NOT NULL);

View File

@ -7185,6 +7185,29 @@ The edge type for [`IncidentManagementOncallShift`](#incidentmanagementoncallshi
| <a id="incidentmanagementoncallshiftedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
| <a id="incidentmanagementoncallshiftedgenode"></a>`node` | [`IncidentManagementOncallShift`](#incidentmanagementoncallshift) | The item at the end of the edge. |
#### `IssuableResourceLinkConnection`
The connection type for [`IssuableResourceLink`](#issuableresourcelink).
##### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="issuableresourcelinkconnectionedges"></a>`edges` | [`[IssuableResourceLinkEdge]`](#issuableresourcelinkedge) | A list of edges. |
| <a id="issuableresourcelinkconnectionnodes"></a>`nodes` | [`[IssuableResourceLink]`](#issuableresourcelink) | A list of nodes. |
| <a id="issuableresourcelinkconnectionpageinfo"></a>`pageInfo` | [`PageInfo!`](#pageinfo) | Information to aid in pagination. |
#### `IssuableResourceLinkEdge`
The edge type for [`IssuableResourceLink`](#issuableresourcelink).
##### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="issuableresourcelinkedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
| <a id="issuableresourcelinkedgenode"></a>`node` | [`IssuableResourceLink`](#issuableresourcelink) | The item at the end of the edge. |
#### `IssueConnection`
The connection type for [`Issue`](#issue).
@ -11342,6 +11365,22 @@ four standard [pagination arguments](#connection-pagination-arguments):
| ---- | ---- | ----------- |
| <a id="epicissuecurrentusertodosstate"></a>`state` | [`TodoStateEnum`](#todostateenum) | State of the to-do items. |
##### `EpicIssue.issuableResourceLinks`
Issuable resource links of the incident issue.
Returns [`IssuableResourceLinkConnection`](#issuableresourcelinkconnection).
This field returns a [connection](#connections). It accepts the
four standard [pagination arguments](#connection-pagination-arguments):
`before: String`, `after: String`, `first: Int`, `last: Int`.
###### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="epicissueissuableresourcelinksincidentid"></a>`incidentId` | [`IssueID!`](#issueid) | ID of the incident. |
##### `EpicIssue.reference`
Internal reference of the issue. Returned in shortened format by default.
@ -12717,6 +12756,22 @@ four standard [pagination arguments](#connection-pagination-arguments):
| ---- | ---- | ----------- |
| <a id="issuecurrentusertodosstate"></a>`state` | [`TodoStateEnum`](#todostateenum) | State of the to-do items. |
##### `Issue.issuableResourceLinks`
Issuable resource links of the incident issue.
Returns [`IssuableResourceLinkConnection`](#issuableresourcelinkconnection).
This field returns a [connection](#connections). It accepts the
four standard [pagination arguments](#connection-pagination-arguments):
`before: String`, `after: String`, `first: Int`, `last: Int`.
###### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="issueissuableresourcelinksincidentid"></a>`incidentId` | [`IssueID!`](#issueid) | ID of the incident. |
##### `Issue.reference`
Internal reference of the issue. Returned in shortened format by default.

View File

@ -6,7 +6,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Metadata API **(FREE)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/357032) in GitLab 15.1.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/357032) in GitLab 15.2.
Retrieve metadata information for this GitLab instance.
@ -35,7 +35,7 @@ Example response:
```json
{
"version": "15.0-pre",
"version": "15.2-pre",
"revision": "c401a659d0c",
"kas": {
"enabled": true,

View File

@ -54,6 +54,7 @@ are very appreciative of the work done by translators and proofreaders!
- Andrei Jiroh Halili - [GitLab](https://gitlab.com/ajhalili2006), [Crowdin](https://crowdin.com/profile/AndreiJirohHaliliDev2006)
- French
- Davy Defaud - [GitLab](https://gitlab.com/DevDef), [Crowdin](https://crowdin.com/profile/DevDef)
- Germain Gorisse - [GitLab](https://gitlab.com/ggorisse), [Crowdin](https://crowdin.com/profile/germaingorisse)
- Galician
- Antón Méixome - [Crowdin](https://crowdin.com/profile/meixome)
- Pedro Garcia - [GitLab](https://gitlab.com/pedgarrod), [Crowdin](https://crowdin.com/profile/breaking_pitt)

View File

@ -447,6 +447,22 @@ You can still customize the behavior of the Secret Detection analyzer using the
For further details, see [the deprecation issue for this change](https://gitlab.com/gitlab-org/gitlab/-/issues/352565).
### Self-managed certificate-based integration with Kubernetes feature flagged
WARNING:
This is a [breaking change](https://docs.gitlab.com/ee/development/contributing/#breaking-changes).
Review the details carefully before upgrading.
In 15.0 the certificate-based integration with Kubernetes will be disabled by default.
After 15.0, you should use the [agent for Kubernetes](https://docs.gitlab.com/ee/user/clusters/agent/) to connect Kubernetes clusters with GitLab. The agent for Kubernetes is a more robust, secure, and reliable integration with Kubernetes. [How do I migrate to the agent?](https://docs.gitlab.com/ee/user/infrastructure/clusters/migrate_to_gitlab_agent.html)
If you need more time to migrate, you can enable the `certificate_based_clusters` [feature flag](https://docs.gitlab.com/ee/administration/feature_flags.html), which re-enables the certificate-based integration.
In GitLab 16.0, we will [remove the feature, its related code, and the feature flag](https://about.gitlab.com/blog/2021/11/15/deprecating-the-cert-based-kubernetes-integration/). GitLab will continue to fix any security or critical issues until 16.0.
For updates and details, follow [this epic](https://gitlab.com/groups/gitlab-org/configure/-/epics/8).
### Sidekiq configuration for metrics and health checks
WARNING:

View File

@ -513,7 +513,7 @@ Alternatively, the SAML response may be missing the `InResponseTo` attribute in
The identity provider administrator should ensure that the login is
initiated by the service provider and not the identity provider.
### Message: "Login to a GitLab account to link with your SAML identity"
### Message: "Sign in to GitLab to connect your organization's account"
A user can see this message when they are trying to [manually link SAML to their existing GitLab.com account](#linking-saml-to-your-existing-gitlabcom-account).

View File

@ -26,7 +26,7 @@ module API
EOF
desc 'Get the metadata information of the GitLab instance.' do
detail 'This feature was introduced in GitLab 15.1.'
detail 'This feature was introduced in GitLab 15.2.'
end
get '/metadata' do
run_graphql!(

View File

@ -0,0 +1,62 @@
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
# Backfills the `issue_search_data` table for issues imported prior
# to the fix for the imported issues search data bug:
# https://gitlab.com/gitlab-org/gitlab/-/issues/361219
class BackfillImportedIssueSearchData < BatchedMigrationJob
def perform
each_sub_batch(
operation_name: :update_search_data,
batching_scope: -> (relation) { Issue }
) do |sub_batch|
update_search_data(sub_batch)
rescue ActiveRecord::StatementInvalid => e
raise unless e.cause.is_a?(PG::ProgramLimitExceeded) && e.message.include?('string is too long for tsvector')
update_search_data_individually(sub_batch)
end
end
private
def update_search_data(relation)
ApplicationRecord.connection.execute(
<<~SQL
INSERT INTO issue_search_data
SELECT
project_id,
id,
NOW(),
NOW(),
setweight(to_tsvector('english', LEFT(title, 255)), 'A') || setweight(to_tsvector('english', LEFT(REGEXP_REPLACE(description, '[A-Za-z0-9+/@]{50,}', ' ', 'g'), 1048576)), 'B')
FROM issues
WHERE issues.id IN (#{relation.select(:id).to_sql})
ON CONFLICT DO NOTHING
SQL
)
end
def update_search_data_individually(relation)
relation.pluck(:id).each do |issue_id|
update_search_data(relation.klass.where(id: issue_id))
sleep(pause_ms * 0.001)
rescue ActiveRecord::StatementInvalid => e
raise unless e.cause.is_a?(PG::ProgramLimitExceeded) && e.message.include?('string is too long for tsvector')
logger.error(
message: "Error updating search data: #{e.message}",
class: relation.klass.name,
model_id: issue_id
)
end
end
def logger
@logger ||= Gitlab::BackgroundMigration::Logger.build
end
end
end
end

View File

@ -8,7 +8,7 @@ module Gitlab
#
# If no more batches exist in the table, returns nil.
class BackfillIssueWorkItemTypeBatchingStrategy < PrimaryKeyBatchingStrategy
def apply_additional_filters(relation, job_arguments:)
def apply_additional_filters(relation, job_arguments:, job_class: nil)
issue_type = job_arguments.first
relation.where(issue_type: issue_type)

View File

@ -16,7 +16,7 @@ module Gitlab
# batch_min_value - The minimum value which the next batch will start at
# batch_size - The size of the next batch
# job_arguments - The migration job arguments
def next_batch(table_name, column_name, batch_min_value:, batch_size:, job_arguments:)
def next_batch(table_name, column_name, batch_min_value:, batch_size:, job_arguments:, job_class: nil)
next_batch_bounds = nil
model_class = ::Gitlab::BackgroundMigration::ProjectNamespaces::Models::Project
quoted_column_name = model_class.connection.quote_column_name(column_name)

View File

@ -18,12 +18,13 @@ module Gitlab
# batch_min_value - The minimum value which the next batch will start at
# batch_size - The size of the next batch
# job_arguments - The migration job arguments
def next_batch(table_name, column_name, batch_min_value:, batch_size:, job_arguments:)
# job_class - The migration job class
def next_batch(table_name, column_name, batch_min_value:, batch_size:, job_arguments:, job_class: nil)
model_class = define_batchable_model(table_name, connection: connection)
quoted_column_name = model_class.connection.quote_column_name(column_name)
relation = model_class.where("#{quoted_column_name} >= ?", batch_min_value)
relation = apply_additional_filters(relation, job_arguments: job_arguments)
relation = apply_additional_filters(relation, job_arguments: job_arguments, job_class: job_class)
next_batch_bounds = nil
relation.each_batch(of: batch_size, column: column_name) do |batch| # rubocop:disable Lint/UnreachableLoop
@ -35,19 +36,11 @@ module Gitlab
next_batch_bounds
end
# Strategies based on PrimaryKeyBatchingStrategy can use
# this method to easily apply additional filters.
#
# Example:
#
# class MatchingType < PrimaryKeyBatchingStrategy
# def apply_additional_filters(relation, job_arguments:)
# type = job_arguments.first
#
# relation.where(type: type)
# end
# end
def apply_additional_filters(relation, job_arguments: [])
def apply_additional_filters(relation, job_arguments: [], job_class: nil)
if job_class.respond_to?(:batching_scope)
return job_class.batching_scope(relation, job_arguments: job_arguments)
end
relation
end
end

View File

@ -11,10 +11,10 @@ module Gitlab
def perform!
logger.instrument_with_sql(:pipeline_save) do
BulkInsertableAssociations.with_bulk_insert do
with_bulk_insert_tags do
::Ci::BulkInsertableTags.with_bulk_insert_tags do
pipeline.transaction do
pipeline.save!
CommitStatus.bulk_insert_tags!(statuses)
Gitlab::Ci::Tags::BulkInsert.bulk_insert_tags!(statuses)
end
end
end
@ -29,14 +29,6 @@ module Gitlab
private
def with_bulk_insert_tags
previous = Thread.current['ci_bulk_insert_tags']
Thread.current['ci_bulk_insert_tags'] = true
yield
ensure
Thread.current['ci_bulk_insert_tags'] = previous
end
def statuses
strong_memoize(:statuses) do
pipeline

View File

@ -9,33 +9,37 @@ module Gitlab
TAGGINGS_BATCH_SIZE = 1000
TAGS_BATCH_SIZE = 500
def initialize(statuses)
@statuses = statuses
def self.bulk_insert_tags!(taggables)
Gitlab::Ci::Tags::BulkInsert.new(taggables).insert!
end
def initialize(taggables)
@taggables = taggables
end
def insert!
return false if tag_list_by_status.empty?
return false if tag_list_by_taggable.empty?
persist_build_tags!
end
private
attr_reader :statuses
attr_reader :taggables
def tag_list_by_status
strong_memoize(:tag_list_by_status) do
statuses.each.with_object({}) do |status, acc|
tag_list = status.tag_list
def tag_list_by_taggable
strong_memoize(:tag_list_by_taggable) do
taggables.each.with_object({}) do |taggable, acc|
tag_list = taggable.tag_list
next unless tag_list
acc[status] = tag_list
acc[taggable] = tag_list
end
end
end
def persist_build_tags!
all_tags = tag_list_by_status.values.flatten.uniq.reject(&:blank?)
all_tags = tag_list_by_taggable.values.flatten.uniq.reject(&:blank?)
tag_records_by_name = create_tags(all_tags).index_by(&:name)
taggings = build_taggings_attributes(tag_records_by_name)
@ -65,24 +69,24 @@ module Gitlab
# rubocop: enable CodeReuse/ActiveRecord
def build_taggings_attributes(tag_records_by_name)
taggings = statuses.flat_map do |status|
tag_list = tag_list_by_status[status]
taggings = taggables.flat_map do |taggable|
tag_list = tag_list_by_taggable[taggable]
next unless tag_list
tags = tag_records_by_name.values_at(*tag_list)
taggings_for(tags, status)
taggings_for(tags, taggable)
end
taggings.compact!
taggings
end
def taggings_for(tags, status)
def taggings_for(tags, taggable)
tags.map do |tag|
{
tag_id: tag.id,
taggable_type: CommitStatus.name,
taggable_id: status.id,
taggable_type: taggable.class.base_class.name,
taggable_id: taggable.id,
created_at: Time.current,
context: 'tags'
}

View File

@ -128,7 +128,8 @@ module Gitlab
batched_migration.column_name,
batch_min_value: min_value,
batch_size: new_batch_size,
job_arguments: batched_migration.job_arguments
job_arguments: batched_migration.job_arguments,
job_class: batched_migration.job_class
)
midpoint = next_batch_bounds.last

View File

@ -101,7 +101,8 @@ module Gitlab
active_migration.column_name,
batch_min_value: batch_min_value,
batch_size: active_migration.batch_size,
job_arguments: active_migration.job_arguments)
job_arguments: active_migration.job_arguments,
job_class: active_migration.job_class)
return if next_batch_bounds.nil?

View File

@ -8220,6 +8220,9 @@ msgstr ""
msgid "Closed MRs"
msgstr ""
msgid "Closed date"
msgstr ""
msgid "Closed issues"
msgstr ""
@ -33478,6 +33481,21 @@ msgstr ""
msgid "SAML for %{group_name}"
msgstr ""
msgid "SAML|Selecting \"Authorize\" will transfer ownership of your GitLab account \"%{username}\" (%{email}) to your organization."
msgstr ""
msgid "SAML|Sign in to GitLab to connect your organization's account"
msgstr ""
msgid "SAML|The \"%{group_path}\" group allows you to sign in with your Single Sign-On Account."
msgstr ""
msgid "SAML|To access \"%{group_name}\" you must sign in with your Single Sign-On account, through an external sign-in page."
msgstr ""
msgid "SAML|Your organization's SSO has been connected to your GitLab account"
msgstr ""
msgid "SAST Configuration"
msgstr ""
@ -38177,9 +38195,6 @@ msgstr ""
msgid "That's it, well done!"
msgstr ""
msgid "The \"%{group_path}\" group allows you to sign in with your Single Sign-On Account"
msgstr ""
msgid "The %{link_start}true-up model%{link_end} allows having more users, and additional users will incur a retroactive charge on renewal."
msgstr ""
@ -39650,9 +39665,6 @@ msgstr ""
msgid "This will invalidate your registered applications and U2F devices."
msgstr ""
msgid "This will redirect you to an external sign in page."
msgstr ""
msgid "This will remove the fork relationship between this project and %{fork_source}."
msgstr ""
@ -45216,6 +45228,9 @@ msgstr ""
msgid "closed"
msgstr ""
msgid "closed %{timeago}"
msgstr ""
msgid "closed issue"
msgstr ""

View File

@ -318,11 +318,15 @@ module QA
end
def merge_immediately!
if has_element?(:merge_moment_dropdown)
click_element(:merge_moment_dropdown, skip_finished_loading_check: true)
click_element(:merge_immediately_menu_item, skip_finished_loading_check: true)
else
click_element(:merge_button, skip_finished_loading_check: true)
retry_until(reload: true, sleep_interval: 1, max_attempts: 12) do
if has_element?(:merge_moment_dropdown)
click_element(:merge_moment_dropdown, skip_finished_loading_check: true)
click_element(:merge_immediately_menu_item, skip_finished_loading_check: true)
else
click_element(:merge_button, skip_finished_loading_check: true)
end
merged?
end
end

View File

@ -27,6 +27,18 @@ RSpec.describe GraphqlController do
)
end
it 'handles a timeout nicely' do
allow(subject).to receive(:execute) do
raise ActiveRecord::QueryCanceled, '**taps wristwatch**'
end
post :execute
expect(json_response).to include(
'errors' => include(a_hash_including('message' => /Request timed out/))
)
end
it 'handles StandardError' do
allow(subject).to receive(:execute) do
raise StandardError, message

View File

@ -0,0 +1,34 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Pages::PageDeployedEvent do
where(:data, :valid) do
[
[{ project_id: 1, namespace_id: 2, root_namespace_id: 3 }, true],
[{ project_id: 1 }, false],
[{ namespace_id: 1 }, false],
[{ project_id: 'foo', namespace_id: 2 }, false],
[{ project_id: 1, namespace_id: 'foo' }, false],
[{ project_id: [], namespace_id: 2 }, false],
[{ project_id: 1, namespace_id: [] }, false],
[{ project_id: {}, namespace_id: 2 }, false],
[{ project_id: 1, namespace_id: {} }, false],
['foo', false],
[123, false],
[[], false]
]
end
with_them do
it 'validates data' do
constructor = -> { described_class.new(data: data) }
if valid
expect { constructor.call }.not_to raise_error
else
expect { constructor.call }.to raise_error(Gitlab::EventStore::InvalidEvent)
end
end
end
end

View File

@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe "Admin > Admin sees background migrations" do
let_it_be(:admin) { create(:admin) }
let(:job_class) { Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJob }
let_it_be(:active_migration) { create(:batched_background_migration, :active, table_name: 'active') }
let_it_be(:failed_migration) { create(:batched_background_migration, :failed, table_name: 'failed', total_tuple_count: 100) }
@ -107,7 +108,8 @@ RSpec.describe "Admin > Admin sees background migrations" do
anything,
batch_min_value: 6,
batch_size: 5,
job_arguments: failed_migration.job_arguments
job_arguments: failed_migration.job_arguments,
job_class: job_class
).and_return([6, 10])
end
end

View File

@ -35,9 +35,59 @@ RSpec.describe 'Profile > Applications' do
expect(page).to have_content('Your applications (0)')
expect(page).to have_content('Authorized applications (0)')
end
end
describe 'Authorized applications', :js do
let(:other_user) { create(:user) }
let(:application) { create(:oauth_application, owner: user) }
let(:created_at) { 2.days.ago }
let(:token) { create(:oauth_access_token, application: application, resource_owner: user) }
let(:anonymous_token) { create(:oauth_access_token, resource_owner: user) }
context 'with multiple access token types and multiple owners' do
let!(:other_user_token) { create(:oauth_access_token, application: application, resource_owner: other_user) }
before do
token.update_column(:created_at, created_at)
anonymous_token.update_columns(application_id: nil, created_at: 1.day.ago)
end
it 'displays the correct authorized applications' do
visit oauth_applications_path
expect(page).to have_content('Authorized applications (2)')
page.within('div.oauth-authorized-applications') do
# Ensure the correct user's token details are displayed
# when the application has more than one token
page.within("tr#application_#{application.id}") do
expect(page).to have_content(created_at)
end
expect(page).to have_content('Anonymous')
expect(page).not_to have_content(other_user_token.created_at)
end
end
end
it 'deletes an authorized application' do
create(:oauth_access_token, resource_owner: user)
token
visit oauth_applications_path
page.within('div.oauth-authorized-applications') do
page.within("tr#application_#{application.id}") do
click_button 'Revoke'
end
end
accept_gl_confirm(button_text: 'Revoke application')
expect(page).to have_content('The application was revoked access.')
expect(page).to have_content('Authorized applications (0)')
end
it 'deletes an anonymous authorized application' do
anonymous_token
visit oauth_applications_path
page.within('.oauth-authorized-applications') do
@ -48,7 +98,6 @@ RSpec.describe 'Profile > Applications' do
accept_gl_confirm(button_text: 'Revoke application')
expect(page).to have_content('The application was revoked access.')
expect(page).to have_content('Your applications (0)')
expect(page).to have_content('Authorized applications (0)')
end
end

View File

@ -32,6 +32,7 @@ export const getIssuesQueryResponse = {
state: 'opened',
title: 'Issue title',
updatedAt: '2021-05-22T04:08:01Z',
closedAt: null,
upvotes: 3,
userDiscussionsCount: 4,
webPath: 'project/-/issues/789',

View File

@ -97,10 +97,10 @@ describe('isSortKey', () => {
describe('getSortOptions', () => {
describe.each`
hasIssueWeightsFeature | hasBlockedIssuesFeature | length | containsWeight | containsBlocking
${false} | ${false} | ${9} | ${false} | ${false}
${true} | ${false} | ${10} | ${true} | ${false}
${false} | ${true} | ${10} | ${false} | ${true}
${true} | ${true} | ${11} | ${true} | ${true}
${false} | ${false} | ${10} | ${false} | ${false}
${true} | ${false} | ${11} | ${true} | ${false}
${false} | ${true} | ${11} | ${false} | ${true}
${true} | ${true} | ${12} | ${true} | ${true}
`(
'when hasIssueWeightsFeature=$hasIssueWeightsFeature and hasBlockedIssuesFeature=$hasBlockedIssuesFeature',
({

View File

@ -39,10 +39,11 @@ describe('commits service', () => {
expect(axios.get).toHaveBeenCalledWith(testUrl, { params: { format: 'json', offset } });
});
it('encodes the path correctly', async () => {
await requestCommits(1, 'some-project', 'with $peci@l ch@rs/');
it('encodes the path and ref correctly', async () => {
await requestCommits(1, 'some-project', 'with $peci@l ch@rs/', 'r€f-#');
const encodedUrl = '/some-project/-/refs/main/logs_tree/with%20%24peci%40l%20ch%40rs%2F';
const encodedUrl =
'/some-project/-/refs/r%E2%82%ACf-%23/logs_tree/with%20%24peci%40l%20ch%40rs%2F';
expect(axios.get).toHaveBeenCalledWith(encodedUrl, expect.anything());
});

View File

@ -11,7 +11,7 @@ const MOCK_BLOBS = [
{
id: '123abc',
sha: '123abc',
flatPath: 'blob',
flatPath: 'main/blob.md',
name: 'blob.md',
type: 'blob',
webPath: '/blob',
@ -19,7 +19,7 @@ const MOCK_BLOBS = [
{
id: '124abc',
sha: '124abc',
flatPath: 'blob2',
flatPath: 'main/blob2.md',
name: 'blob2.md',
type: 'blob',
webUrl: 'http://test.com',
@ -27,7 +27,7 @@ const MOCK_BLOBS = [
{
id: '125abc',
sha: '125abc',
flatPath: 'blob3',
flatPath: 'main/blob3.md',
name: 'blob3.md',
type: 'blob',
webUrl: 'http://test.com',
@ -37,21 +37,21 @@ const MOCK_BLOBS = [
const MOCK_COMMITS = [
{
fileName: 'blob.md',
filePath: 'main/blob.md',
type: 'blob',
commit: {
message: 'Updated blob.md',
},
},
{
fileName: 'blob2.md',
filePath: 'main/blob2.md',
type: 'blob',
commit: {
message: 'Updated blob2.md',
},
},
{
fileName: 'blob3.md',
filePath: 'main/blob3.md',
type: 'blob',
commit: {
message: 'Updated blob3.md',

View File

@ -32,6 +32,7 @@ import {
fullReportExtension,
noTelemetryExtension,
pollingExtension,
pollingFullDataExtension,
pollingErrorExtension,
multiPollingExtension,
} from './test_extensions';
@ -1082,6 +1083,37 @@ describe('MrWidgetOptions', () => {
});
});
describe('success - full data polling', () => {
it('sets data when polling is complete', async () => {
registerExtension(pollingFullDataExtension);
createComponent();
await waitForPromises();
api.trackRedisHllUserEvent.mockClear();
api.trackRedisCounterEvent.mockClear();
findExtensionToggleButton().trigger('click');
// The default working extension is a "warning" type, which generates a second - more specific - telemetry event for expansions
expect(api.trackRedisHllUserEvent).toHaveBeenCalledTimes(2);
expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
'i_merge_request_widget_test_extension_expand',
);
expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
'i_merge_request_widget_test_extension_expand_warning',
);
expect(api.trackRedisCounterEvent).toHaveBeenCalledTimes(2);
expect(api.trackRedisCounterEvent).toHaveBeenCalledWith(
'i_merge_request_widget_test_extension_count_expand',
);
expect(api.trackRedisCounterEvent).toHaveBeenCalledWith(
'i_merge_request_widget_test_extension_count_expand_warning',
);
});
});
describe('error', () => {
let captureException;

View File

@ -109,6 +109,39 @@ export const pollingExtension = {
enablePolling: true,
};
export const pollingFullDataExtension = {
...workingExtension(),
enableExpandedPolling: true,
methods: {
fetchCollapsedData({ targetProjectFullPath }) {
return Promise.resolve({ targetProjectFullPath, count: 1 });
},
fetchFullData() {
return Promise.resolve([
{
headers: { 'poll-interval': 0 },
status: 200,
data: {
id: 1,
text: 'Hello world',
icon: {
name: EXTENSION_ICONS.failed,
},
badge: {
text: 'Closed',
},
link: {
href: 'https://gitlab.com',
text: 'GitLab.com',
},
actions: [{ text: 'Full report', href: 'https://gitlab.com', target: '_blank' }],
},
},
]);
},
},
};
export const fullReportExtension = {
...workingExtension(),
computed: {

View File

@ -39,6 +39,8 @@ describe('IssuableItem', () => {
const originalUrl = gon.gitlab_url;
let wrapper;
const findTimestampWrapper = () => wrapper.find('[data-testid="issuable-timestamp"]');
beforeEach(() => {
gon.gitlab_url = MOCK_GITLAB_URL;
});
@ -150,12 +152,37 @@ describe('IssuableItem', () => {
});
});
describe('updatedAt', () => {
it('returns string containing timeago string based on `issuable.updatedAt`', () => {
describe('timestamp', () => {
it('returns string containing date and time based on `issuable.updatedAt` when the issue is open', () => {
wrapper = createComponent();
expect(wrapper.vm.updatedAt).toContain('updated');
expect(wrapper.vm.updatedAt).toContain('ago');
expect(findTimestampWrapper().attributes('title')).toBe('Sep 10, 2020 11:41am UTC');
});
it('returns string containing timeago string based on `issuable.closedAt` when the issue is closed', () => {
wrapper = createComponent({
issuable: { ...mockIssuable, closedAt: '2020-06-18T11:30:00Z', state: 'closed' },
});
expect(findTimestampWrapper().attributes('title')).toBe('Jun 18, 2020 11:30am UTC');
});
});
describe('formattedTimestamp', () => {
it('returns string containing timeago string based on `issuable.updatedAt` when the issue is open', () => {
wrapper = createComponent();
expect(findTimestampWrapper().text()).toContain('updated');
expect(findTimestampWrapper().text()).toContain('ago');
});
it('returns string containing timeago string based on `issuable.closedAt` when the issue is closed', () => {
wrapper = createComponent({
issuable: { ...mockIssuable, closedAt: '2020-06-18T11:30:00Z', state: 'closed' },
});
expect(findTimestampWrapper().text()).toContain('closed');
expect(findTimestampWrapper().text()).toContain('ago');
});
});
@ -456,18 +483,31 @@ describe('IssuableItem', () => {
it('renders issuable updatedAt info', () => {
wrapper = createComponent();
const updatedAtEl = wrapper.find('[data-testid="issuable-updated-at"]');
const timestampEl = wrapper.find('[data-testid="issuable-timestamp"]');
expect(updatedAtEl.attributes('title')).toBe('Sep 10, 2020 11:41am UTC');
expect(updatedAtEl.text()).toBe(wrapper.vm.updatedAt);
expect(timestampEl.attributes('title')).toBe('Sep 10, 2020 11:41am UTC');
expect(timestampEl.text()).toBe(wrapper.vm.formattedTimestamp);
});
describe('when issuable is closed', () => {
it('renders issuable card with a closed style', () => {
wrapper = createComponent({ issuable: { ...mockIssuable, closedAt: '2020-12-10' } });
wrapper = createComponent({
issuable: { ...mockIssuable, closedAt: '2020-12-10', state: 'closed' },
});
expect(wrapper.classes()).toContain('closed');
});
it('renders issuable closedAt info and does not render updatedAt info', () => {
wrapper = createComponent({
issuable: { ...mockIssuable, closedAt: '2022-06-18T11:30:00Z', state: 'closed' },
});
const timestampEl = wrapper.find('[data-testid="issuable-timestamp"]');
expect(timestampEl.attributes('title')).toBe('Jun 18, 2022 11:30am UTC');
expect(timestampEl.text()).toBe(wrapper.vm.formattedTimestamp);
});
});
describe('when issuable was created within the past 24 hours', () => {

View File

@ -0,0 +1,104 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe Gitlab::BackgroundMigration::BackfillImportedIssueSearchData, :migration, schema: 20220621040800 do
let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
let!(:issue_search_data_table) { table(:issue_search_data) }
let!(:user) { table(:users).create!(email: 'author@example.com', username: 'author', projects_limit: 10) }
let!(:project) do
table(:projects)
.create!(
namespace_id: namespace.id,
creator_id: user.id,
name: 'projecty',
path: 'path',
project_namespace_id: namespace.id)
end
let!(:issue) do
table(:issues).create!(
project_id: project.id,
title: 'Patterson',
description: FFaker::HipsterIpsum.paragraph
)
end
let(:migration) do
described_class.new(start_id: 1,
end_id: 30,
batch_table: :issues,
batch_column: :id,
sub_batch_size: 2,
pause_ms: 0,
connection: ApplicationRecord.connection)
end
let(:perform_migration) { migration.perform }
context 'when issue has search data record' do
let!(:issue_search_data) { issue_search_data_table.create!(project_id: project.id, issue_id: issue.id) }
it 'does not create or update any search data records' do
expect { perform_migration }
.to not_change { issue_search_data_table.count }
.and not_change { issue_search_data }
expect(issue_search_data_table.count).to eq(1)
end
end
context 'when issue has no search data record' do
let(:title_node) { "'#{issue.title.downcase}':1A" }
it 'creates search data records' do
expect { perform_migration }
.to change { issue_search_data_table.count }.from(0).to(1)
expect(issue_search_data_table.find_by(project_id: project.id).issue_id)
.to eq(issue.id)
expect(issue_search_data_table.find_by(project_id: project.id).search_vector)
.to include(title_node)
end
end
context 'error handling' do
let!(:issue2) do
table(:issues).create!(
project_id: project.id,
title: 'Chatterton',
description: FFaker::HipsterIpsum.paragraph
)
end
before do
issue.update!(description: Array.new(30_000) { SecureRandom.hex }.join(' '))
end
let(:title_node2) { "'#{issue2.title.downcase}':1A" }
it 'skips insertion for that issue but continues with migration' do
expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |logger|
expect(logger)
.to receive(:error)
.with(a_hash_including(message: /string is too long for tsvector/, model_id: issue.id))
end
expect { perform_migration }.to change { issue_search_data_table.count }.from(0).to(1)
expect(issue_search_data_table.find_by(issue_id: issue.id)).to eq(nil)
expect(issue_search_data_table.find_by(issue_id: issue2.id).search_vector)
.to include(title_node2)
end
it 're-raises exceptions' do
allow(migration)
.to receive(:update_search_data_individually)
.and_raise(ActiveRecord::StatementTimeout)
expect { perform_migration }.to raise_error(ActiveRecord::StatementTimeout)
end
end
end

View File

@ -45,10 +45,30 @@ RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchi
end
end
context 'when job_class is provided with a batching_scope' do
let(:job_class) do
Class.new(described_class) do
def self.batching_scope(relation, job_arguments:)
min_id = job_arguments.first
relation.where.not(type: 'Project').where('id >= ?', min_id)
end
end
end
it 'applies the batching scope' do
expect(job_class).to receive(:batching_scope).and_call_original
batch_bounds = batching_strategy.next_batch(:namespaces, :id, batch_min_value: namespace4.id, batch_size: 3, job_arguments: [1], job_class: job_class)
expect(batch_bounds).to eq([namespace4.id, namespace4.id])
end
end
context 'additional filters' do
let(:strategy_with_filters) do
Class.new(described_class) do
def apply_additional_filters(relation, job_arguments:)
def apply_additional_filters(relation, job_arguments:, job_class: nil)
min_id = job_arguments.first
relation.where.not(type: 'Project').where('id >= ?', min_id)

View File

@ -77,7 +77,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Create do
context 'without tags' do
it 'extracts an empty tag list' do
expect(CommitStatus)
expect(Gitlab::Ci::Tags::BulkInsert)
.to receive(:bulk_insert_tags!)
.with([job])
.and_call_original
@ -95,7 +95,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Create do
end
it 'bulk inserts tags' do
expect(CommitStatus)
expect(Gitlab::Ci::Tags::BulkInsert)
.to receive(:bulk_insert_tags!)
.with([job])
.and_call_original

View File

@ -18,7 +18,7 @@ RSpec.describe Gitlab::Ci::Tags::BulkInsert do
let(:error_message) do
<<~MESSAGE
A mechanism depending on internals of 'act-as-taggable-on` has been designed
to bulk insert tags for Ci::Build records.
to bulk insert tags for Ci::Build/Ci::Runner records.
Please review the code carefully before updating the gem version
https://gitlab.com/gitlab-org/gitlab/-/issues/350053
MESSAGE
@ -27,6 +27,21 @@ RSpec.describe Gitlab::Ci::Tags::BulkInsert do
it { expect(ActsAsTaggableOn::VERSION).to eq(acceptable_version), error_message }
end
describe '.bulk_insert_tags!' do
let(:inserter) { instance_double(described_class) }
it 'delegates to bulk insert class' do
expect(Gitlab::Ci::Tags::BulkInsert)
.to receive(:new)
.with(statuses)
.and_return(inserter)
expect(inserter).to receive(:insert!)
described_class.bulk_insert_tags!(statuses)
end
end
describe '#insert!' do
context 'without tags' do
it { expect(service.insert!).to be_falsey }
@ -45,6 +60,17 @@ RSpec.describe Gitlab::Ci::Tags::BulkInsert do
expect(other_job.reload.tag_list).to match_array(%w[tag2 tag3 tag4])
end
it 'persists taggings' do
service.insert!
expect(job.taggings.size).to eq(2)
expect(other_job.taggings.size).to eq(3)
expect(Ci::Build.tagged_with('tag1')).to include(job)
expect(Ci::Build.tagged_with('tag2')).to include(job, other_job)
expect(Ci::Build.tagged_with('tag3')).to include(other_job)
end
context 'when batching inserts for tags' do
before do
stub_const("#{described_class}::TAGS_BATCH_SIZE", 2)
@ -83,6 +109,15 @@ RSpec.describe Gitlab::Ci::Tags::BulkInsert do
expect(job.reload.tag_list).to match_array(%w[tag1 tag2])
expect(other_job.reload.tag_list).to be_empty
end
it 'persists taggings' do
service.insert!
expect(job.taggings.size).to eq(2)
expect(Ci::Build.tagged_with('tag1')).to include(job)
expect(Ci::Build.tagged_with('tag2')).to include(job)
end
end
end
end

View File

@ -322,6 +322,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
describe '#retry_failed_jobs!' do
let(:batched_migration) { create(:batched_background_migration, status: 'failed') }
let(:job_class) { Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJob }
subject(:retry_failed_jobs) { batched_migration.retry_failed_jobs! }
@ -335,7 +336,8 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
anything,
batch_min_value: 6,
batch_size: 5,
job_arguments: batched_migration.job_arguments
job_arguments: batched_migration.job_arguments,
job_class: job_class
).and_return([6, 10])
end
end

View File

@ -0,0 +1,72 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe FinaliseProjectNamespaceMembers, :migration do
let(:batched_migrations) { table(:batched_background_migrations) }
let_it_be(:migration) { described_class::MIGRATION }
describe '#up' do
shared_examples 'finalizes the migration' do
it 'finalizes the migration' do
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
expect(runner).to receive(:finalize).with('BackfillProjectMemberNamespaceId', :members, :id, [])
end
end
end
context 'when migration is missing' do
it 'warns migration not found' do
expect(Gitlab::AppLogger)
.to receive(:warn).with(/Could not find batched background migration for the given configuration:/)
migrate!
end
end
context 'with migration present' do
let!(:project_member_namespace_id_backfill) do
batched_migrations.create!(
job_class_name: 'BackfillProjectMemberNamespaceId',
table_name: :members,
column_name: :id,
job_arguments: [],
interval: 2.minutes,
min_value: 1,
max_value: 2,
batch_size: 1000,
sub_batch_size: 200,
gitlab_schema: :gitlab_main,
status: 3 # finished
)
end
context 'when migration finished successfully' do
it 'does not raise exception' do
expect { migrate! }.not_to raise_error
end
end
context 'with different migration statuses' do
using RSpec::Parameterized::TableSyntax
where(:status, :description) do
0 | 'paused'
1 | 'active'
4 | 'failed'
5 | 'finalizing'
end
with_them do
before do
project_member_namespace_id_backfill.update!(status: status)
end
it_behaves_like 'finalizes the migration'
end
end
end
end
end

View File

@ -0,0 +1,56 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe BackfillImportedIssueSearchData do
let_it_be(:batched_migration) { described_class::MIGRATION }
context 'when BackfillIssueSearchData.max_value is nil' do
it 'schedules a new batched migration with a default max_value' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :issues,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_min_value: described_class::BATCH_MIN_VALUE
)
}
end
end
end
context 'when BackfillIssueSearchData.max_value exists' do
before do
Gitlab::Database::BackgroundMigration::BatchedMigration
.create!(
max_value: 200,
batch_size: 200,
sub_batch_size: 20,
interval: 120,
job_class_name: 'BackfillIssueSearchData',
table_name: 'issues',
column_name: 'id',
gitlab_schema: 'glschema'
)
end
it 'schedules a new batched migration with a custom max_value' do
reversible_migration do |migration|
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :issues,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_min_value: 200
)
}
end
end
end
end

View File

@ -2083,6 +2083,27 @@ RSpec.describe Ci::Build do
end
end
describe '#save_tags' do
let(:build) { create(:ci_build, tag_list: ['tag']) }
it 'saves tags' do
build.save!
expect(build.tags.count).to eq(1)
expect(build.tags.first.name).to eq('tag')
end
context 'with BulkInsertableTags.with_bulk_insert_tags' do
it 'does not save_tags' do
Ci::BulkInsertableTags.with_bulk_insert_tags do
build.save!
end
expect(build.tags).to be_empty
end
end
end
describe '#has_tags?' do
context 'when build has tags' do
subject { create(:ci_build, tag_list: ['tag']) }

View File

@ -1193,6 +1193,40 @@ RSpec.describe Ci::Runner do
end
end
describe '#save_tags' do
let(:runner) { build(:ci_runner, tag_list: ['tag']) }
it 'saves tags' do
runner.save!
expect(runner.tags.count).to eq(1)
expect(runner.tags.first.name).to eq('tag')
end
context 'with BulkInsertableTags.with_bulk_insert_tags' do
it 'does not save_tags' do
Ci::BulkInsertableTags.with_bulk_insert_tags do
runner.save!
end
expect(runner.tags).to be_empty
end
context 'over TAG_LIST_MAX_LENGTH' do
let(:tag_list) { (1..described_class::TAG_LIST_MAX_LENGTH + 1).map { |i| "tag#{i}" } }
let(:runner) { build(:ci_runner, tag_list: tag_list) }
it 'fails validation if over tag limit' do
Ci::BulkInsertableTags.with_bulk_insert_tags do
expect { runner.save! }.to raise_error(ActiveRecord::RecordInvalid)
end
expect(runner.tags).to be_empty
end
end
end
end
describe '#has_tags?' do
context 'when runner has tags' do
subject { create(:ci_runner, tag_list: ['tag']) }

View File

@ -984,22 +984,6 @@ RSpec.describe CommitStatus do
end
end
describe '.bulk_insert_tags!' do
let(:statuses) { double('statuses') }
let(:inserter) { double('inserter') }
it 'delegates to bulk insert class' do
expect(Gitlab::Ci::Tags::BulkInsert)
.to receive(:new)
.with(statuses)
.and_return(inserter)
expect(inserter).to receive(:insert!)
described_class.bulk_insert_tags!(statuses)
end
end
describe '#expire_etag_cache!' do
it 'expires the etag cache' do
expect_next_instance_of(Gitlab::EtagCaching::Store) do |etag_store|

View File

@ -0,0 +1,66 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::BulkInsertableTags do
let(:taggable_class) do
Class.new do
prepend Ci::BulkInsertableTags
attr_reader :tags_saved
def save_tags
@tags_saved = true
end
end
end
let(:record) { taggable_class.new }
describe '.with_bulk_insert_tags' do
it 'changes the thread key to true' do
expect(Thread.current['ci_bulk_insert_tags']).to be_nil
described_class.with_bulk_insert_tags do
expect(Thread.current['ci_bulk_insert_tags']).to eq(true)
end
expect(Thread.current['ci_bulk_insert_tags']).to be_nil
end
end
describe '#save_tags' do
it 'calls super' do
record.save_tags
expect(record.tags_saved).to eq(true)
end
it 'does not call super with BulkInsertableTags.with_bulk_insert_tags' do
described_class.with_bulk_insert_tags do
record.save_tags
end
expect(record.tags_saved).to be_nil
end
it 'isolates bulk insert behavior between threads' do
record2 = taggable_class.new
t1 = Thread.new do
described_class.with_bulk_insert_tags do
record.save_tags
end
end
t2 = Thread.new do
record2.save_tags
end
[t1, t2].each(&:join)
expect(record.tags_saved).to be_nil
expect(record2.tags_saved).to eq(true)
end
end
end

View File

@ -4903,7 +4903,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
.to delegate_method(:builds_with_coverage)
.to(:head_pipeline)
.with_prefix
.with_arguments(allow_nil: true)
.allow_nil
end
end

View File

@ -337,16 +337,13 @@ RSpec.describe Namespace do
end
describe 'delegate' do
it { is_expected.to delegate_method(:name).to(:owner).with_prefix.with_arguments(allow_nil: true) }
it { is_expected.to delegate_method(:avatar_url).to(:owner).with_arguments(allow_nil: true) }
it do
is_expected.to delegate_method(:prevent_sharing_groups_outside_hierarchy)
.to(:namespace_settings).with_arguments(allow_nil: true)
end
it { is_expected.to delegate_method(:name).to(:owner).with_prefix.allow_nil }
it { is_expected.to delegate_method(:avatar_url).to(:owner).allow_nil }
it { is_expected.to delegate_method(:prevent_sharing_groups_outside_hierarchy).to(:namespace_settings).allow_nil }
it do
is_expected.to delegate_method(:prevent_sharing_groups_outside_hierarchy=)
.to(:namespace_settings).with_arguments(allow_nil: true)
is_expected.to delegate_method(:prevent_sharing_groups_outside_hierarchy=).to(:namespace_settings)
.with_arguments(:args).allow_nil
end
end

View File

@ -826,26 +826,33 @@ RSpec.describe Project, factory_default: :keep do
end
it { is_expected.to delegate_method(:members).to(:team).with_prefix(true) }
it { is_expected.to delegate_method(:name).to(:owner).with_prefix(true).with_arguments(allow_nil: true) }
it { is_expected.to delegate_method(:root_ancestor).to(:namespace).with_arguments(allow_nil: true) }
it { is_expected.to delegate_method(:certificate_based_clusters_enabled?).to(:namespace).with_arguments(allow_nil: true) }
it { is_expected.to delegate_method(:last_pipeline).to(:commit).with_arguments(allow_nil: true) }
it { is_expected.to delegate_method(:name).to(:owner).with_prefix(true).allow_nil }
it { is_expected.to delegate_method(:root_ancestor).to(:namespace).allow_nil }
it { is_expected.to delegate_method(:certificate_based_clusters_enabled?).to(:namespace).allow_nil }
it { is_expected.to delegate_method(:last_pipeline).to(:commit).allow_nil }
it { is_expected.to delegate_method(:container_registry_enabled?).to(:project_feature) }
it { is_expected.to delegate_method(:container_registry_access_level).to(:project_feature) }
describe 'project settings' do
describe 'read project settings' do
%i(
show_default_award_emojis
show_default_award_emojis=
show_default_award_emojis?
warn_about_potentially_unwanted_characters
warn_about_potentially_unwanted_characters=
warn_about_potentially_unwanted_characters?
enforce_auth_checks_on_uploads
enforce_auth_checks_on_uploads=
enforce_auth_checks_on_uploads?
).each do |method|
it { is_expected.to delegate_method(method).to(:project_setting).with_arguments(allow_nil: true) }
it { is_expected.to delegate_method(method).to(:project_setting).allow_nil }
end
end
describe 'write project settings' do
%i(
show_default_award_emojis=
warn_about_potentially_unwanted_characters=
enforce_auth_checks_on_uploads=
).each do |method|
it { is_expected.to delegate_method(method).to(:project_setting).with_arguments(:args).allow_nil }
end
end

View File

@ -106,7 +106,7 @@ RSpec.describe BlobPresenter do
end
describe '#find_file_path' do
it { expect(presenter.find_file_path).to eq("/#{project.full_path}/-/find_file/HEAD/files/ruby/regex.rb") }
it { expect(presenter.find_file_path).to eq("/#{project.full_path}/-/find_file/HEAD") }
end
describe '#blame_path' do

View File

@ -97,6 +97,7 @@ RSpec.describe Admin::BackgroundMigrationsController, :enable_admin_mode do
describe 'POST #retry' do
let(:migration) { create(:batched_background_migration, :failed) }
let(:job_class) { Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJob }
before do
create(:batched_background_migration_job, :failed, batched_migration: migration, batch_size: 10, min_value: 6, max_value: 15, attempts: 3)
@ -107,7 +108,8 @@ RSpec.describe Admin::BackgroundMigrationsController, :enable_admin_mode do
anything,
batch_min_value: 6,
batch_size: 5,
job_arguments: migration.job_arguments
job_arguments: migration.job_arguments,
job_class: job_class
).and_return([6, 10])
end
end

View File

@ -13,7 +13,7 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
stub_application_setting(valid_runner_registrars: ApplicationSetting::VALID_RUNNER_REGISTRAR_TYPES)
end
subject { described_class.new.execute(token, args) }
subject(:runner) { described_class.new.execute(token, args) }
context 'when no token is provided' do
let(:token) { '' }
@ -83,6 +83,9 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
expect(subject.platform).to eq args[:platform]
expect(subject.architecture).to eq args[:architecture]
expect(subject.ip_address).to eq args[:ip_address]
expect(Ci::Runner.tagged_with('tag1')).to include(subject)
expect(Ci::Runner.tagged_with('tag2')).to include(subject)
end
end
@ -230,5 +233,41 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
end
end
end
context 'when tags are provided' do
let(:token) { registration_token }
let(:args) do
{ tag_list: %w(tag1 tag2) }
end
it 'creates runner with tags' do
expect(runner).to be_persisted
expect(runner.tags).to contain_exactly(
an_object_having_attributes(name: 'tag1'),
an_object_having_attributes(name: 'tag2')
)
end
it 'creates tags in bulk' do
expect(Gitlab::Ci::Tags::BulkInsert).to receive(:bulk_insert_tags!).and_call_original
expect(runner).to be_persisted
end
context 'and tag list exceeds limit' do
let(:args) do
{ tag_list: (1..Ci::Runner::TAG_LIST_MAX_LENGTH + 1).map { |i| "tag#{i}" } }
end
it 'does not create any tags' do
expect(Gitlab::Ci::Tags::BulkInsert).not_to receive(:bulk_insert_tags!)
expect(runner).not_to be_persisted
expect(runner.tags).to be_empty
end
end
end
end
end

View File

@ -43,6 +43,16 @@ RSpec.describe Projects::UpdatePagesService do
expect(project.pages_deployed?).to be_truthy
end
it 'publishes a PageDeployedEvent event with project id and namespace id' do
expected_data = {
project_id: project.id,
namespace_id: project.namespace_id,
root_namespace_id: project.root_namespace.id
}
expect { subject.execute }.to publish_event(Pages::PageDeployedEvent).with(expected_data)
end
it 'creates pages_deployment and saves it in the metadata' do
expect do
expect(execute).to eq(:success)

View File

@ -74,6 +74,13 @@ RSpec::Matchers.define :have_scheduled_batched_migration do |gitlab_schema: :git
.for_configuration(gitlab_schema, migration, table_name, column_name, job_arguments)
expect(batched_migrations.count).to be(1)
# the :batch_min_value & :batch_max_value attribute argument values get applied to the
# :min_value & :max_value columns on the database. Here we change the attribute names
# for the rspec have_attributes matcher used below to pass
attributes[:min_value] = attributes.delete :batch_min_value if attributes.include?(:batch_min_value)
attributes[:max_value] = attributes.delete :batch_max_value if attributes.include?(:batch_max_value)
expect(batched_migrations).to all(have_attributes(attributes)) if attributes.present?
end

View File

@ -217,19 +217,18 @@ func (u *upstream) pollGeoProxyAPI() {
func (u *upstream) callGeoProxyAPI() {
geoProxyData, err := u.APIClient.GetGeoProxyData()
if err != nil {
log.WithError(err).WithFields(log.Fields{"geoProxyBackend": u.geoProxyBackend}).Error("Geo Proxy: Unable to determine Geo Proxy URL. Fallback on cached value.")
// Unable to determine Geo Proxy URL. Fallback on cached value.
return
}
hasProxyDataChanged := false
if u.geoProxyBackend.String() != geoProxyData.GeoProxyURL.String() {
log.WithFields(log.Fields{"oldGeoProxyURL": u.geoProxyBackend, "newGeoProxyURL": geoProxyData.GeoProxyURL}).Info("Geo Proxy: URL changed")
// URL changed
hasProxyDataChanged = true
}
if u.geoProxyExtraData != geoProxyData.GeoProxyExtraData {
// extra data is usually a JWT, thus not explicitly logging it
log.Info("Geo Proxy: signed data changed")
// Signed data changed
hasProxyDataChanged = true
}