Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-12-10 15:10:24 +00:00
parent e838c62efb
commit ecc11e5d60
154 changed files with 1861 additions and 511 deletions

View File

@ -21,20 +21,16 @@ Rails/IncludeUrlHelper:
- app/models/integrations/webex_teams.rb
- app/models/integrations/youtrack.rb
- app/presenters/alert_management/alert_presenter.rb
- app/presenters/ci/pipeline_presenter.rb
- app/presenters/environment_presenter.rb
- app/presenters/gitlab/blame_presenter.rb
- app/presenters/merge_request_presenter.rb
- app/presenters/project_presenter.rb
- app/presenters/prometheus_alert_presenter.rb
- app/presenters/release_presenter.rb
- app/presenters/releases/evidence_presenter.rb
- ee/app/helpers/license_helper.rb
- ee/app/models/integrations/github.rb
- ee/app/presenters/merge_request_approver_presenter.rb
- ee/spec/helpers/ee/projects/security/configuration_helper_spec.rb
- ee/spec/lib/banzai/filter/cross_project_issuable_information_filter_spec.rb
- lib/gitlab/ci/badge/metadata.rb
- spec/helpers/merge_requests_helper_spec.rb
- spec/helpers/nav/top_nav_helper_spec.rb
- spec/helpers/notify_helper_spec.rb

View File

@ -1 +1 @@
4ba8618078d9107d52c0d735f76286ab0b113a8a
1de88e4247d4b940f843003781cb2bf75582b826

View File

@ -4,8 +4,7 @@ import $ from 'jquery';
import IssuableForm from 'ee_else_ce/issuable/issuable_form';
import ShortcutsNavigation from '~/behaviors/shortcuts/shortcuts_navigation';
import GLForm from '~/gl_form';
import initSuggestions from '~/issues/suggestions';
import initIssuableTypeSelector from '~/issues/type_selector';
import { initTitleSuggestions, initTypePopover } from '~/issues/new';
import LabelsSelect from '~/labels/labels_select';
import MilestoneSelect from '~/milestones/milestone_select';
import IssuableTemplateSelectors from '~/issuable/issuable_template_selectors';
@ -20,6 +19,6 @@ export default () => {
warnTemplateOverride: true,
});
initSuggestions();
initIssuableTypeSelector();
initTitleSuggestions();
initTypePopover();
};

View File

@ -2,12 +2,12 @@
import { GlTooltipDirective, GlIcon } from '@gitlab/ui';
import { __ } from '~/locale';
import query from '../queries/issues.query.graphql';
import Suggestion from './item.vue';
import TitleSuggestionsItem from './title_suggestions_item.vue';
export default {
components: {
Suggestion,
GlIcon,
TitleSuggestionsItem,
},
directives: {
GlTooltip: GlTooltipDirective,
@ -66,7 +66,7 @@ export default {
</script>
<template>
<div v-show="showSuggestions" class="form-group row issuable-suggestions">
<div v-show="showSuggestions" class="form-group row">
<div v-once class="col-form-label col-sm-2 pt-0">
{{ __('Similar issues') }}
<gl-icon
@ -86,7 +86,7 @@ export default {
'gl-mb-3': index !== issues.length - 1,
}"
>
<suggestion :suggestion="suggestion" />
<title-suggestions-item :suggestion="suggestion" />
</li>
</ul>
</div>

View File

@ -19,9 +19,9 @@ export default {
<template>
<span id="popovercontainer">
<gl-icon id="issuable-type-info" name="question-o" class="gl-ml-5 gl-text-gray-500" />
<gl-icon id="issue-type-info" name="question-o" class="gl-ml-5 gl-text-gray-500" />
<gl-popover
target="issuable-type-info"
target="issue-type-info"
container="popovercontainer"
:title="$options.i18n.issueTypes"
triggers="focus hover"

View File

@ -1,14 +1,19 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createDefaultClient from '~/lib/graphql';
import App from './components/app.vue';
import TitleSuggestions from './components/title_suggestions.vue';
import TypePopover from './components/type_popover.vue';
Vue.use(VueApollo);
export function initTitleSuggestions() {
Vue.use(VueApollo);
export default function initIssuableSuggestions() {
const el = document.getElementById('js-suggestions');
const issueTitle = document.getElementById('issue_title');
const { projectPath } = el.dataset;
if (!el) {
return undefined;
}
const apolloProvider = new VueApollo({
defaultClient: createDefaultClient(),
});
@ -26,13 +31,26 @@ export default function initIssuableSuggestions() {
this.search = issueTitle.value;
});
},
render(h) {
return h(App, {
render(createElement) {
return createElement(TitleSuggestions, {
props: {
projectPath,
projectPath: el.dataset.projectPath,
search: this.search,
},
});
},
});
}
export function initTypePopover() {
const el = document.getElementById('js-type-popover');
if (!el) {
return undefined;
}
return new Vue({
el,
render: (createElement) => createElement(TypePopover),
});
}

View File

@ -1,16 +0,0 @@
import Vue from 'vue';
import InfoPopover from './components/info_popover.vue';
export default function initIssuableTypeSelector() {
const el = document.getElementById('js-type-popover');
return new Vue({
el,
components: {
InfoPopover,
},
render(h) {
return h(InfoPopover);
},
});
}

View File

@ -735,3 +735,14 @@ export const isFeatureFlagEnabled = (flag) => window.gon.features?.[flag];
export const convertArrayToCamelCase = (array) => array.map((i) => convertToCamelCase(i));
export const isLoggedIn = () => Boolean(window.gon?.current_user_id);
/**
* This method takes in array of objects with snake_case
* property names and returns a new array of objects with
* camelCase property names
*
* @param {Array[Object]} array - Array to be converted
* @returns {Array[Object]} Converted array
*/
export const convertArrayOfObjectsToCamelCase = (array) =>
array.map((o) => convertObjectPropsToCamelCase(o));

View File

@ -2,8 +2,9 @@
import { GlTable, GlTooltipDirective, GlSkeletonLoader } from '@gitlab/ui';
import TooltipOnTruncate from '~/vue_shared/components/tooltip_on_truncate/tooltip_on_truncate.vue';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { __, s__ } from '~/locale';
import { formatNumber, __, s__ } from '~/locale';
import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
import { RUNNER_JOB_COUNT_LIMIT } from '../constants';
import RunnerActionsCell from './cells/runner_actions_cell.vue';
import RunnerSummaryCell from './cells/runner_summary_cell.vue';
import RunnerStatusCell from './cells/runner_status_cell.vue';
@ -52,6 +53,12 @@ export default {
},
},
methods: {
formatJobCount(jobCount) {
if (jobCount > RUNNER_JOB_COUNT_LIMIT) {
return `${formatNumber(RUNNER_JOB_COUNT_LIMIT)}+`;
}
return formatNumber(jobCount);
},
runnerTrAttr(runner) {
if (runner) {
return {
@ -66,6 +73,7 @@ export default {
tableField({ key: 'summary', label: s__('Runners|Runner ID'), thClasses: ['gl-lg-w-25p'] }),
tableField({ key: 'version', label: __('Version') }),
tableField({ key: 'ipAddress', label: __('IP Address') }),
tableField({ key: 'jobCount', label: __('Jobs') }),
tableField({ key: 'tagList', label: __('Tags'), thClasses: ['gl-lg-w-25p'] }),
tableField({ key: 'contactedAt', label: __('Last contact') }),
tableField({ key: 'actions', label: '' }),
@ -112,6 +120,10 @@ export default {
</tooltip-on-truncate>
</template>
<template #cell(jobCount)="{ item: { jobCount } }">
{{ formatJobCount(jobCount) }}
</template>
<template #cell(tagList)="{ item: { tagList } }">
<runner-tags :tag-list="tagList" size="sm" />
</template>

View File

@ -1,6 +1,7 @@
import { s__ } from '~/locale';
export const RUNNER_PAGE_SIZE = 20;
export const RUNNER_JOB_COUNT_LIMIT = 1000;
export const GROUP_RUNNER_COUNT_LIMIT = 1000;
export const I18N_FETCH_ERROR = s__('Runners|Something went wrong while fetching runner data.');

View File

@ -8,6 +8,7 @@ fragment RunnerNode on CiRunner {
ipAddress
active
locked
jobCount
tagList
contactedAt
status(legacyMode: null)

View File

@ -869,10 +869,6 @@
}
}
.issuable-suggestions svg {
vertical-align: sub;
}
.suggestion-footer {
font-size: 12px;
line-height: 15px;

View File

@ -23,6 +23,7 @@ class ApplicationController < ActionController::Base
include Gitlab::Utils::StrongMemoize
include ::Gitlab::EndpointAttributes
include FlocOptOut
include CheckRateLimit
before_action :authenticate_user!, except: [:route_not_found]
before_action :enforce_terms!, if: :should_enforce_terms?

View File

@ -5,19 +5,27 @@
# Controller concern that checks if the rate limit for a given action is throttled by calling the
# Gitlab::ApplicationRateLimiter class. If the action is throttled for the current user, the request
# will be logged and an error message will be rendered with a Too Many Requests response status.
# See lib/api/helpers/rate_limiter.rb for API version
module CheckRateLimit
def check_rate_limit(key)
return unless rate_limiter.throttled?(key, scope: current_user, users_allowlist: rate_limit_users_allowlist)
def check_rate_limit!(key, scope:, redirect_back: false, **options)
return unless rate_limiter.throttled?(key, scope: scope, **options)
rate_limiter.log_request(request, "#{key}_request_limit".to_sym, current_user)
render plain: _('This endpoint has been requested too many times. Try again later.'), status: :too_many_requests
return yield if block_given?
message = _('This endpoint has been requested too many times. Try again later.')
if redirect_back
redirect_back_or_default(options: { alert: message })
else
render plain: message, status: :too_many_requests
end
end
private
def rate_limiter
::Gitlab::ApplicationRateLimiter
end
def rate_limit_users_allowlist
Gitlab::CurrentSettings.current_application_settings.notes_create_limit_allowlist
end
end

View File

@ -32,16 +32,4 @@ module Integrations::HooksExecution
flash[:alert] = "Hook execution failed: #{message}"
end
end
def create_rate_limit(key, scope)
if rate_limiter.throttled?(key, scope: [scope, current_user])
rate_limiter.log_request(request, "#{key}_request_limit".to_sym, current_user)
render plain: _('This endpoint has been requested too many times. Try again later.'), status: :too_many_requests
end
end
def rate_limiter
::Gitlab::ApplicationRateLimiter
end
end

View File

@ -3,7 +3,6 @@
module NotesActions
include RendersNotes
include Gitlab::Utils::StrongMemoize
include CheckRateLimit
extend ActiveSupport::Concern
# last_fetched_at is an integer number of microseconds, which is the same
@ -16,7 +15,11 @@ module NotesActions
before_action :require_noteable!, only: [:index, :create]
before_action :authorize_admin_note!, only: [:update, :destroy]
before_action :note_project, only: [:create]
before_action -> { check_rate_limit(:notes_create) }, only: [:create]
before_action -> {
check_rate_limit!(:notes_create,
scope: current_user,
users_allowlist: Gitlab::CurrentSettings.current_application_settings.notes_create_limit_allowlist)
}, only: [:create]
end
def index

View File

@ -37,7 +37,7 @@ class GroupsController < Groups::ApplicationController
push_frontend_feature_flag(:iteration_cadences, @group, default_enabled: :yaml)
end
before_action :export_rate_limit, only: [:export, :download_export]
before_action :check_export_rate_limit!, only: [:export, :download_export]
helper_method :captcha_required?
@ -314,16 +314,12 @@ class GroupsController < Groups::ApplicationController
url_for(safe_params)
end
def export_rate_limit
def check_export_rate_limit!
prefixed_action = "group_#{params[:action]}".to_sym
scope = params[:action] == :download_export ? @group : nil
if Gitlab::ApplicationRateLimiter.throttled?(prefixed_action, scope: [current_user, scope].compact)
Gitlab::ApplicationRateLimiter.log_request(request, "#{prefixed_action}_request_limit".to_sym, current_user)
render plain: _('This endpoint has been requested too many times. Try again later.'), status: :too_many_requests
end
check_rate_limit!(prefixed_action, scope: [current_user, scope].compact)
end
def ensure_export_enabled

View File

@ -3,7 +3,7 @@
class Import::BaseController < ApplicationController
include ActionView::Helpers::SanitizeHelper
before_action :import_rate_limit, only: [:create]
before_action -> { check_rate_limit!(:project_import, scope: [current_user, :project_import], redirect_back: true) }, only: [:create]
feature_category :importers
def status
@ -98,18 +98,4 @@ class Import::BaseController < ApplicationController
def project_save_error(project)
project.errors.full_messages.join(', ')
end
def import_rate_limit
key = "project_import".to_sym
if rate_limiter.throttled?(key, scope: [current_user, key])
rate_limiter.log_request(request, "#{key}_request_limit".to_sym, current_user)
redirect_back_or_default(options: { alert: _('This endpoint has been requested too many times. Try again later.') })
end
end
def rate_limiter
::Gitlab::ApplicationRateLimiter
end
end

View File

@ -4,7 +4,7 @@ class Import::GitlabGroupsController < ApplicationController
include WorkhorseAuthorization
before_action :ensure_group_import_enabled
before_action :import_rate_limit, only: %i[create]
before_action :check_import_rate_limit!, only: %i[create]
feature_category :importers
@ -55,12 +55,9 @@ class Import::GitlabGroupsController < ApplicationController
render_404 unless Feature.enabled?(:group_import_export, @group, default_enabled: true)
end
def import_rate_limit
if Gitlab::ApplicationRateLimiter.throttled?(:group_import, scope: current_user)
Gitlab::ApplicationRateLimiter.log_request(request, :group_import_request_limit, current_user)
flash[:alert] = _('This endpoint has been requested too many times. Try again later.')
redirect_to new_group_path
def check_import_rate_limit!
check_rate_limit!(:group_import, scope: current_user) do
redirect_to new_group_path, alert: _('This endpoint has been requested too many times. Try again later.')
end
end

View File

@ -2,8 +2,10 @@
class Profiles::EmailsController < Profiles::ApplicationController
before_action :find_email, only: [:destroy, :resend_confirmation_instructions]
before_action -> { rate_limit!(:profile_add_new_email) }, only: [:create]
before_action -> { rate_limit!(:profile_resend_email_confirmation) }, only: [:resend_confirmation_instructions]
before_action -> { check_rate_limit!(:profile_add_new_email, scope: current_user, redirect_back: true) },
only: [:create]
before_action -> { check_rate_limit!(:profile_resend_email_confirmation, scope: current_user, redirect_back: true) },
only: [:resend_confirmation_instructions]
feature_category :users
@ -42,16 +44,6 @@ class Profiles::EmailsController < Profiles::ApplicationController
private
def rate_limit!(action)
rate_limiter = ::Gitlab::ApplicationRateLimiter
if rate_limiter.throttled?(action, scope: current_user)
rate_limiter.log_request(request, action, current_user)
redirect_back_or_default(options: { alert: _('This action has been performed too many times. Try again later.') })
end
end
def email_params
params.require(:email).permit(:email)
end

View File

@ -6,7 +6,7 @@ class Projects::HooksController < Projects::ApplicationController
# Authorize
before_action :authorize_admin_project!
before_action :hook_logs, only: :edit
before_action -> { create_rate_limit(:project_testing_hook, @project) }, only: :test
before_action -> { check_rate_limit!(:project_testing_hook, scope: [@project, current_user]) }, only: :test
respond_to :html

View File

@ -37,7 +37,9 @@ class Projects::IssuesController < Projects::ApplicationController
before_action :authorize_download_code!, only: [:related_branches]
# Limit the amount of issues created per minute
before_action :create_rate_limit, only: [:create], if: -> { Feature.disabled?('rate_limited_service_issues_create', project, default_enabled: :yaml) }
before_action -> { check_rate_limit!(:issues_create, scope: [@project, @current_user])},
only: [:create],
if: -> { Feature.disabled?('rate_limited_service_issues_create', project, default_enabled: :yaml) }
before_action do
push_frontend_feature_flag(:tribute_autocomplete, @project)
@ -363,20 +365,6 @@ class Projects::IssuesController < Projects::ApplicationController
project_compare_path(project, from: project.default_branch, to: branch[:name])
end
def create_rate_limit
key = :issues_create
if rate_limiter.throttled?(key, scope: [@project, @current_user])
rate_limiter.log_request(request, "#{key}_request_limit".to_sym, current_user)
render plain: _('This endpoint has been requested too many times. Try again later.'), status: :too_many_requests
end
end
def rate_limiter
::Gitlab::ApplicationRateLimiter
end
def service_desk?
action_name == 'service_desk'
end

View File

@ -3,7 +3,7 @@
class Projects::PipelineSchedulesController < Projects::ApplicationController
before_action :schedule, except: [:index, :new, :create]
before_action :play_rate_limit, only: [:play]
before_action :check_play_rate_limit!, only: [:play]
before_action :authorize_play_pipeline_schedule!, only: [:play]
before_action :authorize_read_pipeline_schedule!
before_action :authorize_create_pipeline_schedule!, only: [:new, :create]
@ -81,19 +81,15 @@ class Projects::PipelineSchedulesController < Projects::ApplicationController
private
def play_rate_limit
def check_play_rate_limit!
return unless current_user
if rate_limiter.throttled?(:play_pipeline_schedule, scope: [current_user, schedule])
check_rate_limit!(:play_pipeline_schedule, scope: [current_user, schedule]) do
flash[:alert] = _('You cannot play this scheduled pipeline at the moment. Please wait a minute.')
redirect_to pipeline_schedules_path(@project)
end
end
def rate_limiter
::Gitlab::ApplicationRateLimiter
end
def schedule
@schedule ||= project.pipeline_schedules.find(params[:id])
end

View File

@ -13,7 +13,7 @@ class Projects::RawController < Projects::ApplicationController
before_action :set_ref_and_path
before_action :require_non_empty_project
before_action :authorize_download_code!
before_action :show_rate_limit, only: [:show], unless: :external_storage_request?
before_action :check_show_rate_limit!, only: [:show], unless: :external_storage_request?
before_action :redirect_to_external_storage, only: :show, if: :static_objects_external_storage_enabled?
feature_category :source_code_management
@ -33,23 +33,11 @@ class Projects::RawController < Projects::ApplicationController
@ref, @path = extract_ref(get_id)
end
def show_rate_limit
if rate_limiter.throttled?(:show_raw_controller, scope: [@project, @path], threshold: raw_blob_request_limit)
rate_limiter.log_request(request, :raw_blob_request_limit, current_user)
def check_show_rate_limit!
check_rate_limit!(:raw_blob, scope: [@project, @path]) do
render plain: _('You cannot access the raw file. Please wait a minute.'), status: :too_many_requests
end
end
def rate_limiter
::Gitlab::ApplicationRateLimiter
end
def raw_blob_request_limit
Gitlab::CurrentSettings
.current_application_settings
.raw_blob_request_limit
end
end
Projects::RawController.prepend_mod

View File

@ -3,16 +3,16 @@
class Projects::RepositoriesController < Projects::ApplicationController
include ExtractsPath
include StaticObjectExternalStorage
include Gitlab::RateLimitHelpers
include HotlinkInterceptor
include Gitlab::RepositoryArchiveRateLimiter
prepend_before_action(only: [:archive]) { authenticate_sessionless_user!(:archive) }
skip_before_action :default_cache_headers, only: :archive
# Authorize
before_action :check_archive_rate_limiting!, only: :archive
before_action :require_non_empty_project, except: :create
before_action :archive_rate_limit!, only: :archive
before_action :intercept_hotlinking!, only: :archive
before_action :assign_archive_vars, only: :archive
before_action :assign_append_sha, only: :archive
@ -42,12 +42,6 @@ class Projects::RepositoriesController < Projects::ApplicationController
private
def archive_rate_limit!
if archive_rate_limit_reached?(current_user, @project)
render plain: ::Gitlab::RateLimitHelpers::ARCHIVE_RATE_LIMIT_REACHED_MESSAGE, status: :too_many_requests
end
end
def repo_params
@repo_params ||= { ref: @ref, path: params[:path], format: params[:format], append_sha: @append_sha }
end
@ -125,6 +119,12 @@ class Projects::RepositoriesController < Projects::ApplicationController
[path, nil]
end
end
def check_archive_rate_limiting!
check_archive_rate_limit!(current_user, @project) do
render(plain: _('This archive has been requested too many times. Try again later.'), status: :too_many_requests)
end
end
end
Projects::RepositoriesController.prepend_mod_with('Projects::RepositoriesController')

View File

@ -9,6 +9,7 @@ module Projects
layout 'project_settings'
before_action :authorize_admin_pipeline!
before_action :check_builds_available!
before_action :define_variables
before_action do
push_frontend_feature_flag(:ajax_new_deploy_token, @project)

View File

@ -30,7 +30,7 @@ class ProjectsController < Projects::ApplicationController
before_action :event_filter, only: [:show, :activity]
# Project Export Rate Limit
before_action :export_rate_limit, only: [:export, :download_export, :generate_new_export]
before_action :check_export_rate_limit!, only: [:export, :download_export, :generate_new_export]
before_action do
push_frontend_feature_flag(:lazy_load_commits, @project, default_enabled: :yaml)
@ -544,20 +544,12 @@ class ProjectsController < Projects::ApplicationController
@project = @project.present(current_user: current_user)
end
def export_rate_limit
def check_export_rate_limit!
prefixed_action = "project_#{params[:action]}".to_sym
project_scope = params[:action] == 'download_export' ? @project : nil
if rate_limiter.throttled?(prefixed_action, scope: [current_user, project_scope].compact)
rate_limiter.log_request(request, "#{prefixed_action}_request_limit".to_sym, current_user)
render plain: _('This endpoint has been requested too many times. Try again later.'), status: :too_many_requests
end
end
def rate_limiter
::Gitlab::ApplicationRateLimiter
check_rate_limit!(prefixed_action, scope: [current_user, project_scope].compact)
end
def render_edit

View File

@ -4,6 +4,34 @@ module Ci
# This model represents a record in a shadow table of the main database's namespaces table.
# It allows us to navigate the namespace hierarchy on the ci database without resorting to a JOIN.
class NamespaceMirror < ApplicationRecord
# Will be filled by https://gitlab.com/gitlab-org/gitlab/-/merge_requests/75517
belongs_to :namespace
scope :contains_namespace, -> (id) do
where('traversal_ids @> ARRAY[?]::int[]', id)
end
class << self
def sync!(event)
namespace = event.namespace
traversal_ids = namespace.self_and_ancestor_ids(hierarchy_order: :desc)
upsert({ namespace_id: event.namespace_id, traversal_ids: traversal_ids },
unique_by: :namespace_id)
# It won't be necessary once we remove `sync_traversal_ids`.
# More info: https://gitlab.com/gitlab-org/gitlab/-/issues/347541
sync_children_namespaces!(event.namespace_id, traversal_ids)
end
private
def sync_children_namespaces!(namespace_id, traversal_ids)
contains_namespace(namespace_id)
.where.not(namespace_id: namespace_id)
.update_all(
"traversal_ids = ARRAY[#{sanitize_sql(traversal_ids.join(','))}]::int[] || traversal_ids[array_position(traversal_ids, #{sanitize_sql(namespace_id)}) + 1:]"
)
end
end
end
end

View File

@ -4,6 +4,13 @@ module Ci
# This model represents a shadow table of the main database's projects table.
# It allows us to navigate the project and namespace hierarchy on the ci database.
class ProjectMirror < ApplicationRecord
# Will be filled by https://gitlab.com/gitlab-org/gitlab/-/merge_requests/75517
belongs_to :project
class << self
def sync!(event)
upsert({ project_id: event.project_id, namespace_id: event.project.namespace_id },
unique_by: :project_id)
end
end
end
end

View File

@ -145,9 +145,14 @@ class ContainerRepository < ApplicationRecord
name: path.repository_name)
end
def self.create_from_path!(path)
safe_find_or_create_by!(project: path.repository_project,
name: path.repository_name)
def self.find_or_create_from_path(path)
repository = safe_find_or_create_by(
project: path.repository_project,
name: path.repository_name
)
return repository if repository.persisted?
find_by_path!(path)
end
def self.build_root_repository(project)

View File

@ -3,6 +3,9 @@
class ErrorTracking::ErrorEvent < ApplicationRecord
belongs_to :error, counter_cache: :events_count
# Scrub null bytes
attribute :payload, Gitlab::Database::Type::JsonPgSafe.new
validates :payload, json_schema: { filename: 'error_tracking_event_payload' }
validates :error, presence: true

View File

@ -1,15 +1,45 @@
# frozen_string_literal: true
class LooseForeignKeys::DeletedRecord < ApplicationRecord
PARTITION_DURATION = 1.day
include PartitionedTable
self.primary_key = :id
self.ignored_columns = %i[partition]
partitioned_by :partition, strategy: :sliding_list,
next_partition_if: -> (active_partition) do
return false if Feature.disabled?(:lfk_automatic_partition_creation, default_enabled: :yaml)
oldest_record_in_partition = LooseForeignKeys::DeletedRecord
.select(:id, :created_at)
.for_partition(active_partition)
.order(:id)
.limit(1)
.take
oldest_record_in_partition.present? && oldest_record_in_partition.created_at < PARTITION_DURATION.ago
end,
detach_partition_if: -> (partition) do
return false if Feature.disabled?(:lfk_automatic_partition_dropping, default_enabled: :yaml)
!LooseForeignKeys::DeletedRecord
.for_partition(partition)
.status_pending
.exists?
end
scope :for_table, -> (table) { where(fully_qualified_table_name: table) }
scope :for_partition, -> (partition) { where(partition: partition) }
scope :consume_order, -> { order(:partition, :consume_after, :id) }
enum status: { pending: 1, processed: 2 }, _prefix: :status
def self.load_batch_for_table(table, batch_size)
for_table(table)
# selecting partition as partition_number to workaround the sliding partitioning column ignore
select(arel_table[Arel.star], arel_table[:partition].as('partition_number'))
.for_table(table)
.status_pending
.consume_order
.limit(batch_size)
@ -20,9 +50,9 @@ class LooseForeignKeys::DeletedRecord < ApplicationRecord
# Run a query for each partition to optimize the row lookup by primary key (partition, id)
update_count = 0
all_records.group_by(&:partition).each do |partition, records_within_partition|
all_records.group_by(&:partition_number).each do |partition, records_within_partition|
update_count += status_pending
.where(partition: partition)
.for_partition(partition)
.where(id: records_within_partition.pluck(:id))
.update_all(status: :processed)
end

View File

@ -64,6 +64,9 @@ class Namespace < ApplicationRecord
has_one :admin_note, inverse_of: :namespace
accepts_nested_attributes_for :admin_note, update_only: true
has_one :ci_namespace_mirror, class_name: 'Ci::NamespaceMirror'
has_many :sync_events, class_name: 'Namespaces::SyncEvent'
validates :owner, presence: true, if: ->(n) { n.owner_required? }
validates :name,
presence: true,
@ -104,6 +107,8 @@ class Namespace < ApplicationRecord
delegate :name, to: :owner, allow_nil: true, prefix: true
delegate :avatar_url, to: :owner, allow_nil: true
after_save :schedule_sync_event_worker, if: -> { saved_change_to_id? || saved_change_to_parent_id? }
after_commit :refresh_access_of_projects_invited_groups, on: :update, if: -> { previous_changes.key?('share_with_group_lock') }
before_create :sync_share_with_group_lock_with_parent
@ -609,6 +614,13 @@ class Namespace < ApplicationRecord
def enforce_minimum_path_length?
path_changed? && !project_namespace?
end
# SyncEvents are created by PG triggers (with the function `insert_namespaces_sync_event`)
def schedule_sync_event_worker
run_after_commit do
Namespaces::SyncEvent.enqueue_worker
end
end
end
Namespace.prepend_mod_with('Namespace')

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
# This model serves to keep track of changes to the namespaces table in the main database, and allowing to safely
# replicate these changes to other databases.
class Namespaces::SyncEvent < ApplicationRecord
self.table_name = 'namespaces_sync_events'
belongs_to :namespace
scope :preload_synced_relation, -> { preload(:namespace) }
scope :order_by_id_asc, -> { order(id: :asc) }
def self.enqueue_worker
::Namespaces::ProcessSyncEventsWorker.perform_async # rubocop:disable CodeReuse/Worker
end
end

View File

@ -102,6 +102,8 @@ class Project < ApplicationRecord
after_save :update_project_statistics, if: :saved_change_to_namespace_id?
after_save :schedule_sync_event_worker, if: -> { saved_change_to_id? || saved_change_to_namespace_id? }
after_save :create_import_state, if: ->(project) { project.import? && project.import_state.nil? }
after_save :save_topics
@ -394,6 +396,9 @@ class Project < ApplicationRecord
has_many :timelogs
has_one :ci_project_mirror, class_name: 'Ci::ProjectMirror'
has_many :sync_events, class_name: 'Projects::SyncEvent'
accepts_nested_attributes_for :variables, allow_destroy: true
accepts_nested_attributes_for :project_feature, update_only: true
accepts_nested_attributes_for :project_setting, update_only: true
@ -2938,6 +2943,13 @@ class Project < ApplicationRecord
project_namespace.shared_runners_enabled = shared_runners_enabled
project_namespace.visibility_level = visibility_level
end
# SyncEvents are created by PG triggers (with the function `insert_projects_sync_event`)
def schedule_sync_event_worker
run_after_commit do
Projects::SyncEvent.enqueue_worker
end
end
end
Project.prepend_mod_with('Project')

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
# This model serves to keep track of changes to the namespaces table in the main database as they relate to projects,
# allowing to safely replicate changes to other databases.
class Projects::SyncEvent < ApplicationRecord
self.table_name = 'projects_sync_events'
belongs_to :project
scope :preload_synced_relation, -> { preload(:project) }
scope :order_by_id_asc, -> { order(id: :asc) }
def self.enqueue_worker
::Projects::ProcessSyncEventsWorker.perform_async # rubocop:disable CodeReuse/Worker
end
end

View File

@ -4,7 +4,6 @@ module Namespaces
class UserNamespacePolicy < ::NamespacePolicy
rule { anonymous }.prevent_all
condition(:personal_project, scope: :subject) { @subject.kind == 'user' }
condition(:can_create_personal_project, scope: :user) { @user.can_create_project? }
condition(:owner) { @subject.owner == @user }
@ -19,7 +18,7 @@ module Namespaces
enable :read_package_settings
end
rule { personal_project & ~can_create_personal_project }.prevent :create_projects
rule { ~can_create_personal_project }.prevent :create_projects
rule { (owner | admin) & can?(:create_projects) }.enable :transfer_projects
end

View File

@ -3,7 +3,6 @@
module Ci
class PipelinePresenter < Gitlab::View::Presenter::Delegated
include Gitlab::Utils::StrongMemoize
include ActionView::Helpers::UrlHelper
delegator_override_with Gitlab::Utils::StrongMemoize # TODO: Remove `Gitlab::Utils::StrongMemoize` inclusion as it's duplicate
delegator_override_with ActionView::Helpers::TagHelper # TODO: Remove `ActionView::Helpers::UrlHelper` inclusion as it overrides `Ci::Pipeline#tag`
@ -108,7 +107,7 @@ module Ci
end
def link_to_pipeline_ref
link_to(pipeline.ref,
ApplicationController.helpers.link_to(pipeline.ref,
project_commits_path(pipeline.project, pipeline.ref),
class: "ref-name")
end
@ -116,7 +115,7 @@ module Ci
def link_to_merge_request
return unless merge_request_presenter
link_to(merge_request_presenter.to_reference,
ApplicationController.helpers.link_to(merge_request_presenter.to_reference,
project_merge_request_path(merge_request_presenter.project, merge_request_presenter),
class: 'mr-iid')
end
@ -143,7 +142,7 @@ module Ci
private
def plain_ref_name
content_tag(:span, pipeline.ref, class: 'ref-name')
ApplicationController.helpers.content_tag(:span, pipeline.ref, class: 'ref-name')
end
def merge_request_presenter
@ -160,7 +159,7 @@ module Ci
all_related_merge_requests.first(limit).map do |merge_request|
mr_path = project_merge_request_path(merge_request.project, merge_request)
link_to "#{merge_request.to_reference} #{merge_request.title}", mr_path, class: 'mr-iid'
ApplicationController.helpers.link_to "#{merge_request.to_reference} #{merge_request.title}", mr_path, class: 'mr-iid'
end
end

View File

@ -1,8 +1,6 @@
# frozen_string_literal: true
class PrometheusAlertPresenter < Gitlab::View::Presenter::Delegated
include ActionView::Helpers::UrlHelper
presents ::PrometheusAlert, as: :prometheus_alert
def humanized_text

View File

@ -156,7 +156,7 @@ module Auth
return if path.has_repository?
return unless actions.include?('push')
ContainerRepository.create_from_path!(path)
ContainerRepository.find_or_create_from_path(path)
end
# Overridden in EE

View File

@ -0,0 +1,58 @@
# frozen_string_literal: true
module Ci
class ProcessSyncEventsService
include Gitlab::Utils::StrongMemoize
include ExclusiveLeaseGuard
BATCH_SIZE = 1000
def initialize(sync_event_class, sync_class)
@sync_event_class = sync_event_class
@sync_class = sync_class
end
def execute
return unless ::Feature.enabled?(:ci_namespace_project_mirrors, default_enabled: :yaml)
# preventing parallel processing over the same event table
try_obtain_lease { process_events }
enqueue_worker_if_there_still_event
end
private
def process_events
events = @sync_event_class.preload_synced_relation.first(BATCH_SIZE)
return if events.empty?
first = events.first
last_processed = nil
begin
events.each do |event|
@sync_class.sync!(event)
last_processed = event
end
ensure
# remove events till the one that was last succesfully processed
@sync_event_class.id_in(first.id..last_processed.id).delete_all if last_processed
end
end
def enqueue_worker_if_there_still_event
@sync_event_class.enqueue_worker if @sync_event_class.exists?
end
def lease_key
"#{super}::#{@sync_event_class}"
end
def lease_timeout
1.minute
end
end
end

View File

@ -7,7 +7,7 @@ module Ci
allow_failure stage stage_id stage_idx trigger_request
yaml_variables when environment coverage_regex
description tag_list protected needs_attributes
resource_group scheduling_type].freeze
job_variables_attributes resource_group scheduling_type].freeze
end
def self.extra_accessors
@ -68,13 +68,7 @@ module Ci
end
def build_attributes(build)
clone_attributes = if ::Feature.enabled?(:clone_job_variables_at_job_retry, build.project, default_enabled: :yaml)
self.class.clone_accessors + [:job_variables_attributes]
else
self.class.clone_accessors
end
attributes = clone_attributes.to_h do |attribute|
attributes = self.class.clone_accessors.to_h do |attribute|
[attribute, build.public_send(attribute)] # rubocop:disable GitlabSecurity/PublicSend
end

View File

@ -80,7 +80,7 @@ class SearchService
def abuse_messages
return [] unless params.abusive?
params.abuse_detection.errors.messages
params.abuse_detection.errors.full_messages
end
def valid_request?

View File

@ -79,7 +79,7 @@
%span= milestone.issues_visible_to_user(current_user).count
.title.hide-collapsed
= s_('MilestoneSidebar|Issues')
%span.badge.badge-muted.badge-pill.gl-badge.sm= milestone.issues_visible_to_user(current_user).count
= gl_badge_tag milestone.issues_visible_to_user(current_user).count, variant: :muted, size: :sm
- if show_new_issue_link?(project)
= link_to new_project_issue_path(project, issue: { milestone_id: milestone.id }), class: "float-right", title: s_('MilestoneSidebar|New Issue') do
= s_('MilestoneSidebar|New issue')
@ -111,7 +111,7 @@
%span= milestone.merge_requests.count
.title.hide-collapsed
= s_('MilestoneSidebar|Merge requests')
%span.badge.badge-muted.badge-pill.gl-badge.sm= milestone.merge_requests.count
= gl_badge_tag milestone.merge_requests.count, variant: :muted, size: :sm
.value.hide-collapsed.bold
- if !project || can?(current_user, :read_merge_request, project)
%span.milestone-stat

View File

@ -2492,6 +2492,15 @@
:weight: 1
:idempotent: true
:tags: []
- :name: namespaces_process_sync_events
:worker_name: Namespaces::ProcessSyncEventsWorker
:feature_category: :sharding
:has_external_dependencies:
:urgency: :high
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: new_issue
:worker_name: NewIssueWorker
:feature_category: :team_planning
@ -2663,6 +2672,15 @@
:weight: 1
:idempotent: true
:tags: []
- :name: projects_process_sync_events
:worker_name: Projects::ProcessSyncEventsWorker
:feature_category: :sharding
:has_external_dependencies:
:urgency: :high
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: projects_schedule_bulk_repository_shard_moves
:worker_name: Projects::ScheduleBulkRepositoryShardMovesWorker
:feature_category: :gitaly

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
module Namespaces
# This worker can be called multiple times at the same time but only one of them can
# process events at a time. This is ensured by `try_obtain_lease` in `Ci::ProcessSyncEventsService`.
# `until_executing` here is to reduce redundant worker enqueuing.
class ProcessSyncEventsWorker
include ApplicationWorker
data_consistency :always
feature_category :sharding
urgency :high
idempotent!
deduplicate :until_executing
def perform
::Ci::ProcessSyncEventsService.new(::Namespaces::SyncEvent, ::Ci::NamespaceMirror).execute
end
end
end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
module Projects
# This worker can be called multiple times at the same time but only one of them can
# process events at a time. This is ensured by `try_obtain_lease` in `Ci::ProcessSyncEventsService`.
# `until_executing` here is to reduce redundant worker enqueuing.
class ProcessSyncEventsWorker
include ApplicationWorker
data_consistency :always
feature_category :sharding
urgency :high
idempotent!
deduplicate :until_executing
def perform
::Ci::ProcessSyncEventsService.new(::Projects::SyncEvent, ::Ci::ProjectMirror).execute
end
end
end

View File

@ -1,8 +0,0 @@
---
name: api_v3_commits_skip_diff_files
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/67647
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/344617
milestone: '14.5'
type: development
group: group::integrations
default_enabled: true

View File

@ -1,8 +1,8 @@
---
name: clone_job_variables_at_job_retry
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/75720
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/347156
name: ci_namespace_project_mirrors
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/75517
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/346786
milestone: '14.6'
type: development
group: group::pipeline authoring
group: group::sharding
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: lfk_automatic_partition_creation
introduced_by_url:
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/346907
milestone: '14.6'
type: development
group: group::sharding
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: lfk_automatic_partition_dropping
introduced_by_url:
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/346908
milestone: '14.6'
type: development
group: group::sharding
default_enabled: false

View File

@ -2,7 +2,8 @@
Gitlab::Database::Partitioning.register_models([
AuditEvent,
WebHookLog
WebHookLog,
LooseForeignKeys::DeletedRecord
])
if Gitlab.ee?

View File

@ -273,6 +273,8 @@
- 1
- - namespaces_onboarding_user_added
- 1
- - namespaces_process_sync_events
- 1
- - namespaces_sync_namespace_name
- 1
- - new_epic
@ -339,6 +341,8 @@
- 1
- - projects_post_creation
- 1
- - projects_process_sync_events
- 1
- - projects_schedule_bulk_repository_shard_moves
- 1
- - projects_update_repository_storage

View File

@ -0,0 +1,47 @@
# frozen_string_literal: true
class Gitlab::Seeder::Crm
attr_reader :group, :organizations_per_group, :contacts_per_group
def initialize(group, organizations_per_group: 10, contacts_per_group: 40)
@group = group
@organizations_per_group = organizations_per_group
@contacts_per_group = contacts_per_group
end
def seed!
organization_ids = []
organizations_per_group.times do
organization_ids << ::CustomerRelations::Organization.create!(
group_id: group.id,
name: FFaker::Company.name
).id
print '.'
end
contacts_per_group.times do |index|
first_name = FFaker::Name.first_name
last_name = FFaker::Name.last_name
organization_id = index % 3 == 0 ? organization_ids.sample : nil
::CustomerRelations::Contact.create!(
group_id: group.id,
first_name: first_name,
last_name: last_name,
email: "#{first_name}.#{last_name}@example.org",
organization_id: organization_id
)
print '.'
end
end
end
Gitlab::Seeder.quiet do
puts "\nGenerating group crm organizations and contacts"
Group.all.find_each do |group|
Gitlab::Seeder::Crm.new(group).seed!
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class CreateNamespacesSyncEvents < Gitlab::Database::Migration[1.0]
def change
create_table :namespaces_sync_events do |t|
t.references :namespace, null: false, index: true, foreign_key: { on_delete: :cascade }
end
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class CreateProjectsSyncEvents < Gitlab::Database::Migration[1.0]
def change
create_table :projects_sync_events do |t|
t.references :project, null: false, index: true, foreign_key: { on_delete: :cascade }
end
end
end

View File

@ -0,0 +1,37 @@
# frozen_string_literal: true
class CreateNamespacesSyncTrigger < Gitlab::Database::Migration[1.0]
include Gitlab::Database::SchemaHelpers
enable_lock_retries!
TABLE_NAME = 'namespaces'
EVENT_TABLE_NAME = 'namespaces_sync_events'
FUNCTION_NAME = 'insert_namespaces_sync_event'
TRIGGER_ON_INSERT = 'trigger_namespaces_parent_id_on_insert'
TRIGGER_ON_UPDATE = 'trigger_namespaces_parent_id_on_update'
def up
create_trigger_function(FUNCTION_NAME) do
<<~SQL
INSERT INTO #{EVENT_TABLE_NAME} (namespace_id)
VALUES(COALESCE(NEW.id, OLD.id));
RETURN NULL;
SQL
end
create_trigger(TABLE_NAME, TRIGGER_ON_INSERT, FUNCTION_NAME, fires: 'AFTER INSERT')
create_trigger(TABLE_NAME, TRIGGER_ON_UPDATE, FUNCTION_NAME, fires: 'AFTER UPDATE') do
<<~SQL
WHEN (OLD.parent_id IS DISTINCT FROM NEW.parent_id)
SQL
end
end
def down
drop_trigger(TABLE_NAME, TRIGGER_ON_INSERT)
drop_trigger(TABLE_NAME, TRIGGER_ON_UPDATE)
drop_function(FUNCTION_NAME)
end
end

View File

@ -0,0 +1,37 @@
# frozen_string_literal: true
class CreateProjectsSyncTrigger < Gitlab::Database::Migration[1.0]
include Gitlab::Database::SchemaHelpers
enable_lock_retries!
TABLE_NAME = 'projects'
EVENT_TABLE_NAME = 'projects_sync_events'
FUNCTION_NAME = 'insert_projects_sync_event'
TRIGGER_ON_INSERT = 'trigger_projects_parent_id_on_insert'
TRIGGER_ON_UPDATE = 'trigger_projects_parent_id_on_update'
def up
create_trigger_function(FUNCTION_NAME) do
<<~SQL
INSERT INTO #{EVENT_TABLE_NAME} (project_id)
VALUES(COALESCE(NEW.id, OLD.id));
RETURN NULL;
SQL
end
create_trigger(TABLE_NAME, TRIGGER_ON_INSERT, FUNCTION_NAME, fires: 'AFTER INSERT')
create_trigger(TABLE_NAME, TRIGGER_ON_UPDATE, FUNCTION_NAME, fires: 'AFTER UPDATE') do
<<~SQL
WHEN (OLD.namespace_id IS DISTINCT FROM NEW.namespace_id)
SQL
end
end
def down
drop_trigger(TABLE_NAME, TRIGGER_ON_INSERT)
drop_trigger(TABLE_NAME, TRIGGER_ON_UPDATE)
drop_function(FUNCTION_NAME)
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class MoveLooseFkDeletedRecordsToDynamicSchema < Gitlab::Database::Migration[1.0]
enable_lock_retries!
def up
if table_exists?('gitlab_partitions_static.loose_foreign_keys_deleted_records_1')
execute 'ALTER TABLE gitlab_partitions_static.loose_foreign_keys_deleted_records_1 SET SCHEMA gitlab_partitions_dynamic'
end
end
def down
if table_exists?('gitlab_partitions_dynamic.loose_foreign_keys_deleted_records_1')
execute 'ALTER TABLE gitlab_partitions_dynamic.loose_foreign_keys_deleted_records_1 SET SCHEMA gitlab_partitions_static'
end
end
end

View File

@ -0,0 +1 @@
0209db1e7be48bcbf0e52b451d37da0ef2ecadd567cdfa47907fc5032c258a27

View File

@ -0,0 +1 @@
bc0ae055b331801fbe020c12a66e4e6ae790780121bfd66fd161093c94c7a84a

View File

@ -0,0 +1 @@
9fd4977cdb57df827fe1a01f55a305d832ee4240d40af9396e093e3b4dbd1e33

View File

@ -0,0 +1 @@
b3ce6aa41c70cdcf8637a94c3d4d4e97730899221530f5507c4581aaf2fc3a6c

View File

@ -0,0 +1 @@
2bca61880005c9303b2ff71747cde64d3418b6ef8ad2a9f114d584f4149e386b

View File

@ -34,6 +34,28 @@ BEGIN
END
$$;
CREATE FUNCTION insert_namespaces_sync_event() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO namespaces_sync_events (namespace_id)
VALUES(COALESCE(NEW.id, OLD.id));
RETURN NULL;
END
$$;
CREATE FUNCTION insert_projects_sync_event() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
INSERT INTO projects_sync_events (project_id)
VALUES(COALESCE(NEW.id, OLD.id));
RETURN NULL;
END
$$;
CREATE FUNCTION integrations_set_type_new() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -125,6 +147,18 @@ CREATE TABLE incident_management_pending_issue_escalations (
)
PARTITION BY RANGE (process_at);
CREATE TABLE loose_foreign_keys_deleted_records (
id bigint NOT NULL,
partition bigint DEFAULT 1 NOT NULL,
primary_key_value bigint NOT NULL,
status smallint DEFAULT 1 NOT NULL,
created_at timestamp with time zone DEFAULT now() NOT NULL,
fully_qualified_table_name text NOT NULL,
consume_after timestamp with time zone DEFAULT now(),
CONSTRAINT check_1a541f3235 CHECK ((char_length(fully_qualified_table_name) <= 150))
)
PARTITION BY LIST (partition);
CREATE TABLE verification_codes (
created_at timestamp with time zone DEFAULT now() NOT NULL,
visitor_id_code text NOT NULL,
@ -1013,39 +1047,6 @@ CREATE TABLE gitlab_partitions_static.analytics_cycle_analytics_merge_request_st
);
ALTER TABLE ONLY analytics_cycle_analytics_merge_request_stage_events ATTACH PARTITION gitlab_partitions_static.analytics_cycle_analytics_merge_request_stage_events_31 FOR VALUES WITH (modulus 32, remainder 31);
CREATE TABLE loose_foreign_keys_deleted_records (
id bigint NOT NULL,
partition bigint DEFAULT 1 NOT NULL,
primary_key_value bigint NOT NULL,
status smallint DEFAULT 1 NOT NULL,
created_at timestamp with time zone DEFAULT now() NOT NULL,
fully_qualified_table_name text NOT NULL,
consume_after timestamp with time zone DEFAULT now(),
CONSTRAINT check_1a541f3235 CHECK ((char_length(fully_qualified_table_name) <= 150))
)
PARTITION BY LIST (partition);
CREATE SEQUENCE loose_foreign_keys_deleted_records_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE loose_foreign_keys_deleted_records_id_seq OWNED BY loose_foreign_keys_deleted_records.id;
CREATE TABLE gitlab_partitions_static.loose_foreign_keys_deleted_records_1 (
id bigint DEFAULT nextval('loose_foreign_keys_deleted_records_id_seq'::regclass) NOT NULL,
partition bigint DEFAULT 1 NOT NULL,
primary_key_value bigint NOT NULL,
status smallint DEFAULT 1 NOT NULL,
created_at timestamp with time zone DEFAULT now() NOT NULL,
fully_qualified_table_name text NOT NULL,
consume_after timestamp with time zone DEFAULT now(),
CONSTRAINT check_1a541f3235 CHECK ((char_length(fully_qualified_table_name) <= 150))
);
ALTER TABLE ONLY loose_foreign_keys_deleted_records ATTACH PARTITION gitlab_partitions_static.loose_foreign_keys_deleted_records_1 FOR VALUES IN ('1');
CREATE TABLE product_analytics_events_experimental (
id bigint NOT NULL,
project_id integer NOT NULL,
@ -15867,6 +15868,15 @@ CREATE SEQUENCE lists_id_seq
ALTER SEQUENCE lists_id_seq OWNED BY lists.id;
CREATE SEQUENCE loose_foreign_keys_deleted_records_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE loose_foreign_keys_deleted_records_id_seq OWNED BY loose_foreign_keys_deleted_records.id;
CREATE TABLE member_tasks (
id bigint NOT NULL,
member_id bigint NOT NULL,
@ -16492,6 +16502,20 @@ CREATE SEQUENCE namespaces_id_seq
ALTER SEQUENCE namespaces_id_seq OWNED BY namespaces.id;
CREATE TABLE namespaces_sync_events (
id bigint NOT NULL,
namespace_id bigint NOT NULL
);
CREATE SEQUENCE namespaces_sync_events_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE namespaces_sync_events_id_seq OWNED BY namespaces_sync_events.id;
CREATE TABLE note_diff_files (
id integer NOT NULL,
diff_note_id integer NOT NULL,
@ -18546,6 +18570,20 @@ CREATE SEQUENCE projects_id_seq
ALTER SEQUENCE projects_id_seq OWNED BY projects.id;
CREATE TABLE projects_sync_events (
id bigint NOT NULL,
project_id bigint NOT NULL
);
CREATE SEQUENCE projects_sync_events_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE projects_sync_events_id_seq OWNED BY projects_sync_events.id;
CREATE TABLE prometheus_alert_events (
id bigint NOT NULL,
project_id integer NOT NULL,
@ -21811,6 +21849,8 @@ ALTER TABLE ONLY namespace_statistics ALTER COLUMN id SET DEFAULT nextval('names
ALTER TABLE ONLY namespaces ALTER COLUMN id SET DEFAULT nextval('namespaces_id_seq'::regclass);
ALTER TABLE ONLY namespaces_sync_events ALTER COLUMN id SET DEFAULT nextval('namespaces_sync_events_id_seq'::regclass);
ALTER TABLE ONLY note_diff_files ALTER COLUMN id SET DEFAULT nextval('note_diff_files_id_seq'::regclass);
ALTER TABLE ONLY notes ALTER COLUMN id SET DEFAULT nextval('notes_id_seq'::regclass);
@ -21961,6 +22001,8 @@ ALTER TABLE ONLY project_tracing_settings ALTER COLUMN id SET DEFAULT nextval('p
ALTER TABLE ONLY projects ALTER COLUMN id SET DEFAULT nextval('projects_id_seq'::regclass);
ALTER TABLE ONLY projects_sync_events ALTER COLUMN id SET DEFAULT nextval('projects_sync_events_id_seq'::regclass);
ALTER TABLE ONLY prometheus_alert_events ALTER COLUMN id SET DEFAULT nextval('prometheus_alert_events_id_seq'::regclass);
ALTER TABLE ONLY prometheus_alerts ALTER COLUMN id SET DEFAULT nextval('prometheus_alerts_id_seq'::regclass);
@ -22397,12 +22439,6 @@ ALTER TABLE ONLY gitlab_partitions_static.analytics_cycle_analytics_merge_reques
ALTER TABLE ONLY gitlab_partitions_static.analytics_cycle_analytics_merge_request_stage_events_31
ADD CONSTRAINT analytics_cycle_analytics_merge_request_stage_events_31_pkey PRIMARY KEY (stage_event_hash_id, merge_request_id);
ALTER TABLE ONLY loose_foreign_keys_deleted_records
ADD CONSTRAINT loose_foreign_keys_deleted_records_pkey PRIMARY KEY (partition, id);
ALTER TABLE ONLY gitlab_partitions_static.loose_foreign_keys_deleted_records_1
ADD CONSTRAINT loose_foreign_keys_deleted_records_1_pkey PRIMARY KEY (partition, id);
ALTER TABLE ONLY product_analytics_events_experimental
ADD CONSTRAINT product_analytics_events_experimental_pkey PRIMARY KEY (id, project_id);
@ -23492,6 +23528,9 @@ ALTER TABLE ONLY list_user_preferences
ALTER TABLE ONLY lists
ADD CONSTRAINT lists_pkey PRIMARY KEY (id);
ALTER TABLE ONLY loose_foreign_keys_deleted_records
ADD CONSTRAINT loose_foreign_keys_deleted_records_pkey PRIMARY KEY (partition, id);
ALTER TABLE ONLY member_tasks
ADD CONSTRAINT member_tasks_pkey PRIMARY KEY (id);
@ -23582,6 +23621,9 @@ ALTER TABLE ONLY namespace_statistics
ALTER TABLE ONLY namespaces
ADD CONSTRAINT namespaces_pkey PRIMARY KEY (id);
ALTER TABLE ONLY namespaces_sync_events
ADD CONSTRAINT namespaces_sync_events_pkey PRIMARY KEY (id);
ALTER TABLE ONLY note_diff_files
ADD CONSTRAINT note_diff_files_pkey PRIMARY KEY (id);
@ -23852,6 +23894,9 @@ ALTER TABLE ONLY project_tracing_settings
ALTER TABLE ONLY projects
ADD CONSTRAINT projects_pkey PRIMARY KEY (id);
ALTER TABLE ONLY projects_sync_events
ADD CONSTRAINT projects_sync_events_pkey PRIMARY KEY (id);
ALTER TABLE ONLY prometheus_alert_events
ADD CONSTRAINT prometheus_alert_events_pkey PRIMARY KEY (id);
@ -24256,10 +24301,6 @@ CREATE INDEX index_merge_request_stage_events_project_duration ON ONLY analytics
CREATE INDEX index_006f943df6 ON gitlab_partitions_static.analytics_cycle_analytics_merge_request_stage_events_16 USING btree (stage_event_hash_id, project_id, end_event_timestamp, merge_request_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
CREATE INDEX index_loose_foreign_keys_deleted_records_for_partitioned_query ON ONLY loose_foreign_keys_deleted_records USING btree (partition, fully_qualified_table_name, consume_after, id) WHERE (status = 1);
CREATE INDEX index_01e3390fac ON gitlab_partitions_static.loose_foreign_keys_deleted_records_1 USING btree (partition, fully_qualified_table_name, consume_after, id) WHERE (status = 1);
CREATE INDEX index_02749b504c ON gitlab_partitions_static.analytics_cycle_analytics_merge_request_stage_events_11 USING btree (stage_event_hash_id, project_id, end_event_timestamp, merge_request_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
CREATE INDEX index_merge_request_stage_events_group_duration ON ONLY analytics_cycle_analytics_merge_request_stage_events USING btree (stage_event_hash_id, group_id, end_event_timestamp, merge_request_id, start_event_timestamp) WHERE (end_event_timestamp IS NOT NULL);
@ -26514,6 +26555,8 @@ CREATE INDEX index_lists_on_milestone_id ON lists USING btree (milestone_id);
CREATE INDEX index_lists_on_user_id ON lists USING btree (user_id);
CREATE INDEX index_loose_foreign_keys_deleted_records_for_partitioned_query ON ONLY loose_foreign_keys_deleted_records USING btree (partition, fully_qualified_table_name, consume_after, id) WHERE (status = 1);
CREATE INDEX index_member_tasks_on_member_id ON member_tasks USING btree (member_id);
CREATE UNIQUE INDEX index_member_tasks_on_member_id_and_project_id ON member_tasks USING btree (member_id, project_id);
@ -26744,6 +26787,8 @@ CREATE INDEX index_namespaces_on_type_and_id ON namespaces USING btree (type, id
CREATE INDEX index_namespaces_public_groups_name_id ON namespaces USING btree (name, id) WHERE (((type)::text = 'Group'::text) AND (visibility_level = 20));
CREATE INDEX index_namespaces_sync_events_on_namespace_id ON namespaces_sync_events USING btree (namespace_id);
CREATE INDEX index_non_requested_project_members_on_source_id_and_type ON members USING btree (source_id, source_type) WHERE ((requested_at IS NULL) AND ((type)::text = 'ProjectMember'::text));
CREATE UNIQUE INDEX index_note_diff_files_on_diff_note_id ON note_diff_files USING btree (diff_note_id);
@ -27204,6 +27249,8 @@ CREATE INDEX index_projects_on_star_count ON projects USING btree (star_count);
CREATE INDEX index_projects_on_updated_at_and_id ON projects USING btree (updated_at, id);
CREATE INDEX index_projects_sync_events_on_project_id ON projects_sync_events USING btree (project_id);
CREATE UNIQUE INDEX index_prometheus_alert_event_scoped_payload_key ON prometheus_alert_events USING btree (prometheus_alert_id, payload_key);
CREATE INDEX index_prometheus_alert_events_on_project_id_and_status ON prometheus_alert_events USING btree (project_id, status);
@ -28164,8 +28211,6 @@ ALTER INDEX index_issue_stage_events_project_duration ATTACH PARTITION gitlab_pa
ALTER INDEX index_merge_request_stage_events_project_duration ATTACH PARTITION gitlab_partitions_static.index_006f943df6;
ALTER INDEX index_loose_foreign_keys_deleted_records_for_partitioned_query ATTACH PARTITION gitlab_partitions_static.index_01e3390fac;
ALTER INDEX index_merge_request_stage_events_project_duration ATTACH PARTITION gitlab_partitions_static.index_02749b504c;
ALTER INDEX index_merge_request_stage_events_group_duration ATTACH PARTITION gitlab_partitions_static.index_0287f5ba09;
@ -28674,8 +28719,6 @@ ALTER INDEX index_issue_stage_events_project_duration ATTACH PARTITION gitlab_pa
ALTER INDEX index_issue_stage_events_group_in_progress_duration ATTACH PARTITION gitlab_partitions_static.index_ff8741d8d7;
ALTER INDEX loose_foreign_keys_deleted_records_pkey ATTACH PARTITION gitlab_partitions_static.loose_foreign_keys_deleted_records_1_pkey;
ALTER INDEX index_product_analytics_events_experimental_project_and_time ATTACH PARTITION gitlab_partitions_static.product_analytics_events_expe_project_id_collector_tstamp_idx10;
ALTER INDEX index_product_analytics_events_experimental_project_and_time ATTACH PARTITION gitlab_partitions_static.product_analytics_events_expe_project_id_collector_tstamp_idx11;
@ -28960,6 +29003,14 @@ CREATE TRIGGER trigger_has_external_wiki_on_type_new_updated AFTER UPDATE OF typ
CREATE TRIGGER trigger_has_external_wiki_on_update AFTER UPDATE ON integrations FOR EACH ROW WHEN (((new.type_new = 'Integrations::ExternalWiki'::text) AND (old.active <> new.active) AND (new.project_id IS NOT NULL))) EXECUTE FUNCTION set_has_external_wiki();
CREATE TRIGGER trigger_namespaces_parent_id_on_insert AFTER INSERT ON namespaces FOR EACH ROW EXECUTE FUNCTION insert_namespaces_sync_event();
CREATE TRIGGER trigger_namespaces_parent_id_on_update AFTER UPDATE ON namespaces FOR EACH ROW WHEN ((old.parent_id IS DISTINCT FROM new.parent_id)) EXECUTE FUNCTION insert_namespaces_sync_event();
CREATE TRIGGER trigger_projects_parent_id_on_insert AFTER INSERT ON projects FOR EACH ROW EXECUTE FUNCTION insert_projects_sync_event();
CREATE TRIGGER trigger_projects_parent_id_on_update AFTER UPDATE ON projects FOR EACH ROW WHEN ((old.namespace_id IS DISTINCT FROM new.namespace_id)) EXECUTE FUNCTION insert_projects_sync_event();
CREATE TRIGGER trigger_type_new_on_insert AFTER INSERT ON integrations FOR EACH ROW EXECUTE FUNCTION integrations_set_type_new();
ALTER TABLE ONLY chat_names
@ -30870,6 +30921,9 @@ ALTER TABLE ONLY gpg_keys
ALTER TABLE ONLY analytics_language_trend_repository_languages
ADD CONSTRAINT fk_rails_9d851d566c FOREIGN KEY (programming_language_id) REFERENCES programming_languages(id) ON DELETE CASCADE;
ALTER TABLE ONLY namespaces_sync_events
ADD CONSTRAINT fk_rails_9da32a0431 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY badges
ADD CONSTRAINT fk_rails_9df4a56538 FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
@ -31044,6 +31098,9 @@ ALTER TABLE ONLY security_findings
ALTER TABLE ONLY packages_debian_project_component_files
ADD CONSTRAINT fk_rails_bbe9ebfbd9 FOREIGN KEY (component_id) REFERENCES packages_debian_project_components(id) ON DELETE RESTRICT;
ALTER TABLE ONLY projects_sync_events
ADD CONSTRAINT fk_rails_bbf0eef59f FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY approval_merge_request_rules_users
ADD CONSTRAINT fk_rails_bc8972fa55 FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;

View File

@ -259,7 +259,7 @@ replication job is scheduled only if there are no other replication jobs pending
repository.
The reconciliation frequency can be changed via the configuration. The value can be any valid
[Go duration value](https://golang.org/pkg/time/#ParseDuration). Values below 0 disable the feature.
[Go duration value](https://pkg.go.dev/time#ParseDuration). Values below 0 disable the feature.
Examples:

View File

@ -799,7 +799,7 @@ Incorrect configuration of these values may result in intermittent
or persistent errors, or the Pages Daemon serving old content.
NOTE:
Expiry, interval and timeout flags use [Golang's duration formatting](https://golang.org/pkg/time/#ParseDuration).
Expiry, interval and timeout flags use [Golang's duration formatting](https://pkg.go.dev/time#ParseDuration).
A duration string is a possibly signed sequence of decimal numbers,
each with optional fraction and a unit suffix, such as `300ms`, `1.5h` or `2h45m`.
Valid time units are `ns`, `us` (or `µs`), `ms`, `s`, `m`, `h`.
@ -1286,7 +1286,7 @@ in all of your GitLab Pages instances.
### 500 error with `securecookie: failed to generate random iv` and `Failed to save the session`
This problem most likely results from an [out-dated operating system](../package_information/supported_os.md#os-versions-that-are-no-longer-supported).
The [Pages daemon uses the `securecookie` library](https://gitlab.com/search?group_id=9970&project_id=734943&repository_ref=master&scope=blobs&search=securecookie&snippets=false) to get random strings via [`crypto/rand` in Go](https://golang.org/pkg/crypto/rand/#pkg-variables).
The [Pages daemon uses the `securecookie` library](https://gitlab.com/search?group_id=9970&project_id=734943&repository_ref=master&scope=blobs&search=securecookie&snippets=false) to get random strings via [`crypto/rand` in Go](https://pkg.go.dev/crypto/rand#pkg-variables).
This requires the `getrandom` system call or `/dev/urandom` to be available on the host OS.
Upgrading to an [officially supported operating system](https://about.gitlab.com/install/) is recommended.

View File

@ -3758,7 +3758,7 @@ Input type: `ProjectSetComplianceFrameworkInput`
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationprojectsetcomplianceframeworkclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationprojectsetcomplianceframeworkcomplianceframeworkid"></a>`complianceFrameworkId` | [`ComplianceManagementFrameworkID`](#compliancemanagementframeworkid) | ID of the compliance framework to assign to the project. |
| <a id="mutationprojectsetcomplianceframeworkcomplianceframeworkid"></a>`complianceFrameworkId` | [`ComplianceManagementFrameworkID`](#compliancemanagementframeworkid) | ID of the compliance framework to assign to the project. Set to `null` to unset. |
| <a id="mutationprojectsetcomplianceframeworkprojectid"></a>`projectId` | [`ProjectID!`](#projectid) | ID of the project to change the compliance framework of. |
#### Fields

View File

@ -161,7 +161,7 @@ curl --request PUT \
--data "name=topic1" \
--header "PRIVATE-TOKEN: <your_access_token>" \
"https://gitlab.example.com/api/v4/topics/1"
```
Example response:

View File

@ -28,7 +28,7 @@ might be useful to understand why it is important, and what is the roadmap.
## How GitLab Pages Works
GitLab Pages is a daemon designed to serve static content, written in
[Go](https://golang.org/).
[Go](https://go.dev/).
Initially, GitLab Pages has been designed to store static content on a local
shared block storage (NFS) in a hierarchical group > project directory

View File

@ -18,7 +18,7 @@ For GitLab.com and for GitLab customers, the Container Registry is a critical co
## Current Architecture
The Container Registry is a single [Go](https://golang.org/) application. Its only dependency is the storage backend on which images and metadata are stored.
The Container Registry is a single [Go](https://go.dev/) application. Its only dependency is the storage backend on which images and metadata are stored.
```mermaid
graph LR
@ -146,7 +146,7 @@ The interaction between the registry and its clients, including GitLab Rails and
### Database
Following the GitLab [Go standards and style guidelines](../../../development/go_guide), no ORM is used to manage the database, only the [`database/sql`](https://golang.org/pkg/database/sql/) package from the Go standard library, a PostgreSQL driver ([`lib/pq`](https://pkg.go.dev/github.com/lib/pq?tab=doc)) and raw SQL queries, over a TCP connection pool.
Following the GitLab [Go standards and style guidelines](../../../development/go_guide), no ORM is used to manage the database, only the [`database/sql`](https://pkg.go.dev/database/sql) package from the Go standard library, a PostgreSQL driver ([`lib/pq`](https://pkg.go.dev/github.com/lib/pq?tab=doc)) and raw SQL queries, over a TCP connection pool.
The design and development of the registry database adhere to the GitLab [database guidelines](../../../development/database/). Being a Go application, the required tooling to support the database will have to be developed, such as for running database migrations.

View File

@ -11,7 +11,7 @@ type: index, howto
INFO:
Get external repo access and more by upgrading to GitLab Ultimate.
[Try a free 30-day trial now](https://about.gitlab.com/free-trial?glm_source=docs.gitlab.com&glm_content=p-ci-cd-external-docs).
[Try a free 30-day trial now](https://about.gitlab.com/free-trial/index.html?glm_source=docs.gitlab.com&glm_content=p-ci-cd-external-docs).
GitLab CI/CD can be used with [GitHub](github_integration.md), [Bitbucket Cloud](bitbucket_integration.md), or any other
Git server.

View File

@ -13,7 +13,7 @@ last_update: 2019-07-03
INFO:
Get merge trains and more in GitLab Ultimate.
[Try a free 30-day trial now](https://about.gitlab.com/free-trial?glm_source=docs.gitlab.com&glm_content=p-ci-cd-external-docs).
[Try a free 30-day trial now](https://about.gitlab.com/free-trial/index.html?glm_source=docs.gitlab.com&glm_content=p-ci-cd-external-docs).
For more information about why you might want to use merge trains, read [How merge trains keep your master green](https://about.gitlab.com/blog/2020/01/30/all-aboard-merge-trains/).

View File

@ -12,7 +12,7 @@ last_update: 2019-07-03
INFO:
Get these pipelines and more in GitLab Ultimate.
[Try a free 30-day trial now](https://about.gitlab.com/free-trial?glm_source=docs.gitlab.com&glm_content=p-ci-cd-external-docs).
[Try a free 30-day trial now](https://about.gitlab.com/free-trial/index.html?glm_source=docs.gitlab.com&glm_content=p-ci-cd-external-docs).
When you submit a merge request, you are requesting to merge changes from a
source branch into a target branch. By default, the CI pipeline runs jobs

View File

@ -13,7 +13,7 @@ type: reference
INFO:
Create test cases in GitLab Ultimate.
[Try it free for 30 days](https://about.gitlab.com/free-trial?glm_source=docs.gitlab.com&glm_content=u-test-cases-docs).
[Try it free for 30 days](https://about.gitlab.com/free-trial/index.html?glm_source=docs.gitlab.com&glm_content=u-test-cases-docs).
Test cases in GitLab can help your teams create testing scenarios in their existing development platform.

View File

@ -32,7 +32,7 @@ Individual Golang projects need to support multiple Go versions because:
- We must support the [official Omnibus GitLab Go version](#updating-go-version), which may be behind the latest minor release.
- When Omnibus switches Go version, we still may need to support the old one for security backports.
These 3 requirements may easily be satisfied by keeping support for the [3 latest minor versions of Go](https://golang.org/dl/).
These 3 requirements may easily be satisfied by keeping support for the [3 latest minor versions of Go](https://go.dev/dl/).
It is ok to drop support for the oldest Go version and support only the 2 latest releases,
if this is enough to support backports to the last 3 minor GitLab releases.
@ -52,12 +52,12 @@ in case of a critical security release.
We should always:
- Use the same Go version for Omnibus GitLab and Cloud Native GitLab.
- Use a [supported version](https://golang.org/doc/devel/release#policy).
- Use a [supported version](https://go.dev/doc/devel/release#policy).
- Use the most recent patch-level for that version to keep up with security fixes.
Changing the version affects every project being compiled, so it's important to
ensure that all projects have been updated to test against the new Go version
before changing the package builders to use it. Despite [Go's compatibility promise](https://golang.org/doc/go1compat),
before changing the package builders to use it. Despite [Go's compatibility promise](https://go.dev/doc/go1compat),
changes between minor versions can expose bugs or cause problems in our projects.
### Upgrade process

View File

@ -7,7 +7,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Go standards and style guidelines
This document describes various guidelines and best practices for GitLab
projects using the [Go language](https://golang.org).
projects using the [Go language](https://go.dev/).
## Overview
@ -103,7 +103,7 @@ projects:
- Use `goimports` before committing.
[`goimports`](https://pkg.go.dev/golang.org/x/tools/cmd/goimports)
is a tool that automatically formats Go source code using
[`Gofmt`](https://golang.org/cmd/gofmt/), in addition to formatting import lines,
[`Gofmt`](https://pkg.go.dev/cmd/gofmt), in addition to formatting import lines,
adding missing ones and removing unreferenced ones.
Most editors/IDEs allow you to run commands before/after saving a file, you can set it
@ -196,7 +196,7 @@ deploy a new pod, migrating the data automatically.
### Testing frameworks
We should not use any specific library or framework for testing, as the
[standard library](https://golang.org/pkg/) provides already everything to get
[standard library](https://pkg.go.dev/std) provides already everything to get
started. If there is a need for more sophisticated testing tools, the following
external dependencies might be worth considering in case we decide to use a specific
library or framework:
@ -279,7 +279,7 @@ to make the test output easily readable.
### Benchmarks
Programs handling a lot of IO or complex operations should always include
[benchmarks](https://golang.org/pkg/testing/#hdr-Benchmarks), to ensure
[benchmarks](https://pkg.go.dev/testing#hdr-Benchmarks), to ensure
performance consistency over time.
## Error handling
@ -435,7 +435,7 @@ The following are some style guidelines that are specific to the Secure Team.
Use `goimports -local gitlab.com/gitlab-org` before committing.
[`goimports`](https://pkg.go.dev/golang.org/x/tools/cmd/goimports)
is a tool that automatically formats Go source code using
[`Gofmt`](https://golang.org/cmd/gofmt/), in addition to formatting import lines,
[`Gofmt`](https://pkg.go.dev/cmd/gofmt), in addition to formatting import lines,
adding missing ones and removing unreferenced ones.
By using the `-local gitlab.com/gitlab-org` option, `goimports` groups locally referenced
packages separately from external ones. See

View File

@ -298,7 +298,7 @@ this makes it possible to debug the problem without having to change the log lev
#### common `logutil` package
If you are using [go](https://golang.org/) and
If you are using [go](https://go.dev/) and
[common](https://gitlab.com/gitlab-org/security-products/analyzers/common),
then it is suggested that you use [Logrus](https://github.com/Sirupsen/logrus)
and [common's `logutil` package](https://gitlab.com/gitlab-org/security-products/analyzers/common/-/tree/master/logutil)

View File

@ -188,7 +188,7 @@ and [possessive quantifiers](https://www.regular-expressions.info/possessive.htm
#### Go
Go's [`regexp`](https://golang.org/pkg/regexp/) package uses `re2` and isn't vulnerable to backtracking issues.
Go's [`regexp`](https://pkg.go.dev/regexp) package uses `re2` and isn't vulnerable to backtracking issues.
## Further Links

View File

@ -8,7 +8,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
GitLab consists of many various services and sub-projects. The majority of
their backend code is written in [Ruby](https://www.ruby-lang.org) and
[Go](https://golang.org). However, some of them use shell scripts for
[Go](https://go.dev/). However, some of them use shell scripts for
automation of routine system administration tasks like deployment,
installation, etc. It's being done either for historical reasons or as an effort
to minimize the dependencies, for instance, for Docker images.

View File

@ -244,13 +244,13 @@ sudo make install
GitLab has several daemons written in Go. To install
GitLab we need a Go compiler. The instructions below assume you use 64-bit
Linux. You can find downloads for other platforms at the [Go download
page](https://golang.org/dl).
page](https://go.dev/dl).
```shell
# Remove former Go installation folder
sudo rm -rf /usr/local/go
curl --remote-name --progress-bar "https://golang.org/dl/go1.16.10.linux-amd64.tar.gz"
curl --remote-name --progress-bar "https://go.dev/dl/go1.16.10.linux-amd64.tar.gz"
echo '414cd18ce1d193769b9e97d2401ad718755ab47816e13b2a1cde203d263b55cf go1.16.10.linux-amd64.tar.gz' | shasum -a256 -c - && \
sudo tar -C /usr/local -xzf go1.16.10.linux-amd64.tar.gz
sudo ln -sf /usr/local/go/bin/{go,gofmt} /usr/local/bin/

View File

@ -16,7 +16,7 @@ enforcing a special format for commit messages.
INFO:
Get access to push rules and more with a
[free 30-day trial of GitLab Ultimate](https://about.gitlab.com/free-trial?glm_source=docs.gitlab.com&glm_content=p-push-rules-docs).
[free 30-day trial of GitLab Ultimate](https://about.gitlab.com/free-trial/index.html?glm_source=docs.gitlab.com&glm_content=p-push-rules-docs).
Push rules are [pre-receive Git hooks](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks) you
can enable in a user-friendly interface. They are defined either:

View File

@ -9,7 +9,7 @@ type: index, reference
INFO:
Get advanced search and more with
[a trial of GitLab Ultimate](https://about.gitlab.com/free-trial?glm_source=docs.gitlab.com&glm_content=u-subscription-docs).
[a trial of GitLab Ultimate](https://about.gitlab.com/free-trial/index.html?glm_source=docs.gitlab.com&glm_content=u-subscription-docs).
Free for 30 days.
GitLab offers tiers of features. Your subscription determines which tier you

View File

@ -111,7 +111,7 @@ Download and install Go (for Linux, 64-bit):
# Remove former Go installation folder
sudo rm -rf /usr/local/go
curl --remote-name --progress-bar "https://golang.org/dl/go1.16.10.linux-amd64.tar.gz"
curl --remote-name --progress-bar "https://go.dev/dl/go1.16.10.linux-amd64.tar.gz"
echo '414cd18ce1d193769b9e97d2401ad718755ab47816e13b2a1cde203d263b55cf go1.16.10.linux-amd64.tar.gz' | shasum -a256 -c - && \
sudo tar -C /usr/local -xzf go1.16.10.linux-amd64.tar.gz
sudo ln -sf /usr/local/go/bin/{go,gofmt} /usr/local/bin/

View File

@ -14,7 +14,7 @@ miss.
INFO:
Try fuzz testing in GitLab Ultimate.
[It's free for 30 days](https://about.gitlab.com/free-trial?glm_source=docs.gitlab.com&glm_content=u-api-fuzzing-docs).
[It's free for 30 days](https://about.gitlab.com/free-trial/index.html?glm_source=docs.gitlab.com&glm_content=u-api-fuzzing-docs).
We recommend that you use fuzz testing in addition to [GitLab Secure](../index.md)'s
other security scanners and your own test processes. If you're using [GitLab CI/CD](../../../ci/index.md),

View File

@ -11,7 +11,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
INFO:
Want to try out container scanning?
[Get a free 30-day trial of GitLab Ultimate](https://about.gitlab.com/free-trial?glm_source=docs.gitlab.com&glm_content=u-container-scanning-docs).
[Get a free 30-day trial of GitLab Ultimate](https://about.gitlab.com/free-trial/index.html?glm_source=docs.gitlab.com&glm_content=u-container-scanning-docs).
Your application's Docker image may itself be based on Docker images that contain known
vulnerabilities. By including an extra job in your pipeline that scans for those vulnerabilities and

View File

@ -61,14 +61,14 @@ The browser-based crawler can be configured using CI/CD variables.
| `DAST_BROWSER_NUMBER_OF_BROWSERS` | number | `3` | The maximum number of concurrent browser instances to use. For shared runners on GitLab.com, we recommended a maximum of three. Private runners with more resources may benefit from a higher number, but are likely to produce little benefit after five to seven instances. |
| `DAST_BROWSER_COOKIES` | dictionary | `abtesting_group:3,region:locked` | A cookie name and value to be added to every request. |
| `DAST_BROWSER_LOG` | List of strings | `brows:debug,auth:debug` | A list of modules and their intended log level. |
| `DAST_BROWSER_NAVIGATION_TIMEOUT` | [Duration string](https://golang.org/pkg/time/#ParseDuration) | `15s` | The maximum amount of time to wait for a browser to navigate from one page to another. |
| `DAST_BROWSER_ACTION_TIMEOUT` | [Duration string](https://golang.org/pkg/time/#ParseDuration) | `7s` | The maximum amount of time to wait for a browser to complete an action. |
| `DAST_BROWSER_STABILITY_TIMEOUT` | [Duration string](https://golang.org/pkg/time/#ParseDuration) | `7s` | The maximum amount of time to wait for a browser to consider a page loaded and ready for analysis. |
| `DAST_BROWSER_NAVIGATION_STABILITY_TIMEOUT` | [Duration string](https://golang.org/pkg/time/#ParseDuration) | `7s` | The maximum amount of time to wait for a browser to consider a page loaded and ready for analysis after a navigation completes. |
| `DAST_BROWSER_ACTION_STABILITY_TIMEOUT` | [Duration string](https://golang.org/pkg/time/#ParseDuration) | `800ms` | The maximum amount of time to wait for a browser to consider a page loaded and ready for analysis after completing an action. |
| `DAST_BROWSER_SEARCH_ELEMENT_TIMEOUT` | [Duration string](https://golang.org/pkg/time/#ParseDuration) | `3s` | The maximum amount of time to allow the browser to search for new elements or navigations. |
| `DAST_BROWSER_EXTRACT_ELEMENT_TIMEOUT` | [Duration string](https://golang.org/pkg/time/#ParseDuration) | `5s` | The maximum amount of time to allow the browser to extract newly found elements or navigations. |
| `DAST_BROWSER_ELEMENT_TIMEOUT` | [Duration string](https://golang.org/pkg/time/#ParseDuration) | `600ms` | The maximum amount of time to wait for an element before determining it is ready for analysis. |
| `DAST_BROWSER_NAVIGATION_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `15s` | The maximum amount of time to wait for a browser to navigate from one page to another. |
| `DAST_BROWSER_ACTION_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `7s` | The maximum amount of time to wait for a browser to complete an action. |
| `DAST_BROWSER_STABILITY_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `7s` | The maximum amount of time to wait for a browser to consider a page loaded and ready for analysis. |
| `DAST_BROWSER_NAVIGATION_STABILITY_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `7s` | The maximum amount of time to wait for a browser to consider a page loaded and ready for analysis after a navigation completes. |
| `DAST_BROWSER_ACTION_STABILITY_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `800ms` | The maximum amount of time to wait for a browser to consider a page loaded and ready for analysis after completing an action. |
| `DAST_BROWSER_SEARCH_ELEMENT_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `3s` | The maximum amount of time to allow the browser to search for new elements or navigations. |
| `DAST_BROWSER_EXTRACT_ELEMENT_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `5s` | The maximum amount of time to allow the browser to extract newly found elements or navigations. |
| `DAST_BROWSER_ELEMENT_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `600ms` | The maximum amount of time to wait for an element before determining it is ready for analysis. |
| `DAST_BROWSER_PAGE_READY_SELECTOR` | selector | `css:#page-is-ready` | Selector that when detected as visible on the page, indicates to the analyzer that the page has finished loading and the scan can continue. Note: When this selector is set, but the element is not found, the scanner waits for the period defined in `DAST_BROWSER_STABILITY_TIMEOUT` before continuing the scan. This can significantly increase scanning time if the element is not present on multiple pages within the site. |
The [DAST variables](index.md#available-cicd-variables) `SECURE_ANALYZERS_PREFIX`, `DAST_FULL_SCAN_ENABLED`, `DAST_AUTO_UPDATE_ADDONS`, `DAST_EXCLUDE_RULES`, `DAST_REQUEST_HEADERS`, `DAST_HTML_REPORT`, `DAST_MARKDOWN_REPORT`, `DAST_XML_REPORT`,
@ -100,7 +100,7 @@ You can manage the trade-off between coverage and scan time with the following m
Due to poor network conditions or heavy application load, the default timeouts may not be applicable to your application.
Browser-based scans offer the ability to adjust various timeouts to ensure it continues smoothly as it transitions from one page to the next. These values are configured using a [Duration string](https://golang.org/pkg/time/#ParseDuration), which allow you to configure durations with a prefix: `m` for minutes, `s` for seconds, and `ms` for milliseconds.
Browser-based scans offer the ability to adjust various timeouts to ensure it continues smoothly as it transitions from one page to the next. These values are configured using a [Duration string](https://pkg.go.dev/time#ParseDuration), which allow you to configure durations with a prefix: `m` for minutes, `s` for seconds, and `ms` for milliseconds.
Navigations, or the act of loading a new page, usually require the most amount of time because they are
loading multiple new resources such as JavaScript or CSS files. Depending on the size of these resources, or the speed at which they are returned, the default `DAST_BROWSER_NAVIGATION_TIMEOUT` may not be sufficient.

View File

@ -18,7 +18,7 @@ tool [OWASP Zed Attack Proxy](https://www.zaproxy.org/) for analysis.
INFO:
Want to try out security scanning?
[Try GitLab Ultimate free for 30 days](https://about.gitlab.com/free-trial?glm_source=docs.gitlab.com&glm_content=u-dast-docs).
[Try GitLab Ultimate free for 30 days](https://about.gitlab.com/free-trial/index.html?glm_source=docs.gitlab.com&glm_content=u-dast-docs).
After DAST creates its report, GitLab evaluates it for discovered
vulnerabilities between the source and target branches. Relevant

View File

@ -9,7 +9,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
INFO:
Try out Dependency Scanning in GitLab Ultimate.
[It's free for 30 days](https://about.gitlab.com/free-trial?glm_source=docs.gitlab.com&glm_content=u-dependency-scanning-docs).
[It's free for 30 days](https://about.gitlab.com/free-trial/index.html?glm_source=docs.gitlab.com&glm_content=u-dependency-scanning-docs).
The Dependency Scanning feature can automatically find security vulnerabilities in your
dependencies while you're developing and testing your applications. For example, dependency scanning
@ -146,7 +146,7 @@ table.supported-languages ul {
<tr>
<td>Go</td>
<td>N/A</td>
<td><a href="https://golang.org/">Go</a></td>
<td><a href="https://go.dev/">Go</a></td>
<td><code>go.sum</code></td>
<td><a href="https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium">Gemnasium</a></td>
<td>Y</td>

View File

@ -18,7 +18,7 @@ actionable information _before_ changes are merged enables you to be proactive.
INFO:
Want to try out security scanning?
[Try GitLab Ultimate free for 30 days](https://about.gitlab.com/free-trial?glm_source=docs.gitlab.com&glm_content=u-application-security-docs).
[Try GitLab Ultimate free for 30 days](https://about.gitlab.com/free-trial/index.html?glm_source=docs.gitlab.com&glm_content=u-application-security-docs).
GitLab also provides high-level statistics of vulnerabilities across projects and groups:

View File

@ -9,7 +9,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
INFO:
Want to try out security scanning?
[Try GitLab Ultimate free for 30 days](https://about.gitlab.com/free-trial?glm_source=docs.gitlab.com&glm_content=u-security-dashboard-docs).
[Try GitLab Ultimate free for 30 days](https://about.gitlab.com/free-trial/index.html?glm_source=docs.gitlab.com&glm_content=u-security-dashboard-docs).
GitLab provides a comprehensive set of features for viewing and managing vulnerabilities:

View File

@ -20,7 +20,7 @@ The Agent is installed into the cluster through code, providing you with a fast,
INFO:
Get Network Security Alerts in GitLab by upgrading to Ultimate.
[Try a free 30-day trial now](https://about.gitlab.com/free-trial?glm_source=docs.gitlab.com&glm_content=p-cluster-agent-docs).
[Try a free 30-day trial now](https://about.gitlab.com/free-trial/index.html?glm_source=docs.gitlab.com&glm_content=p-cluster-agent-docs).
With GitOps, you can manage containerized clusters and applications from a Git repository that:

View File

@ -16,7 +16,7 @@ is incompatible with yours, then you can deny the use of that license.
INFO:
Try License Compliance scanning to search project dependencies in GitLab Ultimate.
[It's free for 30 days](https://about.gitlab.com/free-trial?glm_source=docs.gitlab.com&glm_content=u-compliance-docs).
[It's free for 30 days](https://about.gitlab.com/free-trial/index.html?glm_source=docs.gitlab.com&glm_content=u-compliance-docs).
You can take advantage of License Compliance by either:
@ -545,24 +545,24 @@ configured to use this as the default `CA_CERT_PATH`.
### Configuring Go projects
To configure [Go modules](https://github.com/golang/go/wiki/Modules)
based projects, specify [CI/CD variables](https://golang.org/pkg/cmd/go/#hdr-Environment_variables)
based projects, specify [CI/CD variables](https://pkg.go.dev/cmd/go#hdr-Environment_variables)
in the `license_scanning` job's [variables](#available-cicd-variables) section in `.gitlab-ci.yml`.
If a project has [vendored](https://golang.org/pkg/cmd/go/#hdr-Vendor_Directories) its modules,
If a project has [vendored](https://pkg.go.dev/cmd/go#hdr-Vendor_Directories) its modules,
then the combination of the `vendor` directory and `mod.sum` file are used to detect the software
licenses associated with the Go module dependencies.
#### Using private Go registries
You can use the [`GOPRIVATE`](https://golang.org/pkg/cmd/go/#hdr-Environment_variables)
and [`GOPROXY`](https://golang.org/pkg/cmd/go/#hdr-Environment_variables)
You can use the [`GOPRIVATE`](https://pkg.go.dev/cmd/go#hdr-Environment_variables)
and [`GOPROXY`](https://pkg.go.dev/cmd/go#hdr-Environment_variables)
environment variables to control where modules are sourced from. Alternatively, you can use
[`go mod vendor`](https://golang.org/ref/mod#tmp_28) to vendor a project's modules.
[`go mod vendor`](https://go.dev/ref/mod#tmp_28) to vendor a project's modules.
#### Custom root certificates for Go
You can specify the [`-insecure`](https://golang.org/pkg/cmd/go/internal/get/) flag by exporting the
[`GOFLAGS`](https://golang.org/cmd/go/#hdr-Environment_variables)
You can specify the [`-insecure`](https://pkg.go.dev/cmd/go/internal/get) flag by exporting the
[`GOFLAGS`](https://pkg.go.dev/cmd/go#hdr-Environment_variables)
environment variable. For example:
```yaml

View File

@ -11,7 +11,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
INFO:
Try epic boards and more with a
[free 30-day trial of GitLab Ultimate](https://about.gitlab.com/free-trial?glm_source=docs.gitlab.com&glm_content=p-epics-boards-docs).
[free 30-day trial of GitLab Ultimate](https://about.gitlab.com/free-trial/index.html?glm_source=docs.gitlab.com&glm_content=p-epics-boards-docs).
Epic boards build on the existing [epic tracking functionality](index.md) and
[labels](../../project/labels.md). Your epics appear as cards in vertical lists, organized by their assigned

View File

@ -12,7 +12,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
INFO:
Check out [multi-level child epics](manage_epics.md#multi-level-child-epics) with a
[free 30-day trial of GitLab Ultimate](https://about.gitlab.com/free-trial?glm_source=docs.gitlab.com&glm_content=p-epics-docs).
[free 30-day trial of GitLab Ultimate](https://about.gitlab.com/free-trial/index.html?glm_source=docs.gitlab.com&glm_content=p-epics-docs).
When [issues](../../project/issues/index.md) share a theme across projects and milestones,
you can manage them by using epics.

View File

@ -16,7 +16,7 @@ SAML on GitLab.com allows users to sign in through their SAML identity provider.
INFO:
Use your own SAML authentication to log in to [GitLab.com](http://gitlab.com/).
[Try GitLab Ultimate free for 30 days](https://about.gitlab.com/free-trial?glm_source=docs.gitlab.com&glm_content=p-saml-sso-docs).
[Try GitLab Ultimate free for 30 days](https://about.gitlab.com/free-trial/index.html?glm_source=docs.gitlab.com&glm_content=p-saml-sso-docs).
User synchronization of SAML SSO groups is supported through [SCIM](scim_setup.md). SCIM supports adding and removing users from the GitLab group automatically.
For example, if you remove a user from the SCIM app, SCIM removes that same user from the GitLab group.

View File

@ -144,8 +144,8 @@ If you're unfamiliar with managing dependencies in Go, or Go in general, review
the following documentation:
- [Dependency Management in Go](../../../development/go_guide/dependencies.md)
- [Go Modules Reference](https://golang.org/ref/mod)
- [Documentation (`golang.org`)](https://golang.org/doc/)
- [Go Modules Reference](https://go.dev/ref/mod)
- [Documentation (`golang.org`)](https://go.dev/doc/)
- [Learn (`go.dev/learn`)](https://go.dev/learn/)
### Set environment variables

View File

@ -539,7 +539,7 @@ server that has Python 3 installed, and may not work on other operating systems
or with other versions of Python.
1. Install Certbot by running the
[`certbot-auto` wrapper script](https://certbot.eff.org/docs/install.html#certbot-auto).
[`certbot-auto` wrapper script](https://eff-certbot.readthedocs.io/install.html#certbot-auto).
On the command line of your server, run the following commands:
```shell

Some files were not shown because too many files have changed in this diff Show More