Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
707c0eca50
commit
4a3ba3e5f2
|
@ -435,4 +435,7 @@ Rails/TimeZone:
|
|||
- 'ee/app/services/**/*'
|
||||
- 'ee/spec/controllers/**/*'
|
||||
- 'ee/spec/services/**/*'
|
||||
|
||||
- 'app/models/**/*'
|
||||
- 'spec/models/**/*'
|
||||
- 'ee/app/models/**/*'
|
||||
- 'ee/spec/models/**/*'
|
||||
|
|
|
@ -14,6 +14,7 @@ import Editor from '../lib/editor';
|
|||
import FileTemplatesBar from './file_templates/bar.vue';
|
||||
import { __ } from '~/locale';
|
||||
import { extractMarkdownImagesFromEntries } from '../stores/utils';
|
||||
import { addFinalNewline } from '../utils';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
|
@ -31,6 +32,7 @@ export default {
|
|||
return {
|
||||
content: '',
|
||||
images: {},
|
||||
addFinalNewline: true,
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
|
@ -247,13 +249,14 @@ export default {
|
|||
|
||||
this.model.onChange(model => {
|
||||
const { file } = model;
|
||||
if (!file.active) return;
|
||||
|
||||
if (file.active) {
|
||||
this.changeFileContent({
|
||||
path: file.path,
|
||||
content: model.getModel().getValue(),
|
||||
});
|
||||
}
|
||||
const monacoModel = model.getModel();
|
||||
const content = monacoModel.getValue();
|
||||
this.changeFileContent({
|
||||
path: file.path,
|
||||
content: this.addFinalNewline ? addFinalNewline(content, monacoModel.getEOL()) : content,
|
||||
});
|
||||
});
|
||||
|
||||
// Handle Cursor Position
|
||||
|
|
|
@ -4,7 +4,7 @@ import eventHub from '../../eventhub';
|
|||
import service from '../../services';
|
||||
import * as types from '../mutation_types';
|
||||
import router from '../../ide_router';
|
||||
import { addFinalNewlineIfNeeded, setPageTitleForFile } from '../utils';
|
||||
import { setPageTitleForFile } from '../utils';
|
||||
import { viewerTypes, stageKeys } from '../../constants';
|
||||
|
||||
export const closeFile = ({ commit, state, dispatch }, file) => {
|
||||
|
@ -152,7 +152,7 @@ export const changeFileContent = ({ commit, state, getters }, { path, content })
|
|||
const file = state.entries[path];
|
||||
commit(types.UPDATE_FILE_CONTENT, {
|
||||
path,
|
||||
content: addFinalNewlineIfNeeded(content),
|
||||
content,
|
||||
});
|
||||
|
||||
const indexOfChangedFile = state.changedFiles.findIndex(f => f.path === path);
|
||||
|
|
|
@ -272,10 +272,6 @@ export const pathsAreEqual = (a, b) => {
|
|||
return cleanA === cleanB;
|
||||
};
|
||||
|
||||
// if the contents of a file dont end with a newline, this function adds a newline
|
||||
export const addFinalNewlineIfNeeded = content =>
|
||||
content.charAt(content.length - 1) !== '\n' ? `${content}\n` : content;
|
||||
|
||||
export function extractMarkdownImagesFromEntries(mdFile, entries) {
|
||||
/**
|
||||
* Regex to identify an image tag in markdown, like:
|
||||
|
|
|
@ -76,3 +76,21 @@ export function registerLanguages(def, ...defs) {
|
|||
}
|
||||
|
||||
export const otherSide = side => (side === SIDE_RIGHT ? SIDE_LEFT : SIDE_RIGHT);
|
||||
|
||||
export function addFinalNewline(content, eol = '\n') {
|
||||
return content.slice(-eol.length) !== eol ? `${content}${eol}` : content;
|
||||
}
|
||||
|
||||
export function getPathParents(path) {
|
||||
const pathComponents = path.split('/');
|
||||
const paths = [];
|
||||
while (pathComponents.length) {
|
||||
pathComponents.pop();
|
||||
|
||||
let parentPath = pathComponents.join('/');
|
||||
if (parentPath.startsWith('/')) parentPath = parentPath.slice(1);
|
||||
if (parentPath) paths.push(parentPath);
|
||||
}
|
||||
|
||||
return paths;
|
||||
}
|
||||
|
|
|
@ -899,7 +899,7 @@ $ide-commit-header-height: 48px;
|
|||
@include ide-trace-view();
|
||||
|
||||
svg {
|
||||
--svg-status-bg: var(--ide-background, $white);
|
||||
--svg-status-bg: var(--ide-background, #{$white});
|
||||
}
|
||||
|
||||
.empty-state {
|
||||
|
|
|
@ -113,7 +113,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
|
|||
|
||||
def build
|
||||
@build ||= begin
|
||||
build = build_from_id || build_from_ref
|
||||
build = build_from_id || build_from_sha || build_from_ref
|
||||
build&.present(current_user: current_user)
|
||||
end
|
||||
end
|
||||
|
@ -127,7 +127,8 @@ class Projects::ArtifactsController < Projects::ApplicationController
|
|||
project.builds.find_by_id(params[:job_id]) if params[:job_id]
|
||||
end
|
||||
|
||||
def build_from_ref
|
||||
def build_from_sha
|
||||
return if params[:job].blank?
|
||||
return unless @ref_name
|
||||
|
||||
commit = project.commit(@ref_name)
|
||||
|
@ -136,6 +137,13 @@ class Projects::ArtifactsController < Projects::ApplicationController
|
|||
project.latest_successful_build_for_sha(params[:job], commit.id)
|
||||
end
|
||||
|
||||
def build_from_ref
|
||||
return if params[:job].blank?
|
||||
return unless @ref_name
|
||||
|
||||
project.latest_successful_build_for_ref(params[:job], @ref_name)
|
||||
end
|
||||
|
||||
def artifacts_file
|
||||
@artifacts_file ||= build&.artifacts_file_for_type(params[:file_type] || :archive)
|
||||
end
|
||||
|
|
|
@ -261,6 +261,8 @@ module ApplicationSettingsHelper
|
|||
:sourcegraph_enabled,
|
||||
:sourcegraph_url,
|
||||
:sourcegraph_public_only,
|
||||
:spam_check_endpoint_enabled,
|
||||
:spam_check_endpoint_url,
|
||||
:terminal_max_session_time,
|
||||
:terms,
|
||||
:throttle_authenticated_api_enabled,
|
||||
|
|
|
@ -301,6 +301,13 @@ class ApplicationSetting < ApplicationRecord
|
|||
numericality: { greater_than: 0, less_than_or_equal_to: 10 },
|
||||
if: :external_authorization_service_enabled
|
||||
|
||||
validates :spam_check_endpoint_url,
|
||||
addressable_url: true, allow_blank: true
|
||||
|
||||
validates :spam_check_endpoint_url,
|
||||
presence: true,
|
||||
if: :spam_check_endpoint_enabled
|
||||
|
||||
validates :external_auth_client_key,
|
||||
presence: true,
|
||||
if: -> (setting) { setting.external_auth_client_cert.present? }
|
||||
|
|
|
@ -115,6 +115,8 @@ module ApplicationSettingImplementation
|
|||
sourcegraph_enabled: false,
|
||||
sourcegraph_url: nil,
|
||||
sourcegraph_public_only: true,
|
||||
spam_check_endpoint_enabled: false,
|
||||
spam_check_endpoint_url: nil,
|
||||
minimum_password_length: DEFAULT_MINIMUM_PASSWORD_LENGTH,
|
||||
namespace_storage_size_limit: 0,
|
||||
terminal_max_session_time: 0,
|
||||
|
@ -151,7 +153,7 @@ module ApplicationSettingImplementation
|
|||
snowplow_app_id: nil,
|
||||
snowplow_iglu_registry_url: nil,
|
||||
custom_http_clone_url_root: nil,
|
||||
productivity_analytics_start_date: Time.now,
|
||||
productivity_analytics_start_date: Time.current,
|
||||
snippet_size_limit: 50.megabytes
|
||||
}
|
||||
end
|
||||
|
|
|
@ -14,7 +14,7 @@ class BoardGroupRecentVisit < ApplicationRecord
|
|||
|
||||
def self.visited!(user, board)
|
||||
visit = find_or_create_by(user: user, group: board.group, board: board)
|
||||
visit.touch if visit.updated_at < Time.now
|
||||
visit.touch if visit.updated_at < Time.current
|
||||
rescue ActiveRecord::RecordNotUnique
|
||||
retry
|
||||
end
|
||||
|
|
|
@ -14,7 +14,7 @@ class BoardProjectRecentVisit < ApplicationRecord
|
|||
|
||||
def self.visited!(user, board)
|
||||
visit = find_or_create_by(user: user, project: board.project, board: board)
|
||||
visit.touch if visit.updated_at < Time.now
|
||||
visit.touch if visit.updated_at < Time.current
|
||||
rescue ActiveRecord::RecordNotUnique
|
||||
retry
|
||||
end
|
||||
|
|
|
@ -137,8 +137,8 @@ module Ci
|
|||
.includes(:metadata, :job_artifacts_metadata)
|
||||
end
|
||||
|
||||
scope :with_artifacts_not_expired, ->() { with_downloadable_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
|
||||
scope :with_expired_artifacts, ->() { with_downloadable_artifacts.where('artifacts_expire_at < ?', Time.now) }
|
||||
scope :with_artifacts_not_expired, ->() { with_downloadable_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.current) }
|
||||
scope :with_expired_artifacts, ->() { with_downloadable_artifacts.where('artifacts_expire_at < ?', Time.current) }
|
||||
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
|
||||
scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + %i[manual]) }
|
||||
scope :scheduled_actions, ->() { where(when: :delayed, status: COMPLETED_STATUSES + %i[scheduled]) }
|
||||
|
@ -259,7 +259,7 @@ module Ci
|
|||
end
|
||||
|
||||
before_transition any => :waiting_for_resource do |build|
|
||||
build.waiting_for_resource_at = Time.now
|
||||
build.waiting_for_resource_at = Time.current
|
||||
end
|
||||
|
||||
before_transition on: :enqueue_waiting_for_resource do |build|
|
||||
|
@ -713,7 +713,7 @@ module Ci
|
|||
end
|
||||
|
||||
def needs_touch?
|
||||
Time.now - updated_at > 15.minutes.to_i
|
||||
Time.current - updated_at > 15.minutes.to_i
|
||||
end
|
||||
|
||||
def valid_token?(token)
|
||||
|
@ -776,11 +776,11 @@ module Ci
|
|||
end
|
||||
|
||||
def artifacts_expired?
|
||||
artifacts_expire_at && artifacts_expire_at < Time.now
|
||||
artifacts_expire_at && artifacts_expire_at < Time.current
|
||||
end
|
||||
|
||||
def artifacts_expire_in
|
||||
artifacts_expire_at - Time.now if artifacts_expire_at
|
||||
artifacts_expire_at - Time.current if artifacts_expire_at
|
||||
end
|
||||
|
||||
def artifacts_expire_in=(value)
|
||||
|
@ -993,7 +993,7 @@ module Ci
|
|||
end
|
||||
|
||||
def update_erased!(user = nil)
|
||||
self.update(erased_by: user, erased_at: Time.now, artifacts_expire_at: nil)
|
||||
self.update(erased_by: user, erased_at: Time.current, artifacts_expire_at: nil)
|
||||
end
|
||||
|
||||
def unscoped_project
|
||||
|
@ -1026,7 +1026,7 @@ module Ci
|
|||
end
|
||||
|
||||
def has_expiring_artifacts?
|
||||
artifacts_expire_at.present? && artifacts_expire_at > Time.now
|
||||
artifacts_expire_at.present? && artifacts_expire_at > Time.current
|
||||
end
|
||||
|
||||
def job_jwt_variables
|
||||
|
|
|
@ -148,7 +148,7 @@ module Ci
|
|||
where(file_type: types)
|
||||
end
|
||||
|
||||
scope :expired, -> (limit) { where('expire_at < ?', Time.now).limit(limit) }
|
||||
scope :expired, -> (limit) { where('expire_at < ?', Time.current).limit(limit) }
|
||||
scope :locked, -> { where(locked: true) }
|
||||
scope :unlocked, -> { where(locked: [false, nil]) }
|
||||
|
||||
|
@ -244,7 +244,7 @@ module Ci
|
|||
end
|
||||
|
||||
def expire_in
|
||||
expire_at - Time.now if expire_at
|
||||
expire_at - Time.current if expire_at
|
||||
end
|
||||
|
||||
def expire_in=(value)
|
||||
|
|
|
@ -163,11 +163,11 @@ module Ci
|
|||
# Create a separate worker for each new operation
|
||||
|
||||
before_transition [:created, :waiting_for_resource, :preparing, :pending] => :running do |pipeline|
|
||||
pipeline.started_at = Time.now
|
||||
pipeline.started_at = Time.current
|
||||
end
|
||||
|
||||
before_transition any => [:success, :failed, :canceled] do |pipeline|
|
||||
pipeline.finished_at = Time.now
|
||||
pipeline.finished_at = Time.current
|
||||
pipeline.update_duration
|
||||
end
|
||||
|
||||
|
|
|
@ -273,7 +273,7 @@ module Ci
|
|||
|
||||
def update_cached_info(values)
|
||||
values = values&.slice(:version, :revision, :platform, :architecture, :ip_address) || {}
|
||||
values[:contacted_at] = Time.now
|
||||
values[:contacted_at] = Time.current
|
||||
|
||||
cache_attributes(values)
|
||||
|
||||
|
@ -309,7 +309,7 @@ module Ci
|
|||
|
||||
real_contacted_at = read_attribute(:contacted_at)
|
||||
real_contacted_at.nil? ||
|
||||
(Time.now - real_contacted_at) >= contacted_at_max_age
|
||||
(Time.current - real_contacted_at) >= contacted_at_max_age
|
||||
end
|
||||
|
||||
def tag_constraints
|
||||
|
|
|
@ -37,7 +37,7 @@ module Clusters
|
|||
end
|
||||
|
||||
after_transition any => :updating do |application|
|
||||
application.update(last_update_started_at: Time.now)
|
||||
application.update(last_update_started_at: Time.current)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -136,15 +136,15 @@ class CommitStatus < ApplicationRecord
|
|||
end
|
||||
|
||||
before_transition [:created, :waiting_for_resource, :preparing, :skipped, :manual, :scheduled] => :pending do |commit_status|
|
||||
commit_status.queued_at = Time.now
|
||||
commit_status.queued_at = Time.current
|
||||
end
|
||||
|
||||
before_transition [:created, :preparing, :pending] => :running do |commit_status|
|
||||
commit_status.started_at = Time.now
|
||||
commit_status.started_at = Time.current
|
||||
end
|
||||
|
||||
before_transition any => [:success, :failed, :canceled] do |commit_status|
|
||||
commit_status.finished_at = Time.now
|
||||
commit_status.finished_at = Time.current
|
||||
end
|
||||
|
||||
before_transition any => :failed do |commit_status, transition|
|
||||
|
|
|
@ -17,7 +17,7 @@ module EachBatch
|
|||
# Example:
|
||||
#
|
||||
# User.each_batch do |relation|
|
||||
# relation.update_all(updated_at: Time.now)
|
||||
# relation.update_all(updated_at: Time.current)
|
||||
# end
|
||||
#
|
||||
# The supplied block is also passed an optional batch index:
|
||||
|
|
|
@ -160,7 +160,7 @@ module HasStatus
|
|||
if started_at && finished_at
|
||||
finished_at - started_at
|
||||
elsif started_at
|
||||
Time.now - started_at
|
||||
Time.current - started_at
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -24,7 +24,7 @@ module Noteable
|
|||
# The timestamp of the note (e.g. the :created_at or :updated_at attribute if provided via
|
||||
# API call)
|
||||
def system_note_timestamp
|
||||
@system_note_timestamp || Time.now # rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
@system_note_timestamp || Time.current # rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
end
|
||||
|
||||
attr_writer :system_note_timestamp
|
||||
|
|
|
@ -44,7 +44,7 @@ module PrometheusAdapter
|
|||
{
|
||||
success: true,
|
||||
data: data,
|
||||
last_update: Time.now.utc
|
||||
last_update: Time.current.utc
|
||||
}
|
||||
rescue Gitlab::PrometheusClient::Error => err
|
||||
{ success: false, result: err.message }
|
||||
|
|
|
@ -23,7 +23,7 @@ module ResolvableNote
|
|||
class_methods do
|
||||
# This method must be kept in sync with `#resolve!`
|
||||
def resolve!(current_user)
|
||||
unresolved.update_all(resolved_at: Time.now, resolved_by_id: current_user.id)
|
||||
unresolved.update_all(resolved_at: Time.current, resolved_by_id: current_user.id)
|
||||
end
|
||||
|
||||
# This method must be kept in sync with `#unresolve!`
|
||||
|
@ -57,7 +57,7 @@ module ResolvableNote
|
|||
return false unless resolvable?
|
||||
return false if resolved?
|
||||
|
||||
self.resolved_at = Time.now
|
||||
self.resolved_at = Time.current
|
||||
self.resolved_by = current_user
|
||||
self.resolved_by_push = resolved_by_push
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ class Deployment < ApplicationRecord
|
|||
end
|
||||
|
||||
before_transition any => [:success, :failed, :canceled] do |deployment|
|
||||
deployment.finished_at = Time.now
|
||||
deployment.finished_at = Time.current
|
||||
end
|
||||
|
||||
after_transition any => :success do |deployment|
|
||||
|
|
|
@ -339,7 +339,7 @@ class Environment < ApplicationRecord
|
|||
end
|
||||
|
||||
def auto_stop_in
|
||||
auto_stop_at - Time.now if auto_stop_at
|
||||
auto_stop_at - Time.current if auto_stop_at
|
||||
end
|
||||
|
||||
def auto_stop_in=(value)
|
||||
|
|
|
@ -11,11 +11,11 @@ class Issue::Metrics < ApplicationRecord
|
|||
|
||||
def record!
|
||||
if issue.milestone_id.present? && self.first_associated_with_milestone_at.blank?
|
||||
self.first_associated_with_milestone_at = Time.now
|
||||
self.first_associated_with_milestone_at = Time.current
|
||||
end
|
||||
|
||||
if issue_assigned_to_list_label? && self.first_added_to_board_at.blank?
|
||||
self.first_added_to_board_at = Time.now
|
||||
self.first_added_to_board_at = Time.current
|
||||
end
|
||||
|
||||
self.save
|
||||
|
|
|
@ -47,7 +47,7 @@ class JiraImportState < ApplicationRecord
|
|||
after_transition initial: :scheduled do |state, _|
|
||||
state.run_after_commit do
|
||||
job_id = Gitlab::JiraImport::Stage::StartImportWorker.perform_async(project.id)
|
||||
state.update(jid: job_id, scheduled_at: Time.now) if job_id
|
||||
state.update(jid: job_id, scheduled_at: Time.current) if job_id
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ class LicenseTemplate
|
|||
end
|
||||
|
||||
# Populate placeholders in the LicenseTemplate content
|
||||
def resolve!(project_name: nil, fullname: nil, year: Time.now.year.to_s)
|
||||
def resolve!(project_name: nil, fullname: nil, year: Time.current.year.to_s)
|
||||
# Ensure the string isn't shared with any other instance of LicenseTemplate
|
||||
new_content = content.dup
|
||||
new_content.gsub!(YEAR_TEMPLATE_REGEX, year) if year.present?
|
||||
|
|
|
@ -320,7 +320,7 @@ class Member < ApplicationRecord
|
|||
return false unless invite?
|
||||
|
||||
self.invite_token = nil
|
||||
self.invite_accepted_at = Time.now.utc
|
||||
self.invite_accepted_at = Time.current.utc
|
||||
|
||||
self.user = new_user
|
||||
|
||||
|
|
|
@ -277,7 +277,7 @@ class Namespace < ApplicationRecord
|
|||
end
|
||||
|
||||
def has_parent?
|
||||
parent.present?
|
||||
parent_id.present? || parent.present?
|
||||
end
|
||||
|
||||
def root_ancestor
|
||||
|
|
|
@ -49,11 +49,11 @@ class PagesDomain < ApplicationRecord
|
|||
after_update :update_daemon, if: :saved_change_to_pages_config?
|
||||
after_destroy :update_daemon
|
||||
|
||||
scope :enabled, -> { where('enabled_until >= ?', Time.now ) }
|
||||
scope :enabled, -> { where('enabled_until >= ?', Time.current ) }
|
||||
scope :needs_verification, -> do
|
||||
verified_at = arel_table[:verified_at]
|
||||
enabled_until = arel_table[:enabled_until]
|
||||
threshold = Time.now + VERIFICATION_THRESHOLD
|
||||
threshold = Time.current + VERIFICATION_THRESHOLD
|
||||
|
||||
where(verified_at.eq(nil).or(enabled_until.eq(nil).or(enabled_until.lt(threshold))))
|
||||
end
|
||||
|
@ -69,7 +69,7 @@ class PagesDomain < ApplicationRecord
|
|||
from_union([user_provided, certificate_not_valid, certificate_expiring])
|
||||
end
|
||||
|
||||
scope :for_removal, -> { where("remove_at < ?", Time.now) }
|
||||
scope :for_removal, -> { where("remove_at < ?", Time.current) }
|
||||
|
||||
scope :with_logging_info, -> { includes(project: [:namespace, :route]) }
|
||||
|
||||
|
@ -141,7 +141,7 @@ class PagesDomain < ApplicationRecord
|
|||
def expired?
|
||||
return false unless x509
|
||||
|
||||
current = Time.new
|
||||
current = Time.current
|
||||
current < x509.not_before || x509.not_after < current
|
||||
end
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
class PagesDomainAcmeOrder < ApplicationRecord
|
||||
belongs_to :pages_domain
|
||||
|
||||
scope :expired, -> { where("expires_at < ?", Time.now) }
|
||||
scope :expired, -> { where("expires_at < ?", Time.current) }
|
||||
|
||||
validates :pages_domain, presence: true
|
||||
validates :expires_at, presence: true
|
||||
|
|
|
@ -506,6 +506,10 @@ class Project < ApplicationRecord
|
|||
left_outer_joins(:pages_metadatum)
|
||||
.where(project_pages_metadata: { project_id: nil })
|
||||
end
|
||||
scope :with_api_entity_associations, -> {
|
||||
preload(:project_feature, :route, :tags,
|
||||
group: :ip_restrictions, namespace: [:route, :owner])
|
||||
}
|
||||
|
||||
enum auto_cancel_pending_pipelines: { disabled: 0, enabled: 1 }
|
||||
|
||||
|
@ -1036,7 +1040,7 @@ class Project < ApplicationRecord
|
|||
remote_mirrors.stuck.update_all(
|
||||
update_status: :failed,
|
||||
last_error: _('The remote mirror took to long to complete.'),
|
||||
last_update_at: Time.now
|
||||
last_update_at: Time.current
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
@ -34,10 +34,4 @@ class PrometheusAlertEvent < ApplicationRecord
|
|||
def self.status_value_for(name)
|
||||
state_machines[:status].states[name].value
|
||||
end
|
||||
|
||||
def self.payload_key_for(gitlab_alert_id, started_at)
|
||||
plain = [gitlab_alert_id, started_at].join('/')
|
||||
|
||||
Digest::SHA1.hexdigest(plain)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -68,13 +68,13 @@ class RemoteMirror < ApplicationRecord
|
|||
after_transition any => :started do |remote_mirror, _|
|
||||
Gitlab::Metrics.add_event(:remote_mirrors_running)
|
||||
|
||||
remote_mirror.update(last_update_started_at: Time.now)
|
||||
remote_mirror.update(last_update_started_at: Time.current)
|
||||
end
|
||||
|
||||
after_transition started: :finished do |remote_mirror, _|
|
||||
Gitlab::Metrics.add_event(:remote_mirrors_finished)
|
||||
|
||||
timestamp = Time.now
|
||||
timestamp = Time.current
|
||||
remote_mirror.update!(
|
||||
last_update_at: timestamp,
|
||||
last_successful_update_at: timestamp,
|
||||
|
@ -86,7 +86,7 @@ class RemoteMirror < ApplicationRecord
|
|||
after_transition started: :failed do |remote_mirror|
|
||||
Gitlab::Metrics.add_event(:remote_mirrors_failed)
|
||||
|
||||
remote_mirror.update(last_update_at: Time.now)
|
||||
remote_mirror.update(last_update_at: Time.current)
|
||||
|
||||
remote_mirror.run_after_commit do
|
||||
RemoteMirrorNotificationWorker.perform_async(remote_mirror.id)
|
||||
|
@ -144,9 +144,9 @@ class RemoteMirror < ApplicationRecord
|
|||
return unless sync?
|
||||
|
||||
if recently_scheduled?
|
||||
RepositoryUpdateRemoteMirrorWorker.perform_in(backoff_delay, self.id, Time.now)
|
||||
RepositoryUpdateRemoteMirrorWorker.perform_in(backoff_delay, self.id, Time.current)
|
||||
else
|
||||
RepositoryUpdateRemoteMirrorWorker.perform_async(self.id, Time.now)
|
||||
RepositoryUpdateRemoteMirrorWorker.perform_async(self.id, Time.current)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -261,7 +261,7 @@ class RemoteMirror < ApplicationRecord
|
|||
def recently_scheduled?
|
||||
return false unless self.last_update_started_at
|
||||
|
||||
self.last_update_started_at >= Time.now - backoff_delay
|
||||
self.last_update_started_at >= Time.current - backoff_delay
|
||||
end
|
||||
|
||||
def reset_fields
|
||||
|
|
|
@ -1171,7 +1171,7 @@ class Repository
|
|||
if target
|
||||
target.committed_date
|
||||
else
|
||||
Time.now
|
||||
Time.current
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -42,7 +42,7 @@ class Route < ApplicationRecord
|
|||
old_path = route.path
|
||||
|
||||
# Callbacks must be run manually
|
||||
route.update_columns(attributes.merge(updated_at: Time.now))
|
||||
route.update_columns(attributes.merge(updated_at: Time.current))
|
||||
|
||||
# We are not calling route.delete_conflicting_redirects here, in hopes
|
||||
# of avoiding deadlocks. The parent (self, in this method) already
|
||||
|
|
|
@ -15,10 +15,4 @@ class SelfManagedPrometheusAlertEvent < ApplicationRecord
|
|||
yield event if block_given?
|
||||
end
|
||||
end
|
||||
|
||||
def self.payload_key_for(started_at, alert_name, query_expression)
|
||||
plain = [started_at, alert_name, query_expression].join('/')
|
||||
|
||||
Digest::SHA1.hexdigest(plain)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class SnippetInputAction
|
||||
include ActiveModel::Validations
|
||||
|
||||
ACTIONS = %w[create update delete move].freeze
|
||||
|
||||
ACTIONS.each do |action_const|
|
||||
define_method "#{action_const}_action?" do
|
||||
action == action_const
|
||||
end
|
||||
end
|
||||
|
||||
attr_reader :action, :previous_path, :file_path, :content
|
||||
|
||||
validates :action, inclusion: { in: ACTIONS, message: "%{value} is not a valid action" }
|
||||
validates :previous_path, presence: true, if: :move_action?
|
||||
validates :file_path, presence: true
|
||||
validates :content, presence: true, if: :create_action?
|
||||
|
||||
def initialize(action: nil, previous_path: nil, file_path: nil, content: nil)
|
||||
@action = action
|
||||
@previous_path = previous_path
|
||||
@file_path = file_path
|
||||
@content = content
|
||||
end
|
||||
|
||||
def to_commit_action
|
||||
{
|
||||
action: action&.to_sym,
|
||||
previous_path: previous_path,
|
||||
file_path: file_path,
|
||||
content: content
|
||||
}
|
||||
end
|
||||
end
|
|
@ -0,0 +1,25 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class SnippetInputActionCollection
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
attr_reader :actions
|
||||
|
||||
delegate :empty?, to: :actions
|
||||
|
||||
def initialize(actions = [])
|
||||
@actions = actions.map { |action| SnippetInputAction.new(action) }
|
||||
end
|
||||
|
||||
def to_commit_actions
|
||||
strong_memoize(:commit_actions) do
|
||||
actions.map { |action| action.to_commit_action }
|
||||
end
|
||||
end
|
||||
|
||||
def valid?
|
||||
strong_memoize(:valid) do
|
||||
actions.all?(&:valid?)
|
||||
end
|
||||
end
|
||||
end
|
|
@ -110,7 +110,7 @@ class Todo < ApplicationRecord
|
|||
base = where.not(state: new_state).except(:order)
|
||||
ids = base.pluck(:id)
|
||||
|
||||
base.update_all(state: new_state, updated_at: Time.now)
|
||||
base.update_all(state: new_state, updated_at: Time.current)
|
||||
|
||||
ids
|
||||
end
|
||||
|
|
|
@ -688,7 +688,7 @@ class User < ApplicationRecord
|
|||
@reset_token, enc = Devise.token_generator.generate(self.class, :reset_password_token)
|
||||
|
||||
self.reset_password_token = enc
|
||||
self.reset_password_sent_at = Time.now.utc
|
||||
self.reset_password_sent_at = Time.current.utc
|
||||
|
||||
@reset_token
|
||||
end
|
||||
|
@ -1126,7 +1126,7 @@ class User < ApplicationRecord
|
|||
if !Gitlab.config.ldap.enabled
|
||||
false
|
||||
elsif ldap_user?
|
||||
!last_credential_check_at || (last_credential_check_at + ldap_sync_time) < Time.now
|
||||
!last_credential_check_at || (last_credential_check_at + ldap_sync_time) < Time.current
|
||||
else
|
||||
false
|
||||
end
|
||||
|
@ -1373,7 +1373,7 @@ class User < ApplicationRecord
|
|||
def contributed_projects
|
||||
events = Event.select(:project_id)
|
||||
.contributions.where(author_id: self)
|
||||
.where("created_at > ?", Time.now - 1.year)
|
||||
.where("created_at > ?", Time.current - 1.year)
|
||||
.distinct
|
||||
.reorder(nil)
|
||||
|
||||
|
@ -1646,7 +1646,7 @@ class User < ApplicationRecord
|
|||
end
|
||||
|
||||
def password_expired?
|
||||
!!(password_expires_at && password_expires_at < Time.now)
|
||||
!!(password_expires_at && password_expires_at < Time.current)
|
||||
end
|
||||
|
||||
def can_be_deactivated?
|
||||
|
|
|
@ -120,7 +120,7 @@ class WikiPage
|
|||
end
|
||||
|
||||
def insert_slugs(strings, is_new, canonical_slug)
|
||||
creation = Time.now.utc
|
||||
creation = Time.current.utc
|
||||
|
||||
slug_attrs = strings.map do |slug|
|
||||
{
|
||||
|
|
|
@ -16,11 +16,11 @@ class WikiPage
|
|||
scope :canonical, -> { where(canonical: true) }
|
||||
|
||||
def update_columns(attrs = {})
|
||||
super(attrs.reverse_merge(updated_at: Time.now.utc))
|
||||
super(attrs.reverse_merge(updated_at: Time.current.utc))
|
||||
end
|
||||
|
||||
def self.update_all(attrs = {})
|
||||
super(attrs.reverse_merge(updated_at: Time.now.utc))
|
||||
super(attrs.reverse_merge(updated_at: Time.current.utc))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -45,6 +45,7 @@ module Groups
|
|||
raise_transfer_error(:invalid_policies) unless valid_policies?
|
||||
raise_transfer_error(:namespace_with_same_path) if namespace_with_same_path?
|
||||
raise_transfer_error(:group_contains_images) if group_projects_contain_registry_images?
|
||||
raise_transfer_error(:cannot_transfer_to_subgroup) if transfer_to_subgroup?
|
||||
end
|
||||
|
||||
def group_is_already_root?
|
||||
|
@ -55,6 +56,11 @@ module Groups
|
|||
@new_parent_group && @new_parent_group.id == @group.parent_id
|
||||
end
|
||||
|
||||
def transfer_to_subgroup?
|
||||
@new_parent_group && \
|
||||
@group.self_and_descendants.pluck_primary_key.include?(@new_parent_group.id)
|
||||
end
|
||||
|
||||
def valid_policies?
|
||||
return false unless can?(current_user, :admin_group, @group)
|
||||
|
||||
|
@ -125,7 +131,8 @@ module Groups
|
|||
group_is_already_root: s_('TransferGroup|Group is already a root group.'),
|
||||
same_parent_as_current: s_('TransferGroup|Group is already associated to the parent group.'),
|
||||
invalid_policies: s_("TransferGroup|You don't have enough permissions."),
|
||||
group_contains_images: s_('TransferGroup|Cannot update the path because there are projects under this group that contain Docker images in their Container Registry. Please remove the images from your projects first and try again.')
|
||||
group_contains_images: s_('TransferGroup|Cannot update the path because there are projects under this group that contain Docker images in their Container Registry. Please remove the images from your projects first and try again.'),
|
||||
cannot_transfer_to_subgroup: s_('TransferGroup|Cannot transfer group to one of its subgroup.')
|
||||
}.freeze
|
||||
end
|
||||
end
|
||||
|
|
|
@ -40,17 +40,13 @@ module Projects
|
|||
|
||||
def create_managed_prometheus_alert_event(parsed_alert)
|
||||
alert = find_alert(parsed_alert.metric_id)
|
||||
payload_key = PrometheusAlertEvent.payload_key_for(parsed_alert.metric_id, parsed_alert.starts_at_raw)
|
||||
|
||||
event = PrometheusAlertEvent.find_or_initialize_by_payload_key(parsed_alert.project, alert, payload_key)
|
||||
event = PrometheusAlertEvent.find_or_initialize_by_payload_key(parsed_alert.project, alert, parsed_alert.gitlab_fingerprint)
|
||||
|
||||
set_status(parsed_alert, event)
|
||||
end
|
||||
|
||||
def create_self_managed_prometheus_alert_event(parsed_alert)
|
||||
payload_key = SelfManagedPrometheusAlertEvent.payload_key_for(parsed_alert.starts_at_raw, parsed_alert.title, parsed_alert.full_query)
|
||||
|
||||
event = SelfManagedPrometheusAlertEvent.find_or_initialize_by_payload_key(parsed_alert.project, payload_key) do |event|
|
||||
event = SelfManagedPrometheusAlertEvent.find_or_initialize_by_payload_key(parsed_alert.project, parsed_alert.gitlab_fingerprint) do |event|
|
||||
event.environment = parsed_alert.environment
|
||||
event.title = parsed_alert.title
|
||||
event.query_expression = parsed_alert.full_query
|
||||
|
|
|
@ -6,12 +6,13 @@ module Snippets
|
|||
|
||||
CreateRepositoryError = Class.new(StandardError)
|
||||
|
||||
attr_reader :uploaded_files
|
||||
attr_reader :uploaded_assets, :snippet_files
|
||||
|
||||
def initialize(project, user = nil, params = {})
|
||||
super
|
||||
|
||||
@uploaded_files = Array(@params.delete(:files).presence)
|
||||
@uploaded_assets = Array(@params.delete(:files).presence)
|
||||
@snippet_files = SnippetInputActionCollection.new(Array(@params.delete(:snippet_files).presence))
|
||||
|
||||
filter_spam_check_params
|
||||
end
|
||||
|
@ -22,12 +23,30 @@ module Snippets
|
|||
Gitlab::VisibilityLevel.allowed_for?(current_user, visibility_level)
|
||||
end
|
||||
|
||||
def error_forbidden_visibility(snippet)
|
||||
def forbidden_visibility_error(snippet)
|
||||
deny_visibility_level(snippet)
|
||||
|
||||
snippet_error_response(snippet, 403)
|
||||
end
|
||||
|
||||
def valid_params?
|
||||
return true if snippet_files.empty?
|
||||
|
||||
(params.keys & [:content, :file_name]).none? && snippet_files.valid?
|
||||
end
|
||||
|
||||
def invalid_params_error(snippet)
|
||||
if snippet_files.valid?
|
||||
[:content, :file_name].each do |key|
|
||||
snippet.errors.add(key, 'and snippet files cannot be used together') if params.key?(key)
|
||||
end
|
||||
else
|
||||
snippet.errors.add(:snippet_files, 'have invalid data')
|
||||
end
|
||||
|
||||
snippet_error_response(snippet, 403)
|
||||
end
|
||||
|
||||
def snippet_error_response(snippet, http_status)
|
||||
ServiceResponse.error(
|
||||
message: snippet.errors.full_messages.to_sentence,
|
||||
|
@ -52,5 +71,13 @@ module Snippets
|
|||
|
||||
message
|
||||
end
|
||||
|
||||
def files_to_commit
|
||||
snippet_files.to_commit_actions.presence || build_actions_from_params
|
||||
end
|
||||
|
||||
def build_actions_from_params
|
||||
raise NotImplementedError
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -5,8 +5,10 @@ module Snippets
|
|||
def execute
|
||||
@snippet = build_from_params
|
||||
|
||||
return invalid_params_error(@snippet) unless valid_params?
|
||||
|
||||
unless visibility_allowed?(@snippet, @snippet.visibility_level)
|
||||
return error_forbidden_visibility(@snippet)
|
||||
return forbidden_visibility_error(@snippet)
|
||||
end
|
||||
|
||||
@snippet.author = current_user
|
||||
|
@ -29,12 +31,23 @@ module Snippets
|
|||
|
||||
def build_from_params
|
||||
if project
|
||||
project.snippets.build(params)
|
||||
project.snippets.build(create_params)
|
||||
else
|
||||
PersonalSnippet.new(params)
|
||||
PersonalSnippet.new(create_params)
|
||||
end
|
||||
end
|
||||
|
||||
# If the snippet_files param is present
|
||||
# we need to fill content and file_name from
|
||||
# the model
|
||||
def create_params
|
||||
return params if snippet_files.empty?
|
||||
|
||||
first_file = snippet_files.actions.first
|
||||
|
||||
params.merge(content: first_file.content, file_name: first_file.file_path)
|
||||
end
|
||||
|
||||
def save_and_commit
|
||||
snippet_saved = @snippet.save
|
||||
|
||||
|
@ -75,19 +88,19 @@ module Snippets
|
|||
message: 'Initial commit'
|
||||
}
|
||||
|
||||
@snippet.snippet_repository.multi_files_action(current_user, snippet_files, commit_attrs)
|
||||
end
|
||||
|
||||
def snippet_files
|
||||
[{ file_path: params[:file_name], content: params[:content] }]
|
||||
@snippet.snippet_repository.multi_files_action(current_user, files_to_commit, commit_attrs)
|
||||
end
|
||||
|
||||
def move_temporary_files
|
||||
return unless @snippet.is_a?(PersonalSnippet)
|
||||
|
||||
uploaded_files.each do |file|
|
||||
uploaded_assets.each do |file|
|
||||
FileMover.new(file, from_model: current_user, to_model: @snippet).execute
|
||||
end
|
||||
end
|
||||
|
||||
def build_actions_from_params
|
||||
[{ file_path: params[:file_name], content: params[:content] }]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -8,7 +8,7 @@ module Snippets
|
|||
|
||||
def execute(snippet)
|
||||
if visibility_changed?(snippet) && !visibility_allowed?(snippet, visibility_level)
|
||||
return error_forbidden_visibility(snippet)
|
||||
return forbidden_visibility_error(snippet)
|
||||
end
|
||||
|
||||
snippet.assign_attributes(params)
|
||||
|
|
|
@ -2,8 +2,24 @@
|
|||
|
||||
module Spam
|
||||
module SpamConstants
|
||||
REQUIRE_RECAPTCHA = :recaptcha
|
||||
DISALLOW = :disallow
|
||||
ALLOW = :allow
|
||||
REQUIRE_RECAPTCHA = "recaptcha"
|
||||
DISALLOW = "disallow"
|
||||
ALLOW = "allow"
|
||||
BLOCK_USER = "block"
|
||||
|
||||
SUPPORTED_VERDICTS = {
|
||||
BLOCK_USER => {
|
||||
priority: 1
|
||||
},
|
||||
DISALLOW => {
|
||||
priority: 2
|
||||
},
|
||||
REQUIRE_RECAPTCHA => {
|
||||
priority: 3
|
||||
},
|
||||
ALLOW => {
|
||||
priority: 4
|
||||
}
|
||||
}.freeze
|
||||
end
|
||||
end
|
||||
|
|
|
@ -5,13 +5,31 @@ module Spam
|
|||
include AkismetMethods
|
||||
include SpamConstants
|
||||
|
||||
def initialize(target:, request:, options:)
|
||||
def initialize(target:, request:, options:, verdict_params: {})
|
||||
@target = target
|
||||
@request = request
|
||||
@options = options
|
||||
@verdict_params = assemble_verdict_params(verdict_params)
|
||||
end
|
||||
|
||||
def execute
|
||||
external_spam_check_result = spam_verdict
|
||||
akismet_result = akismet_verdict
|
||||
|
||||
# filter out anything we don't recognise, including nils.
|
||||
valid_results = [external_spam_check_result, akismet_result].compact.select { |r| SUPPORTED_VERDICTS.key?(r) }
|
||||
# Treat nils - such as service unavailable - as ALLOW
|
||||
return ALLOW unless valid_results.any?
|
||||
|
||||
# Favour the most restrictive result.
|
||||
valid_results.min_by { |v| SUPPORTED_VERDICTS[v][:priority] }
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :target, :request, :options, :verdict_params
|
||||
|
||||
def akismet_verdict
|
||||
if akismet.spam?
|
||||
Gitlab::Recaptcha.enabled? ? REQUIRE_RECAPTCHA : DISALLOW
|
||||
else
|
||||
|
@ -19,8 +37,41 @@ module Spam
|
|||
end
|
||||
end
|
||||
|
||||
private
|
||||
def spam_verdict
|
||||
return unless Gitlab::CurrentSettings.spam_check_endpoint_enabled
|
||||
return if endpoint_url.blank?
|
||||
|
||||
attr_reader :target, :request, :options
|
||||
result = Gitlab::HTTP.try_get(endpoint_url, verdict_params)
|
||||
return unless result
|
||||
|
||||
begin
|
||||
json_result = Gitlab::Json.parse(result).with_indifferent_access
|
||||
# @TODO metrics/logging
|
||||
# Expecting:
|
||||
# error: (string or nil)
|
||||
# result: (string or nil)
|
||||
verdict = json_result[:verdict]
|
||||
return unless SUPPORTED_VERDICTS.include?(verdict)
|
||||
|
||||
# @TODO log if json_result[:error]
|
||||
|
||||
json_result[:verdict]
|
||||
rescue
|
||||
# @TODO log
|
||||
ALLOW
|
||||
end
|
||||
end
|
||||
|
||||
def assemble_verdict_params(params)
|
||||
return {} unless endpoint_url
|
||||
|
||||
params.merge({
|
||||
user_id: target.author_id
|
||||
})
|
||||
end
|
||||
|
||||
def endpoint_url
|
||||
@endpoint_url ||= Gitlab::CurrentSettings.current_application_settings.spam_check_endpoint_url
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -62,4 +62,13 @@
|
|||
.form-text.text-muted
|
||||
How many seconds an IP will be counted towards the limit
|
||||
|
||||
.form-group
|
||||
.form-check
|
||||
= f.check_box :spam_check_endpoint_enabled, class: 'form-check-input'
|
||||
= f.label :spam_check_endpoint_enabled, _('Enable Spam Check via external API endpoint'), class: 'form-check-label'
|
||||
.form-text.text-muted= _('Define custom rules for what constitutes spam, independent of Akismet')
|
||||
.form-group
|
||||
= f.label :spam_check_endpoint_url, _('URL of the external Spam Check endpoint'), class: 'label-bold'
|
||||
= f.text_field :spam_check_endpoint_url, class: 'form-control'
|
||||
|
||||
= f.submit 'Save changes', class: "btn btn-success"
|
||||
|
|
|
@ -41,23 +41,11 @@ module IncidentManagement
|
|||
end
|
||||
|
||||
def find_gitlab_managed_event(alert)
|
||||
payload_key = payload_key_for_alert(alert)
|
||||
|
||||
PrometheusAlertEvent.find_by_payload_key(payload_key)
|
||||
PrometheusAlertEvent.find_by_payload_key(alert.gitlab_fingerprint)
|
||||
end
|
||||
|
||||
def find_self_managed_event(alert)
|
||||
payload_key = payload_key_for_alert(alert)
|
||||
|
||||
SelfManagedPrometheusAlertEvent.find_by_payload_key(payload_key)
|
||||
end
|
||||
|
||||
def payload_key_for_alert(alert)
|
||||
if alert.gitlab_managed?
|
||||
PrometheusAlertEvent.payload_key_for(alert.metric_id, alert.starts_at_raw)
|
||||
else
|
||||
SelfManagedPrometheusAlertEvent.payload_key_for(alert.starts_at_raw, alert.title, alert.full_query)
|
||||
end
|
||||
SelfManagedPrometheusAlertEvent.find_by_payload_key(alert.gitlab_fingerprint)
|
||||
end
|
||||
|
||||
def create_issue(project, alert)
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: SpamVerdictService can call external spam check endpoint
|
||||
merge_request: 31449
|
||||
author:
|
||||
type: added
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Exclude extra.server fields from exceptions_json.log
|
||||
merge_request: 32770
|
||||
author:
|
||||
type: changed
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Fix group transfer service to deny moving group to its subgroup
|
||||
merge_request: 31495
|
||||
author: Abhisek Datta
|
||||
type: fixed
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Allow the snippet create service to accept an array of files
|
||||
merge_request: 32649
|
||||
author:
|
||||
type: changed
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Fix 404s downloading build artifacts
|
||||
merge_request: 32741
|
||||
author:
|
||||
type: fixed
|
|
@ -0,0 +1,34 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddSpamCheckEndpointToApplicationSettings < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
unless column_exists?(:application_settings, :spam_check_endpoint_url)
|
||||
add_column :application_settings, :spam_check_endpoint_url, :text
|
||||
end
|
||||
|
||||
add_text_limit :application_settings, :spam_check_endpoint_url, 255
|
||||
|
||||
unless column_exists?(:application_settings, :spam_check_endpoint_enabled)
|
||||
add_column :application_settings, :spam_check_endpoint_enabled, :boolean, null: false, default: false
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
remove_column_if_exists :spam_check_endpoint_url
|
||||
remove_column_if_exists :spam_check_endpoint_enabled
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def remove_column_if_exists(column)
|
||||
return unless column_exists?(:application_settings, column)
|
||||
|
||||
remove_column :application_settings, column
|
||||
end
|
||||
end
|
|
@ -441,7 +441,10 @@ CREATE TABLE public.application_settings (
|
|||
container_registry_vendor text DEFAULT ''::text NOT NULL,
|
||||
container_registry_version text DEFAULT ''::text NOT NULL,
|
||||
container_registry_features text[] DEFAULT '{}'::text[] NOT NULL,
|
||||
spam_check_endpoint_url text,
|
||||
spam_check_endpoint_enabled boolean DEFAULT false NOT NULL,
|
||||
CONSTRAINT check_d03919528d CHECK ((char_length(container_registry_vendor) <= 255)),
|
||||
CONSTRAINT check_d820146492 CHECK ((char_length(spam_check_endpoint_url) <= 255)),
|
||||
CONSTRAINT check_e5aba18f02 CHECK ((char_length(container_registry_version) <= 255))
|
||||
);
|
||||
|
||||
|
@ -13880,6 +13883,7 @@ COPY "schema_migrations" (version) FROM STDIN;
|
|||
20200506125731
|
||||
20200506154421
|
||||
20200507221434
|
||||
20200508050301
|
||||
20200508091106
|
||||
20200511080113
|
||||
20200511083541
|
||||
|
|
|
@ -728,17 +728,6 @@ Each line contains a JSON line that can be ingested by Elasticsearch. For exampl
|
|||
"severity": "ERROR",
|
||||
"time": "2019-12-17T11:49:29.485Z",
|
||||
"correlation_id": "AbDVUrrTvM1",
|
||||
"extra.server": {
|
||||
"os": {
|
||||
"name": "Darwin",
|
||||
"version": "Darwin Kernel Version 19.2.0",
|
||||
"build": "19.2.0",
|
||||
},
|
||||
"runtime": {
|
||||
"name": "ruby",
|
||||
"version": "ruby 2.6.5p114 (2019-10-01 revision 67812) [x86_64-darwin18]"
|
||||
}
|
||||
},
|
||||
"extra.project_id": 55,
|
||||
"extra.relation_key": "milestones",
|
||||
"extra.relation_index": 1,
|
||||
|
|
|
@ -335,6 +335,8 @@ are listed in the descriptions of the relevant settings.
|
|||
| `sourcegraph_enabled` | boolean | no | Enables Sourcegraph integration. Default is `false`. **If enabled, requires** `sourcegraph_url`. |
|
||||
| `sourcegraph_url` | string | required by: `sourcegraph_enabled` | The Sourcegraph instance URL for integration. |
|
||||
| `sourcegraph_public_only` | boolean | no | Blocks Sourcegraph from being loaded on private and internal projects. Default is `true`. |
|
||||
| `spam_check_endpoint_enabled` | boolean | no | Enables Spam Check via external API endpoint. Default is `false`. |
|
||||
| `spam_check_endpoint_url` | string | no | URL of the external Spam Check service endpoint. |
|
||||
| `terminal_max_session_time` | integer | no | Maximum time for web terminal websocket connection (in seconds). Set to `0` for unlimited time. |
|
||||
| `terms` | text | required by: `enforce_terms` | (**Required by:** `enforce_terms`) Markdown content for the ToS. |
|
||||
| `throttle_authenticated_api_enabled` | boolean | no | (**If enabled, requires:** `throttle_authenticated_api_period_in_seconds` and `throttle_authenticated_api_requests_per_period`) Enable authenticated API request rate limit. Helps reduce request volume (for example, from crawlers or abusive bots). |
|
||||
|
|
|
@ -215,14 +215,6 @@ Test:
|
|||
- ./**/*test-result.xml
|
||||
```
|
||||
|
||||
## Limitations
|
||||
|
||||
Currently, the following tools might not work because their XML formats are unsupported in GitLab.
|
||||
|
||||
|Case|Tool|Issue|
|
||||
|---|---|---|
|
||||
|`<testcase>` does not have `classname` attribute|ESlint, sass-lint|<https://gitlab.com/gitlab-org/gitlab-foss/-/issues/50964>|
|
||||
|
||||
## Viewing JUnit test reports on GitLab
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/24792) in GitLab 12.5.
|
||||
|
|
|
@ -114,6 +114,16 @@ For exact parameters accepted by
|
|||
for [`git clean`](https://git-scm.com/docs/git-clean). The available parameters
|
||||
are dependent on Git version.
|
||||
|
||||
## Git fetch extra flags
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4142) in GitLab Runner 13.1.
|
||||
|
||||
[`GIT_FETCH_EXTRA_FLAGS`](../yaml/README.md#git-fetch-extra-flags) allows you
|
||||
to modify `git fetch` behavior by passing extra flags.
|
||||
|
||||
See the [`GIT_FETCH_EXTRA_FLAGS` documentation](../yaml/README.md#git-fetch-extra-flags)
|
||||
for more information.
|
||||
|
||||
## Fork-based workflow
|
||||
|
||||
> Introduced in GitLab Runner 11.10.
|
||||
|
|
|
@ -3459,6 +3459,43 @@ script:
|
|||
- ls -al cache/
|
||||
```
|
||||
|
||||
### Git fetch extra flags
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4142) in GitLab Runner 13.1.
|
||||
|
||||
The `GIT_FETCH_EXTRA_FLAGS` variable is used to control the behavior of
|
||||
`git fetch`. You can set it globally or per-job in the [`variables`](#variables) section.
|
||||
|
||||
`GIT_FETCH_EXTRA_FLAGS` accepts all possible options of the [`git fetch`](https://git-scm.com/docs/git-fetch) command, but please note that `GIT_FETCH_EXTRA_FLAGS` flags will be appended after the default flags that can't be modified.
|
||||
|
||||
The default flags are:
|
||||
|
||||
- [GIT_DEPTH](#shallow-cloning).
|
||||
- The list of [refspecs](https://git-scm.com/book/en/v2/Git-Internals-The-Refspec).
|
||||
- A remote called `origin`.
|
||||
|
||||
If `GIT_FETCH_EXTRA_FLAGS` is:
|
||||
|
||||
- Not specified, `git fetch` flags default to `--prune --quiet` along with the default flags.
|
||||
- Given the value `none`, `git fetch` is executed only with the default flags.
|
||||
|
||||
For example, the default flags are `--prune --quiet`, so you can make `git fetch` more verbose by overriding this with just `--prune`:
|
||||
|
||||
```yaml
|
||||
variables:
|
||||
GIT_FETCH_EXTRA_FLAGS: --prune
|
||||
script:
|
||||
- ls -al cache/
|
||||
```
|
||||
|
||||
The configurtion above will result in `git fetch` being called this way:
|
||||
|
||||
```shell
|
||||
git fetch origin $REFSPECS --depth 50 --prune
|
||||
```
|
||||
|
||||
Where `$REFSPECS` is a value provided to the Runner internally by GitLab.
|
||||
|
||||
### Job stages attempts
|
||||
|
||||
> Introduced in GitLab, it requires GitLab Runner v1.9+.
|
||||
|
|
|
@ -113,50 +113,7 @@ Complementary reads:
|
|||
|
||||
## Database guides
|
||||
|
||||
### Tooling
|
||||
|
||||
- [Understanding EXPLAIN plans](understanding_explain_plans.md)
|
||||
- [explain.depesz.com](https://explain.depesz.com/) for visualizing the output
|
||||
of `EXPLAIN`
|
||||
- [pgFormatter](http://sqlformat.darold.net/) a PostgreSQL SQL syntax beautifier
|
||||
|
||||
### Migrations
|
||||
|
||||
- [What requires downtime?](what_requires_downtime.md)
|
||||
- [SQL guidelines](sql.md) for working with SQL queries
|
||||
- [Migrations style guide](migration_style_guide.md) for creating safe SQL migrations
|
||||
- [Testing Rails migrations](testing_guide/testing_migrations_guide.md) guide
|
||||
- [Post deployment migrations](post_deployment_migrations.md)
|
||||
- [Background migrations](background_migrations.md)
|
||||
- [Swapping tables](swapping_tables.md)
|
||||
- [Deleting migrations](deleting_migrations.md)
|
||||
|
||||
### Debugging
|
||||
|
||||
- Tracing the source of an SQL query using query comments with [Marginalia](database_query_comments.md)
|
||||
- Tracing the source of an SQL query in Rails console using [Verbose Query Logs](https://guides.rubyonrails.org/debugging_rails_applications.html#verbose-query-logs)
|
||||
|
||||
### Best practices
|
||||
|
||||
- [Adding database indexes](adding_database_indexes.md)
|
||||
- [Foreign keys & associations](foreign_keys.md)
|
||||
- [Single table inheritance](single_table_inheritance.md)
|
||||
- [Polymorphic associations](polymorphic_associations.md)
|
||||
- [Serializing data](serializing_data.md)
|
||||
- [Hash indexes](hash_indexes.md)
|
||||
- [Storing SHA1 hashes as binary](sha1_as_binary.md)
|
||||
- [Iterating tables in batches](iterating_tables_in_batches.md)
|
||||
- [Insert into tables in batches](insert_into_tables_in_batches.md)
|
||||
- [Ordering table columns](ordering_table_columns.md)
|
||||
- [Verifying database capabilities](verifying_database_capabilities.md)
|
||||
- [Database Debugging and Troubleshooting](database_debugging.md)
|
||||
- [Query Count Limits](query_count_limits.md)
|
||||
- [Creating enums](creating_enums.md)
|
||||
|
||||
### Case studies
|
||||
|
||||
- [Database case study: Filtering by label](filtering_by_label.md)
|
||||
- [Database case study: Namespaces storage statistics](namespaces_storage_statistics.md)
|
||||
See [database guidelines](database/index.md).
|
||||
|
||||
## Integration guides
|
||||
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
# Database guides
|
||||
|
||||
## Tooling
|
||||
|
||||
- [Understanding EXPLAIN plans](../understanding_explain_plans.md)
|
||||
- [explain.depesz.com](https://explain.depesz.com/) for visualizing the output
|
||||
of `EXPLAIN`
|
||||
- [pgFormatter](http://sqlformat.darold.net/) a PostgreSQL SQL syntax beautifier
|
||||
|
||||
## Migrations
|
||||
|
||||
- [What requires downtime?](../what_requires_downtime.md)
|
||||
- [SQL guidelines](../sql.md) for working with SQL queries
|
||||
- [Migrations style guide](../migration_style_guide.md) for creating safe SQL migrations
|
||||
- [Testing Rails migrations](../testing_guide/testing_migrations_guide.md) guide
|
||||
- [Post deployment migrations](../post_deployment_migrations.md)
|
||||
- [Background migrations](../background_migrations.md)
|
||||
- [Swapping tables](../swapping_tables.md)
|
||||
- [Deleting migrations](../deleting_migrations.md)
|
||||
|
||||
## Debugging
|
||||
|
||||
- Tracing the source of an SQL query using query comments with [Marginalia](../database_query_comments.md)
|
||||
- Tracing the source of an SQL query in Rails console using [Verbose Query Logs](https://guides.rubyonrails.org/debugging_rails_applications.html#verbose-query-logs)
|
||||
|
||||
## Best practices
|
||||
|
||||
- [Adding database indexes](../adding_database_indexes.md)
|
||||
- [Foreign keys & associations](../foreign_keys.md)
|
||||
- [Adding a foreign key constraint to an existing column](add_foreign_key_to_existing_column.md)
|
||||
- [Strings and the Text data type](strings_and_the_text_data_type.md)
|
||||
- [Single table inheritance](../single_table_inheritance.md)
|
||||
- [Polymorphic associations](../polymorphic_associations.md)
|
||||
- [Serializing data](../serializing_data.md)
|
||||
- [Hash indexes](../hash_indexes.md)
|
||||
- [Storing SHA1 hashes as binary](../sha1_as_binary.md)
|
||||
- [Iterating tables in batches](../iterating_tables_in_batches.md)
|
||||
- [Insert into tables in batches](../insert_into_tables_in_batches.md)
|
||||
- [Ordering table columns](../ordering_table_columns.md)
|
||||
- [Verifying database capabilities](../verifying_database_capabilities.md)
|
||||
- [Database Debugging and Troubleshooting](../database_debugging.md)
|
||||
- [Query Count Limits](../query_count_limits.md)
|
||||
- [Creating enums](../creating_enums.md)
|
||||
|
||||
## Case studies
|
||||
|
||||
- [Database case study: Filtering by label](../filtering_by_label.md)
|
||||
- [Database case study: Namespaces storage statistics](../namespaces_storage_statistics.md)
|
|
@ -0,0 +1,288 @@
|
|||
# Strings and the Text data type
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/30453) in GitLab 13.0.
|
||||
|
||||
When adding new columns that will be used to store strings or other textual information:
|
||||
|
||||
1. We always use the `text` data type instead of the `string` data type.
|
||||
1. `text` columns should always have a limit set by using the `add_text_limit` migration helper.
|
||||
|
||||
The `text` data type can not be defined with a limit, so `add_text_limit` is enforcing that by
|
||||
adding a [check constraint](https://www.postgresql.org/docs/11/ddl-constraints.html) on the
|
||||
column and then validating it at a followup step.
|
||||
|
||||
## Background info
|
||||
|
||||
The reason we always want to use `text` instead of `string` is that `string` columns have the
|
||||
disadvantage that if you want to update their limit, you have to run an `ALTER TABLE ...` command.
|
||||
|
||||
While a limit is added, the `ALTER TABLE ...` command requires an `EXCLUSIVE LOCK` on the table, which
|
||||
is held throughout the process of updating the column and while validating all existing records, a
|
||||
process that can take a while for large tables.
|
||||
|
||||
On the other hand, texts are [more or less equivalent to strings](https://www.depesz.com/2010/03/02/charx-vs-varcharx-vs-varchar-vs-text/) in PostgreSQL,
|
||||
while having the additional advantage that adding a limit on an existing column or updating their
|
||||
limit does not require the very costly `EXCLUSIVE LOCK` to be held throughout the validation phase.
|
||||
We can start by updating the constraint with the valid option off, which requires an `EXCLUSIVE LOCK`
|
||||
but only for updating the declaration of the columns. We can then validate it at a later step using
|
||||
`VALIDATE CONSTRAINT`, which requires only a `SHARE UPDATE EXCLUSIVE LOCK` (only conflicts with other
|
||||
validations and index creation while it allows reads and writes).
|
||||
|
||||
## Create a new table with text columns
|
||||
|
||||
When adding a new table, the limits for all text columns should be added in the same migration as
|
||||
the table creation.
|
||||
|
||||
For example, consider a migration that creates a table with two text columns,
|
||||
**db/migrate/20200401000001_create_db_guides.rb**:
|
||||
|
||||
```ruby
|
||||
class CreateDbGuides < ActiveRecord::Migration[6.0]
|
||||
DOWNTIME = false
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
unless table_exists?(:db_guides)
|
||||
create_table :db_guides do |t|
|
||||
t.bigint :stars, default: 0, null: false
|
||||
t.text :title
|
||||
t.text :notes
|
||||
end
|
||||
end
|
||||
|
||||
# The following add the constraints and validate them immediately (no data in the table)
|
||||
add_text_limit :db_guides, :title, 128
|
||||
add_text_limit :db_guides, :notes, 1024
|
||||
end
|
||||
|
||||
def down
|
||||
# No need to drop the constraints, drop_table takes care of everything
|
||||
drop_table :db_guides
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
Adding a check constraint requires an exclusive lock while the `ALTER TABLE` that adds is running.
|
||||
As we don't want the exclusive lock to be held for the duration of a transaction, `add_text_limit`
|
||||
must always run in a migration with `disable_ddl_transaction!`.
|
||||
|
||||
Also, note that we have to add a check that the table exists so that the migration can be repeated
|
||||
in case of a failure.
|
||||
|
||||
## Add a text column to an existing table
|
||||
|
||||
Adding a column to an existing table requires an exclusive lock for that table. Even though that lock
|
||||
is held for a brief amount of time, the time `add_column` needs to complete its execution can vary
|
||||
depending on how frequently the table is accessed. For example, acquiring an exclusive lock for a very
|
||||
frequently accessed table may take minutes in GitLab.com and requires the use of `with_lock_retries`.
|
||||
|
||||
For these reasons, it is advised to add the text limit on a separate migration than the `add_column` one.
|
||||
|
||||
For example, consider a migration that adds a new text column `extended_title` to table `sprints`,
|
||||
**db/migrate/20200501000001_add_extended_title_to_sprints.rb**:
|
||||
|
||||
```ruby
|
||||
class AddExtendedTitleToSprints < ActiveRecord::Migration[6.0]
|
||||
DOWNTIME = false
|
||||
|
||||
# rubocop:disable Migration/AddLimitToTextColumns
|
||||
# limit is added in 20200501000002_add_text_limit_to_sprints_extended_title
|
||||
def change
|
||||
add_column :sprints, :extended_title, :text
|
||||
end
|
||||
# rubocop:enable Migration/AddLimitToTextColumns
|
||||
end
|
||||
```
|
||||
|
||||
A second migration should follow the first one with a limit added to `extended_title`,
|
||||
**db/migrate/20200501000002_add_text_limit_to_sprints_extended_title.rb**:
|
||||
|
||||
```ruby
|
||||
class AddTextLimitToSprintsExtendedTitle < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
DOWNTIME = false
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_text_limit :sprints, :extended_title, 512
|
||||
end
|
||||
|
||||
def down
|
||||
# Down is required as `add_text_limit` is not reversible
|
||||
remove_text_limit :sprints, :extended_title
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
## Add a text limit constraint to an existing column
|
||||
|
||||
Adding text limits to existing database columns requires multiple steps split into at least two different releases:
|
||||
|
||||
1. Release `N.M` (current release)
|
||||
|
||||
- Add a post-deployment migration to add the limit to the text column with `validate: false`.
|
||||
- Add a post-deployment migration to fix the existing records.
|
||||
|
||||
NOTE: **Note:**
|
||||
Depending on the size of the table, a background migration for cleanup could be required in the next release.
|
||||
See [text limit constraints on large tables](strings_and_the_text_data_type.md#text-limit-constraints-on-large-tables) for more information.
|
||||
|
||||
- Create an issue for the next milestone to validate the text limit.
|
||||
|
||||
1. Release `N.M+1` (next release)
|
||||
|
||||
- Validate the text limit using a post-deployment migration.
|
||||
|
||||
### Example
|
||||
|
||||
Let's assume we want to add a `1024` limit to `issues.title_html` for a given release milestone,
|
||||
such as 13.0.
|
||||
|
||||
Issues is a pretty busy and large table with more than 25 million rows, so we don't want to lock all
|
||||
other processes that try to access it while running the update.
|
||||
|
||||
Also, after checking our production database, we know that there are `issues` with more characters in
|
||||
their title than the 1024 character limit, so we can not add and validate the constraint in one step.
|
||||
|
||||
NOTE: **Note:**
|
||||
Even if we did not have any record with a title larger than the provided limit, another
|
||||
instance of GitLab could have such records, so we would follow the same process either way.
|
||||
|
||||
#### Prevent new invalid records (current release)
|
||||
|
||||
We first add the limit as a `NOT VALID` check constraint to the table, which enforces consistency when
|
||||
new records are inserted or current records are updated.
|
||||
|
||||
In the example above, the existing issues with more than 1024 characters in their title will not be
|
||||
affected and you'll be still able to update records in the `issues` table. However, when you'd try
|
||||
to update the `title_html` with a title that has more than 1024 characters, the constraint causes
|
||||
a database error.
|
||||
|
||||
Adding or removing a constraint to an existing attribute requires that any application changes are
|
||||
deployed _first_, [otherwise servers still in the old version of the application may try to update the
|
||||
attribute with invalid values](../multi_version_compatibility.md#ci-artifact-uploads-were-failing).
|
||||
For these reasons, `add_text_limit` should run in a post-deployment migration.
|
||||
|
||||
Still in our example, for the 13.0 milestone (current), consider that the following validation
|
||||
has been added to model `Issue`:
|
||||
|
||||
```ruby
|
||||
validates :title_html, length: { maximum: 1024 }
|
||||
```
|
||||
|
||||
We can also update the database in the same milestone by adding the text limit with `validate: false`
|
||||
in a post-deployment migration,
|
||||
**db/post_migrate/20200501000001_add_text_limit_migration.rb**:
|
||||
|
||||
```ruby
|
||||
class AddTextLimitMigration < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
DOWNTIME = false
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
# This will add the constraint WITHOUT validating it
|
||||
add_text_limit :issues, :title_html, 1024, validate: false
|
||||
end
|
||||
|
||||
def down
|
||||
# Down is required as `add_text_limit` is not reversible
|
||||
remove_text_limit :issues, :title_html
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
#### Data migration to fix existing records (current release)
|
||||
|
||||
The approach here depends on the data volume and the cleanup strategy. The number of records that must
|
||||
be fixed on GitLab.com is a nice indicator that will help us decide whether to use a post-deployment
|
||||
migration or a background data migration:
|
||||
|
||||
- If the data volume is less than `1,000` records, then the data migration can be executed within the post-migration.
|
||||
- If the data volume is higher than `1,000` records, it's advised to create a background migration.
|
||||
|
||||
When unsure about which option to use, please contact the Database team for advice.
|
||||
|
||||
Back to our example, the issues table is considerably large and frequently accessed, so we are going
|
||||
to add a background migration for the 13.0 milestone (current),
|
||||
**db/post_migrate/20200501000002_schedule_cap_title_length_on_issues.rb**:
|
||||
|
||||
```ruby
|
||||
class ScheduleCapTitleLengthOnIssues < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
# Info on how many records will be affected on GitLab.com
|
||||
# time each batch needs to run on average, etc ...
|
||||
BATCH_SIZE = 5000
|
||||
DELAY_INTERVAL = 2.minutes.to_i
|
||||
|
||||
# Background migration will update issues whose title is longer than 1024 limit
|
||||
ISSUES_BACKGROUND_MIGRATION = 'CapTitleLengthOnIssues'.freeze
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
class Issue < ActiveRecord::Base
|
||||
include EachBatch
|
||||
|
||||
self.table_name = 'issues'
|
||||
end
|
||||
|
||||
def up
|
||||
queue_background_migration_jobs_by_range_at_intervals(
|
||||
Issue.where('char_length(title_html) > 1024'),
|
||||
ISSUES_MIGRATION,
|
||||
DELAY_INTERVAL,
|
||||
batch_size: BATCH_SIZE
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
# no-op : the part of the title_html after the limit is lost forever
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
To keep this guide short, we skipped the definition of the background migration and only
|
||||
provided a high level example of the post-deployment migration that is used to schedule the batches.
|
||||
You can find more info on the guide about [background migrations](../background_migrations.md)
|
||||
|
||||
#### Validate the text limit (next release)
|
||||
|
||||
Validating the text limit will scan the whole table and make sure that each record is correct.
|
||||
|
||||
Still in our example, for the 13.1 milestone (next), we run the `validate_text_limit` migration
|
||||
helper in a final post-deployment migration,
|
||||
**db/post_migrate/20200601000001_validate_text_limit_migration.rb**:
|
||||
|
||||
```ruby
|
||||
class ValidateTextLimitMigration < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
DOWNTIME = false
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
validate_text_limit :issues, :title_html
|
||||
end
|
||||
|
||||
def down
|
||||
# no-op
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
## Text limit constraints on large tables
|
||||
|
||||
If you have to clean up a text column for a really [large table](https://gitlab.com/gitlab-org/gitlab/-/blob/master/rubocop/migration_helpers.rb#L12)
|
||||
(for example, the `artifacts` in `ci_builds`), your background migration will go on for a while and
|
||||
it will need an additional [background migration cleaning up](../background_migrations.md#cleaning-up)
|
||||
in the release after adding the data migration.
|
||||
|
||||
In that rare case you will need 3 releases end-to-end:
|
||||
|
||||
1. Release `N.M` - Add the text limit and the background migration to fix the existing records.
|
||||
1. Release `N.M+1` - Cleanup the background migration.
|
||||
1. Release `N.M+2` - Validate the text limit.
|
|
@ -727,6 +727,12 @@ Rails migration example:
|
|||
add_column(:projects, :foo, :integer, default: 10, limit: 8)
|
||||
```
|
||||
|
||||
## Strings and the Text data type
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/30453) in GitLab 13.0.
|
||||
|
||||
See the [text data type](database/strings_and_the_text_data_type.md) style guide for more information.
|
||||
|
||||
## Timestamp column type
|
||||
|
||||
By default, Rails uses the `timestamp` data type that stores timestamp data
|
||||
|
|
|
@ -126,7 +126,7 @@ available when needed.
|
|||
|
||||
Our [Memory Team](https://about.gitlab.com/handbook/engineering/development/enablement/memory/) is actively working to reduce the memory requirement.
|
||||
|
||||
NOTE: **Note:** The 25 workers of Sidekiq will show up as separate processes in your process overview (such as `top` or `htop`) but they share the same RAM allocation since Sidekiq is a multithreaded application. Please see the section below about Unicorn workers for information about how many you need for those.
|
||||
NOTE: **Note:** The 25 workers of Sidekiq will show up as separate processes in your process overview (such as `top` or `htop`). However, they share the same RAM allocation, as Sidekiq is a multi-threaded application. See the section below about Unicorn workers for information about how many you need for those.
|
||||
|
||||
## Database
|
||||
|
||||
|
|
|
@ -76,7 +76,7 @@ NOTE: **Note:** We will flag any significant differences between Redcarpet and C
|
|||
If you have a large volume of Markdown files, it can be tedious to determine
|
||||
if they will display correctly or not. You can use the
|
||||
[diff_redcarpet_cmark](https://gitlab.com/digitalmoksha/diff_redcarpet_cmark)
|
||||
tool (not an officially supported product) to generate a list of files, and the
|
||||
tool (not an officially supported product) to generate a list of files and the
|
||||
differences between how RedCarpet and CommonMark render the files. It can give
|
||||
an indication if anything needs to be changed - often nothing will need
|
||||
to change.
|
||||
|
@ -253,7 +253,7 @@ Consult the [Emoji Cheat Sheet](https://www.webfx.com/tools/emoji-cheat-sheet/)
|
|||
> **Note:** The emoji example above uses hard-coded images for this documentation. The emoji,
|
||||
when rendered within GitLab, may appear different depending on the OS and browser used.
|
||||
|
||||
Most emoji are natively supported on macOS, Windows, iOS, Android and will fallback to image-based emoji where there is lack of support.
|
||||
Most emoji are natively supported on macOS, Windows, iOS, Android, and will fall back on image-based emoji where there is no support.
|
||||
|
||||
NOTE: **Note:** On Linux, you can download [Noto Color Emoji](https://www.google.com/get/noto/help/emoji/)
|
||||
to get full native emoji support. Ubuntu 18.04 (like many modern Linux distributions) has
|
||||
|
@ -272,7 +272,7 @@ in a box at the top of the document, before the rendered HTML content. To view a
|
|||
you can toggle between the source and rendered version of a [GitLab documentation file](https://gitlab.com/gitlab-org/gitlab/blob/master/doc/README.md).
|
||||
|
||||
In GitLab, front matter is only used in Markdown files and wiki pages, not the other
|
||||
places where Markdown formatting is supported. It must be at the very top of the document,
|
||||
places where Markdown formatting is supported. It must be at the very top of the document
|
||||
and must be between delimiters, as explained below.
|
||||
|
||||
The following delimiters are supported:
|
||||
|
@ -405,7 +405,7 @@ GFM recognizes special GitLab related references. For example, you can easily re
|
|||
an issue, a commit, a team member, or even the whole team within a project. GFM will turn
|
||||
that reference into a link so you can navigate between them easily.
|
||||
|
||||
Additionally, GFM recognizes certain cross-project references, and also has a shorthand
|
||||
Additionally, GFM recognizes certain cross-project references and also has a shorthand
|
||||
version to reference other projects from the same namespace.
|
||||
|
||||
GFM will recognize the following:
|
||||
|
|
|
@ -147,8 +147,8 @@ snippet was created using the GitLab web interface the original line ending is W
|
|||
|
||||
> Introduced in GitLab 10.8.
|
||||
|
||||
Public snippets can not only be shared, but also embedded on any website. This
|
||||
allows us to reuse a GitLab snippet in multiple places and any change to the source
|
||||
Public snippets can not only be shared, but also embedded on any website. With
|
||||
this, you can reuse a GitLab snippet in multiple places and any change to the source
|
||||
is automatically reflected in the embedded snippet.
|
||||
|
||||
To embed a snippet, first make sure that:
|
||||
|
@ -172,6 +172,6 @@ Here's how an embedded snippet looks like:
|
|||
|
||||
<script src="https://gitlab.com/gitlab-org/gitlab-foss/snippets/1717978.js"></script>
|
||||
|
||||
Embedded snippets are displayed with a header that shows the file name is defined,
|
||||
Embedded snippets are displayed with a header that shows the file name if it's defined,
|
||||
the snippet size, a link to GitLab, and the actual snippet content. Actions in
|
||||
the header allow users to see the snippet in raw format and download it.
|
||||
|
|
|
@ -21,7 +21,8 @@ module API
|
|||
}.freeze
|
||||
|
||||
SCOPE_PRELOAD_METHOD = {
|
||||
merge_requests: :with_api_entity_associations
|
||||
merge_requests: :with_api_entity_associations,
|
||||
projects: :with_api_entity_associations
|
||||
}.freeze
|
||||
|
||||
def search(additional_params = {})
|
||||
|
|
|
@ -132,6 +132,10 @@ module API
|
|||
given sourcegraph_enabled: ->(val) { val } do
|
||||
requires :sourcegraph_url, type: String, desc: 'The configured Sourcegraph instance URL'
|
||||
end
|
||||
optional :spam_check_endpoint_enabled, type: Boolean, desc: 'Enable Spam Check via external API endpoint'
|
||||
given spam_check_endpoint_enabled: ->(val) { val } do
|
||||
requires :spam_check_endpoint_url, type: String, desc: 'The URL of the external Spam Check service endpoint'
|
||||
end
|
||||
optional :terminal_max_session_time, type: Integer, desc: 'Maximum time for web terminal websocket connection (in seconds). Set to 0 for unlimited time.'
|
||||
optional :usage_ping_enabled, type: Boolean, desc: 'Every week GitLab will report license usage back to GitLab, Inc.'
|
||||
optional :instance_statistics_visibility_private, type: Boolean, desc: 'When set to `true` Instance statistics will only be available to admins'
|
||||
|
|
|
@ -121,9 +121,9 @@ module Gitlab
|
|||
|
||||
def plain_gitlab_fingerprint
|
||||
if gitlab_managed?
|
||||
[metric_id, starts_at].join('/')
|
||||
[metric_id, starts_at_raw].join('/')
|
||||
else # self managed
|
||||
[starts_at, title, full_query].join('/')
|
||||
[starts_at_raw, title, full_query].join('/')
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -12,6 +12,8 @@ module Gitlab
|
|||
'exception.message' => exception.message
|
||||
)
|
||||
|
||||
payload.delete('extra.server')
|
||||
|
||||
if exception.backtrace
|
||||
payload['exception.backtrace'] = Gitlab::BacktraceCleaner.clean_backtrace(exception.backtrace)
|
||||
end
|
||||
|
|
|
@ -6933,6 +6933,9 @@ msgstr ""
|
|||
msgid "Define a custom pattern with cron syntax"
|
||||
msgstr ""
|
||||
|
||||
msgid "Define custom rules for what constitutes spam, independent of Akismet"
|
||||
msgstr ""
|
||||
|
||||
msgid "Define environments in the deploy stage(s) in <code>.gitlab-ci.yml</code> to track deployments here."
|
||||
msgstr ""
|
||||
|
||||
|
@ -8055,6 +8058,9 @@ msgstr ""
|
|||
msgid "Enable Seat Link"
|
||||
msgstr ""
|
||||
|
||||
msgid "Enable Spam Check via external API endpoint"
|
||||
msgstr ""
|
||||
|
||||
msgid "Enable access to Grafana"
|
||||
msgstr ""
|
||||
|
||||
|
@ -14080,6 +14086,9 @@ msgstr ""
|
|||
msgid "Network"
|
||||
msgstr ""
|
||||
|
||||
msgid "NetworkPolicies|Enabled"
|
||||
msgstr ""
|
||||
|
||||
msgid "NetworkPolicies|Environment does not have deployment platform"
|
||||
msgstr ""
|
||||
|
||||
|
@ -14089,6 +14098,18 @@ msgstr ""
|
|||
msgid "NetworkPolicies|Kubernetes error: %{error}"
|
||||
msgstr ""
|
||||
|
||||
msgid "NetworkPolicies|Last modified"
|
||||
msgstr ""
|
||||
|
||||
msgid "NetworkPolicies|Name"
|
||||
msgstr ""
|
||||
|
||||
msgid "NetworkPolicies|No policies detected"
|
||||
msgstr ""
|
||||
|
||||
msgid "NetworkPolicies|Policies are a specification of how groups of pods are allowed to communicate with each other network endpoints."
|
||||
msgstr ""
|
||||
|
||||
msgid "NetworkPolicies|Policy %{policyName} was successfully changed"
|
||||
msgstr ""
|
||||
|
||||
|
@ -14098,6 +14119,9 @@ msgstr ""
|
|||
msgid "NetworkPolicies|Something went wrong, unable to fetch policies"
|
||||
msgstr ""
|
||||
|
||||
msgid "NetworkPolicies|Status"
|
||||
msgstr ""
|
||||
|
||||
msgid "Never"
|
||||
msgstr ""
|
||||
|
||||
|
@ -22347,9 +22371,15 @@ msgstr ""
|
|||
msgid "ThreatMonitoring|Operations Per Second"
|
||||
msgstr ""
|
||||
|
||||
msgid "ThreatMonitoring|Overview"
|
||||
msgstr ""
|
||||
|
||||
msgid "ThreatMonitoring|Packet Activity"
|
||||
msgstr ""
|
||||
|
||||
msgid "ThreatMonitoring|Policies"
|
||||
msgstr ""
|
||||
|
||||
msgid "ThreatMonitoring|Requests"
|
||||
msgstr ""
|
||||
|
||||
|
@ -22885,6 +22915,9 @@ msgstr ""
|
|||
msgid "Transfer project"
|
||||
msgstr ""
|
||||
|
||||
msgid "TransferGroup|Cannot transfer group to one of its subgroup."
|
||||
msgstr ""
|
||||
|
||||
msgid "TransferGroup|Cannot update the path because there are projects under this group that contain Docker images in their Container Registry. Please remove the images from your projects first and try again."
|
||||
msgstr ""
|
||||
|
||||
|
@ -23062,6 +23095,9 @@ msgstr ""
|
|||
msgid "URL must start with %{codeStart}http://%{codeEnd}, %{codeStart}https://%{codeEnd}, or %{codeStart}ftp://%{codeEnd}"
|
||||
msgstr ""
|
||||
|
||||
msgid "URL of the external Spam Check endpoint"
|
||||
msgstr ""
|
||||
|
||||
msgid "URL of the external storage that will serve the repository static objects (e.g. archives, blobs, ...)."
|
||||
msgstr ""
|
||||
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Projects::ArtifactsController do
|
||||
include RepoHelpers
|
||||
|
||||
let(:user) { project.owner }
|
||||
let_it_be(:project) { create(:project, :repository, :public) }
|
||||
|
||||
|
@ -481,6 +483,22 @@ describe Projects::ArtifactsController do
|
|||
expect(response).to redirect_to(path)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with a failed pipeline on an updated master' do
|
||||
before do
|
||||
create_file_in_repo(project, 'master', 'master', 'test.txt', 'This is test')
|
||||
|
||||
create(:ci_pipeline,
|
||||
project: project,
|
||||
sha: project.commit.sha,
|
||||
ref: project.default_branch,
|
||||
status: 'failed')
|
||||
|
||||
get :latest_succeeded, params: params_from_ref(project.default_branch)
|
||||
end
|
||||
|
||||
it_behaves_like 'redirect to the job'
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -8,6 +8,13 @@ FactoryBot.define do
|
|||
title { FFaker::Lorem.sentence }
|
||||
started_at { Time.current }
|
||||
|
||||
trait :with_validation_errors do
|
||||
after(:create) do |alert|
|
||||
too_many_hosts = Array.new(AlertManagement::Alert::HOSTS_MAX_LENGTH + 1) { |_| 'host' }
|
||||
alert.update_columns(hosts: too_many_hosts)
|
||||
end
|
||||
end
|
||||
|
||||
trait :with_issue do
|
||||
issue
|
||||
end
|
||||
|
|
|
@ -282,11 +282,13 @@ describe 'Admin updates settings', :clean_gitlab_redis_shared_state, :do_not_moc
|
|||
visit reporting_admin_application_settings_path
|
||||
|
||||
page.within('.as-spam') do
|
||||
check 'Enable reCAPTCHA'
|
||||
check 'Enable reCAPTCHA for login'
|
||||
fill_in 'reCAPTCHA Site Key', with: 'key'
|
||||
fill_in 'reCAPTCHA Private Key', with: 'key'
|
||||
check 'Enable reCAPTCHA'
|
||||
check 'Enable reCAPTCHA for login'
|
||||
fill_in 'IPs per user', with: 15
|
||||
check 'Enable Spam Check via external API endpoint'
|
||||
fill_in 'URL of the external Spam Check endpoint', with: 'https://www.example.com/spamcheck'
|
||||
click_button 'Save changes'
|
||||
end
|
||||
|
||||
|
@ -294,6 +296,8 @@ describe 'Admin updates settings', :clean_gitlab_redis_shared_state, :do_not_moc
|
|||
expect(current_settings.recaptcha_enabled).to be true
|
||||
expect(current_settings.login_recaptcha_protection_enabled).to be true
|
||||
expect(current_settings.unique_ips_limit_per_user).to eq(15)
|
||||
expect(current_settings.spam_check_endpoint_enabled).to be true
|
||||
expect(current_settings.spam_check_endpoint_url).to eq 'https://www.example.com/spamcheck'
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -32,5 +32,11 @@ describe "User downloads artifacts" do
|
|||
|
||||
it_behaves_like "downloading"
|
||||
end
|
||||
|
||||
context "via SHA" do
|
||||
let(:url) { latest_succeeded_project_artifacts_path(project, "#{pipeline.sha}/download", job: job.name) }
|
||||
|
||||
it_behaves_like "downloading"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -283,15 +283,25 @@ describe('RepoEditor', () => {
|
|||
expect(vm.model.events.size).toBe(2);
|
||||
});
|
||||
|
||||
it('updates state when model content changed', done => {
|
||||
vm.model.setValue('testing 123\n');
|
||||
it.each`
|
||||
insertFinalNewline | input | eol | output
|
||||
${true} | ${'testing 123\n'} | ${'\n'} | ${'testing 123\n'}
|
||||
${true} | ${'testing 123'} | ${'\n'} | ${'testing 123\n'}
|
||||
${false} | ${'testing 123'} | ${'\n'} | ${'testing 123'}
|
||||
${true} | ${'testing 123'} | ${'\r\n'} | ${'testing 123\r\n'}
|
||||
${false} | ${'testing 123'} | ${'\r\n'} | ${'testing 123'}
|
||||
`(
|
||||
'updates state with "$output" if `this.insertFinalNewline` is $insertFinalNewline',
|
||||
({ insertFinalNewline, input, eol, output }) => {
|
||||
jest.spyOn(vm.model.getModel(), 'getEOL').mockReturnValue(eol);
|
||||
|
||||
setImmediate(() => {
|
||||
expect(vm.file.content).toBe('testing 123\n');
|
||||
vm.addFinalNewline = insertFinalNewline;
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
vm.model.setValue(input);
|
||||
|
||||
expect(vm.file.content).toBe(output);
|
||||
},
|
||||
);
|
||||
|
||||
it('sets head model as staged file', () => {
|
||||
jest.spyOn(vm.editor, 'createModel');
|
||||
|
|
|
@ -661,31 +661,6 @@ describe('Multi-file store utils', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('addFinalNewlineIfNeeded', () => {
|
||||
it('adds a newline if it doesnt already exist', () => {
|
||||
[
|
||||
{
|
||||
input: 'some text',
|
||||
output: 'some text\n',
|
||||
},
|
||||
{
|
||||
input: 'some text\n',
|
||||
output: 'some text\n',
|
||||
},
|
||||
{
|
||||
input: 'some text\n\n',
|
||||
output: 'some text\n\n',
|
||||
},
|
||||
{
|
||||
input: 'some\n text',
|
||||
output: 'some\n text\n',
|
||||
},
|
||||
].forEach(({ input, output }) => {
|
||||
expect(utils.addFinalNewlineIfNeeded(input)).toEqual(output);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('extractMarkdownImagesFromEntries', () => {
|
||||
let mdFile;
|
||||
let entries;
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
import { isTextFile, registerLanguages, trimPathComponents } from '~/ide/utils';
|
||||
import {
|
||||
isTextFile,
|
||||
registerLanguages,
|
||||
trimPathComponents,
|
||||
addFinalNewline,
|
||||
getPathParents,
|
||||
} from '~/ide/utils';
|
||||
import { languages } from 'monaco-editor';
|
||||
|
||||
describe('WebIDE utils', () => {
|
||||
|
@ -148,4 +154,39 @@ describe('WebIDE utils', () => {
|
|||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('addFinalNewline', () => {
|
||||
it.each`
|
||||
input | output
|
||||
${'some text'} | ${'some text\n'}
|
||||
${'some text\n'} | ${'some text\n'}
|
||||
${'some text\n\n'} | ${'some text\n\n'}
|
||||
${'some\n text'} | ${'some\n text\n'}
|
||||
`('adds a newline if it doesnt already exist for input: $input', ({ input, output }) => {
|
||||
expect(addFinalNewline(input)).toEqual(output);
|
||||
});
|
||||
|
||||
it.each`
|
||||
input | output
|
||||
${'some text'} | ${'some text\r\n'}
|
||||
${'some text\r\n'} | ${'some text\r\n'}
|
||||
${'some text\n'} | ${'some text\n\r\n'}
|
||||
${'some text\r\n\r\n'} | ${'some text\r\n\r\n'}
|
||||
${'some\r\n text'} | ${'some\r\n text\r\n'}
|
||||
`('works with CRLF newline style; input: $input', ({ input, output }) => {
|
||||
expect(addFinalNewline(input, '\r\n')).toEqual(output);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getPathParents', () => {
|
||||
it.each`
|
||||
path | parents
|
||||
${'foo/bar/baz/index.md'} | ${['foo/bar/baz', 'foo/bar', 'foo']}
|
||||
${'foo/bar/baz'} | ${['foo/bar', 'foo']}
|
||||
${'index.md'} | ${[]}
|
||||
${'path with/spaces to/something.md'} | ${['path with/spaces to', 'path with']}
|
||||
`('gets all parent directory names for path: $path', ({ path, parents }) => {
|
||||
expect(getPathParents(path)).toEqual(parents);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -253,7 +253,7 @@ describe Gitlab::Alerting::Alert do
|
|||
include_context 'gitlab alert'
|
||||
|
||||
it 'returns a fingerprint' do
|
||||
plain_fingerprint = [alert.metric_id, alert.starts_at].join('/')
|
||||
plain_fingerprint = [alert.metric_id, alert.starts_at_raw].join('/')
|
||||
|
||||
is_expected.to eq(Digest::SHA1.hexdigest(plain_fingerprint))
|
||||
end
|
||||
|
@ -263,7 +263,7 @@ describe Gitlab::Alerting::Alert do
|
|||
include_context 'full query'
|
||||
|
||||
it 'returns a fingerprint' do
|
||||
plain_fingerprint = [alert.starts_at, alert.title, alert.full_query].join('/')
|
||||
plain_fingerprint = [alert.starts_at_raw, alert.title, alert.full_query].join('/')
|
||||
|
||||
is_expected.to eq(Digest::SHA1.hexdigest(plain_fingerprint))
|
||||
end
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::Cleanup::OrphanLfsFileReferences do
|
||||
let(:null_logger) { Logger.new('/dev/null') }
|
||||
let(:project) { create(:project, :repository, lfs_enabled: true) }
|
||||
let(:lfs_object) { create(:lfs_object) }
|
||||
|
||||
let!(:invalid_reference) { create(:lfs_objects_project, project: project, lfs_object: lfs_object) }
|
||||
|
||||
before do
|
||||
allow(null_logger).to receive(:info)
|
||||
|
||||
allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
|
||||
|
||||
# Create a valid reference
|
||||
oid = project.repository.gitaly_blob_client.get_all_lfs_pointers.first.lfs_oid
|
||||
lfs_object2 = create(:lfs_object, oid: oid)
|
||||
create(:lfs_objects_project, project: project, lfs_object: lfs_object2)
|
||||
end
|
||||
|
||||
context 'dry run' do
|
||||
it 'prints messages and does not delete references' do
|
||||
expect(null_logger).to receive(:info).with("[DRY RUN] Looking for orphan LFS files for project #{project.name_with_namespace}")
|
||||
expect(null_logger).to receive(:info).with("[DRY RUN] Found invalid references: 1")
|
||||
|
||||
expect { described_class.new(project, logger: null_logger).run! }
|
||||
.not_to change { project.lfs_objects.count }
|
||||
end
|
||||
end
|
||||
|
||||
context 'regular run' do
|
||||
it 'prints messages and deletes invalid reference' do
|
||||
expect(null_logger).to receive(:info).with("Looking for orphan LFS files for project #{project.name_with_namespace}")
|
||||
expect(null_logger).to receive(:info).with("Removed invalid references: 1")
|
||||
expect(ProjectCacheWorker).to receive(:perform_async).with(project.id, [], [:lfs_objects_size])
|
||||
|
||||
expect { described_class.new(project, logger: null_logger, dry_run: false).run! }
|
||||
.to change { project.lfs_objects.count }.from(2).to(1)
|
||||
|
||||
expect(LfsObjectsProject.exists?(invalid_reference.id)).to be_falsey
|
||||
end
|
||||
end
|
||||
end
|
|
@ -152,6 +152,30 @@ describe ApplicationSetting do
|
|||
end
|
||||
end
|
||||
|
||||
describe 'spam_check_endpoint' do
|
||||
context 'when spam_check_endpoint is enabled' do
|
||||
before do
|
||||
setting.spam_check_endpoint_enabled = true
|
||||
end
|
||||
|
||||
it { is_expected.to allow_value('https://example.org/spam_check').for(:spam_check_endpoint_url) }
|
||||
it { is_expected.not_to allow_value('nonsense').for(:spam_check_endpoint_url) }
|
||||
it { is_expected.not_to allow_value(nil).for(:spam_check_endpoint_url) }
|
||||
it { is_expected.not_to allow_value('').for(:spam_check_endpoint_url) }
|
||||
end
|
||||
|
||||
context 'when spam_check_endpoint is NOT enabled' do
|
||||
before do
|
||||
setting.spam_check_endpoint_enabled = false
|
||||
end
|
||||
|
||||
it { is_expected.to allow_value('https://example.org/spam_check').for(:spam_check_endpoint_url) }
|
||||
it { is_expected.not_to allow_value('nonsense').for(:spam_check_endpoint_url) }
|
||||
it { is_expected.to allow_value(nil).for(:spam_check_endpoint_url) }
|
||||
it { is_expected.to allow_value('').for(:spam_check_endpoint_url) }
|
||||
end
|
||||
end
|
||||
|
||||
context 'when snowplow is enabled' do
|
||||
before do
|
||||
setting.snowplow_enabled = true
|
||||
|
|
|
@ -66,7 +66,7 @@ describe BroadcastMessage do
|
|||
end
|
||||
|
||||
it 'expires the value if a broadcast message has ended', :request_store do
|
||||
message = create(:broadcast_message, broadcast_type: broadcast_type, ends_at: Time.now.utc + 1.day)
|
||||
message = create(:broadcast_message, broadcast_type: broadcast_type, ends_at: Time.current.utc + 1.day)
|
||||
|
||||
expect(subject.call).to match_array([message])
|
||||
expect(described_class.cache).to receive(:expire).and_call_original
|
||||
|
@ -87,8 +87,8 @@ describe BroadcastMessage do
|
|||
|
||||
future = create(
|
||||
:broadcast_message,
|
||||
starts_at: Time.now + 10.minutes,
|
||||
ends_at: Time.now + 20.minutes,
|
||||
starts_at: Time.current + 10.minutes,
|
||||
ends_at: Time.current + 20.minutes,
|
||||
broadcast_type: broadcast_type
|
||||
)
|
||||
|
||||
|
|
|
@ -626,7 +626,7 @@ describe Ci::Build do
|
|||
|
||||
context 'is expired' do
|
||||
before do
|
||||
build.update(artifacts_expire_at: Time.now - 7.days)
|
||||
build.update(artifacts_expire_at: Time.current - 7.days)
|
||||
end
|
||||
|
||||
it { is_expected.to be_truthy }
|
||||
|
@ -634,7 +634,7 @@ describe Ci::Build do
|
|||
|
||||
context 'is not expired' do
|
||||
before do
|
||||
build.update(artifacts_expire_at: Time.now + 7.days)
|
||||
build.update(artifacts_expire_at: Time.current + 7.days)
|
||||
end
|
||||
|
||||
it { is_expected.to be_falsey }
|
||||
|
@ -661,13 +661,13 @@ describe Ci::Build do
|
|||
it { is_expected.to be_nil }
|
||||
|
||||
context 'when artifacts_expire_at is specified' do
|
||||
let(:expire_at) { Time.now + 7.days }
|
||||
let(:expire_at) { Time.current + 7.days }
|
||||
|
||||
before do
|
||||
build.artifacts_expire_at = expire_at
|
||||
end
|
||||
|
||||
it { is_expected.to be_within(5).of(expire_at - Time.now) }
|
||||
it { is_expected.to be_within(5).of(expire_at - Time.current) }
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -1795,7 +1795,7 @@ describe Ci::Build do
|
|||
end
|
||||
|
||||
describe '#keep_artifacts!' do
|
||||
let(:build) { create(:ci_build, artifacts_expire_at: Time.now + 7.days) }
|
||||
let(:build) { create(:ci_build, artifacts_expire_at: Time.current + 7.days) }
|
||||
|
||||
subject { build.keep_artifacts! }
|
||||
|
||||
|
|
|
@ -363,13 +363,13 @@ describe Ci::JobArtifact do
|
|||
it { is_expected.to be_nil }
|
||||
|
||||
context 'when expire_at is specified' do
|
||||
let(:expire_at) { Time.now + 7.days }
|
||||
let(:expire_at) { Time.current + 7.days }
|
||||
|
||||
before do
|
||||
artifact.expire_at = expire_at
|
||||
end
|
||||
|
||||
it { is_expected.to be_within(5).of(expire_at - Time.now) }
|
||||
it { is_expected.to be_within(5).of(expire_at - Time.current) }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -118,7 +118,7 @@ describe Ci::PipelineSchedule do
|
|||
let(:pipeline_schedule) { create(:ci_pipeline_schedule, :every_minute) }
|
||||
|
||||
it "updates next_run_at to the sidekiq worker's execution time" do
|
||||
Timecop.freeze(Time.parse("2019-06-01 12:18:00+0000")) do
|
||||
Timecop.freeze(Time.zone.parse("2019-06-01 12:18:00+0000")) do
|
||||
expect(pipeline_schedule.next_run_at).to eq(cron_worker_next_run_at)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1079,7 +1079,7 @@ describe Ci::Pipeline, :mailer do
|
|||
end
|
||||
|
||||
describe 'state machine' do
|
||||
let(:current) { Time.now.change(usec: 0) }
|
||||
let(:current) { Time.current.change(usec: 0) }
|
||||
let(:build) { create_build('build1', queued_at: 0) }
|
||||
let(:build_b) { create_build('build2', queued_at: 0) }
|
||||
let(:build_c) { create_build('build3', queued_at: 0) }
|
||||
|
|
|
@ -605,7 +605,7 @@ describe Ci::Runner do
|
|||
|
||||
context 'when database was updated recently' do
|
||||
before do
|
||||
runner.contacted_at = Time.now
|
||||
runner.contacted_at = Time.current
|
||||
end
|
||||
|
||||
it 'updates cache' do
|
||||
|
|
|
@ -47,7 +47,7 @@ describe Clusters::Applications::Prometheus do
|
|||
|
||||
it 'sets last_update_started_at to now' do
|
||||
Timecop.freeze do
|
||||
expect { subject.make_updating }.to change { subject.reload.last_update_started_at }.to be_within(1.second).of(Time.now)
|
||||
expect { subject.make_updating }.to change { subject.reload.last_update_started_at }.to be_within(1.second).of(Time.current)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -347,14 +347,14 @@ describe Clusters::Applications::Prometheus do
|
|||
describe '#updated_since?' do
|
||||
let(:cluster) { create(:cluster) }
|
||||
let(:prometheus_app) { build(:clusters_applications_prometheus, cluster: cluster) }
|
||||
let(:timestamp) { Time.now - 5.minutes }
|
||||
let(:timestamp) { Time.current - 5.minutes }
|
||||
|
||||
around do |example|
|
||||
Timecop.freeze { example.run }
|
||||
end
|
||||
|
||||
before do
|
||||
prometheus_app.last_update_started_at = Time.now
|
||||
prometheus_app.last_update_started_at = Time.current
|
||||
end
|
||||
|
||||
context 'when app does not have status failed' do
|
||||
|
@ -363,7 +363,7 @@ describe Clusters::Applications::Prometheus do
|
|||
end
|
||||
|
||||
it 'returns false when last update started before the timestamp' do
|
||||
expect(prometheus_app.updated_since?(Time.now + 5.minutes)).to be false
|
||||
expect(prometheus_app.updated_since?(Time.current + 5.minutes)).to be false
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -235,7 +235,7 @@ describe CommitStatus do
|
|||
|
||||
context 'if the building process has started' do
|
||||
before do
|
||||
commit_status.started_at = Time.now - 1.minute
|
||||
commit_status.started_at = Time.current - 1.minute
|
||||
commit_status.finished_at = nil
|
||||
end
|
||||
|
||||
|
@ -708,7 +708,7 @@ describe CommitStatus do
|
|||
end
|
||||
|
||||
describe '#enqueue' do
|
||||
let!(:current_time) { Time.new(2018, 4, 5, 14, 0, 0) }
|
||||
let!(:current_time) { Time.zone.local(2018, 4, 5, 14, 0, 0) }
|
||||
|
||||
before do
|
||||
allow(Time).to receive(:now).and_return(current_time)
|
||||
|
|
|
@ -44,7 +44,7 @@ describe EachBatch do
|
|||
end
|
||||
|
||||
it 'allows updating of the yielded relations' do
|
||||
time = Time.now
|
||||
time = Time.current
|
||||
|
||||
model.each_batch do |relation|
|
||||
relation.update_all(updated_at: time)
|
||||
|
|
|
@ -422,7 +422,7 @@ describe Issuable do
|
|||
|
||||
context 'total_time_spent is updated' do
|
||||
before do
|
||||
issue.spend_time(duration: 2, user_id: user.id, spent_at: Time.now)
|
||||
issue.spend_time(duration: 2, user_id: user.id, spent_at: Time.current)
|
||||
issue.save
|
||||
expect(Gitlab::HookData::IssuableBuilder)
|
||||
.to receive(:new).with(issue).and_return(builder)
|
||||
|
@ -572,8 +572,8 @@ describe Issuable do
|
|||
second_priority = create(:label, project: project, priority: 2)
|
||||
no_priority = create(:label, project: project)
|
||||
|
||||
first_milestone = create(:milestone, project: project, due_date: Time.now)
|
||||
second_milestone = create(:milestone, project: project, due_date: Time.now + 1.month)
|
||||
first_milestone = create(:milestone, project: project, due_date: Time.current)
|
||||
second_milestone = create(:milestone, project: project, due_date: Time.current + 1.month)
|
||||
third_milestone = create(:milestone, project: project)
|
||||
|
||||
# The issues here are ordered by label priority, to ensure that we don't
|
||||
|
|
|
@ -290,13 +290,13 @@ describe Milestone, 'Milestoneish' do
|
|||
end
|
||||
|
||||
it 'shows 0 if start_date is a future' do
|
||||
milestone = build_stubbed(:milestone, start_date: Time.now + 2.days)
|
||||
milestone = build_stubbed(:milestone, start_date: Time.current + 2.days)
|
||||
|
||||
expect(milestone.elapsed_days).to eq(0)
|
||||
end
|
||||
|
||||
it 'shows correct amount of days' do
|
||||
milestone = build_stubbed(:milestone, start_date: Time.now - 2.days)
|
||||
milestone = build_stubbed(:milestone, start_date: Time.current - 2.days)
|
||||
|
||||
expect(milestone.elapsed_days).to eq(2)
|
||||
end
|
||||
|
|
|
@ -536,7 +536,7 @@ describe Discussion, ResolvableDiscussion do
|
|||
|
||||
describe "#last_resolved_note" do
|
||||
let(:current_user) { create(:user) }
|
||||
let(:time) { Time.now.utc }
|
||||
let(:time) { Time.current.utc }
|
||||
|
||||
before do
|
||||
Timecop.freeze(time - 1.second) do
|
||||
|
|
|
@ -91,7 +91,7 @@ describe Sortable do
|
|||
Group.all.order_by(order).map(&:name)
|
||||
end
|
||||
|
||||
let!(:ref_time) { Time.parse('2018-05-01 00:00:00') }
|
||||
let!(:ref_time) { Time.zone.parse('2018-05-01 00:00:00') }
|
||||
let!(:group1) { create(:group, name: 'aa', id: 1, created_at: ref_time - 15.seconds, updated_at: ref_time) }
|
||||
let!(:group2) { create(:group, name: 'AAA', id: 2, created_at: ref_time - 10.seconds, updated_at: ref_time - 5.seconds) }
|
||||
let!(:group3) { create(:group, name: 'BB', id: 3, created_at: ref_time - 5.seconds, updated_at: ref_time - 10.seconds) }
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue