Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-12-03 15:09:46 +00:00
parent f145ef4be7
commit bbd9e2c915
64 changed files with 774 additions and 498 deletions

View file

@ -3,6 +3,8 @@
module Types
module Terraform
class StateVersionType < BaseObject
include ::API::Helpers::RelatedResourcesHelpers
graphql_name 'TerraformStateVersion'
authorize :read_terraform_state
@ -16,11 +18,20 @@ module Types
authorize: :read_user,
description: 'The user that created this version'
field :download_path, GraphQL::STRING_TYPE,
null: true,
description: "URL for downloading the version's JSON file"
field :job, Types::Ci::JobType,
null: true,
authorize: :read_build,
description: 'The job that created this version'
field :serial, GraphQL::INT_TYPE,
null: true,
description: 'Serial number of the version',
method: :version
field :created_at, Types::TimeType,
null: false,
description: 'Timestamp the version was created'
@ -33,6 +44,14 @@ module Types
Gitlab::Graphql::Loaders::BatchModelLoader.new(User, object.created_by_user_id).find
end
def download_path
expose_path api_v4_projects_terraform_state_versions_path(
id: object.project_id,
name: object.terraform_state.name,
serial: object.version
)
end
def job
Gitlab::Graphql::Loaders::BatchModelLoader.new(::Ci::Build, object.ci_build_id).find
end

View file

@ -0,0 +1,164 @@
# frozen_string_literal: true
module HasWikiPageMetaAttributes
extend ActiveSupport::Concern
include Gitlab::Utils::StrongMemoize
CanonicalSlugConflictError = Class.new(ActiveRecord::RecordInvalid)
WikiPageInvalid = Class.new(ArgumentError)
included do
has_many :events, as: :target, dependent: :delete_all # rubocop:disable Cop/ActiveRecordDependent
validates :title, length: { maximum: 255 }, allow_nil: false
validate :no_two_metarecords_in_same_container_can_have_same_canonical_slug
scope :with_canonical_slug, ->(slug) do
slug_table_name = klass.reflect_on_association(:slugs).table_name
joins(:slugs).where(slug_table_name => { canonical: true, slug: slug })
end
end
class_methods do
# Return the (updated) WikiPage::Meta record for a given wiki page
#
# If none is found, then a new record is created, and its fields are set
# to reflect the wiki_page passed.
#
# @param [String] last_known_slug
# @param [WikiPage] wiki_page
#
# This method raises errors on validation issues.
def find_or_create(last_known_slug, wiki_page)
raise WikiPageInvalid unless wiki_page.valid?
container = wiki_page.wiki.container
known_slugs = [last_known_slug, wiki_page.slug].compact.uniq
raise 'No slugs found! This should not be possible.' if known_slugs.empty?
transaction do
updates = wiki_page_updates(wiki_page)
found = find_by_canonical_slug(known_slugs, container)
meta = found || create!(updates.merge(container_attrs(container)))
meta.update_state(found.nil?, known_slugs, wiki_page, updates)
# We don't need to run validations here, since find_by_canonical_slug
# guarantees that there is no conflict in canonical_slug, and DB
# constraints on title and project_id/group_id enforce our other invariants
# This saves us a query.
meta
end
end
def find_by_canonical_slug(canonical_slug, container)
meta, conflict = with_canonical_slug(canonical_slug)
.where(container_attrs(container))
.limit(2)
if conflict.present?
meta.errors.add(:canonical_slug, 'Duplicate value found')
raise CanonicalSlugConflictError.new(meta)
end
meta
end
private
def wiki_page_updates(wiki_page)
last_commit_date = wiki_page.version_commit_timestamp || Time.now.utc
{
title: wiki_page.title,
created_at: last_commit_date,
updated_at: last_commit_date
}
end
def container_key
raise NotImplementedError
end
def container_attrs(container)
{ container_key => container.id }
end
end
def canonical_slug
strong_memoize(:canonical_slug) { slugs.canonical.take&.slug }
end
# rubocop:disable Gitlab/ModuleWithInstanceVariables
def canonical_slug=(slug)
return if @canonical_slug == slug
if persisted?
transaction do
slugs.canonical.update_all(canonical: false)
page_slug = slugs.create_with(canonical: true).find_or_create_by(slug: slug)
page_slug.update_columns(canonical: true) unless page_slug.canonical?
end
else
slugs.new(slug: slug, canonical: true)
end
@canonical_slug = slug
end
# rubocop:enable Gitlab/ModuleWithInstanceVariables
def update_state(created, known_slugs, wiki_page, updates)
update_wiki_page_attributes(updates)
insert_slugs(known_slugs, created, wiki_page.slug)
self.canonical_slug = wiki_page.slug
end
private
def update_wiki_page_attributes(updates)
# Remove all unnecessary updates:
updates.delete(:updated_at) if updated_at == updates[:updated_at]
updates.delete(:created_at) if created_at <= updates[:created_at]
updates.delete(:title) if title == updates[:title]
update_columns(updates) unless updates.empty?
end
def insert_slugs(strings, is_new, canonical_slug)
creation = Time.current.utc
slug_attrs = strings.map do |slug|
slug_attributes(slug, canonical_slug, is_new, creation)
end
slugs.insert_all(slug_attrs) unless !is_new && slug_attrs.size == 1
@canonical_slug = canonical_slug if is_new || strings.size == 1 # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
def slug_attributes(slug, canonical_slug, is_new, creation)
{
slug: slug,
canonical: (is_new && slug == canonical_slug),
created_at: creation,
updated_at: creation
}.merge(slug_meta_attributes)
end
def slug_meta_attributes
{ self.association(:slugs).reflection.foreign_key => id }
end
def no_two_metarecords_in_same_container_can_have_same_canonical_slug
container_id = attributes[self.class.container_key.to_s]
return unless container_id.present? && canonical_slug.present?
offending = self.class.with_canonical_slug(canonical_slug).where(self.class.container_key => container_id)
offending = offending.where.not(id: id) if persisted?
if offending.exists?
errors.add(:canonical_slug, 'each page in a wiki must have a distinct canonical slug')
end
end
end

View file

@ -0,0 +1,25 @@
# frozen_string_literal: true
module HasWikiPageSlugAttributes
extend ActiveSupport::Concern
included do
validates :slug, uniqueness: { scope: meta_foreign_key }
validates :slug, length: { maximum: 2048 }, allow_nil: false
validates :canonical, uniqueness: {
scope: meta_foreign_key,
if: :canonical?,
message: 'Only one slug can be canonical per wiki metadata record'
}
scope :canonical, -> { where(canonical: true) }
def update_columns(attrs = {})
super(attrs.reverse_merge(updated_at: Time.current.utc))
end
end
def self.update_all(attrs = {})
super(attrs.reverse_merge(updated_at: Time.current.utc))
end
end

View file

@ -3,6 +3,8 @@
class GroupImportState < ApplicationRecord
self.primary_key = :group_id
MAX_ERROR_LENGTH = 255
belongs_to :group, inverse_of: :import_state
belongs_to :user, optional: false
@ -30,7 +32,7 @@ class GroupImportState < ApplicationRecord
after_transition any => :failed do |state, transition|
last_error = transition.args.first
state.update_column(:last_error, last_error) if last_error
state.update_column(:last_error, last_error.truncate(MAX_ERROR_LENGTH)) if last_error
end
end

View file

@ -2,149 +2,20 @@
class WikiPage
class Meta < ApplicationRecord
include Gitlab::Utils::StrongMemoize
CanonicalSlugConflictError = Class.new(ActiveRecord::RecordInvalid)
WikiPageInvalid = Class.new(ArgumentError)
include HasWikiPageMetaAttributes
self.table_name = 'wiki_page_meta'
belongs_to :project
has_many :slugs, class_name: 'WikiPage::Slug', foreign_key: 'wiki_page_meta_id', inverse_of: :wiki_page_meta
has_many :events, as: :target, dependent: :delete_all # rubocop:disable Cop/ActiveRecordDependent
validates :title, presence: true
validates :project_id, presence: true
validate :no_two_metarecords_in_same_project_can_have_same_canonical_slug
scope :with_canonical_slug, ->(slug) do
joins(:slugs).where(wiki_page_slugs: { canonical: true, slug: slug })
end
alias_method :resource_parent, :project
class << self
# Return the (updated) WikiPage::Meta record for a given wiki page
#
# If none is found, then a new record is created, and its fields are set
# to reflect the wiki_page passed.
#
# @param [String] last_known_slug
# @param [WikiPage] wiki_page
#
# This method raises errors on validation issues.
def find_or_create(last_known_slug, wiki_page)
raise WikiPageInvalid unless wiki_page.valid?
project = wiki_page.wiki.project
known_slugs = [last_known_slug, wiki_page.slug].compact.uniq
raise 'No slugs found! This should not be possible.' if known_slugs.empty?
transaction do
updates = wiki_page_updates(wiki_page)
found = find_by_canonical_slug(known_slugs, project)
meta = found || create!(updates.merge(project_id: project.id))
meta.update_state(found.nil?, known_slugs, wiki_page, updates)
# We don't need to run validations here, since find_by_canonical_slug
# guarantees that there is no conflict in canonical_slug, and DB
# constraints on title and project_id enforce our other invariants
# This saves us a query.
meta
end
end
def find_by_canonical_slug(canonical_slug, project)
meta, conflict = with_canonical_slug(canonical_slug)
.where(project_id: project.id)
.limit(2)
if conflict.present?
meta.errors.add(:canonical_slug, 'Duplicate value found')
raise CanonicalSlugConflictError.new(meta)
end
meta
end
private
def wiki_page_updates(wiki_page)
last_commit_date = wiki_page.version_commit_timestamp || Time.now.utc
{
title: wiki_page.title,
created_at: last_commit_date,
updated_at: last_commit_date
}
end
end
def canonical_slug
strong_memoize(:canonical_slug) { slugs.canonical.first&.slug }
end
def canonical_slug=(slug)
return if @canonical_slug == slug
if persisted?
transaction do
slugs.canonical.update_all(canonical: false)
page_slug = slugs.create_with(canonical: true).find_or_create_by(slug: slug)
page_slug.update_columns(canonical: true) unless page_slug.canonical?
end
else
slugs.new(slug: slug, canonical: true)
end
@canonical_slug = slug
end
def update_state(created, known_slugs, wiki_page, updates)
update_wiki_page_attributes(updates)
insert_slugs(known_slugs, created, wiki_page.slug)
self.canonical_slug = wiki_page.slug
end
private
def update_wiki_page_attributes(updates)
# Remove all unnecessary updates:
updates.delete(:updated_at) if updated_at == updates[:updated_at]
updates.delete(:created_at) if created_at <= updates[:created_at]
updates.delete(:title) if title == updates[:title]
update_columns(updates) unless updates.empty?
end
def insert_slugs(strings, is_new, canonical_slug)
creation = Time.current.utc
slug_attrs = strings.map do |slug|
{
wiki_page_meta_id: id,
slug: slug,
canonical: (is_new && slug == canonical_slug),
created_at: creation,
updated_at: creation
}
end
slugs.insert_all(slug_attrs) unless !is_new && slug_attrs.size == 1
@canonical_slug = canonical_slug if is_new || strings.size == 1
end
def no_two_metarecords_in_same_project_can_have_same_canonical_slug
return unless project_id.present? && canonical_slug.present?
offending = self.class.with_canonical_slug(canonical_slug).where(project_id: project_id)
offending = offending.where.not(id: id) if persisted?
if offending.exists?
errors.add(:canonical_slug, 'each page in a wiki must have a distinct canonical slug')
end
def self.container_key
:project_id
end
end
end

View file

@ -2,25 +2,14 @@
class WikiPage
class Slug < ApplicationRecord
def self.meta_foreign_key
:wiki_page_meta_id
end
include HasWikiPageSlugAttributes
self.table_name = 'wiki_page_slugs'
belongs_to :wiki_page_meta, class_name: 'WikiPage::Meta', inverse_of: :slugs
validates :slug, presence: true, uniqueness: { scope: :wiki_page_meta_id }
validates :canonical, uniqueness: {
scope: :wiki_page_meta_id,
if: :canonical?,
message: 'Only one slug can be canonical per wiki metadata record'
}
scope :canonical, -> { where(canonical: true) }
def update_columns(attrs = {})
super(attrs.reverse_merge(updated_at: Time.current.utc))
end
def self.update_all(attrs = {})
super(attrs.reverse_merge(updated_at: Time.current.utc))
end
end
end

View file

@ -0,0 +1,5 @@
{
"type": "object",
"description": "The schema for vulnerability finding details",
"additionalProperties": false
}

View file

@ -0,0 +1,5 @@
---
title: Add details column to vulnerability findings table
merge_request: 49005
author:
type: added

View file

@ -0,0 +1,5 @@
---
title: Avoid branch name checking when creating a new snippet
merge_request: 48995
author:
type: fixed

View file

@ -0,0 +1,5 @@
---
title: Add additional fields to GraphQl terraform state version
merge_request: 48411
author:
type: changed

View file

@ -0,0 +1,5 @@
---
title: Add Attributes cleaner to Group Migration
merge_request: 48374
author:
type: changed

View file

@ -0,0 +1,5 @@
---
title: Fix failed group imports getting stuck by long error messages
merge_request: 48989
author:
type: fixed

View file

@ -0,0 +1,8 @@
---
name: postgres_hll_batch_counting
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/48233
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/285485
milestone: '13.7'
type: development
group: group::product analytics
default_enabled: false

View file

@ -2,7 +2,6 @@
unless Gitlab::Runtime.sidekiq?
Rails.application.reloader.to_prepare do
filename = File.join(Rails.root, 'log', "#{Rails.env}_json.log")
db_counter = Gitlab::Metrics::Subscribers::ActiveRecord
Rails.application.configure do
config.lograge.enabled = true
@ -17,7 +16,6 @@ unless Gitlab::Runtime.sidekiq?
data[:db_duration_s] = Gitlab::Utils.ms_to_round_sec(data.delete(:db)) if data[:db]
data[:view_duration_s] = Gitlab::Utils.ms_to_round_sec(data.delete(:view)) if data[:view]
data[:duration_s] = Gitlab::Utils.ms_to_round_sec(data.delete(:duration)) if data[:duration]
data.merge!(db_counter.db_counter_payload)
# Remove empty hashes to prevent type mismatches
# These are set to empty hashes in Lograge's ActionCable subscriber

View file

@ -150,12 +150,6 @@ if Gitlab::Metrics.enabled? && !Rails.env.test? && !(Rails.env.development? && d
config.middleware.use(Gitlab::Metrics::ElasticsearchRackMiddleware)
end
Sidekiq.configure_server do |config|
config.server_middleware do |chain|
chain.add Gitlab::Metrics::SidekiqMiddleware
end
end
# This instruments all methods residing in app/models that (appear to) use any
# of the ActiveRecord methods. This has to take place _after_ initializing as
# for some unknown reason calling eager_load! earlier breaks Devise.

View file

@ -0,0 +1,19 @@
# frozen_string_literal: true
class AddDetailsToVulnerabilityFindings < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
add_column :vulnerability_occurrences, :details, :jsonb, default: {}, null: false
end
end
def down
with_lock_retries do
remove_column :vulnerability_occurrences, :details
end
end
end

View file

@ -0,0 +1 @@
af9d8c7cda142e2a96a289ebd7afef73367bd544a60794c9e0414c7b82bef8a2

View file

@ -17539,7 +17539,8 @@ CREATE TABLE vulnerability_occurrences (
name character varying NOT NULL,
metadata_version character varying NOT NULL,
raw_metadata text NOT NULL,
vulnerability_id bigint
vulnerability_id bigint,
details jsonb DEFAULT '{}'::jsonb NOT NULL
);
CREATE SEQUENCE vulnerability_occurrences_id_seq

View file

@ -21,10 +21,9 @@ GitLab has several features based on receiving incoming emails:
## Requirements
NOTE: **Note:**
It is **not** recommended to use an email address that receives or will receive any
It is **not** recommended to use an email address that receives any
messages not intended for the GitLab instance. Any incoming emails not intended
for GitLab will receive a reject notice.
for GitLab receive a reject notice.
Handling incoming emails requires an [IMAP](https://en.wikipedia.org/wiki/Internet_Message_Access_Protocol)-enabled
email account. GitLab requires one of the following three strategies:
@ -38,7 +37,7 @@ Let's walk through each of these options.
### Email sub-addressing
[Sub-addressing](https://en.wikipedia.org/wiki/Email_address#Sub-addressing) is
a mail server feature where any email to `user+arbitrary_tag@example.com` will end up
a mail server feature where any email to `user+arbitrary_tag@example.com` ends up
in the mailbox for `user@example.com` . It is supported by providers such as
Gmail, Google Apps, Yahoo! Mail, Outlook.com, and iCloud, as well as the
[Postfix mail server](reply_by_email_postfix_setup.md), which you can run on-premises.
@ -117,9 +116,9 @@ CAUTION: **Caution:**
Use a mail server that has been configured to reduce
spam.
A Postfix mail server that is running on a default configuration, for example,
can result in abuse. All messages received on the configured mailbox will be processed
and messages that are not intended for the GitLab instance will receive a reject notice.
If the sender's address is spoofed, the reject notice will be delivered to the spoofed
can result in abuse. All messages received on the configured mailbox are processed
and messages that are not intended for the GitLab instance receive a reject notice.
If the sender's address is spoofed, the reject notice is delivered to the spoofed
`FROM` address, which can cause the mail server's IP or domain to appear on a block
list.
@ -453,8 +452,8 @@ at the organization level in Office 365. This allows all mailboxes in the organi
to receive sub-addressed mail:
NOTE: **Note:**
This series of commands will enable sub-addressing at the organization
level in Office 365. This will allow all mailboxes in the organization
This series of commands enables sub-addressing at the organization
level in Office 365. This allows all mailboxes in the organization
to receive sub-addressed mail.
```powershell

View file

@ -15,34 +15,40 @@ replying to notification emails.
Make sure [incoming email](incoming_email.md) is set up.
## How it works?
## How it works
### 1. GitLab sends a notification email
Replying by email happens in three steps:
1. GitLab sends a notification email.
1. You reply to the notification email.
1. GitLab receives your reply to the notification email.
### GitLab sends a notification email
When GitLab sends a notification and Reply by email is enabled, the `Reply-To`
header is set to the address defined in your GitLab configuration, with the
`%{key}` placeholder (if present) replaced by a specific "reply key". In
addition, this "reply key" is also added to the `References` header.
### 2. You reply to the notification email
### You reply to the notification email
When you reply to the notification email, your email client will:
When you reply to the notification email, your email client:
- send the email to the `Reply-To` address it got from the notification email
- set the `In-Reply-To` header to the value of the `Message-ID` header from the
- sends the email to the `Reply-To` address it got from the notification email
- sets the `In-Reply-To` header to the value of the `Message-ID` header from the
notification email
- set the `References` header to the value of the `Message-ID` plus the value of
- sets the `References` header to the value of the `Message-ID` plus the value of
the notification email's `References` header.
### 3. GitLab receives your reply to the notification email
### GitLab receives your reply to the notification email
When GitLab receives your reply, it will look for the "reply key" in the
When GitLab receives your reply, it looks for the "reply key" in the
following headers, in this order:
1. the `To` header
1. the `References` header
If it finds a reply key, it will be able to leave your reply as a comment on
If it finds a reply key, it leaves your reply as a comment on
the entity the notification was about (issue, merge request, commit...).
For more details about the `Message-ID`, `In-Reply-To`, and `References headers`,

View file

@ -1016,6 +1016,22 @@ This will also refresh the cached usage ping displayed in the admin area
Gitlab::UsageData.to_json(force_refresh: true)
```
#### Generate and print
Generates usage ping data in JSON format.
```shell
rake gitlab:usage_data:generate
```
#### Generate and send usage ping
Prints the metrics saved in `conversational_development_index_metrics`.
```shell
rake gitlab:usage_data:generate_and_send
```
## Elasticsearch
### Configuration attributes

View file

@ -1230,12 +1230,12 @@ Represents a project or group board
"""
type Board {
"""
The board assignee.
The board assignee
"""
assignee: User
"""
Epics associated with board issues.
Epics associated with board issues
"""
epics(
"""
@ -1265,12 +1265,12 @@ type Board {
): BoardEpicConnection
"""
Whether or not backlog list is hidden.
Whether or not backlog list is hidden
"""
hideBacklogList: Boolean
"""
Whether or not closed list is hidden.
Whether or not closed list is hidden
"""
hideClosedList: Boolean
@ -1340,7 +1340,7 @@ type Board {
): BoardListConnection
"""
The board milestone.
The board milestone
"""
milestone: Milestone
@ -1350,7 +1350,7 @@ type Board {
name: String
"""
Weight of the board.
Weight of the board
"""
weight: Int
}
@ -21691,6 +21691,11 @@ type TerraformStateVersion {
"""
createdByUser: User
"""
URL for downloading the version's JSON file
"""
downloadPath: String
"""
ID of the Terraform state version
"""
@ -21701,6 +21706,11 @@ type TerraformStateVersion {
"""
job: CiJob
"""
Serial number of the version
"""
serial: Int
"""
Timestamp the version was updated
"""
@ -22648,12 +22658,12 @@ input UpdateBoardInput {
clientMutationId: String
"""
Whether or not backlog list is hidden.
Whether or not backlog list is hidden
"""
hideBacklogList: Boolean
"""
Whether or not closed list is hidden.
Whether or not closed list is hidden
"""
hideClosedList: Boolean

View file

@ -3305,7 +3305,7 @@
"fields": [
{
"name": "assignee",
"description": "The board assignee.",
"description": "The board assignee",
"args": [
],
@ -3319,7 +3319,7 @@
},
{
"name": "epics",
"description": "Epics associated with board issues.",
"description": "Epics associated with board issues",
"args": [
{
"name": "issueFilters",
@ -3382,7 +3382,7 @@
},
{
"name": "hideBacklogList",
"description": "Whether or not backlog list is hidden.",
"description": "Whether or not backlog list is hidden",
"args": [
],
@ -3396,7 +3396,7 @@
},
{
"name": "hideClosedList",
"description": "Whether or not closed list is hidden.",
"description": "Whether or not closed list is hidden",
"args": [
],
@ -3554,7 +3554,7 @@
},
{
"name": "milestone",
"description": "The board milestone.",
"description": "The board milestone",
"args": [
],
@ -3582,7 +3582,7 @@
},
{
"name": "weight",
"description": "Weight of the board.",
"description": "Weight of the board",
"args": [
],
@ -63178,6 +63178,20 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "downloadPath",
"description": "URL for downloading the version's JSON file",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "id",
"description": "ID of the Terraform state version",
@ -63210,6 +63224,20 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "serial",
"description": "Serial number of the version",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "updatedAt",
"description": "Timestamp the version was updated",
@ -66098,7 +66126,7 @@
},
{
"name": "hideBacklogList",
"description": "Whether or not backlog list is hidden.",
"description": "Whether or not backlog list is hidden",
"type": {
"kind": "SCALAR",
"name": "Boolean",
@ -66108,7 +66136,7 @@
},
{
"name": "hideClosedList",
"description": "Whether or not closed list is hidden.",
"description": "Whether or not closed list is hidden",
"type": {
"kind": "SCALAR",
"name": "Boolean",

View file

@ -236,16 +236,16 @@ Represents a project or group board.
| Field | Type | Description |
| ----- | ---- | ----------- |
| `assignee` | User | The board assignee. |
| `epics` | BoardEpicConnection | Epics associated with board issues. |
| `hideBacklogList` | Boolean | Whether or not backlog list is hidden. |
| `hideClosedList` | Boolean | Whether or not closed list is hidden. |
| `assignee` | User | The board assignee |
| `epics` | BoardEpicConnection | Epics associated with board issues |
| `hideBacklogList` | Boolean | Whether or not backlog list is hidden |
| `hideClosedList` | Boolean | Whether or not closed list is hidden |
| `id` | ID! | ID (global ID) of the board |
| `labels` | LabelConnection | Labels of the board |
| `lists` | BoardListConnection | Lists of the board |
| `milestone` | Milestone | The board milestone. |
| `milestone` | Milestone | The board milestone |
| `name` | String | Name of the board |
| `weight` | Int | Weight of the board. |
| `weight` | Int | Weight of the board |
### BoardEpic
@ -3222,8 +3222,10 @@ Autogenerated return type of TerraformStateUnlock.
| ----- | ---- | ----------- |
| `createdAt` | Time! | Timestamp the version was created |
| `createdByUser` | User | The user that created this version |
| `downloadPath` | String | URL for downloading the version's JSON file |
| `id` | ID! | ID of the Terraform state version |
| `job` | CiJob | The job that created this version |
| `serial` | Int | Serial number of the version |
| `updatedAt` | Time! | Timestamp the version was updated |
### TerraformStateVersionRegistry

View file

@ -9,7 +9,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
Every API call to group boards must be authenticated.
If a user is not a member of a group and the group is private, a `GET`
request will result in `404` status code.
request results in `404` status code.
## List all group issue boards in a group
@ -76,7 +76,7 @@ Example response:
]
```
Users on GitLab [Premium, Silver, or higher](https://about.gitlab.com/pricing/) will see
Users on GitLab [Premium, Silver, or higher](https://about.gitlab.com/pricing/) see
different parameters, due to the ability to have multiple group boards.
Example response:
@ -192,8 +192,8 @@ Example response:
}
```
Users on GitLab [Premium, Silver, or higher](https://about.gitlab.com/pricing/) will see
different parameters, due to the ability to have multiple group issue boards.s
Users on GitLab [Premium, Silver, or higher](https://about.gitlab.com/pricing/) see
different parameters, due to the ability to have multiple group issue boards.
Example response:

View file

@ -692,7 +692,7 @@ to include the file.
sub_path = "config.json"
```
### Option 3: Use `DOCKER_ATUH_CONFIG`
### Option 3: Use `DOCKER_AUTH_CONFIG`
If you already have
[`DOCKER_AUTH_CONFIG`](using_docker_images.md#determining-your-docker_auth_config-data)

View file

@ -1715,6 +1715,13 @@ the blockquote to use a bulleted list:
> - Enabled by default in GitLab 11.4.
```
If a feature is moved to another tier:
```markdown
> - [Moved](<link-to-issue>) from [GitLab Premium](https://about.gitlab.com/pricing/) to [GitLab Starter](https://about.gitlab.com/pricing/) in 11.8.
> - [Moved](<link-to-issue>) from [GitLab Starter](https://about.gitlab.com/pricing/) to GitLab Core in 12.0.
```
If a feature is deprecated, include a link to a replacement (when available):
```markdown

View file

@ -116,7 +116,7 @@ that of Issues and Merge Requests) based on following parameters:
![epics search](img/epics_search.png)
To search, go to the list of epics and select the field **Search or filter results**.
It will display a dropdown menu, from which you can add an author. You can also enter plain
It displays a dropdown menu, from which you can add an author. You can also enter plain
text to search by epic title or description. When done, press <kbd>Enter</kbd> on your
keyboard to filter the list.
@ -197,7 +197,7 @@ To create an issue from an epic:
### Remove an issue from an epic
You can remove issues from an epic when you're on the epic's details page.
After you remove an issue from an epic, the issue will no longer be associated with this epic.
After you remove an issue from an epic, the issue is no longer associated with this epic.
To remove an issue from an epic:
@ -239,8 +239,8 @@ To move an issue to another epic:
If you have the necessary [permissions](../../permissions.md) to close an issue and create an
epic in the immediate parent group, you can promote an issue to an epic with the `/promote`
[quick action](../../project/quick_actions.md#quick-actions-for-issues-merge-requests-and-epics).
Only issues from projects that are in groups can be promoted. When attempting to promote a confidential
issue, a warning will display. Promoting a confidential issue to an epic will make all information
Only issues from projects that are in groups can be promoted. When you attempt to promote a confidential
issue, a warning is displayed. Promoting a confidential issue to an epic makes all information
related to the issue public as epics are public to group members.
When the quick action is executed:
@ -248,7 +248,7 @@ When the quick action is executed:
- An epic is created in the same group as the project of the issue.
- Subscribers of the issue are notified that the epic was created.
The following issue metadata will be copied to the epic:
The following issue metadata is copied to the epic:
- Title, description, activity/comment thread.
- Upvotes/downvotes.

View file

@ -35,6 +35,11 @@ is automatically merged. When the merge request is updated with
new commits, the automatic merge is canceled to allow the new
changes to be reviewed.
By default, all threads must be resolved before you see the **Merge when
pipeline succeeds** button. If someone adds a new comment after
the button is selected, but before the jobs in the CI pipeline are
complete, the merge is blocked until you resolve all existing threads.
## Only allow merge requests to be merged if the pipeline succeeds
You can prevent merge requests from being merged if their pipeline did not succeed

View file

@ -0,0 +1,39 @@
# frozen_string_literal: true
module BulkImports
module Common
module Transformers
class ProhibitedAttributesTransformer
PROHIBITED_REFERENCES = Regexp.union(
/\Acached_markdown_version\Z/,
/\Aid\Z/,
/_id\Z/,
/_ids\Z/,
/_html\Z/,
/attributes/,
/\Aremote_\w+_(url|urls|request_header)\Z/ # carrierwave automatically creates these attribute methods for uploads
).freeze
def initialize(options = {})
@options = options
end
def transform(context, data)
data.each_with_object({}) do |(key, value), result|
prohibited = prohibited_key?(key)
unless prohibited
result[key] = value.is_a?(Hash) ? transform(context, value) : value
end
end
end
private
def prohibited_key?(key)
key.to_s =~ PROHIBITED_REFERENCES
end
end
end
end
end

View file

@ -12,6 +12,7 @@ module BulkImports
transformer Common::Transformers::HashKeyDigger, key_path: %w[data group]
transformer Common::Transformers::UnderscorifyKeysTransformer
transformer Common::Transformers::ProhibitedAttributesTransformer
transformer Groups::Transformers::GroupAttributesTransformer
loader Groups::Loaders::GroupLoader

View file

@ -7,6 +7,7 @@ module BulkImports
include Pipeline
extractor BulkImports::Groups::Extractors::SubgroupsExtractor
transformer Common::Transformers::ProhibitedAttributesTransformer
transformer BulkImports::Groups::Transformers::SubgroupToEntityTransformer
loader BulkImports::Common::Loaders::EntityLoader
end

View file

@ -10,12 +10,13 @@ module Gitlab
ATTRIBUTES = %i[oldrev newrev ref branch_name tag_name logger].freeze
attr_reader(*ATTRIBUTES)
def initialize(change, default_branch:, logger:)
def initialize(change, default_branch:, root_ref:, logger:)
@oldrev, @newrev, @ref = change.values_at(:oldrev, :newrev, :ref)
@branch_name = Gitlab::Git.branch_name(@ref)
@tag_name = Gitlab::Git.tag_name(@ref)
@default_branch = default_branch
@root_ref = root_ref
@logger = logger
@logger.append_message("Running checks for ref: #{@branch_name || @tag_name}")
end
@ -34,8 +35,13 @@ module Gitlab
private
# If the `root_ref` is not present means that this is the first commit to the
# repository and when the default branch is going to be created.
# We allow the first branch creation no matter the name because
# it can be even an imported snippet from an instance with a different
# default branch.
def creation?
@branch_name != @default_branch && super
super && @root_ref && (@branch_name != @default_branch)
end
end
end

View file

@ -28,13 +28,14 @@ module Gitlab
# for given implementation no higher value was reported (https://gitlab.com/gitlab-org/gitlab/-/merge_requests/45673#accuracy-estimation) than 5.3%
# for the most of a cases this value is lower. However, if the exact value is necessary other tools has to be used.
class PostgresHllBatchDistinctCounter
ERROR_RATE = 4.9 # max encountered empirical error rate, used in tests
FALLBACK = -1
MIN_REQUIRED_BATCH_SIZE = 1_250
MAX_ALLOWED_LOOPS = 10_000
MIN_REQUIRED_BATCH_SIZE = 750
SLEEP_TIME_IN_SECONDS = 0.01 # 10 msec sleep
MAX_DATA_VOLUME = 4_000_000_000
# Each query should take < 500ms https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22705
DEFAULT_BATCH_SIZE = 100_000
DEFAULT_BATCH_SIZE = 10_000
BIT_31_MASK = "B'0#{'1' * 31}'"
BIT_9_MASK = "B'#{'0' * 23}#{'1' * 9}'"
@ -49,7 +50,7 @@ module Gitlab
SELECT (attr_hash_32_bits & #{BIT_9_MASK})::int AS bucket_num,
(31 - floor(log(2, min((attr_hash_32_bits & #{BIT_31_MASK})::int))))::int as bucket_hash
FROM hashed_attributes
GROUP BY 1 ORDER BY 1
GROUP BY 1
SQL
TOTAL_BUCKETS_NUMBER = 512
@ -61,7 +62,7 @@ module Gitlab
def unwanted_configuration?(finish, batch_size, start)
batch_size <= MIN_REQUIRED_BATCH_SIZE ||
(finish - start) / batch_size >= MAX_ALLOWED_LOOPS ||
(finish - start) >= MAX_DATA_VOLUME ||
start > finish
end
@ -101,7 +102,7 @@ module Gitlab
num_uniques = (
((TOTAL_BUCKETS_NUMBER**2) * (0.7213 / (1 + 1.079 / TOTAL_BUCKETS_NUMBER))) /
(num_zero_buckets + hll_blob.values.sum { |bucket_hash, _| 2**(-1 * bucket_hash)} )
(num_zero_buckets + hll_blob.values.sum { |bucket_hash| 2**(-1 * bucket_hash)} )
).to_i
if num_zero_buckets > 0 && num_uniques < 2.5 * TOTAL_BUCKETS_NUMBER

View file

@ -83,6 +83,9 @@ module Gitlab
},
remove_known_trial_form_fields: {
tracking_category: 'Growth::Conversion::Experiment::RemoveKnownTrialFormFields'
},
trimmed_skip_trial_copy: {
tracking_category: 'Growth::Conversion::Experiment::TrimmedSkipTrialCopy'
}
}.freeze

View file

@ -114,7 +114,7 @@ module Gitlab
override :check_single_change_access
def check_single_change_access(change, _skip_lfs_integrity_check: false)
Checks::SnippetCheck.new(change, default_branch: snippet.default_branch, logger: logger).validate!
Checks::SnippetCheck.new(change, default_branch: snippet.default_branch, root_ref: snippet.repository.root_ref, logger: logger).validate!
Checks::PushFileCountCheck.new(change, repository: repository, limit: Snippet.max_file_limit, logger: logger).validate!
rescue Checks::TimedLogger::TimeoutError
raise TimeoutError, logger.full_message

View file

@ -13,7 +13,8 @@ module Gitlab
:rugged_duration_s,
:elasticsearch_calls,
:elasticsearch_duration_s,
*::Gitlab::Instrumentation::Redis.known_payload_keys]
*::Gitlab::Instrumentation::Redis.known_payload_keys,
*::Gitlab::Metrics::Subscribers::ActiveRecord::DB_COUNTERS]
end
def add_instrumentation_data(payload)
@ -22,6 +23,7 @@ module Gitlab
instrument_redis(payload)
instrument_elasticsearch(payload)
instrument_throttle(payload)
instrument_active_record(payload)
end
def instrument_gitaly(payload)
@ -62,6 +64,12 @@ module Gitlab
payload[:throttle_safelist] = safelist if safelist.present?
end
def instrument_active_record(payload)
db_counters = ::Gitlab::Metrics::Subscribers::ActiveRecord.db_counter_payload
payload.merge!(db_counters)
end
# Returns the queuing duration for a Sidekiq job in seconds, as a float, if the
# `enqueued_at` field or `created_at` field is available.
#

View file

@ -1,16 +0,0 @@
# frozen_string_literal: true
module Gitlab
module Metrics
class BackgroundTransaction < Transaction
def initialize(worker_class)
super()
@worker_class = worker_class
end
def labels
{ controller: @worker_class.name, action: 'perform', feature_category: @worker_class.try(:get_feature_category).to_s }
end
end
end
end

View file

@ -1,35 +0,0 @@
# frozen_string_literal: true
module Gitlab
module Metrics
# Sidekiq middleware for tracking jobs.
#
# This middleware is intended to be used as a server-side middleware.
class SidekiqMiddleware
def call(worker, payload, queue)
trans = BackgroundTransaction.new(worker.class)
begin
# Old gitlad-shell messages don't provide enqueued_at/created_at attributes
enqueued_at = payload['enqueued_at'] || payload['created_at'] || 0
trans.set(:gitlab_transaction_sidekiq_queue_duration_total, Time.current.to_f - enqueued_at) do
multiprocess_mode :livesum
end
trans.run { yield }
rescue Exception => error # rubocop: disable Lint/RescueException
trans.add_event(:sidekiq_exception)
raise error
ensure
add_info_to_payload(payload, trans)
end
end
private
def add_info_to_payload(payload, trans)
payload.merge!(::Gitlab::Metrics::Subscribers::ActiveRecord.db_counter_payload)
end
end
end
end

View file

@ -16,16 +16,14 @@ module Gitlab
# using a connection.
Thread.current[:uses_db_connection] = true
return unless current_transaction
payload = event.payload
return if payload[:name] == 'SCHEMA' || IGNORABLE_SQL.include?(payload[:sql])
current_transaction.observe(:gitlab_sql_duration_seconds, event.duration / 1000.0) do
increment_db_counters(payload)
current_transaction&.observe(:gitlab_sql_duration_seconds, event.duration / 1000.0) do
buckets [0.05, 0.1, 0.25]
end
increment_db_counters(payload)
end
def self.db_counter_payload
@ -53,7 +51,7 @@ module Gitlab
end
def increment(counter)
current_transaction.increment("gitlab_transaction_#{counter}_total".to_sym, 1)
current_transaction&.increment("gitlab_transaction_#{counter}_total".to_sym, 1)
if Gitlab::SafeRequestStore.active?
Gitlab::SafeRequestStore[counter] = Gitlab::SafeRequestStore[counter].to_i + 1

View file

@ -25,6 +25,13 @@ module Gitlab
relation.select(relation.all.table[column].sum).to_sql
end
# For estimated distinct count use exact query instead of hll
# buckets query, because it can't be used to obtain estimations without
# supplementary ruby code present in Gitlab::Database::PostgresHllBatchDistinctCounter
def estimate_batch_distinct_count(relation, column = nil, *rest)
raw_sql(relation, column, :distinct)
end
private
def raw_sql(relation, column, distinct = nil)

View file

@ -38,6 +38,7 @@ module Gitlab
extend self
FALLBACK = -1
DISTRIBUTED_HLL_FALLBACK = -2
def count(relation, column = nil, batch: true, batch_size: nil, start: nil, finish: nil)
if batch
@ -59,6 +60,17 @@ module Gitlab
FALLBACK
end
def estimate_batch_distinct_count(relation, column = nil, batch_size: nil, start: nil, finish: nil)
Gitlab::Database::PostgresHllBatchDistinctCounter.new(relation, column).estimate_distinct_count(batch_size: batch_size, start: start, finish: finish)
rescue ActiveRecord::StatementInvalid
FALLBACK
# catch all rescue should be removed as a part of feature flag rollout issue
# https://gitlab.com/gitlab-org/gitlab/-/issues/285485
rescue StandardError => error
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error)
DISTRIBUTED_HLL_FALLBACK
end
def sum(relation, column, batch_size: nil, start: nil, finish: nil)
Gitlab::Database::BatchCount.batch_sum(relation, column, batch_size: batch_size, start: start, finish: finish)
rescue ActiveRecord::StatementInvalid

View file

@ -9,5 +9,16 @@ namespace :gitlab do
task dump_sql_in_json: :environment do
puts Gitlab::Json.pretty_generate(Gitlab::UsageDataQueries.uncached_data)
end
desc 'GitLab | UsageData | Generate usage ping in JSON'
task generate: :environment do
puts Gitlab::UsageData.to_json(force_refresh: true)
end
desc 'GitLab | UsageData | Generate usage ping and send it to Versions Application'
task generate_and_send: :environment do
result = SubmitUsagePingService.new.execute
puts result.inspect
end
end
end

View file

@ -28903,6 +28903,9 @@ msgstr ""
msgid "Trials|Go back to GitLab"
msgstr ""
msgid "Trials|Skip Trial"
msgstr ""
msgid "Trials|Skip Trial (Continue with Free Account)"
msgstr ""

View file

@ -43,7 +43,7 @@
"@babel/preset-env": "^7.10.1",
"@gitlab/at.js": "1.5.5",
"@gitlab/svgs": "1.175.0",
"@gitlab/ui": "24.3.2",
"@gitlab/ui": "24.4.0",
"@gitlab/visual-review-tools": "1.6.1",
"@rails/actioncable": "^6.0.3-3",
"@rails/ujs": "^6.0.3-2",

View file

@ -7,13 +7,15 @@ RSpec.describe GitlabSchema.types['TerraformStateVersion'] do
it { expect(described_class).to require_graphql_authorizations(:read_terraform_state) }
describe 'fields' do
let(:fields) { %i[id created_by_user job created_at updated_at] }
let(:fields) { %i[id created_by_user job download_path serial created_at updated_at] }
it { expect(described_class).to have_graphql_fields(fields) }
it { expect(described_class.fields['id'].type).to be_non_null }
it { expect(described_class.fields['createdByUser'].type).not_to be_non_null }
it { expect(described_class.fields['job'].type).not_to be_non_null }
it { expect(described_class.fields['downloadPath'].type).not_to be_non_null }
it { expect(described_class.fields['serial'].type).not_to be_non_null }
it { expect(described_class.fields['createdAt'].type).to be_non_null }
it { expect(described_class.fields['updatedAt'].type).to be_non_null }
end

View file

@ -153,32 +153,22 @@ RSpec.describe 'lograge', type: :request do
end
end
context 'with transaction' do
let(:transaction) { Gitlab::Metrics::WebTransaction.new({}) }
before do
allow(Gitlab::Metrics::Transaction).to receive(:current).and_return(transaction)
end
context 'with db payload' do
context 'when RequestStore is enabled', :request_store do
context 'with db payload' do
it 'includes db counters', :request_store do
ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);')
subscriber.process_action(event)
it 'includes db counters' do
ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);')
subscriber.process_action(event)
expect(log_data).to include("db_count" => 1, "db_write_count" => 0, "db_cached_count" => 0)
end
expect(log_data).to include("db_count" => a_value >= 1, "db_write_count" => 0, "db_cached_count" => 0)
end
end
context 'when RequestStore is disabled' do
context 'with db payload' do
it 'does not include db counters' do
ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);')
subscriber.process_action(event)
it 'does not include db counters' do
ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);')
subscriber.process_action(event)
expect(log_data).not_to include("db_count" => 1, "db_write_count" => 0, "db_cached_count" => 0)
end
expect(log_data).not_to include("db_count", "db_write_count", "db_cached_count")
end
end
end

View file

@ -0,0 +1,72 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Common::Transformers::ProhibitedAttributesTransformer do
describe '#transform' do
let_it_be(:hash) do
{
'id' => 101,
'service_id' => 99,
'moved_to_id' => 99,
'namespace_id' => 99,
'ci_id' => 99,
'random_project_id' => 99,
'random_id' => 99,
'milestone_id' => 99,
'project_id' => 99,
'user_id' => 99,
'random_id_in_the_middle' => 99,
'notid' => 99,
'import_source' => 'test',
'import_type' => 'test',
'non_existent_attr' => 'test',
'some_html' => '<p>dodgy html</p>',
'legit_html' => '<p>legit html</p>',
'_html' => '<p>perfectly ordinary html</p>',
'cached_markdown_version' => 12345,
'custom_attributes' => 'test',
'some_attributes_metadata' => 'test',
'group_id' => 99,
'commit_id' => 99,
'issue_ids' => [1, 2, 3],
'merge_request_ids' => [1, 2, 3],
'note_ids' => [1, 2, 3],
'remote_attachment_url' => 'http://something.dodgy',
'remote_attachment_request_header' => 'bad value',
'remote_attachment_urls' => %w(http://something.dodgy http://something.okay),
'attributes' => {
'issue_ids' => [1, 2, 3],
'merge_request_ids' => [1, 2, 3],
'note_ids' => [1, 2, 3]
},
'variables_attributes' => {
'id' => 1
},
'attr_with_nested_attrs' => {
'nested_id' => 1,
'nested_attr' => 2
}
}
end
let(:expected_hash) do
{
'random_id_in_the_middle' => 99,
'notid' => 99,
'import_source' => 'test',
'import_type' => 'test',
'non_existent_attr' => 'test',
'attr_with_nested_attrs' => {
'nested_attr' => 2
}
}
end
it 'removes prohibited attributes' do
transformed_hash = subject.transform(nil, hash)
expect(transformed_hash).to eq(expected_hash)
end
end
end

View file

@ -91,6 +91,7 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
.to contain_exactly(
{ klass: BulkImports::Common::Transformers::HashKeyDigger, options: { key_path: %w[data group] } },
{ klass: BulkImports::Common::Transformers::UnderscorifyKeysTransformer, options: nil },
{ klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil },
{ klass: BulkImports::Groups::Transformers::GroupAttributesTransformer, options: nil }
)
end

View file

@ -66,8 +66,8 @@ RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do
it 'has transformers' do
expect(described_class.transformers).to contain_exactly(
klass: BulkImports::Groups::Transformers::SubgroupToEntityTransformer,
options: nil
{ klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil },
{ klass: BulkImports::Groups::Transformers::SubgroupToEntityTransformer, options: nil }
)
end

View file

@ -13,7 +13,7 @@ RSpec.describe Gitlab::Checks::SnippetCheck do
let(:creation) { false }
let(:deletion) { false }
subject { Gitlab::Checks::SnippetCheck.new(changes, default_branch: default_branch, logger: logger) }
subject { Gitlab::Checks::SnippetCheck.new(changes, default_branch: default_branch, root_ref: snippet.repository.root_ref, logger: logger) }
describe '#validate!' do
it 'does not raise any error' do
@ -45,10 +45,18 @@ RSpec.describe Gitlab::Checks::SnippetCheck do
let(:branch_name) { 'feature' }
end
context "when branch is 'master'" do
let(:ref) { 'refs/heads/master' }
context 'when branch is the same as the default branch' do
let(:ref) { "refs/heads/#{default_branch}" }
it "allows the operation" do
it 'allows the operation' do
expect { subject.validate! }.not_to raise_error
end
end
context 'when snippet has an empty repo' do
let_it_be(:snippet) { create(:personal_snippet, :empty_repo) }
it 'allows the operation' do
expect { subject.validate! }.not_to raise_error
end
end

View file

@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::PostgresHllBatchDistinctCounter do
let_it_be(:error_rate) { 4.9 } # HyperLogLog is a probabilistic algorithm, which provides estimated data, with given error margin
let_it_be(:error_rate) { described_class::ERROR_RATE } # HyperLogLog is a probabilistic algorithm, which provides estimated data, with given error margin
let_it_be(:fallback) { ::Gitlab::Database::BatchCounter::FALLBACK }
let_it_be(:small_batch_size) { calculate_batch_size(::Gitlab::Database::BatchCounter::MIN_REQUIRED_BATCH_SIZE) }
let_it_be(:small_batch_size) { calculate_batch_size(described_class::MIN_REQUIRED_BATCH_SIZE) }
let(:model) { Issue }
let(:column) { :author_id }
@ -118,8 +118,8 @@ RSpec.describe Gitlab::Database::PostgresHllBatchDistinctCounter do
expect(described_class.new(model, column).estimate_distinct_count(start: 1, finish: 0)).to eq(fallback)
end
it 'returns fallback if loops more than allowed' do
large_finish = Gitlab::Database::PostgresHllBatchDistinctCounter::MAX_ALLOWED_LOOPS * default_batch_size + 1
it 'returns fallback if data volume exceeds upper limit' do
large_finish = Gitlab::Database::PostgresHllBatchDistinctCounter::MAX_DATA_VOLUME + 1
expect(described_class.new(model, column).estimate_distinct_count(start: 1, finish: large_finish)).to eq(fallback)
end

View file

@ -34,7 +34,10 @@ RSpec.describe Gitlab::InstrumentationHelper do
:redis_shared_state_calls,
:redis_shared_state_duration_s,
:redis_shared_state_read_bytes,
:redis_shared_state_write_bytes
:redis_shared_state_write_bytes,
:db_count,
:db_write_count,
:db_cached_count
]
expect(described_class.keys).to eq(expected_keys)
@ -46,10 +49,10 @@ RSpec.describe Gitlab::InstrumentationHelper do
subject { described_class.add_instrumentation_data(payload) }
it 'adds nothing' do
it 'adds only DB counts by default' do
subject
expect(payload).to eq({})
expect(payload).to eq(db_count: 0, db_cached_count: 0, db_write_count: 0)
end
context 'when Gitaly calls are made' do

View file

@ -1,56 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Metrics::BackgroundTransaction do
let(:test_worker_class) { double(:class, name: 'TestWorker') }
let(:prometheus_metric) { instance_double(Prometheus::Client::Metric, base_labels: {}) }
before do
allow(described_class).to receive(:prometheus_metric).and_return(prometheus_metric)
end
subject { described_class.new(test_worker_class) }
RSpec.shared_examples 'metric with worker labels' do |metric_method|
it 'measures with correct labels and value' do
value = 1
expect(prometheus_metric).to receive(metric_method).with({ controller: 'TestWorker', action: 'perform', feature_category: '' }, value)
subject.send(metric_method, :bau, value)
end
end
describe '#label' do
it 'returns labels based on class name' do
expect(subject.labels).to eq(controller: 'TestWorker', action: 'perform', feature_category: '')
end
it 'contains only the labels defined for metrics' do
expect(subject.labels.keys).to contain_exactly(*described_class.superclass::BASE_LABEL_KEYS)
end
it 'includes the feature category if there is one' do
expect(test_worker_class).to receive(:get_feature_category).and_return('source_code_management')
expect(subject.labels).to include(feature_category: 'source_code_management')
end
end
describe '#increment' do
let(:prometheus_metric) { instance_double(Prometheus::Client::Counter, :increment, base_labels: {}) }
it_behaves_like 'metric with worker labels', :increment
end
describe '#set' do
let(:prometheus_metric) { instance_double(Prometheus::Client::Gauge, :set, base_labels: {}) }
it_behaves_like 'metric with worker labels', :set
end
describe '#observe' do
let(:prometheus_metric) { instance_double(Prometheus::Client::Histogram, :observe, base_labels: {}) }
it_behaves_like 'metric with worker labels', :observe
end
end

View file

@ -1,66 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Metrics::SidekiqMiddleware do
let(:middleware) { described_class.new }
let(:message) { { 'args' => ['test'], 'enqueued_at' => Time.new(2016, 6, 23, 6, 59).to_f } }
describe '#call' do
it 'tracks the transaction' do
worker = double(:worker, class: double(:class, name: 'TestWorker'))
expect_next_instance_of(Gitlab::Metrics::BackgroundTransaction) do |transaction|
expect(transaction).to receive(:set).with(:gitlab_transaction_sidekiq_queue_duration_total, instance_of(Float))
expect(transaction).to receive(:increment).with(:gitlab_transaction_db_count_total, 1)
end
middleware.call(worker, message, :test) do
ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);')
end
end
it 'prevents database counters from leaking to the next transaction' do
worker = double(:worker, class: double(:class, name: 'TestWorker'))
2.times do
Gitlab::WithRequestStore.with_request_store do
middleware.call(worker, message, :test) do
ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);')
end
end
end
expect(message).to include(db_count: 1, db_write_count: 0, db_cached_count: 0)
end
it 'tracks the transaction (for messages without `enqueued_at`)', :aggregate_failures do
worker = double(:worker, class: double(:class, name: 'TestWorker'))
expect(Gitlab::Metrics::BackgroundTransaction).to receive(:new)
.with(worker.class)
.and_call_original
expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:set)
.with(:gitlab_transaction_sidekiq_queue_duration_total, instance_of(Float))
middleware.call(worker, {}, :test) { nil }
end
it 'tracks any raised exceptions', :aggregate_failures, :request_store do
worker = double(:worker, class: double(:class, name: 'TestWorker'))
expect_any_instance_of(Gitlab::Metrics::Transaction)
.to receive(:add_event).with(:sidekiq_exception)
expect do
middleware.call(worker, message, :test) do
ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);')
raise RuntimeError
end
end.to raise_error(RuntimeError)
expect(message).to include(db_count: 1, db_write_count: 0, db_cached_count: 0)
end
end
end

View file

@ -18,59 +18,73 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
end
describe '#sql' do
describe 'without a current transaction' do
it 'simply returns' do
expect_any_instance_of(Gitlab::Metrics::Transaction)
.not_to receive(:increment)
shared_examples 'track query in metrics' do
before do
allow(subscriber).to receive(:current_transaction)
.at_least(:once)
.and_return(transaction)
end
it 'increments only db count value' do
described_class::DB_COUNTERS.each do |counter|
prometheus_counter = "gitlab_transaction_#{counter}_total".to_sym
if expected_counters[counter] > 0
expect(transaction).to receive(:increment).with(prometheus_counter, 1)
else
expect(transaction).not_to receive(:increment).with(prometheus_counter, 1)
end
end
subscriber.sql(event)
end
end
describe 'with a current transaction' do
shared_examples 'track executed query' do
before do
allow(subscriber).to receive(:current_transaction)
.at_least(:once)
.and_return(transaction)
end
it 'increments only db count value' do
described_class::DB_COUNTERS.each do |counter|
prometheus_counter = "gitlab_transaction_#{counter}_total".to_sym
if expected_counters[counter] > 0
expect(transaction).to receive(:increment).with(prometheus_counter, 1)
else
expect(transaction).not_to receive(:increment).with(prometheus_counter, 1)
end
end
shared_examples 'track query in RequestStore' do
context 'when RequestStore is enabled' do
it 'caches db count value', :request_store, :aggregate_failures do
subscriber.sql(event)
described_class::DB_COUNTERS.each do |counter|
expect(Gitlab::SafeRequestStore[counter].to_i).to eq expected_counters[counter]
end
end
context 'when RequestStore is enabled' do
it 'caches db count value', :request_store, :aggregate_failures do
subscriber.sql(event)
it 'prevents db counters from leaking to the next transaction' do
2.times do
Gitlab::WithRequestStore.with_request_store do
subscriber.sql(event)
described_class::DB_COUNTERS.each do |counter|
expect(Gitlab::SafeRequestStore[counter].to_i).to eq expected_counters[counter]
end
end
it 'prevents db counters from leaking to the next transaction' do
2.times do
Gitlab::WithRequestStore.with_request_store do
subscriber.sql(event)
described_class::DB_COUNTERS.each do |counter|
expect(Gitlab::SafeRequestStore[counter].to_i).to eq expected_counters[counter]
end
described_class::DB_COUNTERS.each do |counter|
expect(Gitlab::SafeRequestStore[counter].to_i).to eq expected_counters[counter]
end
end
end
end
end
end
describe 'without a current transaction' do
it 'does not track any metrics' do
expect_any_instance_of(Gitlab::Metrics::Transaction)
.not_to receive(:increment)
subscriber.sql(event)
end
context 'with read query' do
let(:expected_counters) do
{
db_count: 1,
db_write_count: 0,
db_cached_count: 0
}
end
it_behaves_like 'track query in RequestStore'
end
end
describe 'with a current transaction' do
it 'observes sql_duration metric' do
expect(subscriber).to receive(:current_transaction)
.at_least(:once)
@ -96,12 +110,14 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
}
end
it_behaves_like 'track executed query'
it_behaves_like 'track query in metrics'
it_behaves_like 'track query in RequestStore'
context 'with only select' do
let(:payload) { { sql: 'WITH active_milestones AS (SELECT COUNT(*), state FROM milestones GROUP BY state) SELECT * FROM active_milestones' } }
it_behaves_like 'track executed query'
it_behaves_like 'track query in metrics'
it_behaves_like 'track query in RequestStore'
end
end
@ -117,33 +133,38 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
context 'with select for update sql event' do
let(:payload) { { sql: 'SELECT * FROM users WHERE id = 10 FOR UPDATE' } }
it_behaves_like 'track executed query'
it_behaves_like 'track query in metrics'
it_behaves_like 'track query in RequestStore'
end
context 'with common table expression' do
context 'with insert' do
let(:payload) { { sql: 'WITH archived_rows AS (SELECT * FROM users WHERE archived = true) INSERT INTO products_log SELECT * FROM archived_rows' } }
it_behaves_like 'track executed query'
it_behaves_like 'track query in metrics'
it_behaves_like 'track query in RequestStore'
end
end
context 'with delete sql event' do
let(:payload) { { sql: 'DELETE FROM users where id = 10' } }
it_behaves_like 'track executed query'
it_behaves_like 'track query in metrics'
it_behaves_like 'track query in RequestStore'
end
context 'with insert sql event' do
let(:payload) { { sql: 'INSERT INTO project_ci_cd_settings (project_id) SELECT id FROM projects' } }
it_behaves_like 'track executed query'
it_behaves_like 'track query in metrics'
it_behaves_like 'track query in RequestStore'
end
context 'with update sql event' do
let(:payload) { { sql: 'UPDATE users SET admin = true WHERE id = 10' } }
it_behaves_like 'track executed query'
it_behaves_like 'track query in metrics'
it_behaves_like 'track query in RequestStore'
end
end
@ -164,18 +185,20 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
}
end
it_behaves_like 'track executed query'
it_behaves_like 'track query in metrics'
it_behaves_like 'track query in RequestStore'
end
context 'with cached payload name' do
let(:payload) do
{
sql: 'SELECT * FROM users WHERE id = 10',
name: 'CACHE'
sql: 'SELECT * FROM users WHERE id = 10',
name: 'CACHE'
}
end
it_behaves_like 'track executed query'
it_behaves_like 'track query in metrics'
it_behaves_like 'track query in RequestStore'
end
end
@ -227,8 +250,8 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
it 'skips schema/begin/commit sql commands' do
allow(subscriber).to receive(:current_transaction)
.at_least(:once)
.and_return(transaction)
.at_least(:once)
.and_return(transaction)
expect(transaction).not_to receive(:increment)

View file

@ -37,6 +37,36 @@ RSpec.describe Gitlab::Utils::UsageData do
end
end
describe '#estimate_batch_distinct_count' do
let(:relation) { double(:relation) }
it 'delegates counting to counter class instance' do
expect_next_instance_of(Gitlab::Database::PostgresHllBatchDistinctCounter, relation, 'column') do |instance|
expect(instance).to receive(:estimate_distinct_count)
.with(batch_size: nil, start: nil, finish: nil)
.and_return(5)
end
expect(described_class.estimate_batch_distinct_count(relation, 'column')).to eq(5)
end
it 'returns default fallback value when counting fails due to database error' do
stub_const("Gitlab::Utils::UsageData::FALLBACK", 15)
allow(Gitlab::Database::PostgresHllBatchDistinctCounter).to receive(:new).and_raise(ActiveRecord::StatementInvalid.new(''))
expect(described_class.estimate_batch_distinct_count(relation)).to eq(15)
end
it 'logs error and returns DISTRIBUTED_HLL_FALLBACK value when counting raises any error', :aggregate_failures do
error = StandardError.new('')
stub_const("Gitlab::Utils::UsageData::DISTRIBUTED_HLL_FALLBACK", 15)
allow(Gitlab::Database::PostgresHllBatchDistinctCounter).to receive(:new).and_raise(error)
expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(error)
expect(described_class.estimate_batch_distinct_count(relation)).to eq(15)
end
end
describe '#sum' do
let(:relation) { double(:relation) }

View file

@ -70,4 +70,24 @@ RSpec.describe GroupImportState do
end
end
end
context 'when import failed' do
context 'when error message is present' do
it 'truncates error message' do
group_import_state = build(:group_import_state, :started)
group_import_state.fail_op('e' * 300)
expect(group_import_state.last_error.length).to eq(255)
end
end
context 'when error message is missing' do
it 'has no error message' do
group_import_state = build(:group_import_state, :started)
group_import_state.fail_op
expect(group_import_state.last_error).to be_nil
end
end
end
end

View file

@ -245,7 +245,7 @@ RSpec.describe Service do
context 'with a previous existing service (MockCiService) and a new service (Asana)' do
before do
Service.insert(type: 'MockCiService', instance: true)
Service.insert({ type: 'MockCiService', instance: true })
Service.delete_by(type: 'AsanaService', instance: true)
end
@ -291,7 +291,7 @@ RSpec.describe Service do
context 'with a previous existing service (Previous) and a new service (Asana)' do
before do
Service.insert(type: 'PreviousService', template: true)
Service.insert({ type: 'PreviousService', template: true })
Service.delete_by(type: 'AsanaService', template: true)
end

View file

@ -25,13 +25,8 @@ RSpec.describe WikiPage::Meta do
end
it { is_expected.to validate_presence_of(:project_id) }
it { is_expected.to validate_presence_of(:title) }
it 'is forbidden to add extremely long titles' do
expect do
create(:wiki_page_meta, project: project, title: FFaker::Lorem.characters(300))
end.to raise_error(ActiveRecord::ValueTooLong)
end
it { is_expected.to validate_length_of(:title).is_at_most(255) }
it { is_expected.not_to allow_value(nil).for(:title) }
it 'is forbidden to have two records for the same project with the same canonical_slug' do
the_slug = generate(:sluggified_title)

View file

@ -48,8 +48,9 @@ RSpec.describe WikiPage::Slug do
build(:wiki_page_slug, canonical: canonical, wiki_page_meta: meta)
end
it { is_expected.to validate_presence_of(:slug) }
it { is_expected.to validate_uniqueness_of(:slug).scoped_to(:wiki_page_meta_id) }
it { is_expected.to validate_length_of(:slug).is_at_most(2048) }
it { is_expected.not_to allow_value(nil).for(:slug) }
describe 'only_one_slug_can_be_canonical_per_meta_record' do
context 'there are no other slugs' do

View file

@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'query terraform states' do
include GraphqlHelpers
include ::API::Helpers::RelatedResourcesHelpers
let_it_be(:project) { create(:project) }
let_it_be(:terraform_state) { create(:terraform_state, :with_version, :locked, project: project) }
@ -23,6 +24,8 @@ RSpec.describe 'query terraform states' do
latestVersion {
id
downloadPath
serial
createdAt
updatedAt
@ -62,6 +65,16 @@ RSpec.describe 'query terraform states' do
expect(state.dig('lockedByUser', 'id')).to eq(terraform_state.locked_by_user.to_global_id.to_s)
expect(version['id']).to eq(latest_version.to_global_id.to_s)
expect(version['serial']).to eq(latest_version.version)
expect(version['downloadPath']).to eq(
expose_path(
api_v4_projects_terraform_state_versions_path(
id: project.id,
name: terraform_state.name,
serial: latest_version.version
)
)
)
expect(version['createdAt']).to eq(latest_version.created_at.iso8601)
expect(version['updatedAt']).to eq(latest_version.updated_at.iso8601)
expect(version.dig('createdByUser', 'id')).to eq(latest_version.created_by_user.to_global_id.to_s)

View file

@ -332,6 +332,13 @@ RSpec.configure do |config|
Gitlab::WithRequestStore.with_request_store { example.run }
end
config.before(:example, :request_store) do
# Clear request store before actually starting the spec (the
# `around` above will have the request store enabled for all
# `before` blocks)
RequestStore.clear!
end
config.around do |example|
# Wrap each example in it's own context to make sure the contexts don't
# leak

View file

@ -866,10 +866,10 @@
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.175.0.tgz#734f341784af1cd1d62d160a17bcdfb61ff7b04d"
integrity sha512-gXpc87TGSXIzfAr4QER1Qw1v3P47pBO6BXkma52blgwXVmcFNe3nhQzqsqt66wKNzrIrk3lAcB4GUyPHbPVXpg==
"@gitlab/ui@24.3.2":
version "24.3.2"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-24.3.2.tgz#cba9a1efd7a05a0110de3b23e7cd97917023d500"
integrity sha512-p7lXZlMfK7Eovrle7FZ14wgfb2SKt2uYJj71XXFMUA+GUFpjkteU8UD/K+moTmSk16FB7nd6W6w8TJYAX8TzmA==
"@gitlab/ui@24.4.0":
version "24.4.0"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-24.4.0.tgz#5c66c0582a4818edf596757dd56f6606f32a3604"
integrity sha512-M96BZBEv7t+Q9V7B0obCJJsew3rDJ+2xGcJZUrVVyBpwz2PUqFsSk86+SxUFDxuTlcQ3ox0O2Y1aCKGG8vZlyA==
dependencies:
"@babel/standalone" "^7.0.0"
"@gitlab/vue-toasted" "^1.3.0"