Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-02-12 18:09:21 +00:00
parent 2c89e16976
commit 43e3dc2f95
37 changed files with 903 additions and 126 deletions

View File

@ -380,13 +380,13 @@ rspec-ee integration pg10:
extends:
- .rspec-ee-base-pg10
- .rails:rules:master-refs-code-backstage
parallel: 3
parallel: 4
rspec-ee system pg10:
extends:
- .rspec-ee-base-pg10
- .rails:rules:master-refs-code-backstage
parallel: 5
parallel: 6
# ee + master-only jobs #
#########################

View File

@ -8,11 +8,11 @@ module Types
description 'Represents the snippet blob'
present_using SnippetBlobPresenter
field :highlighted_data, GraphQL::STRING_TYPE,
field :rich_data, GraphQL::STRING_TYPE,
description: 'Blob highlighted data',
null: true
field :plain_highlighted_data, GraphQL::STRING_TYPE,
field :plain_data, GraphQL::STRING_TYPE,
description: 'Blob plain highlighted data',
null: true

View File

@ -119,6 +119,17 @@ module ApplicationSettingsHelper
options_for_select(options, selected)
end
def repository_storages_options_json
options = Gitlab.config.repositories.storages.map do |name, storage|
{
label: "#{name} - #{storage['gitaly_address']}",
value: name
}
end
options.to_json
end
def external_authorization_description
_("If enabled, access to projects will be validated on an external service"\
" using their classification label.")

View File

@ -65,8 +65,6 @@ class Repository
xcode_config: :xcode_project?
}.freeze
MERGED_BRANCH_NAMES_CACHE_DURATION = 10.minutes
def initialize(full_path, container, disk_path: nil, repo_type: Gitlab::GlRepository::PROJECT)
@full_path = full_path
@disk_path = disk_path || full_path
@ -914,38 +912,28 @@ class Repository
@root_ref_sha ||= commit(root_ref).sha
end
# If this method is not provided a set of branch names to check merge status,
# it fetches all branches.
def merged_branch_names(branch_names = [])
# Currently we should skip caching if requesting all branch names
# This is only used in a few places, notably app/services/branches/delete_merged_service.rb,
# and it could potentially result in a very large cache/performance issues with the current
# implementation.
skip_cache = branch_names.empty? || Feature.disabled?(:merged_branch_names_redis_caching)
skip_cache = branch_names.empty? || Feature.disabled?(:merged_branch_names_redis_caching, default_enabled: true)
return raw_repository.merged_branch_names(branch_names) if skip_cache
cached_branch_names = cache.read(:merged_branch_names)
merged_branch_names_hash = cached_branch_names || {}
missing_branch_names = branch_names.select { |bn| !merged_branch_names_hash.key?(bn) }
cache = redis_hash_cache
# Track some metrics here whilst feature flag is enabled
if cached_branch_names.present?
counter = Gitlab::Metrics.counter(
:gitlab_repository_merged_branch_names_cache_hit,
"Count of cache hits for Repository#merged_branch_names"
)
counter.increment(full_hit: missing_branch_names.empty?)
end
if missing_branch_names.any?
merged_branch_names_hash = cache.fetch_and_add_missing(:merged_branch_names, branch_names) do |missing_branch_names, hash|
merged = raw_repository.merged_branch_names(missing_branch_names)
missing_branch_names.each do |bn|
merged_branch_names_hash[bn] = merged.include?(bn)
# Redis only stores strings in hset keys, use a fancy encoder
hash[bn] = Gitlab::Redis::Boolean.new(merged.include?(bn))
end
cache.write(:merged_branch_names, merged_branch_names_hash, expires_in: MERGED_BRANCH_NAMES_CACHE_DURATION)
end
Set.new(merged_branch_names_hash.select { |_, v| v }.keys)
Set.new(merged_branch_names_hash.select { |_, v| Gitlab::Redis::Boolean.true?(v) }.keys)
end
def merge_base(*commits_or_ids)
@ -1168,6 +1156,10 @@ class Repository
@redis_set_cache ||= Gitlab::RepositorySetCache.new(self)
end
def redis_hash_cache
@redis_hash_cache ||= Gitlab::RepositoryHashCache.new(self)
end
def request_store_cache
@request_store_cache ||= Gitlab::RepositoryCache.new(self, backend: Gitlab::SafeRequestStore)
end

View File

@ -1,16 +1,20 @@
# frozen_string_literal: true
class SnippetBlobPresenter < BlobPresenter
def highlighted_data
def rich_data
return if blob.binary?
highlight(plain: false)
if markup?
blob.rendered_markup
else
highlight(plain: false)
end
end
def plain_highlighted_data
def plain_data
return if blob.binary?
highlight(plain: true)
highlight(plain: !markup?)
end
def raw_path
@ -23,6 +27,10 @@ class SnippetBlobPresenter < BlobPresenter
private
def markup?
blob.rich_viewer&.partial_name == 'markup'
end
def snippet
blob.snippet
end

View File

@ -56,12 +56,10 @@ module Users
MigrateToGhostUserService.new(user).execute unless options[:hard_delete]
if Feature.enabled?(:destroy_user_associations_in_batches)
# Rails attempts to load all related records into memory before
# destroying: https://github.com/rails/rails/issues/22510
# This ensures we delete records in batches.
user.destroy_dependent_associations_in_batches
end
# Rails attempts to load all related records into memory before
# destroying: https://github.com/rails/rails/issues/22510
# This ensures we delete records in batches.
user.destroy_dependent_associations_in_batches
# Destroy the namespace after destroying the user since certain methods may depend on the namespace existing
user_data = user.destroy

View File

@ -25,6 +25,10 @@ class AvatarUploader < GitlabUploader
self.class.absolute_path(upload)
end
def mounted_as
super || 'avatar'
end
private
def dynamic_segment

View File

@ -958,7 +958,7 @@
:resource_boundary: :unknown
:weight: 1
- :name: project_export
:feature_category: :source_code_management
:feature_category: :importers
:has_external_dependencies:
:latency_sensitive:
:resource_boundary: :memory

View File

@ -5,7 +5,7 @@ class ProjectExportWorker
include ExceptionBacktrace
sidekiq_options retry: 3
feature_category :source_code_management
feature_category :importers
worker_resource_boundary :memory
def perform(current_user_id, project_id, after_export_strategy = {}, params = {})

View File

@ -0,0 +1,5 @@
---
title: Destroy user associations in batches like we do with projects
merge_request: 24641
author:
type: performance

View File

@ -0,0 +1,5 @@
---
title: Fix bug rendering BlobType markdown data
merge_request: 24960
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Improvement to merged_branch_names cache
merge_request: 24504
author:
type: performance

View File

@ -0,0 +1,5 @@
---
title: Ensure a valid mount_point is used by the AvatarUploader
merge_request: 24800
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Cache repository merged branch names by default
merge_request: 24986
author:
type: performance

View File

@ -6757,11 +6757,6 @@ type SnippetBlob {
"""
binary: Boolean!
"""
Blob highlighted data
"""
highlightedData: String
"""
Blob mode
"""
@ -6780,13 +6775,18 @@ type SnippetBlob {
"""
Blob plain highlighted data
"""
plainHighlightedData: String
plainData: String
"""
Blob raw content endpoint path
"""
rawPath: String!
"""
Blob highlighted data
"""
richData: String
"""
Blob content rich viewer
"""

View File

@ -7556,20 +7556,6 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "highlightedData",
"description": "Blob highlighted data",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "mode",
"description": "Blob mode",
@ -7613,7 +7599,7 @@
"deprecationReason": null
},
{
"name": "plainHighlightedData",
"name": "plainData",
"description": "Blob plain highlighted data",
"args": [
@ -7644,6 +7630,20 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "richData",
"description": "Blob highlighted data",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "richViewer",
"description": "Blob content rich viewer",

View File

@ -1067,12 +1067,12 @@ Represents the snippet blob
| Name | Type | Description |
| --- | ---- | ---------- |
| `binary` | Boolean! | Shows whether the blob is binary |
| `highlightedData` | String | Blob highlighted data |
| `mode` | String | Blob mode |
| `name` | String | Blob name |
| `path` | String | Blob path |
| `plainHighlightedData` | String | Blob plain highlighted data |
| `plainData` | String | Blob plain highlighted data |
| `rawPath` | String! | Blob raw content endpoint path |
| `richData` | String | Blob highlighted data |
| `richViewer` | SnippetBlobViewer | Blob content rich viewer |
| `simpleViewer` | SnippetBlobViewer! | Blob content simple viewer |
| `size` | Int! | Blob size |

View File

@ -7,6 +7,8 @@ are very appreciative of the work done by translators and proofreaders!
- Albanian
- Proofreaders needed.
- Amharic
- Tsegaselassie Tadesse - [GitLab](https://gitlab.com/tsega), [Crowdin](https://crowdin.com/profile/tsegaselassi/activity)
- Arabic
- Proofreaders needed.
- Bulgarian

View File

@ -65,13 +65,13 @@ you can search the content of your logs via a search bar.
The search is passed on to Elasticsearch using the [simple_query_string](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-simple-query-string-query.html)
Elasticsearch function, which supports the following operators:
```
+ signifies AND operation
| signifies OR operation
- negates a single token
" wraps a number of tokens to signify a phrase for searching
* at the end of a term signifies a prefix query
( and ) signify precedence
~N after a word signifies edit distance (fuzziness)
~N after a phrase signifies slop amount
```
| Operator | Description |
|----------------------------|------------------------------------------------------------|
| `\|` | An OR operation. |
| `-` | Negates a single token. |
| `+` | An AND operation. |
| `"` | Wraps a number of tokens to signify a phrase for searching. |
| `*` (at the end of a term) | A prefix query. |
| `(` and `)` | Precedence. |
| `~N` (after a word) | Edit distance (fuzziness). |
| `~N` (after a phrase) | Slop amount. |

109
lib/gitlab/redis/boolean.rb Normal file
View File

@ -0,0 +1,109 @@
# frozen_string_literal: true
# A serializer for boolean values being stored in Redis.
#
# This is to ensure that booleans are stored in a consistent and
# testable way when being stored as strings in Redis.
#
# Examples:
#
# bool = Gitlab::Redis::Boolean.new(true)
# bool.to_s == "_b:1"
#
# Gitlab::Redis::Boolean.encode(true)
# => "_b:1"
#
# Gitlab::Redis::Boolean.decode("_b:1")
# => true
#
# Gitlab::Redis::Boolean.true?("_b:1")
# => true
#
# Gitlab::Redis::Boolean.true?("_b:0")
# => false
module Gitlab
module Redis
class Boolean
LABEL = "_b"
DELIMITER = ":"
TRUE_STR = "1"
FALSE_STR = "0"
BooleanError = Class.new(StandardError)
NotABooleanError = Class.new(BooleanError)
NotAnEncodedBooleanStringError = Class.new(BooleanError)
def initialize(value)
@value = value
end
# @return [String] the encoded boolean
def to_s
self.class.encode(@value)
end
class << self
# Turn a boolean into a string for storage in Redis
#
# @param value [Boolean] true or false
# @return [String] the encoded boolean
# @raise [NotABooleanError] if the value isn't true or false
def encode(value)
raise NotABooleanError.new(value) unless bool?(value)
[LABEL, to_string(value)].join(DELIMITER)
end
# Decode a boolean string
#
# @param value [String] the stored boolean string
# @return [Boolean] true or false
# @raise [NotAnEncodedBooleanStringError] if the provided value isn't an encoded boolean
def decode(value)
raise NotAnEncodedBooleanStringError.new(value.class) unless value.is_a?(String)
label, bool_str = *value.split(DELIMITER, 2)
raise NotAnEncodedBooleanStringError.new(label) unless label == LABEL
from_string(bool_str)
end
# Decode a boolean string, then test if it's true
#
# @param value [String] the stored boolean string
# @return [Boolean] is the value true?
# @raise [NotAnEncodedBooleanStringError] if the provided value isn't an encoded boolean
def true?(encoded_value)
decode(encoded_value)
end
# Decode a boolean string, then test if it's false
#
# @param value [String] the stored boolean string
# @return [Boolean] is the value false?
# @raise [NotAnEncodedBooleanStringError] if the provided value isn't an encoded boolean
def false?(encoded_value)
!true?(encoded_value)
end
private
def bool?(value)
[true, false].include?(value)
end
def to_string(bool)
bool ? TRUE_STR : FALSE_STR
end
def from_string(str)
raise NotAnEncodedBooleanStringError.new(str) unless [TRUE_STR, FALSE_STR].include?(str)
str == TRUE_STR
end
end
end
end
end

View File

@ -132,6 +132,11 @@ module Gitlab
raise NotImplementedError
end
# RepositoryHashCache to be used. Should be overridden by the including class
def redis_hash_cache
raise NotImplementedError
end
# List of cached methods. Should be overridden by the including class
def cached_methods
raise NotImplementedError
@ -215,6 +220,7 @@ module Gitlab
end
expire_redis_set_method_caches(methods)
expire_redis_hash_method_caches(methods)
expire_request_store_method_caches(methods)
end
@ -234,6 +240,10 @@ module Gitlab
methods.each { |name| redis_set_cache.expire(name) }
end
def expire_redis_hash_method_caches(methods)
methods.each { |name| redis_hash_cache.delete(name) }
end
# All cached repository methods depend on the existence of a Git repository,
# so if the repository doesn't exist, we already know not to call it.
def fallback_early?(method_name)

View File

@ -0,0 +1,176 @@
# frozen_string_literal: true
# Interface to the Redis-backed cache store for keys that use a Redis HSET.
# This is currently used as an incremental cache by the `Repository` model
# for `#merged_branch_names`. It works slightly differently to the other
# repository cache classes in that it is intended to work with partial
# caches which can be updated with new data, using the Redis hash system.
module Gitlab
class RepositoryHashCache
attr_reader :repository, :namespace, :expires_in
RepositoryHashCacheError = Class.new(StandardError)
InvalidKeysProvidedError = Class.new(RepositoryHashCacheError)
InvalidHashProvidedError = Class.new(RepositoryHashCacheError)
# @param repository [Repository]
# @param extra_namespace [String]
# @param expires_in [Integer] expiry time for hash store keys
def initialize(repository, extra_namespace: nil, expires_in: 1.day)
@repository = repository
@namespace = "#{repository.full_path}"
@namespace += ":#{repository.project.id}" if repository.project
@namespace = "#{@namespace}:#{extra_namespace}" if extra_namespace
@expires_in = expires_in
end
# @param type [String]
# @return [String]
def cache_key(type)
"#{type}:#{namespace}:hash"
end
# @param key [String]
# @return [Integer] 0 or 1 depending on success
def delete(key)
with { |redis| redis.del(cache_key(key)) }
end
# Check if the provided hash key exists in the hash.
#
# @param key [String]
# @param h_key [String] the key to check presence in Redis
# @return [True, False]
def key?(key, h_key)
with { |redis| redis.hexists(cache_key(key), h_key) }
end
# Read the values of a set of keys from the hash store, and return them
# as a hash of those keys and their values.
#
# @param key [String]
# @param hash_keys [Array<String>] an array of keys to retrieve from the store
# @return [Hash] a Ruby hash of the provided keys and their values from the store
def read_members(key, hash_keys = [])
raise InvalidKeysProvidedError unless hash_keys.is_a?(Array) && hash_keys.any?
with do |redis|
# Fetch an array of values for the supplied keys
values = redis.hmget(cache_key(key), hash_keys)
# Turn it back into a hash
hash_keys.zip(values).to_h
end
end
# Write a hash to the store. All keys and values will be strings when stored.
#
# @param key [String]
# @param hash [Hash] the hash to be written to Redis
# @return [Boolean] whether all operations were successful or not
def write(key, hash)
raise InvalidHashProvidedError unless hash.is_a?(Hash) && hash.any?
full_key = cache_key(key)
with do |redis|
results = redis.pipelined do
# Set each hash key to the provided value
hash.each do |h_key, h_value|
redis.hset(full_key, h_key, h_value)
end
# Update the expiry time for this hset
redis.expire(full_key, expires_in)
end
results.all?
end
end
# A variation on the `fetch` pattern of other cache stores. This method
# allows you to attempt to fetch a group of keys from the hash store, and
# for any keys that are missing values a block is then called to provide
# those values, which are then written back into Redis. Both sets of data
# are then combined and returned as one hash.
#
# @param key [String]
# @param h_keys [Array<String>] the keys to fetch or add to the cache
# @yieldparam missing_keys [Array<String>] the keys missing from the cache
# @yieldparam new_values [Hash] the hash to be populated by the block
# @return [Hash] the amalgamated hash of cached and uncached values
def fetch_and_add_missing(key, h_keys, &block)
# Check the cache for all supplied keys
cache_values = read_members(key, h_keys)
# Find the results which returned nil (meaning they're not in the cache)
missing = cache_values.select { |_, v| v.nil? }.keys
if missing.any?
new_values = {}
# Run the block, which updates the new_values hash
yield(missing, new_values)
# Ensure all values are converted to strings, to ensure merging hashes
# below returns standardised data.
new_values = standardize_hash(new_values)
# Write the new values to the hset
write(key, new_values)
# Merge the two sets of values to return a complete hash
cache_values.merge!(new_values)
end
record_metrics(key, cache_values, missing)
cache_values
end
private
def with(&blk)
Gitlab::Redis::Cache.with(&blk) # rubocop:disable CodeReuse/ActiveRecord
end
# Take a hash and convert both keys and values to strings, for insertion into Redis.
#
# @param hash [Hash]
# @return [Hash] the stringified hash
def standardize_hash(hash)
hash.map { |k, v| [k.to_s, v.to_s] }.to_h
end
# Record metrics in Prometheus.
#
# @param key [String] the basic key, e.g. :merged_branch_names. Not record-specific.
# @param cache_values [Hash] the hash response from the cache read
# @param missing_keys [Array<String>] the array of missing keys from the cache read
def record_metrics(key, cache_values, missing_keys)
cache_hits = cache_values.delete_if { |_, v| v.nil? }
# Increment the counter if we have hits
metrics_hit_counter.increment(full_hit: missing_keys.empty?, store_type: key) if cache_hits.any?
# Track the number of hits we got
metrics_hit_histogram.observe({ type: "hits", store_type: key }, cache_hits.size)
metrics_hit_histogram.observe({ type: "misses", store_type: key }, missing_keys.size)
end
def metrics_hit_counter
@counter ||= Gitlab::Metrics.counter(
:gitlab_repository_hash_cache_hit,
"Count of cache hits in Redis HSET"
)
end
def metrics_hit_histogram
@histogram ||= Gitlab::Metrics.histogram(
:gitlab_repository_hash_cache_size,
"Number of records in the HSET"
)
end
end
end

View File

@ -33,6 +33,7 @@ module Quality
serializers
services
sidekiq
support_specs
tasks
uploaders
validators

View File

@ -3578,6 +3578,9 @@ msgstr ""
msgid "Choose which repositories you want to connect and run CI/CD pipelines."
msgstr ""
msgid "Choose which shards you wish to synchronize to this secondary node"
msgstr ""
msgid "Choose which shards you wish to synchronize to this secondary node."
msgstr ""
@ -12902,6 +12905,9 @@ msgstr ""
msgid "Notes|This comment has changed since you started editing, please review the %{open_link}updated comment%{close_link} to ensure information is not lost"
msgstr ""
msgid "Nothing found…"
msgstr ""
msgid "Nothing to preview."
msgstr ""
@ -17025,6 +17031,9 @@ msgstr ""
msgid "Select projects you want to import."
msgstr ""
msgid "Select shards to replicate"
msgstr ""
msgid "Select source branch"
msgstr ""
@ -17055,6 +17064,9 @@ msgstr ""
msgid "Selecting a GitLab user will add a link to the GitLab user in the descriptions of issues and comments (e.g. \"By <a href=\"#\">@johnsmith</a>\"). It will also associate and/or assign these issues and comments with the selected user."
msgstr ""
msgid "Selective synchronization"
msgstr ""
msgid "Self monitoring project does not exist"
msgstr ""
@ -17385,6 +17397,12 @@ msgstr ""
msgid "Severity: %{severity}"
msgstr ""
msgid "Shards selected: %{count}"
msgstr ""
msgid "Shards to synchronize"
msgstr ""
msgid "Share"
msgstr ""

View File

@ -52,10 +52,10 @@ gitlab:
resources:
requests:
cpu: 650m
memory: 970M
memory: 1018M
limits:
cpu: 975m
memory: 1450M
memory: 1527M
hpa:
targetAverageValue: 650m
task-runner:
@ -69,11 +69,11 @@ gitlab:
unicorn:
resources:
requests:
cpu: 500m
memory: 1630M
cpu: 525m
memory: 1711M
limits:
cpu: 750m
memory: 2450M
cpu: 787m
memory: 2566M
deployment:
readinessProbe:
initialDelaySeconds: 5 # Default is 0

View File

@ -4,7 +4,7 @@ require 'spec_helper'
describe GitlabSchema.types['SnippetBlob'] do
it 'has the correct fields' do
expected_fields = [:highlighted_data, :plain_highlighted_data,
expected_fields = [:rich_data, :plain_data,
:raw_path, :size, :binary, :name, :path,
:simple_viewer, :rich_viewer, :mode]

View File

@ -0,0 +1,150 @@
# frozen_string_literal: true
require "spec_helper"
describe Gitlab::Redis::Boolean do
subject(:redis_boolean) { described_class.new(bool) }
let(:bool) { true }
let(:label_section) { "#{described_class::LABEL}#{described_class::DELIMITER}" }
describe "#to_s" do
subject { redis_boolean.to_s }
context "true" do
let(:bool) { true }
it { is_expected.to eq("#{label_section}#{described_class::TRUE_STR}") }
end
context "false" do
let(:bool) { false }
it { is_expected.to eq("#{label_section}#{described_class::FALSE_STR}") }
end
end
describe ".encode" do
subject { redis_boolean.class.encode(bool) }
context "true" do
let(:bool) { true }
it { is_expected.to eq("#{label_section}#{described_class::TRUE_STR}") }
end
context "false" do
let(:bool) { false }
it { is_expected.to eq("#{label_section}#{described_class::FALSE_STR}") }
end
end
describe ".decode" do
subject { redis_boolean.class.decode(str) }
context "valid encoded bool" do
let(:str) { "#{label_section}#{bool_str}" }
context "true" do
let(:bool_str) { described_class::TRUE_STR }
it { is_expected.to be(true) }
end
context "false" do
let(:bool_str) { described_class::FALSE_STR }
it { is_expected.to be(false) }
end
end
context "partially invalid bool" do
let(:str) { "#{label_section}whoops" }
it "raises an error" do
expect { subject }.to raise_error(described_class::NotAnEncodedBooleanStringError)
end
end
context "invalid encoded bool" do
let(:str) { "whoops" }
it "raises an error" do
expect { subject }.to raise_error(described_class::NotAnEncodedBooleanStringError)
end
end
end
describe ".true?" do
subject { redis_boolean.class.true?(str) }
context "valid encoded bool" do
let(:str) { "#{label_section}#{bool_str}" }
context "true" do
let(:bool_str) { described_class::TRUE_STR }
it { is_expected.to be(true) }
end
context "false" do
let(:bool_str) { described_class::FALSE_STR }
it { is_expected.to be(false) }
end
end
context "partially invalid bool" do
let(:str) { "#{label_section}whoops" }
it "raises an error" do
expect { subject }.to raise_error(described_class::NotAnEncodedBooleanStringError)
end
end
context "invalid encoded bool" do
let(:str) { "whoops" }
it "raises an error" do
expect { subject }.to raise_error(described_class::NotAnEncodedBooleanStringError)
end
end
end
describe ".false?" do
subject { redis_boolean.class.false?(str) }
context "valid encoded bool" do
let(:str) { "#{label_section}#{bool_str}" }
context "true" do
let(:bool_str) { described_class::TRUE_STR }
it { is_expected.to be(false) }
end
context "false" do
let(:bool_str) { described_class::FALSE_STR }
it { is_expected.to be(true) }
end
end
context "partially invalid bool" do
let(:str) { "#{label_section}whoops" }
it "raises an error" do
expect { subject }.to raise_error(described_class::NotAnEncodedBooleanStringError)
end
end
context "invalid encoded bool" do
let(:str) { "whoops" }
it "raises an error" do
expect { subject }.to raise_error(described_class::NotAnEncodedBooleanStringError)
end
end
end
end

View File

@ -7,6 +7,7 @@ describe Gitlab::RepositoryCacheAdapter do
let(:repository) { project.repository }
let(:cache) { repository.send(:cache) }
let(:redis_set_cache) { repository.send(:redis_set_cache) }
let(:redis_hash_cache) { repository.send(:redis_hash_cache) }
describe '#cache_method_output', :use_clean_rails_memory_store_caching do
let(:fallback) { 10 }
@ -212,6 +213,8 @@ describe Gitlab::RepositoryCacheAdapter do
expect(cache).to receive(:expire).with(:branch_names)
expect(redis_set_cache).to receive(:expire).with(:rendered_readme)
expect(redis_set_cache).to receive(:expire).with(:branch_names)
expect(redis_hash_cache).to receive(:delete).with(:rendered_readme)
expect(redis_hash_cache).to receive(:delete).with(:branch_names)
repository.expire_method_caches(%i(rendered_readme branch_names))
end

View File

@ -0,0 +1,184 @@
# frozen_string_literal: true
require "spec_helper"
describe Gitlab::RepositoryHashCache, :clean_gitlab_redis_cache do
let_it_be(:project) { create(:project) }
let(:repository) { project.repository }
let(:namespace) { "#{repository.full_path}:#{project.id}" }
let(:cache) { described_class.new(repository) }
let(:test_hash) do
{ "test" => "value" }
end
describe "#cache_key" do
subject { cache.cache_key(:example) }
it "includes the namespace" do
is_expected.to eq("example:#{namespace}:hash")
end
context "with a given namespace" do
let(:extra_namespace) { "my:data" }
let(:cache) { described_class.new(repository, extra_namespace: extra_namespace) }
it "includes the full namespace" do
is_expected.to eq("example:#{namespace}:#{extra_namespace}:hash")
end
end
end
describe "#delete" do
subject { cache.delete(:example) }
context "key exists" do
before do
cache.write(:example, test_hash)
end
it { is_expected.to eq(1) }
it "deletes the given key from the cache" do
subject
expect(cache.read_members(:example, ["test"])).to eq({ "test" => nil })
end
end
context "key doesn't exist" do
it { is_expected.to eq(0) }
end
end
describe "#key?" do
subject { cache.key?(:example, "test") }
context "key exists" do
before do
cache.write(:example, test_hash)
end
it { is_expected.to be(true) }
end
context "key doesn't exist" do
it { is_expected.to be(false) }
end
end
describe "#read_members" do
subject { cache.read_members(:example, keys) }
let(:keys) { %w(test missing) }
context "all data is cached" do
before do
cache.write(:example, test_hash.merge({ "missing" => false }))
end
it { is_expected.to eq({ "test" => "value", "missing" => "false" }) }
end
context "partial data is cached" do
before do
cache.write(:example, test_hash)
end
it { is_expected.to eq({ "test" => "value", "missing" => nil }) }
end
context "no data is cached" do
it { is_expected.to eq({ "test" => nil, "missing" => nil }) }
end
context "empty keys are passed for some reason" do
let(:keys) { [] }
it "raises an error" do
expect { subject }.to raise_error(Gitlab::RepositoryHashCache::InvalidKeysProvidedError)
end
end
end
describe "#write" do
subject { cache.write(:example, test_hash) }
it { is_expected.to be(true) }
it "actually writes stuff to Redis" do
subject
expect(cache.read_members(:example, ["test"])).to eq(test_hash)
end
end
describe "#fetch_and_add_missing" do
subject do
cache.fetch_and_add_missing(:example, keys) do |missing_keys, hash|
missing_keys.each do |key|
hash[key] = "was_missing"
end
end
end
let(:keys) { %w(test) }
it "records metrics" do
# Here we expect it to receive "test" as a missing key because we
# don't write to the cache before this test
expect(cache).to receive(:record_metrics).with(:example, { "test" => "was_missing" }, ["test"])
subject
end
context "fully cached" do
let(:keys) { %w(test another) }
before do
cache.write(:example, test_hash.merge({ "another" => "not_missing" }))
end
it "returns a hash" do
is_expected.to eq({ "test" => "value", "another" => "not_missing" })
end
it "doesn't write to the cache" do
expect(cache).not_to receive(:write)
subject
end
end
context "partially cached" do
let(:keys) { %w(test missing) }
before do
cache.write(:example, test_hash)
end
it "returns a hash" do
is_expected.to eq({ "test" => "value", "missing" => "was_missing" })
end
it "writes to the cache" do
expect(cache).to receive(:write).with(:example, { "missing" => "was_missing" })
subject
end
end
context "uncached" do
let(:keys) { %w(test missing) }
it "returns a hash" do
is_expected.to eq({ "test" => "was_missing", "missing" => "was_missing" })
end
it "writes to the cache" do
expect(cache).to receive(:write).with(:example, { "test" => "was_missing", "missing" => "was_missing" })
subject
end
end
end
end

View File

@ -21,7 +21,7 @@ RSpec.describe Quality::TestLevel do
context 'when level is unit' do
it 'returns a pattern' do
expect(subject.pattern(:unit))
.to eq("spec/{bin,config,db,dependencies,factories,finders,frontend,graphql,haml_lint,helpers,initializers,javascripts,lib,models,policies,presenters,rack_servers,replicators,routing,rubocop,serializers,services,sidekiq,tasks,uploaders,validators,views,workers,elastic_integration}{,/**/}*_spec.rb")
.to eq("spec/{bin,config,db,dependencies,factories,finders,frontend,graphql,haml_lint,helpers,initializers,javascripts,lib,models,policies,presenters,rack_servers,replicators,routing,rubocop,serializers,services,sidekiq,support_specs,tasks,uploaders,validators,views,workers,elastic_integration}{,/**/}*_spec.rb")
end
end
@ -82,7 +82,7 @@ RSpec.describe Quality::TestLevel do
context 'when level is unit' do
it 'returns a regexp' do
expect(subject.regexp(:unit))
.to eq(%r{spec/(bin|config|db|dependencies|factories|finders|frontend|graphql|haml_lint|helpers|initializers|javascripts|lib|models|policies|presenters|rack_servers|replicators|routing|rubocop|serializers|services|sidekiq|tasks|uploaders|validators|views|workers|elastic_integration)})
.to eq(%r{spec/(bin|config|db|dependencies|factories|finders|frontend|graphql|haml_lint|helpers|initializers|javascripts|lib|models|policies|presenters|rack_servers|replicators|routing|rubocop|serializers|services|sidekiq|support_specs|tasks|uploaders|validators|views|workers|elastic_integration)})
end
end

View File

@ -500,45 +500,62 @@ describe Repository do
let(:branch_names) { %w(test beep boop definitely_merged) }
let(:already_merged) { Set.new(["definitely_merged"]) }
let(:merge_state_hash) do
let(:write_hash) do
{
"test" => false,
"beep" => false,
"boop" => false,
"definitely_merged" => true
"test" => Gitlab::Redis::Boolean.new(false).to_s,
"beep" => Gitlab::Redis::Boolean.new(false).to_s,
"boop" => Gitlab::Redis::Boolean.new(false).to_s,
"definitely_merged" => Gitlab::Redis::Boolean.new(true).to_s
}
end
let_it_be(:cache) do
caching_config_hash = Gitlab::Redis::Cache.params
ActiveSupport::Cache.lookup_store(:redis_cache_store, caching_config_hash)
let(:read_hash) do
{
"test" => Gitlab::Redis::Boolean.new(false).to_s,
"beep" => Gitlab::Redis::Boolean.new(false).to_s,
"boop" => Gitlab::Redis::Boolean.new(false).to_s,
"definitely_merged" => Gitlab::Redis::Boolean.new(true).to_s
}
end
let(:repository_cache) do
Gitlab::RepositoryCache.new(repository, backend: Rails.cache)
end
let(:cache_key) { repository_cache.cache_key(:merged_branch_names) }
let(:cache) { repository.send(:redis_hash_cache) }
let(:cache_key) { cache.cache_key(:merged_branch_names) }
before do
allow(Rails).to receive(:cache) { cache }
allow(repository).to receive(:cache) { repository_cache }
allow(repository.raw_repository).to receive(:merged_branch_names).with(branch_names).and_return(already_merged)
end
it { is_expected.to eq(already_merged) }
it { is_expected.to be_a(Set) }
describe "cache expiry" do
before do
allow(cache).to receive(:delete).with(anything)
end
it "is expired when the branches caches are expired" do
expect(cache).to receive(:delete).with(:merged_branch_names).at_least(:once)
repository.send(:expire_branches_cache)
end
it "is expired when the repository caches are expired" do
expect(cache).to receive(:delete).with(:merged_branch_names).at_least(:once)
repository.send(:expire_all_method_caches)
end
end
context "cache is empty" do
before do
cache.delete(cache_key)
cache.delete(:merged_branch_names)
end
it { is_expected.to eq(already_merged) }
describe "cache values" do
it "writes the values to redis" do
expect(cache).to receive(:write).with(cache_key, merge_state_hash, expires_in: Repository::MERGED_BRANCH_NAMES_CACHE_DURATION)
expect(cache).to receive(:write).with(:merged_branch_names, write_hash)
subject
end
@ -546,14 +563,14 @@ describe Repository do
it "matches the supplied hash" do
subject
expect(cache.read(cache_key)).to eq(merge_state_hash)
expect(cache.read_members(:merged_branch_names, branch_names)).to eq(read_hash)
end
end
end
context "cache is not empty" do
before do
cache.write(cache_key, merge_state_hash)
cache.write(:merged_branch_names, write_hash)
end
it { is_expected.to eq(already_merged) }
@ -568,8 +585,8 @@ describe Repository do
context "cache is partially complete" do
before do
allow(repository.raw_repository).to receive(:merged_branch_names).with(["boop"]).and_return([])
hash = merge_state_hash.except("boop")
cache.write(cache_key, hash)
hash = write_hash.except("boop")
cache.write(:merged_branch_names, hash)
end
it { is_expected.to eq(already_merged) }

View File

@ -3,10 +3,10 @@
require 'spec_helper'
describe SnippetBlobPresenter do
describe '#highlighted_data' do
describe '#rich_data' do
let(:snippet) { build(:personal_snippet) }
subject { described_class.new(snippet.blob).highlighted_data }
subject { described_class.new(snippet.blob).rich_data }
it 'returns nil when the snippet blob is binary' do
allow(snippet.blob).to receive(:binary?).and_return(true)
@ -18,7 +18,7 @@ describe SnippetBlobPresenter do
snippet.file_name = 'test.md'
snippet.content = '*foo*'
expect(subject).to eq '<span id="LC1" class="line" lang="markdown"><span class="ge">*foo*</span></span>'
expect(subject).to eq '<p data-sourcepos="1:1-1:5" dir="auto"><em>foo</em></p>'
end
it 'returns syntax highlighted content' do
@ -37,10 +37,10 @@ describe SnippetBlobPresenter do
end
end
describe '#plain_highlighted_data' do
describe '#plain_data' do
let(:snippet) { build(:personal_snippet) }
subject { described_class.new(snippet.blob).plain_highlighted_data }
subject { described_class.new(snippet.blob).plain_data }
it 'returns nil when the snippet blob is binary' do
allow(snippet.blob).to receive(:binary?).and_return(true)
@ -52,7 +52,7 @@ describe SnippetBlobPresenter do
snippet.file_name = 'test.md'
snippet.content = '*foo*'
expect(subject).to eq '<span id="LC1" class="line" lang="">*foo*</span>'
expect(subject).to eq '<span id="LC1" class="line" lang="markdown"><span class="ge">*foo*</span></span>'
end
it 'returns plain syntax content' do

View File

@ -67,7 +67,8 @@ describe 'Creating a Snippet' do
it 'returns the created Snippet' do
post_graphql_mutation(mutation, current_user: current_user)
expect(mutation_response['snippet']['blob']['highlightedData']).to match(content)
expect(mutation_response['snippet']['blob']['richData']).to match(content)
expect(mutation_response['snippet']['blob']['plainData']).to match(content)
expect(mutation_response['snippet']['title']).to eq(title)
expect(mutation_response['snippet']['description']).to eq(description)
expect(mutation_response['snippet']['fileName']).to eq(file_name)
@ -92,7 +93,8 @@ describe 'Creating a Snippet' do
it 'returns the created Snippet' do
post_graphql_mutation(mutation, current_user: current_user)
expect(mutation_response['snippet']['blob']['highlightedData']).to match(content)
expect(mutation_response['snippet']['blob']['richData']).to match(content)
expect(mutation_response['snippet']['blob']['plainData']).to match(content)
expect(mutation_response['snippet']['title']).to eq(title)
expect(mutation_response['snippet']['description']).to eq(description)
expect(mutation_response['snippet']['fileName']).to eq(file_name)

View File

@ -56,7 +56,8 @@ describe 'Updating a Snippet' do
it 'returns the updated Snippet' do
post_graphql_mutation(mutation, current_user: current_user)
expect(mutation_response['snippet']['blob']['highlightedData']).to match(updated_content)
expect(mutation_response['snippet']['blob']['richData']).to match(updated_content)
expect(mutation_response['snippet']['blob']['plainData']).to match(updated_content)
expect(mutation_response['snippet']['title']).to eq(updated_title)
expect(mutation_response['snippet']['description']).to eq(updated_description)
expect(mutation_response['snippet']['fileName']).to eq(updated_file_name)
@ -77,7 +78,8 @@ describe 'Updating a Snippet' do
it 'returns the Snippet with its original values' do
post_graphql_mutation(mutation, current_user: current_user)
expect(mutation_response['snippet']['blob']['highlightedData']).to match(original_content)
expect(mutation_response['snippet']['blob']['richData']).to match(original_content)
expect(mutation_response['snippet']['blob']['plainData']).to match(original_content)
expect(mutation_response['snippet']['title']).to eq(original_title)
expect(mutation_response['snippet']['description']).to eq(original_description)
expect(mutation_response['snippet']['fileName']).to eq(original_file_name)

View File

@ -26,16 +26,6 @@ describe Users::DestroyService do
service.execute(user)
end
context 'when :destroy_user_associations_in_batches flag is disabled' do
it 'does not delete user associations in batches' do
stub_feature_flags(destroy_user_associations_in_batches: false)
expect(user).not_to receive(:destroy_dependent_associations_in_batches)
service.execute(user)
end
end
it 'will delete the project' do
expect_next_instance_of(Projects::DestroyService) do |destroy_service|
expect(destroy_service).to receive(:execute).once.and_return(true)

View File

@ -2,12 +2,15 @@
module ActiveRecord
class QueryRecorder
attr_reader :log, :skip_cached, :cached
attr_reader :log, :skip_cached, :cached, :data
UNKNOWN = %w(unknown unknown).freeze
def initialize(skip_cached: true, &block)
def initialize(skip_cached: true, query_recorder_debug: false, &block)
@data = Hash.new { |h, k| h[k] = { count: 0, occurrences: [], backtrace: [] } }
@log = []
@cached = []
@skip_cached = skip_cached
@query_recorder_debug = query_recorder_debug
# force replacement of bind parameters to give tests the ability to check for ids
ActiveRecord::Base.connection.unprepared_statement do
ActiveSupport::Notifications.subscribed(method(:callback), 'sql.active_record', &block)
@ -19,30 +22,62 @@ module ActiveRecord
Gitlab::BacktraceCleaner.clean_backtrace(caller).each { |line| Rails.logger.debug(" --> #{line}") }
end
def get_sql_source(sql)
matches = sql.match(/,line:(?<line>.*):in\s+`(?<method>.*)'\*\//)
matches ? [matches[:line], matches[:method]] : UNKNOWN
end
def store_sql_by_source(values: {}, backtrace: nil)
full_name = get_sql_source(values[:sql]).join(':')
@data[full_name][:count] += 1
@data[full_name][:occurrences] << values[:sql]
@data[full_name][:backtrace] << backtrace
end
def find_query(query_regexp, limit, first_only: false)
out = []
@data.each_pair do |k, v|
if v[:count] > limit && k.match(query_regexp)
out << [k, v[:count]]
break if first_only
end
end
out.flatten! if first_only
out
end
def occurrences_by_line_method
@occurrences_by_line_method ||= @data.sort_by { |_, v| v[:count] }
end
def callback(name, start, finish, message_id, values)
show_backtrace(values) if ENV['QUERY_RECORDER_DEBUG']
store_backtrace = ENV['QUERY_RECORDER_DEBUG'] || @query_recorder_debug
backtrace = store_backtrace ? show_backtrace(values) : nil
if values[:cached] && skip_cached
@cached << values[:sql]
elsif !values[:name]&.include?("SCHEMA")
@log << values[:sql]
store_sql_by_source(values: values, backtrace: backtrace)
end
end
def count
@log.count
@count ||= @log.count
end
def cached_count
@cached.count
@cached_count ||= @cached.count
end
def log_message
@log.join("\n\n")
@log_message ||= @log.join("\n\n")
end
def occurrences
@log.group_by(&:to_s).transform_values(&:count)
@occurrences ||= @log.group_by(&:to_s).transform_values(&:count)
end
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
require 'spec_helper'
describe ActiveRecord::QueryRecorder do
class TestQueries < ActiveRecord::Base
self.table_name = 'schema_migrations'
end
describe 'detecting the right number of calls and their origin' do
it 'detects two separate queries' do
control = ActiveRecord::QueryRecorder.new query_recorder_debug: true do
2.times { TestQueries.count }
TestQueries.first
end
# Test first_only flag works as expected
expect(control.find_query(/.*query_recorder_spec.rb.*/, 0, first_only: true))
.to eq(control.find_query(/.*query_recorder_spec.rb.*/, 0).first)
# Check #find_query
expect(control.find_query(/.*/, 0).size)
.to eq(control.data.keys.size)
# Ensure exactly 2 COUNT queries were detected
expect(control.occurrences_by_line_method.last[1][:occurrences]
.find_all {|i| i.match(/SELECT COUNT/) }.count).to eq(2)
# Ensure exactly 1 LIMIT 1 (#first)
expect(control.occurrences_by_line_method.first[1][:occurrences]
.find_all { |i| i.match(/ORDER BY.*#{TestQueries.table_name}.*LIMIT 1/) }.count).to eq(1)
# Ensure 3 DB calls overall were executed
expect(control.log.size).to eq(3)
# Ensure memoization value match the raw value above
expect(control.count).to eq(control.log.size)
# Ensure we have only two sources of queries
expect(control.data.keys.size).to eq(2)
# Ensure we detect only queries from this file
expect(control.data.keys.find_all { |i| i.match(/query_recorder_spec.rb/) }.count).to eq(2)
end
end
end