Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-01-10 15:07:47 +00:00
parent 96b0c1245c
commit 8b1228b0d4
56 changed files with 1307 additions and 319 deletions

View File

@ -61,8 +61,10 @@ export const getFileData = (
{ path, makeFileActive = true, openFile = makeFileActive },
) => {
const file = state.entries[path];
const fileDeletedAndReadded = getters.isFileDeletedAndReadded(path);
if (file.raw || (file.tempFile && !file.prevPath)) return Promise.resolve();
if (file.raw || (file.tempFile && !file.prevPath && !fileDeletedAndReadded))
return Promise.resolve();
commit(types.TOGGLE_LOADING, { entry: file });
@ -102,11 +104,16 @@ export const setFileMrChange = ({ commit }, { file, mrChange }) => {
export const getRawFileData = ({ state, commit, dispatch, getters }, { path }) => {
const file = state.entries[path];
const stagedFile = state.stagedFiles.find(f => f.path === path);
return new Promise((resolve, reject) => {
const fileDeletedAndReadded = getters.isFileDeletedAndReadded(path);
service
.getRawFileData(file)
.getRawFileData(fileDeletedAndReadded ? stagedFile : file)
.then(raw => {
if (!(file.tempFile && !file.prevPath)) commit(types.SET_FILE_RAW_DATA, { file, raw });
if (!(file.tempFile && !file.prevPath && !fileDeletedAndReadded))
commit(types.SET_FILE_RAW_DATA, { file, raw, fileDeletedAndReadded });
if (file.mrChange && file.mrChange.new_file === false) {
const baseSha =
(getters.currentMergeRequest && getters.currentMergeRequest.baseCommitSha) || '';
@ -151,7 +158,7 @@ export const changeFileContent = ({ commit, dispatch, state }, { path, content }
if (file.changed && indexOfChangedFile === -1) {
commit(types.ADD_FILE_TO_CHANGED, path);
} else if (!file.changed && indexOfChangedFile !== -1) {
} else if (!file.changed && !file.tempFile && indexOfChangedFile !== -1) {
commit(types.REMOVE_FILE_FROM_CHANGED, path);
}

View File

@ -54,27 +54,29 @@ export default {
}
});
},
[types.SET_FILE_RAW_DATA](state, { file, raw }) {
[types.SET_FILE_RAW_DATA](state, { file, raw, fileDeletedAndReadded = false }) {
const openPendingFile = state.openFiles.find(
f => f.path === file.path && f.pending && !(f.tempFile && !f.prevPath),
f =>
f.path === file.path && f.pending && !(f.tempFile && !f.prevPath && !fileDeletedAndReadded),
);
const stagedFile = state.stagedFiles.find(f => f.path === file.path);
if (file.tempFile && file.content === '') {
Object.assign(state.entries[file.path], {
content: raw,
});
if (file.tempFile && file.content === '' && !fileDeletedAndReadded) {
Object.assign(state.entries[file.path], { content: raw });
} else if (fileDeletedAndReadded) {
Object.assign(stagedFile, { raw });
} else {
Object.assign(state.entries[file.path], {
raw,
});
Object.assign(state.entries[file.path], { raw });
}
if (!openPendingFile) return;
if (!openPendingFile.tempFile) {
openPendingFile.raw = raw;
} else if (openPendingFile.tempFile) {
} else if (openPendingFile.tempFile && !fileDeletedAndReadded) {
openPendingFile.content = raw;
} else if (fileDeletedAndReadded) {
Object.assign(stagedFile, { raw });
}
},
[types.SET_FILE_BASE_RAW_DATA](state, { file, baseRaw }) {

View File

@ -1,21 +1,32 @@
const MARKDOWN_EXTENSIONS = ['mdown', 'mkd', 'mkdn', 'md', 'markdown'];
const ASCIIDOC_EXTENSIONS = ['adoc', 'ad', 'asciidoc'];
const OTHER_EXTENSIONS = ['textile', 'rdoc', 'org', 'creole', 'wiki', 'mediawiki', 'rst'];
const EXTENSIONS = [...MARKDOWN_EXTENSIONS, ...ASCIIDOC_EXTENSIONS, ...OTHER_EXTENSIONS];
const PLAIN_FILENAMES = ['readme', 'index'];
const FILE_REGEXP = new RegExp(
`^(${PLAIN_FILENAMES.join('|')})(.(${EXTENSIONS.join('|')}))?$`,
'i',
);
const PLAIN_FILE_REGEXP = new RegExp(`^(${PLAIN_FILENAMES.join('|')})`, 'i');
const EXTENSIONS_REGEXP = new RegExp(`.(${EXTENSIONS.join('|')})$`, 'i');
const FILENAMES = ['index', 'readme'];
const MARKUP_EXTENSIONS = [
'ad',
'adoc',
'asciidoc',
'creole',
'markdown',
'md',
'mdown',
'mediawiki',
'mkd',
'mkdn',
'org',
'rdoc',
'rst',
'textile',
'wiki',
];
const isRichReadme = file => {
const re = new RegExp(`^(${FILENAMES.join('|')})\\.(${MARKUP_EXTENSIONS.join('|')})$`, 'i');
return re.test(file.name);
};
const isPlainReadme = file => {
const re = new RegExp(`^(${FILENAMES.join('|')})$`, 'i');
return re.test(file.name);
};
// eslint-disable-next-line import/prefer-default-export
export const readmeFile = blobs => {
const readMeFiles = blobs.filter(f => f.name.search(FILE_REGEXP) !== -1);
const previewableReadme = readMeFiles.find(f => f.name.search(EXTENSIONS_REGEXP) !== -1);
const plainReadme = readMeFiles.find(f => f.name.search(PLAIN_FILE_REGEXP) !== -1);
return previewableReadme || plainReadme;
};
export const readmeFile = blobs => blobs.find(isRichReadme) || blobs.find(isPlainReadme);

View File

@ -4,7 +4,6 @@ class ProjectCiCdSetting < ApplicationRecord
include IgnorableColumns
# https://gitlab.com/gitlab-org/gitlab/issues/36651
ignore_column :merge_trains_enabled, remove_with: '12.7', remove_after: '2019-12-22'
belongs_to :project, inverse_of: :ci_cd_settings
# The version of the schema that first introduced this model/table.

View File

@ -1,14 +1,14 @@
# frozen_string_literal: true
# SpamCheckService
# SpamCheckMethods
#
# Provide helper methods for checking if a given spammable object has
# potential spam data.
#
# Dependencies:
# - params with :request
#
module SpamCheckService
module SpamCheckMethods
# rubocop:disable Gitlab/ModuleWithInstanceVariables
def filter_spam_check_params
@request = params.delete(:request)

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
class CreateSnippetService < BaseService
include SpamCheckService
include SpamCheckMethods
def execute
filter_spam_check_params

View File

@ -2,7 +2,7 @@
module Issues
class CreateService < Issues::BaseService
include SpamCheckService
include SpamCheckMethods
include ResolveDiscussions
def execute

View File

@ -2,7 +2,7 @@
module Issues
class UpdateService < Issues::BaseService
include SpamCheckService
include SpamCheckMethods
def execute(issue)
handle_move_between_ids(issue)

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
class UpdateSnippetService < BaseService
include SpamCheckService
include SpamCheckMethods
attr_accessor :snippet

View File

@ -0,0 +1,5 @@
---
title: "Web IDE: Fix Incorrect diff of deletion and addition of the same file"
merge_request: 21680
author:
type: fixed

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class DropProjectCiCdSettingsMergeTrainsEnabled < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
remove_column :project_ci_cd_settings, :merge_trains_enabled
end
def down
add_column_with_default :project_ci_cd_settings, :merge_trains_enabled, :boolean, default: false, allow_null: true
end
end

View File

@ -3098,7 +3098,6 @@ ActiveRecord::Schema.define(version: 2020_01_08_155731) do
t.integer "project_id", null: false
t.boolean "group_runners_enabled", default: true, null: false
t.boolean "merge_pipelines_enabled"
t.boolean "merge_trains_enabled", default: false, null: false
t.integer "default_git_depth"
t.index ["project_id"], name: "index_project_ci_cd_settings_on_project_id", unique: true
end

View File

@ -32,11 +32,11 @@ In the case of [custom domains](#custom-domains) (but not
ports `80` and/or `443`. For that reason, there is some flexibility in the way
which you can set it up:
1. Run the Pages daemon in the same server as GitLab, listening on a secondary IP.
1. Run the Pages daemon in a separate server. In that case, the
- Run the Pages daemon in the same server as GitLab, listening on a **secondary IP**.
- Run the Pages daemon in a [separate server](#running-gitlab-pages-on-a-separate-server). In that case, the
[Pages path](#change-storage-path) must also be present in the server that
the Pages daemon is installed, so you will have to share it via network.
1. Run the Pages daemon in the same server as GitLab, listening on the same IP
- Run the Pages daemon in the same server as GitLab, listening on the same IP
but on different ports. In that case, you will have to proxy the traffic with
a loadbalancer. If you choose that route note that you should use TCP load
balancing for HTTPS. If you use TLS-termination (HTTPS-load balancing) the
@ -182,7 +182,7 @@ The [GitLab Pages README](https://gitlab.com/gitlab-org/gitlab-pages#caveats) ha
In addition to the wildcard domains, you can also have the option to configure
GitLab Pages to work with custom domains. Again, there are two options here:
support custom domains with and without TLS certificates. The easiest setup is
that without TLS certificates. In either case, you'll need a secondary IP. If
that without TLS certificates. In either case, you'll need a **secondary IP**. If
you have IPv6 as well as IPv4 addresses, you can use them both.
### Custom domains

View File

@ -426,7 +426,7 @@ Status: 200 OK
The link to the next page contains an additional filter `id_after=42` which excludes records we have retrieved already.
Note the type of filter depends on the `order_by` option used and we may have more than one additional filter.
The `Link` header is absent when the end of the collection has been reached and there are no additional records to retrieve.
When the end of the collection has been reached and there are no additional records to retrieve, the `Link` header is absent and the resulting array is empty.
We recommend using only the given link to retrieve the next page instead of building your own URL. Apart from the headers shown,
we don't expose additional pagination headers.

View File

@ -4,6 +4,7 @@ module API
module Helpers
include Gitlab::Utils
include Helpers::Pagination
include Helpers::PaginationStrategies
SUDO_HEADER = "HTTP_SUDO"
GITLAB_SHARED_SECRET_HEADER = "Gitlab-Shared-Secret"

View File

@ -3,34 +3,9 @@
module API
module Helpers
module Pagination
# This returns an ActiveRecord relation
def paginate(relation)
Gitlab::Pagination::OffsetPagination.new(self).paginate(relation)
end
# This applies pagination and executes the query
# It always returns an array instead of an ActiveRecord relation
def paginate_and_retrieve!(relation)
offset_or_keyset_pagination(relation).to_a
end
private
def offset_or_keyset_pagination(relation)
return paginate(relation) unless keyset_pagination_enabled?
request_context = Gitlab::Pagination::Keyset::RequestContext.new(self)
unless Gitlab::Pagination::Keyset.available?(request_context, relation)
return error!('Keyset pagination is not yet available for this type of request', 405)
end
Gitlab::Pagination::Keyset.paginate(request_context, relation)
end
def keyset_pagination_enabled?
params[:pagination] == 'keyset' && Feature.enabled?(:api_keyset_pagination, default_enabled: true)
end
end
end
end

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
module API
module Helpers
module PaginationStrategies
def paginate_with_strategies(relation)
paginator = paginator(relation)
yield(paginator.paginate(relation)).tap do |records, _|
paginator.finalize(records)
end
end
def paginator(relation)
return Gitlab::Pagination::OffsetPagination.new(self) unless keyset_pagination_enabled?
request_context = Gitlab::Pagination::Keyset::RequestContext.new(self)
unless Gitlab::Pagination::Keyset.available?(request_context, relation)
return error!('Keyset pagination is not yet available for this type of request', 405)
end
Gitlab::Pagination::Keyset::Pager.new(request_context)
end
private
def keyset_pagination_enabled?
params[:pagination] == 'keyset' && Feature.enabled?(:api_keyset_pagination, default_enabled: true)
end
end
end
end

View File

@ -90,18 +90,22 @@ module API
def present_projects(projects, options = {})
projects = reorder_projects(projects)
projects = apply_filters(projects)
projects = paginate(projects)
projects, options = with_custom_attributes(projects, options)
options = options.reverse_merge(
with: current_user ? Entities::ProjectWithAccess : Entities::BasicProjectDetails,
statistics: params[:statistics],
current_user: current_user,
license: false
)
options[:with] = Entities::BasicProjectDetails if params[:simple]
records, options = paginate_with_strategies(projects) do |projects|
projects, options = with_custom_attributes(projects, options)
present options[:with].prepare_relation(projects, options), options
options = options.reverse_merge(
with: current_user ? Entities::ProjectWithAccess : Entities::BasicProjectDetails,
statistics: params[:statistics],
current_user: current_user,
license: false
)
options[:with] = Entities::BasicProjectDetails if params[:simple]
[options[:with].prepare_relation(projects, options), options]
end
present records, options
end
def translate_params_for_compatibility(params)

View File

@ -17,7 +17,7 @@ module Gitlab
allow_failure type stage when start_in artifacts cache
dependencies before_script needs after_script variables
environment coverage retry parallel extends interruptible timeout
resource_group].freeze
resource_group release].freeze
REQUIRED_BY_NEEDS = %i[stage].freeze
@ -151,14 +151,18 @@ module Gitlab
description: 'Coverage configuration for this job.',
inherit: false
entry :release, Entry::Release,
description: 'This job will produce a release.',
inherit: false
helpers :before_script, :script, :stage, :type, :after_script,
:cache, :image, :services, :only, :except, :variables,
:artifacts, :environment, :coverage, :retry, :rules,
:parallel, :needs, :interruptible
:parallel, :needs, :interruptible, :release
attributes :script, :tags, :allow_failure, :when, :dependencies,
:needs, :retry, :parallel, :extends, :start_in, :rules,
:interruptible, :timeout, :resource_group
:interruptible, :timeout, :resource_group, :release
def self.matching?(name, config)
!name.to_s.start_with?('.') &&
@ -243,6 +247,7 @@ module Gitlab
interruptible: interruptible_defined? ? interruptible_value : nil,
timeout: has_timeout? ? ChronicDuration.parse(timeout.to_s) : nil,
artifacts: artifacts_value,
release: release_value,
after_script: after_script_value,
ignore: ignored?,
needs: needs_defined? ? needs_value : nil,

View File

@ -0,0 +1,46 @@
# frozen_string_literal: true
module Gitlab
module Ci
class Config
module Entry
##
# Entry that represents a release configuration.
#
class Release < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Configurable
include ::Gitlab::Config::Entry::Validatable
include ::Gitlab::Config::Entry::Attributable
ALLOWED_KEYS = %i[tag_name name description assets].freeze
attributes %i[tag_name name assets].freeze
# Attributable description conflicts with
# ::Gitlab::Config::Entry::Node.description
def has_description?
true
end
def description
config[:description]
end
entry :assets, Entry::Release::Assets, description: 'Release assets.'
validations do
validates :config, allowed_keys: ALLOWED_KEYS
validates :tag_name, presence: true
validates :description, type: String, presence: true
end
helpers :assets
def value
@config[:assets] = assets_value if @config.key?(:assets)
@config
end
end
end
end
end
end

View File

@ -0,0 +1,37 @@
# frozen_string_literal: true
module Gitlab
module Ci
class Config
module Entry
##
# Entry that represents a configuration of release assets.
#
class Release
class Assets < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Configurable
include ::Gitlab::Config::Entry::Validatable
include ::Gitlab::Config::Entry::Attributable
ALLOWED_KEYS = %i[links].freeze
attributes ALLOWED_KEYS
entry :links, Entry::Release::Assets::Links, description: 'Release assets:links.'
validations do
validates :config, allowed_keys: ALLOWED_KEYS
validates :links, array_of_hashes: true, presence: true
end
helpers :links
def value
@config[:links] = links_value if @config.key?(:links)
@config
end
end
end
end
end
end
end

View File

@ -0,0 +1,32 @@
# frozen_string_literal: true
module Gitlab
module Ci
class Config
module Entry
##
# Entry that represents a configuration of release:assets:links.
#
class Release
class Assets
class Link < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Validatable
include ::Gitlab::Config::Entry::Attributable
ALLOWED_KEYS = %i[name url].freeze
attributes ALLOWED_KEYS
validations do
validates :config, allowed_keys: ALLOWED_KEYS
validates :name, type: String, presence: true
validates :url, presence: true, addressable_url: true
end
end
end
end
end
end
end
end

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
module Gitlab
module Ci
class Config
module Entry
##
# Entry that represents a configuration of release:assets:links.
#
class Release
class Assets
class Links < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Configurable
include ::Gitlab::Config::Entry::Validatable
entry :link, Entry::Release::Assets::Link, description: 'Release assets:links:link.'
validations do
validates :config, type: Array, presence: true
end
def skip_config_hash_validation?
true
end
end
end
end
end
end
end
end

View File

@ -81,10 +81,15 @@ module Gitlab
instance: job[:instance],
start_in: job[:start_in],
trigger: job[:trigger],
bridge_needs: job.dig(:needs, :bridge)&.first
bridge_needs: job.dig(:needs, :bridge)&.first,
release: release(job)
}.compact }.compact
end
def release(job)
job[:release] if Feature.enabled?(:ci_release_generation, default_enabled: false)
end
def stage_builds_attributes(stage)
@jobs.values
.select { |job| job[:stage] == stage }
@ -133,7 +138,6 @@ module Gitlab
@jobs.each do |name, job|
# logical validation for job
validate_job_stage!(name, job)
validate_job_dependencies!(name, job)
validate_job_needs!(name, job)

View File

@ -10,7 +10,7 @@ module Gitlab
def attributes(*attributes)
attributes.flatten.each do |attribute|
if method_defined?(attribute)
raise ArgumentError, 'Method already defined!'
raise ArgumentError, "Method already defined: #{attribute}"
end
define_method(attribute) do

View File

@ -5,7 +5,7 @@ module Gitlab
module Entry
##
# This mixin is responsible for adding DSL, which purpose is to
# simplifly process of adding child nodes.
# simplify the process of adding child nodes.
#
# This can be used only if parent node is a configuration entry that
# holds a hash as a configuration value, for example:

View File

@ -8,9 +8,17 @@ module Gitlab
feature = Feature.get(feature_key)
return feature.enabled? if Feature.persisted?(feature)
# Disable Rugged auto-detect(can_use_disk?) when Puma threads>1
# https://gitlab.com/gitlab-org/gitlab/issues/119326
return false if running_puma_with_multiple_threads?
Gitlab::GitalyClient.can_use_disk?(repo.storage)
end
def running_puma_with_multiple_threads?
Gitlab::Runtime.puma? && ::Puma.cli_config.options[:max_threads] > 1
end
def execute_rugged_call(method_name, *args)
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
start = Gitlab::Metrics::System.monotonic_time

View File

@ -3,6 +3,14 @@
module Gitlab
module Pagination
class Base
def paginate(relation)
raise NotImplementedError
end
def finalize(records)
# Optional: Called with the actual set of records
end
private
def per_page

View File

@ -3,10 +3,6 @@
module Gitlab
module Pagination
module Keyset
def self.paginate(request_context, relation)
Gitlab::Pagination::Keyset::Pager.new(request_context).paginate(relation)
end
def self.available?(request_context, relation)
order_by = request_context.page.order_by

View File

@ -11,14 +11,13 @@ module Gitlab
# Maximum number of records for a page
MAXIMUM_PAGE_SIZE = 100
attr_accessor :lower_bounds, :end_reached
attr_accessor :lower_bounds
attr_reader :order_by
def initialize(order_by: {}, lower_bounds: nil, per_page: DEFAULT_PAGE_SIZE, end_reached: false)
def initialize(order_by: {}, lower_bounds: nil, per_page: DEFAULT_PAGE_SIZE)
@order_by = order_by.symbolize_keys
@lower_bounds = lower_bounds&.symbolize_keys
@per_page = per_page
@end_reached = end_reached
end
# Number of records to return per page
@ -28,17 +27,11 @@ module Gitlab
[@per_page, MAXIMUM_PAGE_SIZE].min
end
# Determine whether this page indicates the end of the collection
def end_reached?
@end_reached
end
# Construct a Page for the next page
# Uses identical order_by/per_page information for the next page
def next(lower_bounds, end_reached)
def next(lower_bounds)
dup.tap do |next_page|
next_page.lower_bounds = lower_bounds&.symbolize_keys
next_page.end_reached = end_reached
end
end
end

View File

@ -14,27 +14,20 @@ module Gitlab
# Validate assumption: The last two columns must match the page order_by
validate_order!(relation)
# This performs the database query and retrieves records
# We retrieve one record more to check if we have data beyond this page
all_records = relation.limit(page.per_page + 1).to_a # rubocop: disable CodeReuse/ActiveRecord
relation.limit(page.per_page) # rubocop: disable CodeReuse/ActiveRecord
end
records_for_page = all_records.first(page.per_page)
# If we retrieved more records than belong on this page,
# we know there's a next page
there_is_more = all_records.size > records_for_page.size
apply_headers(records_for_page.last, there_is_more)
records_for_page
def finalize(records)
apply_headers(records.last)
end
private
def apply_headers(last_record_in_page, there_is_more)
end_reached = last_record_in_page.nil? || !there_is_more
lower_bounds = last_record_in_page&.slice(page.order_by.keys)
def apply_headers(last_record_in_page)
return unless last_record_in_page
next_page = page.next(lower_bounds, end_reached)
lower_bounds = last_record_in_page&.slice(page.order_by.keys)
next_page = page.next(lower_bounds)
request.apply_headers(next_page)
end

View File

@ -68,8 +68,6 @@ module Gitlab
end
def pagination_links(next_page)
return if next_page.end_reached?
%(<#{page_href(next_page)}>; rel="next")
end

View File

@ -21,6 +21,8 @@ module QA
delete delete_project_request.url
expect_status(202)
Page::Main::Menu.perform(&:sign_out_if_signed_in)
end
it 'user imports a GitHub repo' do

View File

@ -202,6 +202,53 @@ describe('IDE store file actions', () => {
};
});
describe('call to service', () => {
const callExpectation = serviceCalled => {
store.dispatch('getFileData', { path: localFile.path });
if (serviceCalled) {
expect(service.getFileData).toHaveBeenCalled();
} else {
expect(service.getFileData).not.toHaveBeenCalled();
}
};
beforeEach(() => {
service.getFileData.mockImplementation(() => new Promise(() => {}));
});
it("isn't called if file.raw exists", () => {
localFile.raw = 'raw data';
callExpectation(false);
});
it("isn't called if file is a tempFile", () => {
localFile.raw = '';
localFile.tempFile = true;
callExpectation(false);
});
it('is called if file is a tempFile but also renamed', () => {
localFile.raw = '';
localFile.tempFile = true;
localFile.prevPath = 'old_path';
callExpectation(true);
});
it('is called if tempFile but file was deleted and readded', () => {
localFile.raw = '';
localFile.tempFile = true;
localFile.prevPath = 'old_path';
store.state.stagedFiles = [{ ...localFile, deleted: true }];
callExpectation(true);
});
});
describe('success', () => {
beforeEach(() => {
mock.onGet(`${RELATIVE_URL_ROOT}/test/test/7297abc/${localFile.path}`).replyOnce(
@ -332,10 +379,10 @@ describe('IDE store file actions', () => {
mock.onGet(`${RELATIVE_URL_ROOT}/test/test/7297abc/${localFile.path}`).networkError();
});
it('dispatches error action', done => {
it('dispatches error action', () => {
const dispatch = jest.fn();
actions
return actions
.getFileData(
{ state: store.state, commit() {}, dispatch, getters: store.getters },
{ path: localFile.path },
@ -350,10 +397,7 @@ describe('IDE store file actions', () => {
makeFileActive: true,
},
});
done();
})
.catch(done.fail);
});
});
});
});
@ -446,12 +490,14 @@ describe('IDE store file actions', () => {
mock.onGet(/(.*)/).networkError();
});
it('dispatches error action', done => {
it('dispatches error action', () => {
const dispatch = jest.fn();
actions
.getRawFileData({ state: store.state, commit() {}, dispatch }, { path: tmpFile.path })
.then(done.fail)
return actions
.getRawFileData(
{ state: store.state, commit() {}, dispatch, getters: store.getters },
{ path: tmpFile.path },
)
.catch(() => {
expect(dispatch).toHaveBeenCalledWith('setErrorMessage', {
text: 'An error occurred whilst loading the file content.',
@ -461,8 +507,6 @@ describe('IDE store file actions', () => {
path: tmpFile.path,
},
});
done();
});
});
});

View File

@ -11,7 +11,7 @@ describe('IDE store file mutations', () => {
beforeEach(() => {
localStore = createStore();
localState = localStore.state;
localFile = { ...file(), type: 'blob' };
localFile = { ...file('file'), type: 'blob', content: 'original' };
localState.entries[localFile.path] = localFile;
});
@ -139,35 +139,68 @@ describe('IDE store file mutations', () => {
});
describe('SET_FILE_RAW_DATA', () => {
it('sets raw data', () => {
const callMutationForFile = f => {
mutations.SET_FILE_RAW_DATA(localState, {
file: localFile,
file: f,
raw: 'testing',
fileDeletedAndReadded: localStore.getters.isFileDeletedAndReadded(localFile.path),
});
};
it('sets raw data', () => {
callMutationForFile(localFile);
expect(localFile.raw).toBe('testing');
});
it('sets raw data to stagedFile if file was deleted and readded', () => {
localState.stagedFiles = [{ ...localFile, deleted: true }];
localFile.tempFile = true;
callMutationForFile(localFile);
expect(localFile.raw).toBeFalsy();
expect(localState.stagedFiles[0].raw).toBe('testing');
});
it("sets raw data to a file's content if tempFile is empty", () => {
localFile.tempFile = true;
localFile.content = '';
callMutationForFile(localFile);
expect(localFile.raw).toBeFalsy();
expect(localFile.content).toBe('testing');
});
it('adds raw data to open pending file', () => {
localState.openFiles.push({ ...localFile, pending: true });
mutations.SET_FILE_RAW_DATA(localState, {
file: localFile,
raw: 'testing',
});
callMutationForFile(localFile);
expect(localState.openFiles[0].raw).toBe('testing');
});
it('does not add raw data to open pending tempFile file', () => {
localState.openFiles.push({ ...localFile, pending: true, tempFile: true });
it('sets raw to content of a renamed tempFile', () => {
localFile.tempFile = true;
localFile.prevPath = 'old_path';
localState.openFiles.push({ ...localFile, pending: true });
mutations.SET_FILE_RAW_DATA(localState, {
file: localFile,
raw: 'testing',
});
callMutationForFile(localFile);
expect(localState.openFiles[0].raw).not.toBe('testing');
expect(localState.openFiles[0].content).toBe('testing');
});
it('adds raw data to a staged deleted file if unstaged change has a tempFile of the same name', () => {
localFile.tempFile = true;
localState.openFiles.push({ ...localFile, pending: true });
localState.stagedFiles = [{ ...localFile, deleted: true }];
callMutationForFile(localFile);
expect(localFile.raw).toBeFalsy();
expect(localState.stagedFiles[0].raw).toBe('testing');
});
});

View File

@ -1,33 +1,38 @@
import { readmeFile } from '~/repository/utils/readme';
describe('readmeFile', () => {
describe('markdown files', () => {
it('returns markdown file', () => {
expect(readmeFile([{ name: 'README' }, { name: 'README.md' }])).toEqual({
name: 'README.md',
});
expect(readmeFile([{ name: 'README' }, { name: 'index.md' }])).toEqual({
name: 'index.md',
});
it('prefers README with markup over plain text README', () => {
expect(readmeFile([{ name: 'README' }, { name: 'README.md' }])).toEqual({
name: 'README.md',
});
});
describe('plain files', () => {
it('returns plain file', () => {
expect(readmeFile([{ name: 'README' }, { name: 'TEST.md' }])).toEqual({
name: 'README',
});
expect(readmeFile([{ name: 'readme' }, { name: 'TEST.md' }])).toEqual({
name: 'readme',
});
it('is case insensitive', () => {
expect(readmeFile([{ name: 'README' }, { name: 'readme.rdoc' }])).toEqual({
name: 'readme.rdoc',
});
});
describe('non-previewable file', () => {
it('returns undefined', () => {
expect(readmeFile([{ name: 'index.js' }, { name: 'TEST.md' }])).toBe(undefined);
it('returns the first README found', () => {
expect(readmeFile([{ name: 'INDEX.adoc' }, { name: 'README.md' }])).toEqual({
name: 'INDEX.adoc',
});
});
it('expects extension to be separated by dot', () => {
expect(readmeFile([{ name: 'readmeXorg' }, { name: 'index.org' }])).toEqual({
name: 'index.org',
});
});
it('returns plain text README when there is no README with markup', () => {
expect(readmeFile([{ name: 'README' }, { name: 'NOT_README.md' }])).toEqual({
name: 'README',
});
});
it('returns undefined when there are no appropriate files', () => {
expect(readmeFile([{ name: 'index.js' }, { name: 'md.README' }])).toBe(undefined);
expect(readmeFile([])).toBe(undefined);
});
});

View File

@ -5,70 +5,14 @@ require 'spec_helper'
describe API::Helpers::Pagination do
subject { Class.new.include(described_class).new }
let(:expected_result) { double("result", to_a: double) }
let(:relation) { double("relation") }
let(:params) { {} }
let(:paginator) { double('paginator') }
let(:relation) { double('relation') }
let(:expected_result) { double('expected result') }
before do
allow(subject).to receive(:params).and_return(params)
end
it 'delegates to OffsetPagination' do
expect(Gitlab::Pagination::OffsetPagination).to receive(:new).with(subject).and_return(paginator)
expect(paginator).to receive(:paginate).with(relation).and_return(expected_result)
describe '#paginate' do
let(:offset_pagination) { double("offset pagination") }
it 'delegates to OffsetPagination' do
expect(::Gitlab::Pagination::OffsetPagination).to receive(:new).with(subject).and_return(offset_pagination)
expect(offset_pagination).to receive(:paginate).with(relation).and_return(expected_result)
result = subject.paginate(relation)
expect(result).to eq(expected_result)
end
end
describe '#paginate_and_retrieve!' do
context 'for offset pagination' do
before do
allow(Gitlab::Pagination::Keyset).to receive(:available?).and_return(false)
end
it 'delegates to paginate' do
expect(subject).to receive(:paginate).with(relation).and_return(expected_result)
result = subject.paginate_and_retrieve!(relation)
expect(result).to eq(expected_result.to_a)
end
end
context 'for keyset pagination' do
let(:params) { { pagination: 'keyset' } }
let(:request_context) { double('request context') }
before do
allow(Gitlab::Pagination::Keyset::RequestContext).to receive(:new).with(subject).and_return(request_context)
end
context 'when keyset pagination is available' do
it 'delegates to KeysetPagination' do
expect(Gitlab::Pagination::Keyset).to receive(:available?).and_return(true)
expect(Gitlab::Pagination::Keyset).to receive(:paginate).with(request_context, relation).and_return(expected_result)
result = subject.paginate_and_retrieve!(relation)
expect(result).to eq(expected_result.to_a)
end
end
context 'when keyset pagination is not available' do
it 'renders a 501 error if keyset pagination isnt available yet' do
expect(Gitlab::Pagination::Keyset).to receive(:available?).with(request_context, relation).and_return(false)
expect(Gitlab::Pagination::Keyset).not_to receive(:paginate)
expect(subject).to receive(:error!).with(/not yet available/, 405)
subject.paginate_and_retrieve!(relation)
end
end
end
expect(subject.paginate(relation)).to eq(expected_result)
end
end

View File

@ -0,0 +1,97 @@
# frozen_string_literal: true
require 'spec_helper'
describe API::Helpers::PaginationStrategies do
subject { Class.new.include(described_class).new }
let(:expected_result) { double("result") }
let(:relation) { double("relation") }
let(:params) { {} }
before do
allow(subject).to receive(:params).and_return(params)
end
describe '#paginate_with_strategies' do
let(:paginator) { double("paginator", paginate: expected_result, finalize: nil) }
before do
allow(subject).to receive(:paginator).with(relation).and_return(paginator)
end
it 'yields paginated relation' do
expect { |b| subject.paginate_with_strategies(relation, &b) }.to yield_with_args(expected_result)
end
it 'calls #finalize with first value returned from block' do
return_value = double
expect(paginator).to receive(:finalize).with(return_value)
subject.paginate_with_strategies(relation) do |records|
some_options = {}
[return_value, some_options]
end
end
it 'returns whatever the block returns' do
return_value = [double, double]
result = subject.paginate_with_strategies(relation) do |records|
return_value
end
expect(result).to eq(return_value)
end
end
describe '#paginator' do
context 'offset pagination' do
let(:paginator) { double("paginator") }
before do
allow(subject).to receive(:keyset_pagination_enabled?).and_return(false)
end
it 'delegates to OffsetPagination' do
expect(Gitlab::Pagination::OffsetPagination).to receive(:new).with(subject).and_return(paginator)
expect(subject.paginator(relation)).to eq(paginator)
end
end
context 'for keyset pagination' do
let(:params) { { pagination: 'keyset' } }
let(:request_context) { double('request context') }
let(:pager) { double('pager') }
before do
allow(subject).to receive(:keyset_pagination_enabled?).and_return(true)
allow(Gitlab::Pagination::Keyset::RequestContext).to receive(:new).with(subject).and_return(request_context)
end
context 'when keyset pagination is available' do
before do
allow(Gitlab::Pagination::Keyset).to receive(:available?).and_return(true)
allow(Gitlab::Pagination::Keyset::Pager).to receive(:new).with(request_context).and_return(pager)
end
it 'delegates to Pager' do
expect(subject.paginator(relation)).to eq(pager)
end
end
context 'when keyset pagination is not available' do
before do
allow(Gitlab::Pagination::Keyset).to receive(:available?).with(request_context, relation).and_return(false)
end
it 'renders a 501 error' do
expect(subject).to receive(:error!).with(/not yet available/, 405)
subject.paginator(relation)
end
end
end
end
end

View File

@ -24,7 +24,7 @@ describe Gitlab::Ci::Config::Entry::Job do
let(:result) do
%i[before_script script stage type after_script cache
image services only except rules needs variables artifacts
environment coverage retry interruptible timeout tags]
environment coverage retry interruptible timeout release tags]
end
it { is_expected.to match_array result }
@ -122,6 +122,21 @@ describe Gitlab::Ci::Config::Entry::Job do
it { expect(entry).to be_valid }
end
context 'when it is a release' do
let(:config) do
{
script: ["make changelog | tee release_changelog.txt"],
release: {
tag_name: "v0.06",
name: "Release $CI_TAG_NAME",
description: "./release_changelog.txt"
}
}
end
it { expect(entry).to be_valid }
end
end
end
@ -443,6 +458,25 @@ describe Gitlab::Ci::Config::Entry::Job do
expect(entry.timeout).to eq('1m 1s')
end
end
context 'when it is a release' do
context 'when `release:description` is missing' do
let(:config) do
{
script: ["make changelog | tee release_changelog.txt"],
release: {
tag_name: "v0.06",
name: "Release $CI_TAG_NAME"
}
}
end
it "returns error" do
expect(entry).not_to be_valid
expect(entry.errors).to include "release description can't be blank"
end
end
end
end
end

View File

@ -0,0 +1,79 @@
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Ci::Config::Entry::Release::Assets::Link do
let(:entry) { described_class.new(config) }
describe 'validation' do
context 'when entry config value is correct' do
let(:config) do
{
name: "cool-app.zip",
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
}
end
describe '#value' do
it 'returns link configuration' do
expect(entry.value).to eq config
end
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context 'when entry value is not correct' do
describe '#errors' do
context 'when name is not a string' do
let(:config) { { name: 123, url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip" } }
it 'reports error' do
expect(entry.errors)
.to include 'link name should be a string'
end
end
context 'when name is not present' do
let(:config) { { url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip" } }
it 'reports error' do
expect(entry.errors)
.to include "link name can't be blank"
end
end
context 'when url is not addressable' do
let(:config) { { name: "cool-app.zip", url: "xyz" } }
it 'reports error' do
expect(entry.errors)
.to include "link url is blocked: only allowed schemes are http, https"
end
end
context 'when url is not present' do
let(:config) { { name: "cool-app.zip" } }
it 'reports error' do
expect(entry.errors)
.to include "link url can't be blank"
end
end
context 'when there is an unknown key present' do
let(:config) { { test: 100 } }
it 'reports error' do
expect(entry.errors)
.to include 'link config contains unknown keys: test'
end
end
end
end
end
end

View File

@ -0,0 +1,67 @@
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Ci::Config::Entry::Release::Assets::Links do
let(:entry) { described_class.new(config) }
describe 'validation' do
context 'when entry config value is correct' do
let(:config) do
[
{
name: "cool-app.zip",
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
},
{
name: "cool-app.exe",
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.exe"
}
]
end
describe '#value' do
it 'returns links configuration' do
expect(entry.value).to eq config
end
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context 'when entry value is not correct' do
describe '#errors' do
context 'when value of link is invalid' do
let(:config) { { link: 'xyz' } }
it 'reports error' do
expect(entry.errors)
.to include 'links config should be a array'
end
end
context 'when value of links link is empty' do
let(:config) { { link: [] } }
it 'reports error' do
expect(entry.errors)
.to include "links config should be a array"
end
end
context 'when there is an unknown key present' do
let(:config) { { test: 100 } }
it 'reports error' do
expect(entry.errors)
.to include 'links config should be a array'
end
end
end
end
end
end

View File

@ -0,0 +1,69 @@
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Ci::Config::Entry::Release::Assets do
let(:entry) { described_class.new(config) }
describe 'validation' do
context 'when entry config value is correct' do
let(:config) do
{
links: [
{
name: "cool-app.zip",
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
},
{
name: "cool-app.exe",
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.exe"
}
]
}
end
describe '#value' do
it 'returns assets configuration' do
expect(entry.value).to eq config
end
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context 'when entry value is not correct' do
describe '#errors' do
context 'when value of assets is invalid' do
let(:config) { { links: 'xyz' } }
it 'reports error' do
expect(entry.errors)
.to include 'assets links should be an array of hashes'
end
end
context 'when value of assets:links is empty' do
let(:config) { { links: [] } }
it 'reports error' do
expect(entry.errors)
.to include "assets links can't be blank"
end
end
context 'when there is an unknown key present' do
let(:config) { { test: 100 } }
it 'reports error' do
expect(entry.errors)
.to include 'assets config contains unknown keys: test'
end
end
end
end
end
end

View File

@ -0,0 +1,114 @@
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Ci::Config::Entry::Release do
let(:entry) { described_class.new(config) }
describe 'validation' do
context 'when entry config value is correct' do
let(:config) { { tag_name: 'v0.06', description: "./release_changelog.txt" } }
describe '#value' do
it 'returns release configuration' do
expect(entry.value).to eq config
end
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context "when value includes 'assets' keyword" do
let(:config) do
{
tag_name: 'v0.06',
description: "./release_changelog.txt",
assets: [
{
name: "cool-app.zip",
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
}
]
}
end
describe '#value' do
it 'returns release configuration' do
expect(entry.value).to eq config
end
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context "when value includes 'name' keyword" do
let(:config) do
{
tag_name: 'v0.06',
description: "./release_changelog.txt",
name: "Release $CI_TAG_NAME"
}
end
describe '#value' do
it 'returns release configuration' do
expect(entry.value).to eq config
end
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context 'when entry value is not correct' do
describe '#errors' do
context 'when value of attribute is invalid' do
let(:config) { { description: 10 } }
it 'reports error' do
expect(entry.errors)
.to include 'release description should be a string'
end
end
context 'when release description is missing' do
let(:config) { { tag_name: 'v0.06' } }
it 'reports error' do
expect(entry.errors)
.to include "release description can't be blank"
end
end
context 'when release tag_name is missing' do
let(:config) { { description: "./release_changelog.txt" } }
it 'reports error' do
expect(entry.errors)
.to include "release tag name can't be blank"
end
end
context 'when there is an unknown key present' do
let(:config) { { test: 100 } }
it 'reports error' do
expect(entry.errors)
.to include 'release config contains unknown keys: test'
end
end
end
end
end
end

View File

@ -27,16 +27,29 @@ describe Gitlab::Ci::Config::Entry::Root do
context 'when configuration is valid' do
context 'when top-level entries are defined' do
let(:hash) do
{ before_script: %w(ls pwd),
{
before_script: %w(ls pwd),
image: 'ruby:2.2',
default: {},
services: ['postgres:9.1', 'mysql:5.5'],
variables: { VAR: 'value' },
after_script: ['make clean'],
stages: %w(build pages),
stages: %w(build pages release),
cache: { key: 'k', untracked: true, paths: ['public/'] },
rspec: { script: %w[rspec ls] },
spinach: { before_script: [], variables: {}, script: 'spinach' } }
spinach: { before_script: [], variables: {}, script: 'spinach' },
release: {
stage: 'release',
before_script: [],
after_script: [],
script: ["make changelog | tee release_changelog.txt"],
release: {
tag_name: 'v0.06',
name: "Release $CI_TAG_NAME",
description: "./release_changelog.txt"
}
}
}
end
describe '#compose!' do
@ -87,7 +100,7 @@ describe Gitlab::Ci::Config::Entry::Root do
describe '#stages_value' do
context 'when stages key defined' do
it 'returns array of stages' do
expect(root.stages_value).to eq %w[build pages]
expect(root.stages_value).to eq %w[build pages release]
end
end
@ -105,8 +118,9 @@ describe Gitlab::Ci::Config::Entry::Root do
describe '#jobs_value' do
it 'returns jobs configuration' do
expect(root.jobs_value).to eq(
rspec: { name: :rspec,
expect(root.jobs_value.keys).to eq([:rspec, :spinach, :release])
expect(root.jobs_value[:rspec]).to eq(
{ name: :rspec,
script: %w[rspec ls],
before_script: %w(ls pwd),
image: { name: 'ruby:2.2' },
@ -116,8 +130,10 @@ describe Gitlab::Ci::Config::Entry::Root do
variables: {},
ignore: false,
after_script: ['make clean'],
only: { refs: %w[branches tags] } },
spinach: { name: :spinach,
only: { refs: %w[branches tags] } }
)
expect(root.jobs_value[:spinach]).to eq(
{ name: :spinach,
before_script: [],
script: %w[spinach],
image: { name: 'ruby:2.2' },
@ -129,6 +145,20 @@ describe Gitlab::Ci::Config::Entry::Root do
after_script: ['make clean'],
only: { refs: %w[branches tags] } }
)
expect(root.jobs_value[:release]).to eq(
{ name: :release,
stage: 'release',
before_script: [],
script: ["make changelog | tee release_changelog.txt"],
release: { name: "Release $CI_TAG_NAME", tag_name: 'v0.06', description: "./release_changelog.txt" },
image: { name: "ruby:2.2" },
services: [{ name: "postgres:9.1" }, { name: "mysql:5.5" }],
cache: { key: "k", untracked: true, paths: ["public/"], policy: "pull-push" },
only: { refs: %w(branches tags) },
variables: {},
after_script: [],
ignore: false }
)
end
end
end
@ -261,7 +291,7 @@ describe Gitlab::Ci::Config::Entry::Root do
# despite the fact, that key is present. See issue #18775 for more
# details.
#
context 'when entires specified but not defined' do
context 'when entries are specified but not defined' do
before do
root.compose!
end

View File

@ -1285,6 +1285,59 @@ module Gitlab
end
end
describe "release" do
let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
let(:config) do
{
stages: ["build", "test", "release"], # rubocop:disable Style/WordArray
release: {
stage: "release",
only: ["tags"],
script: ["make changelog | tee release_changelog.txt"],
release: {
tag_name: "$CI_COMMIT_TAG",
name: "Release $CI_TAG_NAME",
description: "./release_changelog.txt",
assets: {
links: [
{
name: "cool-app.zip",
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
},
{
name: "cool-app.exe",
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.exe"
}
]
}
}
}
}
end
context 'with feature flag active' do
before do
stub_feature_flags(ci_release_generation: true)
end
it "returns release info" do
expect(processor.stage_builds_attributes('release').first[:options])
.to eq(config[:release].except(:stage, :only))
end
end
context 'with feature flag inactive' do
before do
stub_feature_flags(ci_release_generation: false)
end
it "returns release info" do
expect(processor.stage_builds_attributes('release').first[:options].include?(config[:release]))
.to be false
end
end
end
describe '#environment' do
let(:config) do
{

View File

@ -59,7 +59,7 @@ describe Gitlab::Config::Entry::Attributable do
end
end
expectation.to raise_error(ArgumentError, 'Method already defined!')
expectation.to raise_error(ArgumentError, 'Method already defined: length')
end
end
end

View File

@ -53,30 +53,46 @@ describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
allow(Feature).to receive(:persisted?).with(feature_flag).and_return(false)
end
it 'returns true when gitaly matches disk' do
expect(subject.use_rugged?(repository, feature_flag_name)).to be true
context 'when running puma with multiple threads' do
before do
allow(subject).to receive(:running_puma_with_multiple_threads?).and_return(true)
end
it 'returns false' do
expect(subject.use_rugged?(repository, feature_flag_name)).to be false
end
end
it 'returns false when disk access fails' do
allow(Gitlab::GitalyClient).to receive(:storage_metadata_file_path).and_return("/fake/path/doesnt/exist")
context 'when not running puma with multiple threads' do
before do
allow(subject).to receive(:running_puma_with_multiple_threads?).and_return(false)
end
expect(subject.use_rugged?(repository, feature_flag_name)).to be false
end
it 'returns true when gitaly matches disk' do
expect(subject.use_rugged?(repository, feature_flag_name)).to be true
end
it "returns false when gitaly doesn't match disk" do
allow(Gitlab::GitalyClient).to receive(:storage_metadata_file_path).and_return(temp_gitaly_metadata_file)
it 'returns false when disk access fails' do
allow(Gitlab::GitalyClient).to receive(:storage_metadata_file_path).and_return("/fake/path/doesnt/exist")
expect(subject.use_rugged?(repository, feature_flag_name)).to be_falsey
expect(subject.use_rugged?(repository, feature_flag_name)).to be false
end
File.delete(temp_gitaly_metadata_file)
end
it "returns false when gitaly doesn't match disk" do
allow(Gitlab::GitalyClient).to receive(:storage_metadata_file_path).and_return(temp_gitaly_metadata_file)
it "doesn't lead to a second rpc call because gitaly client should use the cached value" do
expect(subject.use_rugged?(repository, feature_flag_name)).to be true
expect(subject.use_rugged?(repository, feature_flag_name)).to be_falsey
expect(Gitlab::GitalyClient).not_to receive(:filesystem_id)
File.delete(temp_gitaly_metadata_file)
end
subject.use_rugged?(repository, feature_flag_name)
it "doesn't lead to a second rpc call because gitaly client should use the cached value" do
expect(subject.use_rugged?(repository, feature_flag_name)).to be true
expect(Gitlab::GitalyClient).not_to receive(:filesystem_id)
subject.use_rugged?(repository, feature_flag_name)
end
end
end
@ -99,6 +115,37 @@ describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
end
end
describe '#running_puma_with_multiple_threads?' do
context 'when using Puma' do
before do
stub_const('::Puma', class_double('Puma'))
allow(Gitlab::Runtime).to receive(:puma?).and_return(true)
end
it 'returns false for single thread Puma' do
allow(::Puma).to receive_message_chain(:cli_config, :options).and_return(max_threads: 1)
expect(subject.running_puma_with_multiple_threads?).to be false
end
it 'returns true for multi-threaded Puma' do
allow(::Puma).to receive_message_chain(:cli_config, :options).and_return(max_threads: 2)
expect(subject.running_puma_with_multiple_threads?).to be true
end
end
context 'when not using Puma' do
before do
allow(Gitlab::Runtime).to receive(:puma?).and_return(false)
end
it 'returns false' do
expect(subject.running_puma_with_multiple_threads?).to be false
end
end
end
def create_temporary_gitaly_metadata_file
tmp = Tempfile.new('.gitaly-metadata')
gitaly_metadata = {

View File

@ -30,16 +30,14 @@ describe Gitlab::Pagination::Keyset::Page do
end
describe '#next' do
let(:page) { described_class.new(order_by: order_by, lower_bounds: lower_bounds, per_page: per_page, end_reached: end_reached) }
subject { page.next(new_lower_bounds, new_end_reached) }
let(:page) { described_class.new(order_by: order_by, lower_bounds: lower_bounds, per_page: per_page) }
subject { page.next(new_lower_bounds) }
let(:order_by) { { id: :desc } }
let(:lower_bounds) { { id: 42 } }
let(:per_page) { 10 }
let(:end_reached) { false }
let(:new_lower_bounds) { { id: 21 } }
let(:new_end_reached) { true }
it 'copies over order_by' do
expect(subject.order_by).to eq(page.order_by)
@ -57,10 +55,5 @@ describe Gitlab::Pagination::Keyset::Page do
expect(subject.lower_bounds).to eq(new_lower_bounds)
expect(page.lower_bounds).to eq(lower_bounds)
end
it 'sets end_reached only on new instance' do
expect(subject.end_reached?).to eq(new_end_reached)
expect(page.end_reached?).to eq(end_reached)
end
end
end

View File

@ -15,46 +15,18 @@ describe Gitlab::Pagination::Keyset::Pager do
describe '#paginate' do
subject { described_class.new(request).paginate(relation) }
it 'loads the result relation only once' do
it 'does not execute a query' do
expect do
subject
end.not_to exceed_query_limit(1)
end.not_to exceed_query_limit(0)
end
it 'passes information about next page to request' do
lower_bounds = relation.limit(page.per_page).last.slice(:id)
expect(page).to receive(:next).with(lower_bounds, false).and_return(next_page)
expect(request).to receive(:apply_headers).with(next_page)
subject
it 'applies a LIMIT' do
expect(subject.limit_value).to eq(page.per_page)
end
context 'when retrieving the last page' do
let(:relation) { Project.where('id > ?', Project.maximum(:id) - page.per_page).order(id: :asc) }
it 'indicates this is the last page' do
expect(request).to receive(:apply_headers) do |next_page|
expect(next_page.end_reached?).to be_truthy
end
subject
end
end
context 'when retrieving an empty page' do
let(:relation) { Project.where('id > ?', Project.maximum(:id) + 1).order(id: :asc) }
it 'indicates this is the last page' do
expect(request).to receive(:apply_headers) do |next_page|
expect(next_page.end_reached?).to be_truthy
end
subject
end
end
it 'returns an array with the loaded records' do
expect(subject).to eq(relation.limit(page.per_page).to_a)
it 'returns the limited relation' do
expect(subject).to eq(relation.limit(page.per_page))
end
context 'validating the order clause' do
@ -65,4 +37,40 @@ describe Gitlab::Pagination::Keyset::Pager do
end
end
end
describe '#finalize' do
let(:records) { relation.limit(page.per_page).load }
subject { described_class.new(request).finalize(records) }
it 'passes information about next page to request' do
lower_bounds = records.last.slice(:id)
expect(page).to receive(:next).with(lower_bounds).and_return(next_page)
expect(request).to receive(:apply_headers).with(next_page)
subject
end
context 'when retrieving the last page' do
let(:relation) { Project.where('id > ?', Project.maximum(:id) - page.per_page).order(id: :asc) }
it 'indicates there is another (likely empty) page' do
lower_bounds = records.last.slice(:id)
expect(page).to receive(:next).with(lower_bounds).and_return(next_page)
expect(request).to receive(:apply_headers).with(next_page)
subject
end
end
context 'when retrieving an empty page' do
let(:relation) { Project.where('id > ?', Project.maximum(:id) + 1).order(id: :asc) }
it 'indicates this is the last page' do
expect(request).not_to receive(:apply_headers)
subject
end
end
end
end

View File

@ -53,7 +53,7 @@ describe Gitlab::Pagination::Keyset::RequestContext do
let(:request) { double('request', url: "http://#{Gitlab.config.gitlab.host}/api/v4/projects?foo=bar") }
let(:params) { { foo: 'bar' } }
let(:request_context) { double('request context', params: params, request: request) }
let(:next_page) { double('next page', order_by: { id: :asc }, lower_bounds: { id: 42 }, end_reached?: false) }
let(:next_page) { double('next page', order_by: { id: :asc }, lower_bounds: { id: 42 }) }
subject { described_class.new(request_context).apply_headers(next_page) }
@ -92,7 +92,7 @@ describe Gitlab::Pagination::Keyset::RequestContext do
end
context 'with descending order' do
let(:next_page) { double('next page', order_by: { id: :desc }, lower_bounds: { id: 42 }, end_reached?: false) }
let(:next_page) { double('next page', order_by: { id: :desc }, lower_bounds: { id: 42 }) }
it 'sets Links header with a link to the next page' do
orig_uri = URI.parse(request_context.request.url)

View File

@ -3,22 +3,6 @@
require 'spec_helper'
describe Gitlab::Pagination::Keyset do
describe '.paginate' do
subject { described_class.paginate(request_context, relation) }
let(:request_context) { double }
let(:relation) { double }
let(:pager) { double }
let(:result) { double }
it 'uses Pager to paginate the relation' do
expect(Gitlab::Pagination::Keyset::Pager).to receive(:new).with(request_context).and_return(pager)
expect(pager).to receive(:paginate).with(relation).and_return(result)
expect(subject).to eq(result)
end
end
describe '.available?' do
subject { described_class }

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20191128162854_drop_project_ci_cd_settings_merge_trains_enabled.rb')
describe DropProjectCiCdSettingsMergeTrainsEnabled, :migration do
let!(:project_ci_cd_setting) { table(:project_ci_cd_settings) }
it 'correctly migrates up and down' do
reversible_migration do |migration|
migration.before -> {
expect(project_ci_cd_setting.column_names).to include("merge_trains_enabled")
}
migration.after -> {
project_ci_cd_setting.reset_column_information
expect(project_ci_cd_setting.column_names).not_to include("merge_trains_enabled")
}
end
end
end

View File

@ -570,6 +570,102 @@ describe API::Projects do
let(:projects) { Project.all }
end
end
context 'with keyset pagination' do
let(:current_user) { user }
let(:projects) { [public_project, project, project2, project3] }
context 'headers and records' do
let(:params) { { pagination: 'keyset', order_by: :id, sort: :asc, per_page: 1 } }
it 'includes a pagination header with link to the next page' do
get api('/projects', current_user), params: params
expect(response.header).to include('Links')
expect(response.header['Links']).to include('pagination=keyset')
expect(response.header['Links']).to include("id_after=#{public_project.id}")
end
it 'contains only the first project with per_page = 1' do
get api('/projects', current_user), params: params
expect(response).to have_gitlab_http_status(200)
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).to contain_exactly(public_project.id)
end
it 'still includes a link if the end has reached and there is no more data after this page' do
get api('/projects', current_user), params: params.merge(id_after: project2.id)
expect(response.header).to include('Links')
expect(response.header['Links']).to include('pagination=keyset')
expect(response.header['Links']).to include("id_after=#{project3.id}")
end
it 'does not include a next link when the page does not have any records' do
get api('/projects', current_user), params: params.merge(id_after: Project.maximum(:id))
expect(response.header).not_to include('Links')
end
it 'returns an empty array when the page does not have any records' do
get api('/projects', current_user), params: params.merge(id_after: Project.maximum(:id))
expect(response).to have_gitlab_http_status(200)
expect(json_response).to eq([])
end
it 'responds with 501 if order_by is different from id' do
get api('/projects', current_user), params: params.merge(order_by: :created_at)
expect(response).to have_gitlab_http_status(405)
end
end
context 'with descending sorting' do
let(:params) { { pagination: 'keyset', order_by: :id, sort: :desc, per_page: 1 } }
it 'includes a pagination header with link to the next page' do
get api('/projects', current_user), params: params
expect(response.header).to include('Links')
expect(response.header['Links']).to include('pagination=keyset')
expect(response.header['Links']).to include("id_before=#{project3.id}")
end
it 'contains only the last project with per_page = 1' do
get api('/projects', current_user), params: params
expect(response).to have_gitlab_http_status(200)
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).to contain_exactly(project3.id)
end
end
context 'retrieving the full relation' do
let(:params) { { pagination: 'keyset', order_by: :id, sort: :desc, per_page: 2 } }
it 'returns all projects' do
url = '/projects'
requests = 0
ids = []
while url && requests <= 5 # circuit breaker
requests += 1
get api(url, current_user), params: params
links = response.header['Links']
url = links&.match(/<[^>]+(\/projects\?[^>]+)>; rel="next"/) do |match|
match[1]
end
ids += JSON.parse(response.body).map { |p| p['id'] }
end
expect(ids).to contain_exactly(*projects.map(&:id))
end
end
end
end
describe 'POST /projects' do

View File

@ -34,7 +34,7 @@ describe AutoMerge::MergeWhenPipelineSucceedsService do
it { is_expected.to be_truthy }
context 'when the head piipeline succeeded' do
context 'when the head pipeline succeeded' do
let(:pipeline_status) { :success }
it { is_expected.to be_falsy }

View File

@ -940,7 +940,7 @@ describe Ci::CreatePipelineService do
expect(resource_group.resources.first.build).to eq(nil)
end
context 'when resourc group key includes predefined variables' do
context 'when resource group key includes predefined variables' do
let(:resource_group_key) { '$CI_COMMIT_REF_NAME-$CI_JOB_NAME' }
it 'interpolates the variables into the key correctly' do
@ -969,6 +969,70 @@ describe Ci::CreatePipelineService do
end
end
context 'with release' do
shared_examples_for 'a successful release pipeline' do
before do
stub_feature_flags(ci_release_generation: true)
stub_ci_pipeline_yaml_file(YAML.dump(config))
end
it 'is valid config' do
pipeline = execute_service
build = pipeline.builds.first
expect(pipeline).to be_kind_of(Ci::Pipeline)
expect(pipeline).to be_valid
expect(pipeline.yaml_errors).not_to be_present
expect(pipeline).to be_persisted
expect(build).to be_kind_of(Ci::Build)
expect(build.options).to eq(config[:release].except(:stage, :only).with_indifferent_access)
end
end
context 'simple example' do
it_behaves_like 'a successful release pipeline' do
let(:config) do
{
release: {
script: ["make changelog | tee release_changelog.txt"],
release: {
tag_name: "v0.06",
description: "./release_changelog.txt"
}
}
}
end
end
end
context 'example with all release metadata' do
it_behaves_like 'a successful release pipeline' do
let(:config) do
{
release: {
script: ["make changelog | tee release_changelog.txt"],
release: {
name: "Release $CI_TAG_NAME",
tag_name: "v0.06",
description: "./release_changelog.txt",
assets: {
links: [
{
name: "cool-app.zip",
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
},
{
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.exe"
}
]
}
}
}
}
end
end
end
end
shared_examples 'when ref is protected' do
let(:user) { create(:user) }

View File

@ -45,7 +45,7 @@ describe SpamService do
context 'when indicated as spam by akismet' do
shared_examples 'akismet spam' do
it 'doesnt check as spam when request is missing' do
it "doesn't check as spam when request is missing" do
check_spam(issue, nil, false)
expect(issue).not_to be_spam