Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-10-03 12:08:27 +00:00
parent 10d4625ed3
commit 077b0a79d5
64 changed files with 1841 additions and 1278 deletions

View File

@ -631,7 +631,6 @@ Layout/FirstHashElementIndentation:
- 'spec/support_specs/graphql/arguments_spec.rb'
- 'spec/support_specs/graphql/field_selection_spec.rb'
- 'spec/support_specs/matchers/exceed_query_limit_helpers_spec.rb'
- 'spec/tasks/gitlab/backup_rake_spec.rb'
- 'spec/tooling/danger/datateam_spec.rb'
- 'spec/tooling/lib/tooling/kubernetes_client_spec.rb'
- 'spec/views/projects/issues/_issue.html.haml_spec.rb'

View File

@ -5901,7 +5901,6 @@ Layout/LineLength:
- 'spec/tasks/dev_rake_spec.rb'
- 'spec/tasks/gitlab/artifacts/check_rake_spec.rb'
- 'spec/tasks/gitlab/background_migrations_rake_spec.rb'
- 'spec/tasks/gitlab/backup_rake_spec.rb'
- 'spec/tasks/gitlab/db/validate_config_rake_spec.rb'
- 'spec/tasks/gitlab/db_rake_spec.rb'
- 'spec/tasks/gitlab/external_diffs_rake_spec.rb'

View File

@ -418,7 +418,6 @@ Layout/SpaceInsideParens:
- 'spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb'
- 'spec/support/shared_examples/requests/releases_shared_examples.rb'
- 'spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb'
- 'spec/tasks/gitlab/backup_rake_spec.rb'
- 'spec/tasks/gitlab/db_rake_spec.rb'
- 'spec/validators/devise_email_validator_spec.rb'
- 'spec/views/shared/runners/_runner_details.html.haml_spec.rb'

View File

@ -3370,7 +3370,6 @@ RSpec/ContextWording:
- 'spec/tasks/cache/clear/redis_spec.rb'
- 'spec/tasks/dev_rake_spec.rb'
- 'spec/tasks/gettext_rake_spec.rb'
- 'spec/tasks/gitlab/backup_rake_spec.rb'
- 'spec/tasks/gitlab/cleanup_rake_spec.rb'
- 'spec/tasks/gitlab/db/lock_writes_rake_spec.rb'
- 'spec/tasks/gitlab/db/validate_config_rake_spec.rb'

View File

@ -496,7 +496,6 @@ RSpec/PredicateMatcher:
- 'spec/support/shared_examples/requests/api/hooks_shared_examples.rb'
- 'spec/support/shared_examples/uploaders/object_storage_shared_examples.rb'
- 'spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb'
- 'spec/tasks/gitlab/backup_rake_spec.rb'
- 'spec/tasks/gitlab/cleanup_rake_spec.rb'
- 'spec/uploaders/object_storage_spec.rb'
- 'spec/validators/any_field_validator_spec.rb'

View File

@ -72,4 +72,3 @@ Style/NumericLiteralPrefix:
- 'spec/support/import_export/export_file_helper.rb'
- 'spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb'
- 'spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb'
- 'spec/tasks/gitlab/backup_rake_spec.rb'

View File

@ -1191,7 +1191,6 @@ Style/PercentLiteralDelimiters:
- 'spec/support_specs/graphql/arguments_spec.rb'
- 'spec/support_specs/helpers/active_record/query_recorder_spec.rb'
- 'spec/support_specs/matchers/exceed_query_limit_helpers_spec.rb'
- 'spec/tasks/gitlab/backup_rake_spec.rb'
- 'spec/tasks/gitlab/db_rake_spec.rb'
- 'spec/tasks/gitlab/task_helpers_spec.rb'
- 'spec/tasks/gitlab/uploads/migrate_rake_spec.rb'

View File

@ -146,6 +146,3 @@ export const BIDI_CHAR_TOOLTIP = __(
export const HLJS_COMMENT_SELECTOR = 'hljs-comment';
export const HLJS_ON_AFTER_HIGHLIGHT = 'after:highlight';
export const NPM_URL = 'https://npmjs.com/package';
export const GEM_URL = 'https://rubygems.org/gems';

View File

@ -1,16 +1,7 @@
import { escape } from 'lodash';
import { setAttributes } from '~/lib/utils/dom_utils';
export const createLink = (href, innerText) => {
// eslint-disable-next-line @gitlab/require-i18n-strings
const rel = 'nofollow noreferrer noopener';
const link = document.createElement('a');
setAttributes(link, { href: escape(href), rel });
link.textContent = innerText;
return link.outerHTML;
};
export const createLink = (href, innerText) =>
`<a href="${escape(href)}" rel="nofollow noreferrer noopener">${escape(innerText)}</a>`;
export const generateHLJSOpenTag = (type, delimiter = '&quot;') =>
`<span class="hljs-${escape(type)}">${delimiter}`;

View File

@ -1,7 +1,6 @@
import { joinPaths } from '~/lib/utils/url_utility';
import { GEM_URL } from '../../constants';
import { createLink, generateHLJSOpenTag } from './dependency_linker_util';
const GEM_URL = 'https://rubygems.org/gems/';
const methodRegex = '.*add_dependency.*|.*add_runtime_dependency.*|.*add_development_dependency.*';
const openTagRegex = generateHLJSOpenTag('string', '(&.*;)');
const closeTagRegex = '&.*</span>';
@ -24,7 +23,7 @@ const DEPENDENCY_REGEX = new RegExp(
const handleReplace = (method, delimiter, packageName, closeTag, rest) => {
// eslint-disable-next-line @gitlab/require-i18n-strings
const openTag = generateHLJSOpenTag('string linked', delimiter);
const href = joinPaths(GEM_URL, packageName);
const href = `${GEM_URL}${packageName}`;
const packageLink = createLink(href, packageName);
return `${method}${openTag}${packageLink}${closeTag}${rest}`;

View File

@ -1,8 +1,7 @@
import { unescape } from 'lodash';
import { joinPaths } from '~/lib/utils/url_utility';
import { NPM_URL } from '../../constants';
import { createLink, generateHLJSOpenTag } from './dependency_linker_util';
const NPM_URL = 'https://npmjs.com/package/';
const attrOpenTag = generateHLJSOpenTag('attr');
const stringOpenTag = generateHLJSOpenTag('string');
const closeTag = '&quot;</span>';
@ -20,7 +19,7 @@ const DEPENDENCY_REGEX = new RegExp(
const handleReplace = (original, packageName, version, dependenciesToLink) => {
const unescapedPackageName = unescape(packageName);
const unescapedVersion = unescape(version);
const href = joinPaths(NPM_URL, unescapedPackageName);
const href = `${NPM_URL}${unescapedPackageName}`;
const packageLink = createLink(href, unescapedPackageName);
const versionLink = createLink(href, unescapedVersion);
const closeAndOpenTag = `${closeTag}: ${attrOpenTag}`;

View File

@ -0,0 +1,10 @@
import { highlight } from './highlight_utils';
/**
* A webworker for highlighting large amounts of content with Highlight.js
*/
// eslint-disable-next-line no-restricted-globals
self.addEventListener('message', ({ data: { fileType, content, language } }) => {
// eslint-disable-next-line no-restricted-globals
self.postMessage(highlight(fileType, content, language));
});

View File

@ -0,0 +1,15 @@
import hljs from 'highlight.js/lib/core';
import languageLoader from '~/content_editor/services/highlight_js_language_loader';
import { registerPlugins } from '../plugins/index';
const initHighlightJs = async (fileType, content, language) => {
const languageDefinition = await languageLoader[language]();
registerPlugins(hljs, fileType, content);
hljs.registerLanguage(language, languageDefinition.default);
};
export const highlight = (fileType, content, language) => {
initHighlightJs(fileType, content, language);
return hljs.highlight(content, { language }).value;
};

View File

@ -393,7 +393,7 @@ class GroupsController < Groups::ApplicationController
end
def captcha_enabled?
Gitlab::Recaptcha.enabled? && Feature.enabled?(:recaptcha_on_top_level_group_creation, type: :ops)
helpers.recaptcha_enabled? && Feature.enabled?(:recaptcha_on_top_level_group_creation, type: :ops)
end
def captcha_required?

View File

@ -107,11 +107,11 @@ class SessionsController < Devise::SessionsController
end
def captcha_enabled?
request.headers[CAPTCHA_HEADER] && Gitlab::Recaptcha.enabled?
request.headers[CAPTCHA_HEADER] && helpers.recaptcha_enabled?
end
def captcha_on_login_required?
Gitlab::Recaptcha.enabled_on_login? && unverified_anonymous_user?
helpers.recaptcha_enabled_on_login? && unverified_anonymous_user?
end
# From https://github.com/plataformatec/devise/wiki/How-To:-Use-Recaptcha-with-Devise#devisepasswordscontroller

View File

@ -2,9 +2,27 @@
module RecaptchaHelper
def recaptcha_enabled?
return false if gitlab_qa?
!!Gitlab::Recaptcha.enabled?
end
alias_method :show_recaptcha_sign_up?, :recaptcha_enabled?
def recaptcha_enabled_on_login?
return false if gitlab_qa?
Gitlab::Recaptcha.enabled_on_login?
end
private
def gitlab_qa?
return false unless Gitlab.com?
return false unless request.user_agent.present?
return false unless Gitlab::Environment.qa_user_agent.present?
ActiveSupport::SecurityUtils.secure_compare(request.user_agent, Gitlab::Environment.qa_user_agent)
end
end
RecaptchaHelper.prepend_mod

View File

@ -38,6 +38,8 @@ module BulkImports
def execute
bulk_import = create_bulk_import
Gitlab::Tracking.event(self.class.name, 'create', label: 'bulk_import_group')
BulkImportWorker.perform_async(bulk_import.id)
ServiceResponse.success(payload: bulk_import)

View File

@ -26,6 +26,8 @@ module Groups
end
def execute
Gitlab::Tracking.event(self.class.name, 'create', label: 'import_group_from_file')
if valid_user_permissions? && import_file && restorers.all?(&:restore)
notify_success

View File

@ -11,16 +11,12 @@ module MergeRequests
end
def commit
return unless enabled?
commit_logs
end
def instrument(mergeability_name:)
raise ArgumentError, 'block not given' unless block_given?
return yield unless enabled?
op_start_db_counters = current_db_counter_payload
op_started_at = current_monotonic_time
@ -38,15 +34,11 @@ module MergeRequests
attr_reader :destination, :merge_request
def observe(name, value)
return unless enabled?
observations[name.to_s].push(value)
end
def commit_logs
attributes = Gitlab::ApplicationContext.current.merge({
mergeability_project_id: merge_request.project.id
})
attributes = Gitlab::ApplicationContext.current.merge({ mergeability_project_id: merge_request.project.id })
attributes[:mergeability_merge_request_id] = merge_request.id
attributes.merge!(observations_hash)
@ -89,12 +81,6 @@ module MergeRequests
::Gitlab::Metrics::Subscribers::ActiveRecord.db_counter_payload
end
def enabled?
strong_memoize(:enabled) do
::Feature.enabled?(:mergeability_checks_logger, merge_request.project)
end
end
def current_monotonic_time
::Gitlab::Metrics::System.monotonic_time
end

View File

@ -1,8 +0,0 @@
---
name: mergeability_checks_logger
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/96128
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/371717
milestone: '15.4'
type: development
group: group::code review
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: increase_branch_cache_expiry
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/96739
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/372331
milestone: '15.4'
type: ops
group: group::code review
default_enabled: false

View File

@ -55,4 +55,5 @@ end
specs.changed_specs_files.each do |filename|
specs.add_suggestions_for_match_with_array(filename)
specs.add_suggestions_for_project_factory_usage(filename)
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class RemoveUniqueIndexBuildIdToCiBuildsMetadata < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
TABLE_NAME = :ci_builds_metadata
INDEX_NAME = :index_ci_builds_metadata_on_build_id
def up
remove_concurrent_index_by_name(TABLE_NAME, INDEX_NAME)
end
def down
add_concurrent_index(TABLE_NAME, :build_id, unique: true, name: INDEX_NAME)
end
end

View File

@ -0,0 +1 @@
6a37ea8ea1ae2b90d12db67b2fa6adac2ed5b936f6c45d2142dc8390883f764a

View File

@ -28036,8 +28036,6 @@ CREATE UNIQUE INDEX index_ci_build_trace_chunks_on_build_id_and_chunk_index ON c
CREATE INDEX index_ci_build_trace_metadata_on_trace_artifact_id ON ci_build_trace_metadata USING btree (trace_artifact_id);
CREATE UNIQUE INDEX index_ci_builds_metadata_on_build_id ON ci_builds_metadata USING btree (build_id);
CREATE INDEX index_ci_builds_metadata_on_build_id_and_has_exposed_artifacts ON ci_builds_metadata USING btree (build_id) WHERE (has_exposed_artifacts IS TRUE);
CREATE INDEX index_ci_builds_metadata_on_build_id_and_id_and_interruptible ON ci_builds_metadata USING btree (build_id) INCLUDE (id) WHERE (interruptible = true);

View File

@ -5,6 +5,7 @@
# - https://docs.gitlab.com/ee/development/gitlab_flavored_markdown/specification_guide/#glfm_example_normalizationsyml
#
# NOTE: All YAML anchors which are shared across one or more entries are defined in the `00_shared` section.
# They must all start with `00_` in order to be skipped during example name validation.
00_shared:
00_uri: &00_uri
- regex: '(href|data-src)(=")(.*?)(test-file\.(png|zip)")'

View File

@ -52,19 +52,13 @@ module API
merged_branch_names = repository.merged_branch_names(branches.map(&:name))
expiry_time = if Feature.enabled?(:increase_branch_cache_expiry, type: :ops)
60.minutes
else
10.minutes
end
present_cached(
branches,
with: Entities::Branch,
current_user: current_user,
project: user_project,
merged_branch_names: merged_branch_names,
expires_in: expiry_time,
expires_in: 60.minutes,
cache_context: -> (branch) { [current_user&.cache_key, merged_branch_names.include?(branch.name)] }
)
end

View File

@ -5,5 +5,9 @@ module Gitlab
def self.hostname
@hostname ||= ENV['HOSTNAME'] || Socket.gethostname
end
def self.qa_user_agent
ENV['GITLAB_QA_USER_AGENT']
end
end
end

View File

@ -47,6 +47,7 @@ module Gitlab
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Groups::GroupPathChangedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Groups::GroupDeletedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::PagesDomains::PagesDomainDeletedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::PagesDomains::PagesDomainUpdatedEvent
store.subscribe ::MergeRequests::CreateApprovalEventWorker, to: ::MergeRequests::ApprovedEvent
store.subscribe ::MergeRequests::CreateApprovalNoteWorker, to: ::MergeRequests::ApprovedEvent

View File

@ -3,9 +3,9 @@
module Gitlab
module Pages
class CacheControl
CACHE_KEY_FORMAT = 'pages_domain_for_%{type}_%{id}'
include Gitlab::Utils::StrongMemoize
attr_reader :cache_key
CACHE_KEY_FORMAT = 'pages_domain_for_%{type}_%{id}_%{settings}'
class << self
def for_project(project_id)
@ -20,12 +20,35 @@ module Gitlab
def initialize(type:, id:)
raise(ArgumentError, "type must be :namespace or :project") unless %i[namespace project].include?(type)
@cache_key = CACHE_KEY_FORMAT % { type: type, id: id }
@type = type
@id = id
end
def cache_key
strong_memoize(:cache_key) do
CACHE_KEY_FORMAT % {
type: @type,
id: @id,
settings: settings
}
end
end
def clear_cache
Rails.cache.delete(cache_key)
end
private
def settings
values = ::Gitlab.config.pages.dup
values['app_settings'] = ::Gitlab::CurrentSettings.attributes.slice(
'force_pages_access_control'
)
::Digest::SHA256.hexdigest(values.inspect)
end
end
end
end

View File

@ -85,6 +85,8 @@ module Sidebars
end
def contributors_menu_item
return false unless context.project.analytics_enabled?
::Sidebars::MenuItem.new(
title: _('Contributors'),
link: project_graph_path(context.project, context.current_ref),

View File

@ -4,121 +4,168 @@ require 'active_record/fixtures'
namespace :gitlab do
namespace :backup do
PID = Process.pid.freeze
PID_FILE = "#{Rails.application.root}/tmp/backup_restore.pid"
# Create backup of GitLab system
desc 'GitLab | Backup | Create a backup of the GitLab system'
task create: :gitlab_environment do
warn_user_is_not_gitlab
lock do
warn_user_is_not_gitlab
Backup::Manager.new(progress).create
Backup::Manager.new(progress).create
end
end
# Restore backup of GitLab system
desc 'GitLab | Backup | Restore a previously created backup'
task restore: :gitlab_environment do
warn_user_is_not_gitlab
lock do
warn_user_is_not_gitlab
Backup::Manager.new(progress).restore
Backup::Manager.new(progress).restore
end
end
namespace :repo do
task create: :gitlab_environment do
Backup::Manager.new(progress).run_create_task('repositories')
lock do
Backup::Manager.new(progress).run_create_task('repositories')
end
end
task restore: :gitlab_environment do
Backup::Manager.new(progress).run_restore_task('repositories')
lock do
Backup::Manager.new(progress).run_restore_task('repositories')
end
end
end
namespace :db do
task create: :gitlab_environment do
Backup::Manager.new(progress).run_create_task('main_db')
Backup::Manager.new(progress).run_create_task('ci_db')
lock do
Backup::Manager.new(progress).run_create_task('main_db')
Backup::Manager.new(progress).run_create_task('ci_db')
end
end
task restore: :gitlab_environment do
Backup::Manager.new(progress).run_restore_task('main_db')
Backup::Manager.new(progress).run_restore_task('ci_db')
lock do
Backup::Manager.new(progress).run_restore_task('main_db')
Backup::Manager.new(progress).run_restore_task('ci_db')
end
end
end
namespace :builds do
task create: :gitlab_environment do
Backup::Manager.new(progress).run_create_task('builds')
lock do
Backup::Manager.new(progress).run_create_task('builds')
end
end
task restore: :gitlab_environment do
Backup::Manager.new(progress).run_restore_task('builds')
lock do
Backup::Manager.new(progress).run_restore_task('builds')
end
end
end
namespace :uploads do
task create: :gitlab_environment do
Backup::Manager.new(progress).run_create_task('uploads')
lock do
Backup::Manager.new(progress).run_create_task('uploads')
end
end
task restore: :gitlab_environment do
Backup::Manager.new(progress).run_restore_task('uploads')
lock do
Backup::Manager.new(progress).run_restore_task('uploads')
end
end
end
namespace :artifacts do
task create: :gitlab_environment do
Backup::Manager.new(progress).run_create_task('artifacts')
lock do
Backup::Manager.new(progress).run_create_task('artifacts')
end
end
task restore: :gitlab_environment do
Backup::Manager.new(progress).run_restore_task('artifacts')
lock do
Backup::Manager.new(progress).run_restore_task('artifacts')
end
end
end
namespace :pages do
task create: :gitlab_environment do
Backup::Manager.new(progress).run_create_task('pages')
lock do
Backup::Manager.new(progress).run_create_task('pages')
end
end
task restore: :gitlab_environment do
Backup::Manager.new(progress).run_restore_task('pages')
lock do
Backup::Manager.new(progress).run_restore_task('pages')
end
end
end
namespace :lfs do
task create: :gitlab_environment do
Backup::Manager.new(progress).run_create_task('lfs')
lock do
Backup::Manager.new(progress).run_create_task('lfs')
end
end
task restore: :gitlab_environment do
Backup::Manager.new(progress).run_restore_task('lfs')
lock do
Backup::Manager.new(progress).run_restore_task('lfs')
end
end
end
namespace :terraform_state do
task create: :gitlab_environment do
Backup::Manager.new(progress).run_create_task('terraform_state')
lock do
Backup::Manager.new(progress).run_create_task('terraform_state')
end
end
task restore: :gitlab_environment do
Backup::Manager.new(progress).run_restore_task('terraform_state')
lock do
Backup::Manager.new(progress).run_restore_task('terraform_state')
end
end
end
namespace :registry do
task create: :gitlab_environment do
Backup::Manager.new(progress).run_create_task('registry')
lock do
Backup::Manager.new(progress).run_create_task('registry')
end
end
task restore: :gitlab_environment do
Backup::Manager.new(progress).run_restore_task('registry')
lock do
Backup::Manager.new(progress).run_restore_task('registry')
end
end
end
namespace :packages do
task create: :gitlab_environment do
Backup::Manager.new(progress).run_create_task('packages')
lock do
Backup::Manager.new(progress).run_create_task('packages')
end
end
task restore: :gitlab_environment do
Backup::Manager.new(progress).run_restore_task('packages')
lock do
Backup::Manager.new(progress).run_restore_task('packages')
end
end
end
@ -132,6 +179,35 @@ namespace :gitlab do
$stdout
end
end
def lock
File.open(PID_FILE, File::RDWR | File::CREAT, 0644) do |f|
f.flock(File::LOCK_EX)
unless f.read.empty?
# There is a PID inside so the process fails
progress.puts(<<~HEREDOC.color(:red))
Backup and restore in progress:
There is a backup and restore task in progress. Please, try to run the current task once the previous one ends.
If there is no other process running, please remove the PID file manually: rm #{PID_FILE}
HEREDOC
exit 1
end
f.write(PID)
f.flush
ensure
f.flock(File::LOCK_UN)
end
begin
yield
ensure
progress.puts "#{Time.now} " + "-- Deleting backup and restore lock file".color(:blue)
File.delete(PID_FILE)
end
end
end
# namespace end: backup
end

View File

@ -44588,6 +44588,9 @@ msgstr ""
msgid "Vulnerability|Tool"
msgstr ""
msgid "Vulnerability|Tool:"
msgstr ""
msgid "Vulnerability|Training"
msgstr ""
@ -47080,6 +47083,9 @@ msgstr ""
msgid "ciReport|Full Report"
msgstr ""
msgid "ciReport|Generic Report"
msgstr ""
msgid "ciReport|IaC Scanning"
msgstr ""

View File

@ -34,7 +34,7 @@ RSpec.describe 'Render Static HTML', :api, type: :request do # rubocop:disable R
it 'can create a project dependency graph using factories' do
markdown_hash = YAML.safe_load(File.open(ENV.fetch('INPUT_MARKDOWN_YML_PATH')), symbolize_names: true)
metadata_hash = YAML.safe_load(File.open(ENV.fetch('INPUT_METADATA_YML_PATH')), symbolize_names: true)
metadata_hash = YAML.safe_load(File.open(ENV.fetch('INPUT_METADATA_YML_PATH')), symbolize_names: true) || {}
# NOTE: We cannot parallelize this loop like the Javascript WYSIWYG example generation does,
# because the rspec `post` API cannot be parallized (it is not thread-safe, it can't find
@ -66,8 +66,7 @@ RSpec.describe 'Render Static HTML', :api, type: :request do # rubocop:disable R
private
def write_output_file(static_html_hash)
tmpfile = File.open(ENV.fetch('OUTPUT_STATIC_HTML_TEMPFILE_PATH'), 'w')
yaml_string = dump_yaml_with_formatting(static_html_hash)
write_file(tmpfile, yaml_string)
write_file(ENV.fetch('OUTPUT_STATIC_HTML_TEMPFILE_PATH'), yaml_string)
end
end

View File

@ -5,6 +5,7 @@ require 'yaml'
require 'psych'
require 'tempfile'
require 'open3'
require 'active_support/core_ext/enumerable'
require_relative 'constants'
require_relative 'shared'
require_relative 'parse_examples'
@ -115,11 +116,13 @@ module Glfm
def write_snapshot_example_files(all_examples, skip_static_and_wysiwyg:)
output("Reading #{GLFM_EXAMPLE_STATUS_YML_PATH}...")
glfm_examples_statuses = YAML.safe_load(File.open(GLFM_EXAMPLE_STATUS_YML_PATH), symbolize_names: true)
glfm_examples_statuses = YAML.safe_load(File.open(GLFM_EXAMPLE_STATUS_YML_PATH), symbolize_names: true) || {}
validate_glfm_example_status_yml(glfm_examples_statuses)
write_examples_index_yml(all_examples)
validate_glfm_config_file_example_names(all_examples)
write_markdown_yml(all_examples)
if skip_static_and_wysiwyg
@ -151,6 +154,50 @@ module Glfm
end
end
def validate_glfm_config_file_example_names(all_examples)
valid_example_names = all_examples.pluck(:name).map(&:to_sym) # rubocop:disable CodeReuse/ActiveRecord
# We are re-reading GLFM_EXAMPLE_STATUS_YML_PATH here, but that's OK, it's a small file, and rereading it
# allows us to handle it in the same loop as the other manually-curated config files.
[
GLFM_EXAMPLE_STATUS_YML_PATH,
GLFM_EXAMPLE_METADATA_YML_PATH,
GLFM_EXAMPLE_NORMALIZATIONS_YML_PATH
].each do |path|
output("Reading #{path}...")
io = File.open(path)
config_file_examples = YAML.safe_load(io, symbolize_names: true, aliases: true)
# Skip validation if the config file is empty
next unless config_file_examples
config_file_example_names = config_file_examples.keys
# Validate that all example names exist in the config file refer to an existing example in `examples_index.yml`,
# unless it starts with the special prefix `00_`, which is preserved for usage as YAML anchors.
invalid_name = config_file_example_names.detect do |name|
!name.start_with?('00_') && valid_example_names.exclude?(name)
end
next unless invalid_name
# NOTE: The extra spaces before punctuation in the error message allows for easier copy/pasting of the paths.
err_msg =
<<~TXT
Error in input specification config file #{path} :
Config file entry named #{invalid_name}
does not have a corresponding example entry in
#{ES_EXAMPLES_INDEX_YML_PATH} .
Please delete or rename this config file entry.
If this entry is being used as a YAML anchor, please rename it to start with '00_'.
TXT
raise err_msg
end
end
def write_examples_index_yml(all_examples)
generate_and_write_for_all_examples(
all_examples, ES_EXAMPLES_INDEX_YML_PATH, literal_scalars: false
@ -244,7 +291,7 @@ module Glfm
wysiwyg_html_and_json_tempfile_path = Dir::Tmpname.create(WYSIWYG_HTML_AND_JSON_TEMPFILE_BASENAME) {}
ENV['OUTPUT_WYSIWYG_HTML_AND_JSON_TEMPFILE_PATH'] = wysiwyg_html_and_json_tempfile_path
cmd = %(yarn jest --testMatch '**/render_wysiwyg_html_and_json.js' #{__dir__}/render_wysiwyg_html_and_json.js)
cmd = "yarn jest --testMatch '**/render_wysiwyg_html_and_json.js' #{__dir__}/render_wysiwyg_html_and_json.js"
run_external_cmd(cmd)
output("Reading generated WYSIWYG HTML and prosemirror JSON from tempfile " \

View File

@ -154,12 +154,8 @@ function disable_sign_ups() {
true
fi
# Create the root token
local set_token_rb="token = User.find_by_username('root').personal_access_tokens.create(scopes: [:api], name: 'Token to disable sign-ups'); token.set_token('${REVIEW_APPS_ROOT_TOKEN}'); begin; token.save!; rescue(ActiveRecord::RecordNotUnique); end"
retry "run_task \"${set_token_rb}\""
# Disable sign-ups
local disable_signup_rb="Gitlab::CurrentSettings.current_application_settings.update!(signup_enabled: false)"
# Create the root token + Disable sign-ups
local disable_signup_rb="token = User.find_by_username('root').personal_access_tokens.create(scopes: [:api], name: 'Token to disable sign-ups'); token.set_token('${REVIEW_APPS_ROOT_TOKEN}'); begin; token.save!; rescue(ActiveRecord::RecordNotUnique); end; Gitlab::CurrentSettings.current_application_settings.update!(signup_enabled: false)"
if (retry "run_task \"${disable_signup_rb}\""); then
echoinfo "Sign-ups have been disabled successfully."
else

View File

@ -1,7 +1,7 @@
import Vue, { nextTick } from 'vue';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
import { mount } from '@vue/test-utils';
import { projectData, branches } from 'jest/ide/mock_data';
import commitActions from '~/ide/components/commit_sidebar/actions.vue';
import CommitActions from '~/ide/components/commit_sidebar/actions.vue';
import { createStore } from '~/ide/stores';
import {
COMMIT_TO_NEW_BRANCH,
@ -18,32 +18,27 @@ const BRANCH_REGULAR_NO_ACCESS = 'regular/no-access';
describe('IDE commit sidebar actions', () => {
let store;
let vm;
let wrapper;
const createComponent = ({ hasMR = false, currentBranchId = 'main', emptyRepo = false } = {}) => {
const Component = Vue.extend(commitActions);
vm = createComponentWithStore(Component, store);
vm.$store.state.currentBranchId = currentBranchId;
vm.$store.state.currentProjectId = 'abcproject';
store.state.currentBranchId = currentBranchId;
store.state.currentProjectId = 'abcproject';
const proj = { ...projectData };
proj.branches[currentBranchId] = branches.find((branch) => branch.name === currentBranchId);
proj.empty_repo = emptyRepo;
Vue.set(vm.$store.state.projects, 'abcproject', proj);
Vue.set(store.state.projects, 'abcproject', proj);
if (hasMR) {
vm.$store.state.currentMergeRequestId = '1';
vm.$store.state.projects[store.state.currentProjectId].mergeRequests[
store.state.currentMergeRequestId = '1';
store.state.projects[store.state.currentProjectId].mergeRequests[
store.state.currentMergeRequestId
] = { foo: 'bar' };
}
vm.$mount();
return vm;
wrapper = mount(CommitActions, { store });
return wrapper;
};
beforeEach(() => {
@ -52,17 +47,16 @@ describe('IDE commit sidebar actions', () => {
});
afterEach(() => {
vm.$destroy();
vm = null;
wrapper.destroy();
});
const findText = () => vm.$el.textContent;
const findRadios = () => Array.from(vm.$el.querySelectorAll('input[type="radio"]'));
const findText = () => wrapper.text();
const findRadios = () => wrapper.findAll('input[type="radio"]');
it('renders 2 groups', () => {
createComponent();
expect(findRadios().length).toBe(2);
expect(findRadios()).toHaveLength(2);
});
it('renders current branch text', () => {
@ -79,41 +73,38 @@ describe('IDE commit sidebar actions', () => {
expect(findText()).not.toContain('Create a new branch and merge request');
});
describe('currentBranchText', () => {
it('escapes current branch', () => {
const injectedSrc = '<img src="x" />';
createComponent({ currentBranchId: injectedSrc });
it('escapes current branch name', () => {
const injectedSrc = '<img src="x" />';
const escapedSrc = '&lt;img src=&quot;x&quot; /&gt';
createComponent({ currentBranchId: injectedSrc });
expect(vm.currentBranchText).not.toContain(injectedSrc);
});
expect(wrapper.text()).not.toContain(injectedSrc);
expect(wrapper.text).not.toContain(escapedSrc);
});
describe('updateSelectedCommitAction', () => {
it('does not return anything if currentBranch does not exist', () => {
createComponent({ currentBranchId: null });
expect(vm.$store.dispatch).not.toHaveBeenCalled();
expect(store.dispatch).not.toHaveBeenCalled();
});
it('is not called on mount if there is already a selected commitAction', () => {
store.state.commitAction = '1';
createComponent({ currentBranchId: null });
expect(vm.$store.dispatch).not.toHaveBeenCalled();
expect(store.dispatch).not.toHaveBeenCalled();
});
it('calls again after staged changes', async () => {
createComponent({ currentBranchId: null });
vm.$store.state.currentBranchId = 'main';
vm.$store.state.changedFiles.push({});
vm.$store.state.stagedFiles.push({});
store.state.currentBranchId = 'main';
store.state.changedFiles.push({});
store.state.stagedFiles.push({});
await nextTick();
expect(vm.$store.dispatch).toHaveBeenCalledWith(
ACTION_UPDATE_COMMIT_ACTION,
expect.anything(),
);
expect(store.dispatch).toHaveBeenCalledWith(ACTION_UPDATE_COMMIT_ACTION, expect.anything());
});
it.each`
@ -133,9 +124,7 @@ describe('IDE commit sidebar actions', () => {
({ input, expectedOption }) => {
createComponent(input);
expect(vm.$store.dispatch.mock.calls).toEqual([
[ACTION_UPDATE_COMMIT_ACTION, expectedOption],
]);
expect(store.dispatch.mock.calls).toEqual([[ACTION_UPDATE_COMMIT_ACTION, expectedOption]]);
},
);
});

View File

@ -1,14 +1,15 @@
import { mount } from '@vue/test-utils';
import { GlIcon } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import { trimText } from 'helpers/text_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
import listItem from '~/ide/components/commit_sidebar/list_item.vue';
import ListItem from '~/ide/components/commit_sidebar/list_item.vue';
import { createRouter } from '~/ide/ide_router';
import { createStore } from '~/ide/stores';
import { file } from '../../helpers';
describe('Multi-file editor commit sidebar list item', () => {
let vm;
let wrapper;
let f;
let findPathEl;
let store;
@ -16,118 +17,120 @@ describe('Multi-file editor commit sidebar list item', () => {
beforeEach(() => {
store = createStore();
router = createRouter(store);
jest.spyOn(store, 'dispatch');
const Component = Vue.extend(listItem);
router = createRouter(store);
f = file('test-file');
store.state.entries[f.path] = f;
vm = createComponentWithStore(Component, store, {
file: f,
activeFileKey: `staged-${f.key}`,
}).$mount();
wrapper = mount(ListItem, {
store,
propsData: {
file: f,
activeFileKey: `staged-${f.key}`,
},
});
findPathEl = vm.$el.querySelector('.multi-file-commit-list-path');
findPathEl = wrapper.find('.multi-file-commit-list-path');
});
afterEach(() => {
vm.$destroy();
wrapper.destroy();
});
const findPathText = () => trimText(findPathEl.textContent);
const findPathText = () => trimText(findPathEl.text());
it('renders file path', () => {
expect(findPathText()).toContain(f.path);
});
it('correctly renders renamed entries', async () => {
Vue.set(vm.file, 'prevName', 'Old name');
Vue.set(f, 'prevName', 'Old name');
await nextTick();
expect(findPathText()).toEqual(`Old name → ${f.name}`);
});
it('correctly renders entry, the name of which did not change after rename (as within a folder)', async () => {
Vue.set(vm.file, 'prevName', f.name);
Vue.set(f, 'prevName', f.name);
await nextTick();
expect(findPathText()).toEqual(f.name);
});
it('opens a closed file in the editor when clicking the file path', async () => {
jest.spyOn(vm, 'openPendingTab');
jest.spyOn(router, 'push').mockImplementation(() => {});
findPathEl.click();
await findPathEl.trigger('click');
await nextTick();
expect(vm.openPendingTab).toHaveBeenCalled();
expect(store.dispatch).toHaveBeenCalledWith('openPendingTab', expect.anything());
expect(router.push).toHaveBeenCalled();
});
it('calls updateViewer with diff when clicking file', async () => {
jest.spyOn(vm, 'openFileInEditor');
jest.spyOn(vm, 'updateViewer');
jest.spyOn(router, 'push').mockImplementation(() => {});
findPathEl.click();
await findPathEl.trigger('click');
await waitForPromises();
expect(vm.updateViewer).toHaveBeenCalledWith('diff');
expect(store.dispatch).toHaveBeenCalledWith('updateViewer', 'diff');
});
describe('computed', () => {
describe('iconName', () => {
it('returns modified when not a tempFile', () => {
expect(vm.iconName).toBe('file-modified');
});
describe('icon name', () => {
const getIconName = () => wrapper.findComponent(GlIcon).props('name');
it('returns addition when not a tempFile', () => {
f.tempFile = true;
expect(vm.iconName).toBe('file-addition');
});
it('returns deletion', () => {
f.deleted = true;
expect(vm.iconName).toBe('file-deletion');
});
it('is modified when not a tempFile', () => {
expect(getIconName()).toBe('file-modified');
});
describe('iconClass', () => {
it('returns modified when not a tempFile', () => {
expect(vm.iconClass).toContain('ide-file-modified');
});
it('is addition when is a tempFile', async () => {
f.tempFile = true;
await nextTick();
it('returns addition when not a tempFile', () => {
f.tempFile = true;
expect(getIconName()).toBe('file-addition');
});
expect(vm.iconClass).toContain('ide-file-addition');
});
it('is deletion when is deleted', async () => {
f.deleted = true;
await nextTick();
it('returns deletion', () => {
f.deleted = true;
expect(getIconName()).toBe('file-deletion');
});
});
expect(vm.iconClass).toContain('ide-file-deletion');
});
describe('icon class', () => {
const getIconClass = () => wrapper.findComponent(GlIcon).classes();
it('is modified when not a tempFile', () => {
expect(getIconClass()).toContain('ide-file-modified');
});
it('is addition when is a tempFile', async () => {
f.tempFile = true;
await nextTick();
expect(getIconClass()).toContain('ide-file-addition');
});
it('returns deletion when is deleted', async () => {
f.deleted = true;
await nextTick();
expect(getIconClass()).toContain('ide-file-deletion');
});
});
describe('is active', () => {
it('does not add active class when dont keys match', () => {
expect(vm.$el.querySelector('.is-active')).toBe(null);
expect(wrapper.find('.is-active').exists()).toBe(false);
});
it('adds active class when keys match', async () => {
vm.keyPrefix = 'staged';
await wrapper.setProps({ keyPrefix: 'staged' });
await nextTick();
expect(vm.$el.querySelector('.is-active')).not.toBe(null);
expect(wrapper.find('.is-active').exists()).toBe(true);
});
});
});

View File

@ -1,135 +1,121 @@
import Vue, { nextTick } from 'vue';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import createComponent from 'helpers/vue_mount_component_helper';
import { nextTick } from 'vue';
import { mount } from '@vue/test-utils';
import CommitMessageField from '~/ide/components/commit_sidebar/message_field.vue';
describe('IDE commit message field', () => {
const Component = Vue.extend(CommitMessageField);
let vm;
let wrapper;
beforeEach(() => {
setHTMLFixture('<div id="app"></div>');
vm = createComponent(
Component,
{
wrapper = mount(CommitMessageField, {
propsData: {
text: '',
placeholder: 'testing',
},
'#app',
);
attachTo: document.body,
});
});
afterEach(() => {
vm.$destroy();
resetHTMLFixture();
wrapper.destroy();
});
it('adds is-focused class on focus', async () => {
vm.$el.querySelector('textarea').focus();
const findMessage = () => wrapper.find('textarea');
const findHighlights = () => wrapper.findAll('.highlights span');
const findMarks = () => wrapper.findAll('mark');
await nextTick();
expect(vm.$el.querySelector('.is-focused')).not.toBeNull();
it('adds is-focused class on focus', async () => {
await findMessage().trigger('focus');
expect(wrapper.find('.is-focused').exists()).toBe(true);
});
it('removed is-focused class on blur', async () => {
vm.$el.querySelector('textarea').focus();
await findMessage().trigger('focus');
await nextTick();
expect(vm.$el.querySelector('.is-focused')).not.toBeNull();
expect(wrapper.find('.is-focused').exists()).toBe(true);
vm.$el.querySelector('textarea').blur();
await findMessage().trigger('blur');
await nextTick();
expect(vm.$el.querySelector('.is-focused')).toBeNull();
expect(wrapper.find('.is-focused').exists()).toBe(false);
});
it('emits input event on input', () => {
jest.spyOn(vm, '$emit').mockImplementation();
it('emits input event on input', async () => {
await findMessage().setValue('testing');
const textarea = vm.$el.querySelector('textarea');
textarea.value = 'testing';
textarea.dispatchEvent(new Event('input'));
expect(vm.$emit).toHaveBeenCalledWith('input', 'testing');
expect(wrapper.emitted('input')[0]).toStrictEqual(['testing']);
});
describe('highlights', () => {
describe('subject line', () => {
it('does not highlight less than 50 characters', async () => {
vm.text = 'text less than 50 chars';
await wrapper.setProps({ text: 'text less than 50 chars' });
await nextTick();
expect(vm.$el.querySelector('.highlights span').textContent).toContain(
'text less than 50 chars',
);
expect(findHighlights()).toHaveLength(1);
expect(findHighlights().at(0).text()).toContain('text less than 50 chars');
expect(vm.$el.querySelector('mark').style.display).toBe('none');
expect(findMarks()).toHaveLength(1);
expect(findMarks().at(0).isVisible()).toBe(false);
});
it('highlights characters over 50 length', async () => {
vm.text =
'text less than 50 chars that should not highlighted. text more than 50 should be highlighted';
await wrapper.setProps({
text:
'text less than 50 chars that should not highlighted. text more than 50 should be highlighted',
});
await nextTick();
expect(vm.$el.querySelector('.highlights span').textContent).toContain(
expect(findHighlights()).toHaveLength(1);
expect(findHighlights().at(0).text()).toContain(
'text less than 50 chars that should not highlighte',
);
expect(vm.$el.querySelector('mark').style.display).not.toBe('none');
expect(vm.$el.querySelector('mark').textContent).toBe(
'd. text more than 50 should be highlighted',
);
expect(findMarks()).toHaveLength(1);
expect(findMarks().at(0).isVisible()).toBe(true);
expect(findMarks().at(0).text()).toBe('d. text more than 50 should be highlighted');
});
});
describe('body text', () => {
it('does not highlight body text less tan 72 characters', async () => {
vm.text = 'subject line\nbody content';
await wrapper.setProps({ text: 'subject line\nbody content' });
await nextTick();
expect(vm.$el.querySelectorAll('.highlights span').length).toBe(2);
expect(vm.$el.querySelectorAll('mark')[1].style.display).toBe('none');
expect(findHighlights()).toHaveLength(2);
expect(findMarks().at(1).isVisible()).toBe(false);
});
it('highlights body text more than 72 characters', async () => {
vm.text =
'subject line\nbody content that will be highlighted when it is more than 72 characters in length';
await wrapper.setProps({
text:
'subject line\nbody content that will be highlighted when it is more than 72 characters in length',
});
await nextTick();
expect(vm.$el.querySelectorAll('.highlights span').length).toBe(2);
expect(vm.$el.querySelectorAll('mark')[1].style.display).not.toBe('none');
expect(vm.$el.querySelectorAll('mark')[1].textContent).toBe(' in length');
expect(findHighlights()).toHaveLength(2);
expect(findMarks().at(1).isVisible()).toBe(true);
expect(findMarks().at(1).text()).toBe('in length');
});
it('highlights body text & subject line', async () => {
vm.text =
'text less than 50 chars that should not highlighted\nbody content that will be highlighted when it is more than 72 characters in length';
await wrapper.setProps({
text:
'text less than 50 chars that should not highlighted\nbody content that will be highlighted when it is more than 72 characters in length',
});
await nextTick();
expect(vm.$el.querySelectorAll('.highlights span').length).toBe(2);
expect(vm.$el.querySelectorAll('mark').length).toBe(2);
expect(findHighlights()).toHaveLength(2);
expect(findMarks()).toHaveLength(2);
expect(vm.$el.querySelectorAll('mark')[0].textContent).toContain('d');
expect(vm.$el.querySelectorAll('mark')[1].textContent).toBe(' in length');
expect(findMarks().at(0).text()).toContain('d');
expect(findMarks().at(1).text()).toBe('in length');
});
});
});
describe('scrolling textarea', () => {
it('updates transform of highlights', async () => {
vm.text = 'subject line\n\n\n\n\n\n\n\n\n\n\nbody content';
await wrapper.setProps({ text: 'subject line\n\n\n\n\n\n\n\n\n\n\nbody content' });
findMessage().element.scrollTo(0, 50);
await nextTick();
vm.$el.querySelector('textarea').scrollTo(0, 50);
vm.handleScroll();
await nextTick();
expect(vm.scrollTop).toBe(50);
expect(vm.$el.querySelector('.highlights').style.transform).toBe('translate3d(0, -50px, 0)');
expect(wrapper.find('.highlights').element.style.transform).toBe('translate3d(0, -50px, 0)');
});
});
});

View File

@ -1,123 +1,116 @@
import Vue, { nextTick } from 'vue';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
import { GlFormRadioGroup } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import RadioGroup from '~/ide/components/commit_sidebar/radio_group.vue';
import { createStore } from '~/ide/stores';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
describe('IDE commit sidebar radio group', () => {
let vm;
let wrapper;
let store;
beforeEach(async () => {
const createComponent = (config = {}) => {
store = createStore();
const Component = Vue.extend(RadioGroup);
store.state.commit.commitAction = '2';
store.state.commit.newBranchName = 'test-123';
vm = createComponentWithStore(Component, store, {
wrapper = mount(RadioGroup, {
store,
propsData: config.props,
slots: config.slots,
directives: {
GlTooltip: createMockDirective(),
},
});
};
afterEach(() => {
wrapper.destroy();
});
describe('without input', () => {
const props = {
value: '1',
label: 'test',
checked: true,
};
it('uses label if present', () => {
createComponent({ props });
expect(wrapper.text()).toContain('test');
});
vm.$mount();
it('uses slot if label is not present', () => {
createComponent({ props: { value: '1', checked: true }, slots: { default: 'Testing slot' } });
await nextTick();
});
afterEach(() => {
vm.$destroy();
});
it('uses label if present', () => {
expect(vm.$el.textContent).toContain('test');
});
it('uses slot if label is not present', async () => {
vm.$destroy();
vm = new Vue({
components: {
RadioGroup,
},
store,
render: (createElement) =>
createElement('radio-group', { props: { value: '1' } }, 'Testing slot'),
expect(wrapper.text()).toContain('Testing slot');
});
vm.$mount();
it('updates store when changing radio button', async () => {
createComponent({ props });
await nextTick();
expect(vm.$el.textContent).toContain('Testing slot');
});
await wrapper.find('input').trigger('change');
it('updates store when changing radio button', async () => {
vm.$el.querySelector('input').dispatchEvent(new Event('change'));
await nextTick();
expect(store.state.commit.commitAction).toBe('1');
expect(store.state.commit.commitAction).toBe('1');
});
});
describe('with input', () => {
beforeEach(async () => {
vm.$destroy();
const Component = Vue.extend(RadioGroup);
store.state.commit.commitAction = '1';
store.state.commit.newBranchName = 'test-123';
vm = createComponentWithStore(Component, store, {
value: '1',
label: 'test',
checked: true,
showInput: true,
});
vm.$mount();
await nextTick();
});
const props = {
value: '2',
label: 'test',
checked: true,
showInput: true,
};
it('renders input box when commitAction matches value', () => {
expect(vm.$el.querySelector('.form-control')).not.toBeNull();
createComponent({ props: { ...props, value: '2' } });
expect(wrapper.find('.form-control').exists()).toBe(true);
});
it('hides input when commitAction doesnt match value', async () => {
store.state.commit.commitAction = '2';
it('hides input when commitAction doesnt match value', () => {
createComponent({ props: { ...props, value: '1' } });
await nextTick();
expect(vm.$el.querySelector('.form-control')).toBeNull();
expect(wrapper.find('.form-control').exists()).toBe(false);
});
it('updates branch name in store on input', async () => {
const input = vm.$el.querySelector('.form-control');
input.value = 'testing-123';
input.dispatchEvent(new Event('input'));
createComponent({ props });
await wrapper.find('.form-control').setValue('testing-123');
await nextTick();
expect(store.state.commit.newBranchName).toBe('testing-123');
});
it('renders newBranchName if present', () => {
const input = vm.$el.querySelector('.form-control');
createComponent({ props });
expect(input.value).toBe('test-123');
const input = wrapper.find('.form-control');
expect(input.element.value).toBe('test-123');
});
});
describe('tooltipTitle', () => {
it('returns title when disabled', () => {
vm.title = 'test title';
vm.disabled = true;
createComponent({
props: { value: '1', label: 'test', disabled: true, title: 'test title' },
});
expect(vm.tooltipTitle).toBe('test title');
const tooltip = getBinding(wrapper.findComponent(GlFormRadioGroup).element, 'gl-tooltip');
expect(tooltip.value).toBe('test title');
});
it('returns blank when not disabled', () => {
vm.title = 'test title';
createComponent({
props: { value: '1', label: 'test', title: 'test title' },
});
expect(vm.tooltipTitle).not.toBe('test title');
const tooltip = getBinding(wrapper.findComponent(GlFormRadioGroup).element, 'gl-tooltip');
expect(tooltip.value).toBe('');
});
});
});

View File

@ -1,146 +1,146 @@
import Vue, { nextTick } from 'vue';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
import Vuex from 'vuex';
import { mount } from '@vue/test-utils';
import FileRowExtra from '~/ide/components/file_row_extra.vue';
import { createStore } from '~/ide/stores';
import { createStoreOptions } from '~/ide/stores';
import { file } from '../helpers';
describe('IDE extra file row component', () => {
let Component;
let vm;
let wrapper;
let store;
let unstagedFilesCount = 0;
let stagedFilesCount = 0;
let changesCount = 0;
beforeAll(() => {
Component = Vue.extend(FileRowExtra);
});
const createComponent = (fileProps) => {
const storeConfig = createStoreOptions();
beforeEach(() => {
vm = createComponentWithStore(Component, createStore(), {
file: {
...file('test'),
store = new Vuex.Store({
...storeConfig,
getters: {
getUnstagedFilesCountForPath: () => () => unstagedFilesCount,
getStagedFilesCountForPath: () => () => stagedFilesCount,
getChangesInFolder: () => () => changesCount,
},
dropdownOpen: false,
});
jest.spyOn(vm, 'getUnstagedFilesCountForPath', 'get').mockReturnValue(() => unstagedFilesCount);
jest.spyOn(vm, 'getStagedFilesCountForPath', 'get').mockReturnValue(() => stagedFilesCount);
jest.spyOn(vm, 'getChangesInFolder', 'get').mockReturnValue(() => changesCount);
vm.$mount();
});
wrapper = mount(FileRowExtra, {
store,
propsData: {
file: {
...file('test'),
type: 'tree',
...fileProps,
},
dropdownOpen: false,
},
});
};
afterEach(() => {
vm.$destroy();
wrapper.destroy();
stagedFilesCount = 0;
unstagedFilesCount = 0;
changesCount = 0;
});
describe('folderChangesTooltip', () => {
it('returns undefined when changes count is 0', () => {
changesCount = 0;
expect(vm.folderChangesTooltip).toBe(undefined);
});
describe('folder changes tooltip', () => {
[
{ input: 1, output: '1 changed file' },
{ input: 2, output: '2 changed files' },
].forEach(({ input, output }) => {
it('returns changed files count if changes count is not 0', () => {
it('shows changed files count if changes count is not 0', () => {
changesCount = input;
createComponent();
expect(vm.folderChangesTooltip).toBe(output);
expect(wrapper.find('.ide-file-modified').attributes('title')).toBe(output);
});
});
});
describe('show tree changes count', () => {
it('does not show for blobs', () => {
vm.file.type = 'blob';
const findTreeChangesCount = () => wrapper.find('.ide-tree-changes');
expect(vm.$el.querySelector('.ide-tree-changes')).toBe(null);
it('does not show for blobs', () => {
createComponent({ type: 'blob' });
expect(findTreeChangesCount().exists()).toBe(false);
});
it('does not show when changes count is 0', () => {
vm.file.type = 'tree';
createComponent({ type: 'tree' });
expect(vm.$el.querySelector('.ide-tree-changes')).toBe(null);
expect(findTreeChangesCount().exists()).toBe(false);
});
it('does not show when tree is open', async () => {
vm.file.type = 'tree';
vm.file.opened = true;
it('does not show when tree is open', () => {
changesCount = 1;
createComponent({ type: 'tree', opened: true });
await nextTick();
expect(vm.$el.querySelector('.ide-tree-changes')).toBe(null);
expect(findTreeChangesCount().exists()).toBe(false);
});
it('shows for trees with changes', async () => {
vm.file.type = 'tree';
vm.file.opened = false;
it('shows for trees with changes', () => {
changesCount = 1;
createComponent({ type: 'tree', opened: false });
await nextTick();
expect(vm.$el.querySelector('.ide-tree-changes')).not.toBe(null);
expect(findTreeChangesCount().exists()).toBe(true);
});
});
describe('changes file icon', () => {
const findChangedFileIcon = () => wrapper.find('.file-changed-icon');
it('hides when file is not changed', () => {
expect(vm.$el.querySelector('.file-changed-icon')).toBe(null);
createComponent();
expect(findChangedFileIcon().exists()).toBe(false);
});
it('shows when file is changed', async () => {
vm.file.changed = true;
it('shows when file is changed', () => {
createComponent({ type: 'blob', changed: true });
await nextTick();
expect(vm.$el.querySelector('.file-changed-icon')).not.toBe(null);
expect(findChangedFileIcon().exists()).toBe(true);
});
it('shows when file is staged', async () => {
vm.file.staged = true;
it('shows when file is staged', () => {
createComponent({ type: 'blob', staged: true });
await nextTick();
expect(vm.$el.querySelector('.file-changed-icon')).not.toBe(null);
expect(findChangedFileIcon().exists()).toBe(true);
});
it('shows when file is a tempFile', async () => {
vm.file.tempFile = true;
it('shows when file is a tempFile', () => {
createComponent({ type: 'blob', tempFile: true });
await nextTick();
expect(vm.$el.querySelector('.file-changed-icon')).not.toBe(null);
expect(findChangedFileIcon().exists()).toBe(true);
});
it('shows when file is renamed', async () => {
vm.file.prevPath = 'original-file';
it('shows when file is renamed', () => {
createComponent({ type: 'blob', prevPath: 'original-file' });
await nextTick();
expect(vm.$el.querySelector('.file-changed-icon')).not.toBe(null);
expect(findChangedFileIcon().exists()).toBe(true);
});
it('hides when file is renamed', async () => {
vm.file.prevPath = 'original-file';
vm.file.type = 'tree';
it('hides when tree is renamed', () => {
createComponent({ type: 'tree', prevPath: 'original-path' });
await nextTick();
expect(vm.$el.querySelector('.file-changed-icon')).toBe(null);
expect(findChangedFileIcon().exists()).toBe(false);
});
});
describe('merge request icon', () => {
const findMergeRequestIcon = () => wrapper.find('[data-testid="git-merge-icon"]');
it('hides when not a merge request change', () => {
expect(vm.$el.querySelector('[data-testid="git-merge-icon"]')).toBe(null);
createComponent();
expect(findMergeRequestIcon().exists()).toBe(false);
});
it('shows when a merge request change', async () => {
vm.file.mrChange = true;
it('shows when a merge request change', () => {
createComponent({ mrChange: true });
await nextTick();
expect(vm.$el.querySelector('[data-testid="git-merge-icon"]')).not.toBe(null);
expect(findMergeRequestIcon().exists()).toBe(true);
});
});
});

View File

@ -1,19 +1,16 @@
import Vue, { nextTick } from 'vue';
import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
import { nextTick } from 'vue';
import { mount } from '@vue/test-utils';
import Bar from '~/ide/components/file_templates/bar.vue';
import { createStore } from '~/ide/stores';
import { file } from '../../helpers';
describe('IDE file templates bar component', () => {
let Component;
let vm;
beforeAll(() => {
Component = Vue.extend(Bar);
});
let wrapper;
let store;
beforeEach(() => {
const store = createStore();
store = createStore();
jest.spyOn(store, 'dispatch').mockImplementation();
store.state.openFiles.push({
...file('file'),
@ -21,24 +18,22 @@ describe('IDE file templates bar component', () => {
active: true,
});
vm = mountComponentWithStore(Component, { store });
wrapper = mount(Bar, { store });
});
afterEach(() => {
vm.$destroy();
wrapper.destroy();
});
describe('template type dropdown', () => {
it('renders dropdown component', () => {
expect(vm.$el.querySelector('.dropdown').textContent).toContain('Choose a type');
expect(wrapper.find('.dropdown').text()).toContain('Choose a type');
});
it('calls setSelectedTemplateType when clicking item', () => {
jest.spyOn(vm, 'setSelectedTemplateType').mockImplementation();
it('calls setSelectedTemplateType when clicking item', async () => {
await wrapper.find('.dropdown-menu button').trigger('click');
vm.$el.querySelector('.dropdown-menu button').click();
expect(vm.setSelectedTemplateType).toHaveBeenCalledWith({
expect(store.dispatch).toHaveBeenCalledWith('fileTemplates/setSelectedTemplateType', {
name: '.gitlab-ci.yml',
key: 'gitlab_ci_ymls',
});
@ -46,60 +41,52 @@ describe('IDE file templates bar component', () => {
});
describe('template dropdown', () => {
beforeEach(async () => {
vm.$store.state.fileTemplates.templates = [
beforeEach(() => {
store.state.fileTemplates.templates = [
{
name: 'test',
},
];
vm.$store.state.fileTemplates.selectedTemplateType = {
store.state.fileTemplates.selectedTemplateType = {
name: '.gitlab-ci.yml',
key: 'gitlab_ci_ymls',
};
await nextTick();
});
it('renders dropdown component', () => {
expect(vm.$el.querySelectorAll('.dropdown')[1].textContent).toContain('Choose a template');
expect(wrapper.findAll('.dropdown').at(1).text()).toContain('Choose a template');
});
it('calls fetchTemplate on dropdown open', () => {
jest.spyOn(vm, 'fetchTemplate').mockImplementation();
it('calls fetchTemplate on dropdown open', async () => {
await wrapper.findAll('.dropdown-menu').at(1).find('button').trigger('click');
vm.$el.querySelectorAll('.dropdown-menu')[1].querySelector('button').click();
expect(vm.fetchTemplate).toHaveBeenCalledWith({
expect(store.dispatch).toHaveBeenCalledWith('fileTemplates/fetchTemplate', {
name: 'test',
});
});
});
const findUndoButton = () => wrapper.find('.btn-default-secondary');
it('shows undo button if updateSuccess is true', async () => {
vm.$store.state.fileTemplates.updateSuccess = true;
store.state.fileTemplates.updateSuccess = true;
await nextTick();
expect(vm.$el.querySelector('.btn-default').style.display).not.toBe('none');
expect(findUndoButton().isVisible()).toBe(true);
});
it('calls undoFileTemplate when clicking undo button', () => {
jest.spyOn(vm, 'undoFileTemplate').mockImplementation();
it('calls undoFileTemplate when clicking undo button', async () => {
await findUndoButton().trigger('click');
vm.$el.querySelector('.btn-default-secondary').click();
expect(vm.undoFileTemplate).toHaveBeenCalled();
expect(store.dispatch).toHaveBeenCalledWith('fileTemplates/undoFileTemplate', undefined);
});
it('calls setSelectedTemplateType if activeFile name matches a template', async () => {
const fileName = '.gitlab-ci.yml';
jest.spyOn(vm, 'setSelectedTemplateType').mockImplementation(() => {});
vm.$store.state.openFiles[0].name = fileName;
vm.setInitialType();
store.state.openFiles = [{ ...file(fileName), opened: true, active: true }];
await nextTick();
expect(vm.setSelectedTemplateType).toHaveBeenCalledWith({
expect(store.dispatch).toHaveBeenCalledWith('fileTemplates/setSelectedTemplateType', {
name: fileName,
key: 'gitlab_ci_ymls',
});

View File

@ -1,44 +1,43 @@
import Vue from 'vue';
import mountComponent from 'helpers/vue_mount_component_helper';
import { mount } from '@vue/test-utils';
import { GlIcon } from '@gitlab/ui';
import Description from '~/ide/components/jobs/detail/description.vue';
import { jobs } from '../../../mock_data';
describe('IDE job description', () => {
const Component = Vue.extend(Description);
let vm;
let wrapper;
beforeEach(() => {
vm = mountComponent(Component, {
job: jobs[0],
wrapper = mount(Description, {
propsData: {
job: jobs[0],
},
});
});
afterEach(() => {
vm.$destroy();
wrapper.destroy();
});
it('renders job details', () => {
expect(vm.$el.textContent).toContain('#1');
expect(vm.$el.textContent).toContain('test');
expect(wrapper.text()).toContain('#1');
expect(wrapper.text()).toContain('test');
});
it('renders CI icon', () => {
expect(
vm.$el.querySelector('.ci-status-icon [data-testid="status_success_borderless-icon"]'),
).not.toBe(null);
expect(wrapper.find('.ci-status-icon').findComponent(GlIcon).exists()).toBe(true);
});
it('renders a borderless CI icon', () => {
expect(
vm.$el.querySelector('.borderless [data-testid="status_success_borderless-icon"]'),
).not.toBe(null);
expect(wrapper.find('.borderless').findComponent(GlIcon).exists()).toBe(true);
});
it('renders bridge job details without the job link', () => {
vm = mountComponent(Component, {
job: { ...jobs[0], path: undefined },
wrapper = mount(Description, {
propsData: {
job: { ...jobs[0], path: undefined },
},
});
expect(vm.$el.querySelector('[data-testid="description-detail-link"]')).toBe(null);
expect(wrapper.find('[data-testid="description-detail-link"]').exists()).toBe(false);
});
});

View File

@ -1,15 +1,17 @@
import Vue, { nextTick } from 'vue';
import { nextTick } from 'vue';
import { mount } from '@vue/test-utils';
import { TEST_HOST } from 'helpers/test_constants';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
import JobDetail from '~/ide/components/jobs/detail.vue';
import { createStore } from '~/ide/stores';
import { jobs } from '../../mock_data';
describe('IDE jobs detail view', () => {
let vm;
let wrapper;
let store;
const createComponent = () => {
const store = createStore();
store = createStore();
store.state.pipelines.detailJob = {
...jobs[0],
@ -18,163 +20,129 @@ describe('IDE jobs detail view', () => {
rawPath: `${TEST_HOST}/raw`,
};
return createComponentWithStore(Vue.extend(JobDetail), store);
jest.spyOn(store, 'dispatch');
store.dispatch.mockResolvedValue();
wrapper = mount(JobDetail, { store });
};
beforeEach(() => {
vm = createComponent();
const findBuildJobLog = () => wrapper.find('pre');
const findScrollToBottomButton = () => wrapper.find('button[aria-label="Scroll to bottom"]');
const findScrollToTopButton = () => wrapper.find('button[aria-label="Scroll to top"]');
jest.spyOn(vm, 'fetchJobLogs').mockResolvedValue();
beforeEach(() => {
createComponent();
});
afterEach(() => {
vm.$destroy();
wrapper.destroy();
});
describe('mounted', () => {
beforeEach(() => {
vm = vm.$mount();
});
const findJobOutput = () => wrapper.find('.bash');
const findBuildLoaderAnimation = () => wrapper.find('.build-loader-animation');
it('calls fetchJobLogs', () => {
expect(vm.fetchJobLogs).toHaveBeenCalled();
expect(store.dispatch).toHaveBeenCalledWith('pipelines/fetchJobLogs', undefined);
});
it('scrolls to bottom', () => {
expect(vm.$refs.buildJobLog.scrollTo).toHaveBeenCalled();
expect(findBuildJobLog().element.scrollTo).toHaveBeenCalled();
});
it('renders job output', () => {
expect(vm.$el.querySelector('.bash').textContent).toContain('testing');
expect(findJobOutput().text()).toContain('testing');
});
it('renders empty message output', async () => {
vm.$store.state.pipelines.detailJob.output = '';
store.state.pipelines.detailJob.output = '';
await nextTick();
expect(vm.$el.querySelector('.bash').textContent).toContain('No messages were logged');
expect(findJobOutput().text()).toContain('No messages were logged');
});
it('renders loading icon', () => {
expect(vm.$el.querySelector('.build-loader-animation')).not.toBe(null);
expect(vm.$el.querySelector('.build-loader-animation').style.display).toBe('');
expect(findBuildLoaderAnimation().exists()).toBe(true);
expect(findBuildLoaderAnimation().isVisible()).toBe(true);
});
it('hides output when loading', () => {
expect(vm.$el.querySelector('.bash')).not.toBe(null);
expect(vm.$el.querySelector('.bash').style.display).toBe('none');
expect(findJobOutput().exists()).toBe(true);
expect(findJobOutput().isVisible()).toBe(false);
});
it('hide loading icon when isLoading is false', async () => {
vm.$store.state.pipelines.detailJob.isLoading = false;
store.state.pipelines.detailJob.isLoading = false;
await nextTick();
expect(vm.$el.querySelector('.build-loader-animation').style.display).toBe('none');
expect(findBuildLoaderAnimation().isVisible()).toBe(false);
});
it('resets detailJob when clicking header button', () => {
jest.spyOn(vm, 'setDetailJob').mockImplementation();
it('resets detailJob when clicking header button', async () => {
await wrapper.find('.btn').trigger('click');
vm.$el.querySelector('.btn').click();
expect(vm.setDetailJob).toHaveBeenCalledWith(null);
expect(store.dispatch).toHaveBeenCalledWith('pipelines/setDetailJob', null);
});
it('renders raw path link', () => {
expect(vm.$el.querySelector('.controllers-buttons').getAttribute('href')).toBe(
`${TEST_HOST}/raw`,
);
expect(wrapper.find('.controllers-buttons').attributes('href')).toBe(`${TEST_HOST}/raw`);
});
});
describe('scroll buttons', () => {
beforeEach(() => {
vm = createComponent();
jest.spyOn(vm, 'fetchJobLogs').mockResolvedValue();
});
afterEach(() => {
vm.$destroy();
createComponent();
});
it.each`
fnName | btnName | scrollPos
${'scrollDown'} | ${'down'} | ${0}
${'scrollUp'} | ${'up'} | ${1}
`('triggers $fnName when clicking $btnName button', async ({ fnName, scrollPos }) => {
jest.spyOn(vm, fnName).mockImplementation();
fnName | btnName | scrollPos | targetScrollPos
${'scroll down'} | ${'down'} | ${0} | ${200}
${'scroll up'} | ${'up'} | ${200} | ${0}
`('triggers $fnName when clicking $btnName button', async ({ scrollPos, targetScrollPos }) => {
jest.spyOn(findBuildJobLog().element, 'offsetHeight', 'get').mockReturnValue(0);
jest.spyOn(findBuildJobLog().element, 'scrollHeight', 'get').mockReturnValue(200);
jest.spyOn(findBuildJobLog().element, 'scrollTop', 'get').mockReturnValue(scrollPos);
findBuildJobLog().element.scrollTo.mockReset();
vm = vm.$mount();
await findBuildJobLog().trigger('scroll'); // trigger button updates
vm.scrollPos = scrollPos;
await wrapper.find('.controllers button:not(:disabled)').trigger('click');
await nextTick();
vm.$el.querySelector('.btn-scroll:not([disabled])').click();
expect(vm[fnName]).toHaveBeenCalled();
expect(findBuildJobLog().element.scrollTo).toHaveBeenCalledWith(0, targetScrollPos);
});
});
describe('scrollDown', () => {
describe('scrolling build log', () => {
beforeEach(() => {
vm = vm.$mount();
jest.spyOn(vm.$refs.buildJobLog, 'scrollTo').mockImplementation();
jest.spyOn(findBuildJobLog().element, 'offsetHeight', 'get').mockReturnValue(100);
jest.spyOn(findBuildJobLog().element, 'scrollHeight', 'get').mockReturnValue(200);
});
it('scrolls build trace to bottom', () => {
jest.spyOn(vm.$refs.buildJobLog, 'scrollHeight', 'get').mockReturnValue(1000);
it('keeps scroll at bottom when already at the bottom', async () => {
jest.spyOn(findBuildJobLog().element, 'scrollTop', 'get').mockReturnValue(100);
vm.scrollDown();
await findBuildJobLog().trigger('scroll');
expect(vm.$refs.buildJobLog.scrollTo).toHaveBeenCalledWith(0, 1000);
});
});
describe('scrollUp', () => {
beforeEach(() => {
vm = vm.$mount();
jest.spyOn(vm.$refs.buildJobLog, 'scrollTo').mockImplementation();
expect(findScrollToBottomButton().attributes('disabled')).toBe('disabled');
expect(findScrollToTopButton().attributes('disabled')).not.toBe('disabled');
});
it('scrolls build trace to top', () => {
vm.scrollUp();
it('keeps scroll at top when already at top', async () => {
jest.spyOn(findBuildJobLog().element, 'scrollTop', 'get').mockReturnValue(0);
expect(vm.$refs.buildJobLog.scrollTo).toHaveBeenCalledWith(0, 0);
});
});
await findBuildJobLog().trigger('scroll');
describe('scrollBuildLog', () => {
beforeEach(() => {
vm = vm.$mount();
jest.spyOn(vm.$refs.buildJobLog, 'scrollTo').mockImplementation();
jest.spyOn(vm.$refs.buildJobLog, 'offsetHeight', 'get').mockReturnValue(100);
jest.spyOn(vm.$refs.buildJobLog, 'scrollHeight', 'get').mockReturnValue(200);
expect(findScrollToBottomButton().attributes('disabled')).not.toBe('disabled');
expect(findScrollToTopButton().attributes('disabled')).toBe('disabled');
});
it('sets scrollPos to bottom when at the bottom', () => {
jest.spyOn(vm.$refs.buildJobLog, 'scrollTop', 'get').mockReturnValue(100);
it('resets scroll when not at top or bottom', async () => {
jest.spyOn(findBuildJobLog().element, 'scrollTop', 'get').mockReturnValue(10);
vm.scrollBuildLog();
await findBuildJobLog().trigger('scroll');
expect(vm.scrollPos).toBe(1);
});
it('sets scrollPos to top when at the top', () => {
jest.spyOn(vm.$refs.buildJobLog, 'scrollTop', 'get').mockReturnValue(0);
vm.scrollPos = 1;
vm.scrollBuildLog();
expect(vm.scrollPos).toBe(0);
});
it('resets scrollPos when not at top or bottom', () => {
jest.spyOn(vm.$refs.buildJobLog, 'scrollTop', 'get').mockReturnValue(10);
vm.scrollBuildLog();
expect(vm.scrollPos).toBe('');
expect(findScrollToBottomButton().attributes('disabled')).not.toBe('disabled');
expect(findScrollToTopButton().attributes('disabled')).not.toBe('disabled');
});
});
});

View File

@ -1,36 +1,38 @@
import Vue, { nextTick } from 'vue';
import mountComponent from 'helpers/vue_mount_component_helper';
import { mount } from '@vue/test-utils';
import { GlButton } from '@gitlab/ui';
import JobItem from '~/ide/components/jobs/item.vue';
import { jobs } from '../../mock_data';
describe('IDE jobs item', () => {
const Component = Vue.extend(JobItem);
const job = jobs[0];
let vm;
let wrapper;
beforeEach(() => {
vm = mountComponent(Component, {
job,
});
wrapper = mount(JobItem, { propsData: { job } });
});
afterEach(() => {
vm.$destroy();
wrapper.destroy();
});
it('renders job details', () => {
expect(vm.$el.textContent).toContain(job.name);
expect(vm.$el.textContent).toContain(`#${job.id}`);
expect(wrapper.text()).toContain(job.name);
expect(wrapper.text()).toContain(`#${job.id}`);
});
it('renders CI icon', () => {
expect(vm.$el.querySelector('[data-testid="status_success_borderless-icon"]')).not.toBe(null);
expect(wrapper.find('[data-testid="status_success_borderless-icon"]').exists()).toBe(true);
});
it('does not render view logs button if not started', async () => {
vm.job.started = false;
await wrapper.setProps({
job: {
...jobs[0],
started: false,
},
});
await nextTick();
expect(vm.$el.querySelector('.btn')).toBe(null);
expect(wrapper.findComponent(GlButton).exists()).toBe(false);
});
});

View File

@ -1,59 +1,60 @@
import Vue, { nextTick } from 'vue';
import mountComponent from 'helpers/vue_mount_component_helper';
import { mount } from '@vue/test-utils';
import Button from '~/ide/components/new_dropdown/button.vue';
describe('IDE new entry dropdown button component', () => {
let Component;
let vm;
let wrapper;
beforeAll(() => {
Component = Vue.extend(Button);
});
beforeEach(() => {
vm = mountComponent(Component, {
label: 'Testing',
icon: 'doc-new',
const createComponent = (props = {}) => {
wrapper = mount(Button, {
propsData: {
label: 'Testing',
icon: 'doc-new',
...props,
},
});
jest.spyOn(vm, '$emit').mockImplementation(() => {});
});
};
afterEach(() => {
vm.$destroy();
wrapper.destroy();
});
it('renders button with label', () => {
expect(vm.$el.textContent).toContain('Testing');
createComponent();
expect(wrapper.text()).toContain('Testing');
});
it('renders icon', () => {
expect(vm.$el.querySelector('[data-testid="doc-new-icon"]')).not.toBe(null);
createComponent();
expect(wrapper.find('[data-testid="doc-new-icon"]').exists()).toBe(true);
});
it('emits click event', () => {
vm.$el.click();
it('emits click event', async () => {
createComponent();
expect(vm.$emit).toHaveBeenCalledWith('click');
await wrapper.trigger('click');
expect(wrapper.emitted('click')).toHaveLength(1);
});
it('hides label if showLabel is false', async () => {
vm.showLabel = false;
it('hides label if showLabel is false', () => {
createComponent({ showLabel: false });
await nextTick();
expect(vm.$el.textContent).not.toContain('Testing');
expect(wrapper.text()).not.toContain('Testing');
});
describe('tooltipTitle', () => {
describe('tooltip title', () => {
it('returns empty string when showLabel is true', () => {
expect(vm.tooltipTitle).toBe('');
createComponent({ showLabel: true });
expect(wrapper.attributes('title')).toBe('');
});
it('returns label', async () => {
vm.showLabel = false;
it('returns label', () => {
createComponent({ showLabel: false });
await nextTick();
expect(vm.tooltipTitle).toBe('Testing');
expect(wrapper.attributes('title')).toBe('Testing');
});
});
});

View File

@ -1,39 +1,34 @@
import Vue from 'vue';
import createComponent from 'helpers/vue_mount_component_helper';
import upload from '~/ide/components/new_dropdown/upload.vue';
import { mount } from '@vue/test-utils';
import Upload from '~/ide/components/new_dropdown/upload.vue';
describe('new dropdown upload', () => {
let vm;
let wrapper;
beforeEach(() => {
const Component = Vue.extend(upload);
vm = createComponent(Component, {
path: '',
wrapper = mount(Upload, {
propsData: {
path: '',
},
});
vm.entryName = 'testing';
jest.spyOn(vm, '$emit');
});
afterEach(() => {
vm.$destroy();
wrapper.destroy();
});
describe('openFile', () => {
it('calls for each file', () => {
const files = ['test', 'test2', 'test3'];
jest.spyOn(vm, 'readFile').mockImplementation(() => {});
jest.spyOn(vm.$refs.fileUpload, 'files', 'get').mockReturnValue(files);
jest.spyOn(wrapper.vm, 'readFile').mockImplementation(() => {});
jest.spyOn(wrapper.vm.$refs.fileUpload, 'files', 'get').mockReturnValue(files);
vm.openFile();
wrapper.vm.openFile();
expect(vm.readFile.mock.calls.length).toBe(3);
expect(wrapper.vm.readFile.mock.calls.length).toBe(3);
files.forEach((file, i) => {
expect(vm.readFile.mock.calls[i]).toEqual([file]);
expect(wrapper.vm.readFile.mock.calls[i]).toEqual([file]);
});
});
});
@ -48,7 +43,7 @@ describe('new dropdown upload', () => {
type: 'images/png',
};
vm.readFile(file);
wrapper.vm.readFile(file);
expect(FileReader.prototype.readAsDataURL).toHaveBeenCalledWith(file);
});
@ -71,35 +66,39 @@ describe('new dropdown upload', () => {
it('calls readAsText and creates file in plain text (without encoding) if the file content is plain text', async () => {
const waitForCreate = new Promise((resolve) => {
vm.$on('create', resolve);
wrapper.vm.$on('create', resolve);
});
vm.createFile(textTarget, textFile);
wrapper.vm.createFile(textTarget, textFile);
expect(FileReader.prototype.readAsText).toHaveBeenCalledWith(textFile);
await waitForCreate;
expect(vm.$emit).toHaveBeenCalledWith('create', {
name: textFile.name,
type: 'blob',
content: 'plain text',
rawPath: '',
mimeType: 'test/mime-text',
});
expect(wrapper.emitted('create')[0]).toStrictEqual([
{
name: textFile.name,
type: 'blob',
content: 'plain text',
rawPath: '',
mimeType: 'test/mime-text',
},
]);
});
it('creates a blob URL for the content if binary', () => {
vm.createFile(binaryTarget, binaryFile);
wrapper.vm.createFile(binaryTarget, binaryFile);
expect(FileReader.prototype.readAsText).not.toHaveBeenCalled();
expect(vm.$emit).toHaveBeenCalledWith('create', {
name: binaryFile.name,
type: 'blob',
content: 'ðððð',
rawPath: 'blob:https://gitlab.com/048c7ac1-98de-4a37-ab1b-0206d0ea7e1b',
mimeType: 'test/mime-binary',
});
expect(wrapper.emitted('create')[0]).toStrictEqual([
{
name: binaryFile.name,
type: 'blob',
content: 'ðððð',
rawPath: 'blob:https://gitlab.com/048c7ac1-98de-4a37-ab1b-0206d0ea7e1b',
mimeType: 'test/mime-binary',
},
]);
});
});
});

View File

@ -1,5 +1,4 @@
import Vue, { nextTick } from 'vue';
import mountComponent from 'helpers/vue_mount_component_helper';
import { mount } from '@vue/test-utils';
import TokenedInput from '~/ide/components/shared/tokened_input.vue';
const TEST_PLACEHOLDER = 'Searching in test';
@ -10,120 +9,106 @@ const TEST_TOKENS = [
];
const TEST_VALUE = 'lorem';
function getTokenElements(vm) {
return Array.from(vm.$el.querySelectorAll('.filtered-search-token button'));
}
function createBackspaceEvent() {
const e = new Event('keyup');
e.keyCode = 8;
e.which = e.keyCode;
e.altKey = false;
e.ctrlKey = true;
e.shiftKey = false;
e.metaKey = false;
return e;
function getTokenElements(wrapper) {
return wrapper.findAll('.filtered-search-token button');
}
describe('IDE shared/TokenedInput', () => {
const Component = Vue.extend(TokenedInput);
let vm;
let wrapper;
beforeEach(() => {
vm = mountComponent(Component, {
tokens: TEST_TOKENS,
placeholder: TEST_PLACEHOLDER,
value: TEST_VALUE,
const createComponent = (props = {}) => {
wrapper = mount(TokenedInput, {
propsData: {
tokens: TEST_TOKENS,
placeholder: TEST_PLACEHOLDER,
value: TEST_VALUE,
...props,
},
attachTo: document.body,
});
jest.spyOn(vm, '$emit').mockImplementation(() => {});
});
};
afterEach(() => {
vm.$destroy();
wrapper.destroy();
});
it('renders tokens', () => {
const renderedTokens = getTokenElements(vm).map((x) => x.textContent.trim());
createComponent();
const renderedTokens = getTokenElements(wrapper).wrappers.map((w) => w.text());
expect(renderedTokens).toEqual(TEST_TOKENS.map((x) => x.label));
});
it('renders input', () => {
expect(vm.$refs.input).toBeInstanceOf(HTMLInputElement);
expect(vm.$refs.input).toHaveValue(TEST_VALUE);
createComponent();
expect(wrapper.find('input').element).toBeInstanceOf(HTMLInputElement);
expect(wrapper.find('input').element).toHaveValue(TEST_VALUE);
});
it('renders placeholder, when tokens are empty', async () => {
vm.tokens = [];
it('renders placeholder, when tokens are empty', () => {
createComponent({ tokens: [] });
await nextTick();
expect(vm.$refs.input).toHaveAttr('placeholder', TEST_PLACEHOLDER);
expect(wrapper.find('input').attributes('placeholder')).toBe(TEST_PLACEHOLDER);
});
it('triggers "removeToken" on token click', () => {
getTokenElements(vm)[0].click();
it('triggers "removeToken" on token click', async () => {
createComponent();
await getTokenElements(wrapper).at(0).trigger('click');
expect(vm.$emit).toHaveBeenCalledWith('removeToken', TEST_TOKENS[0]);
expect(wrapper.emitted('removeToken')[0]).toStrictEqual([TEST_TOKENS[0]]);
});
it('when input triggers backspace event, it calls "onBackspace"', () => {
jest.spyOn(vm, 'onBackspace').mockImplementation(() => {});
it('removes token on backspace when value is empty', async () => {
createComponent({ value: '' });
vm.$refs.input.dispatchEvent(createBackspaceEvent());
vm.$refs.input.dispatchEvent(createBackspaceEvent());
expect(wrapper.emitted('removeToken')).toBeUndefined();
expect(vm.onBackspace).toHaveBeenCalledTimes(2);
await wrapper.find('input').trigger('keyup.delete');
await wrapper.find('input').trigger('keyup.delete');
expect(wrapper.emitted('removeToken')[0]).toStrictEqual([TEST_TOKENS[TEST_TOKENS.length - 1]]);
});
it('triggers "removeToken" on backspaces when value is empty', () => {
vm.value = '';
it('does not trigger "removeToken" on backspaces when value is not empty', async () => {
createComponent({ value: 'SOMETHING' });
vm.onBackspace();
await wrapper.find('input').trigger('keyup.delete');
await wrapper.find('input').trigger('keyup.delete');
expect(vm.$emit).not.toHaveBeenCalled();
expect(vm.backspaceCount).toEqual(1);
vm.onBackspace();
expect(vm.$emit).toHaveBeenCalledWith('removeToken', TEST_TOKENS[TEST_TOKENS.length - 1]);
expect(vm.backspaceCount).toEqual(0);
expect(wrapper.emitted('removeToken')).toBeUndefined();
});
it('does not trigger "removeToken" on backspaces when value is not empty', () => {
vm.onBackspace();
vm.onBackspace();
it('does not trigger "removeToken" on backspaces when tokens are empty', async () => {
createComponent({ value: '', tokens: [] });
expect(vm.backspaceCount).toEqual(0);
expect(vm.$emit).not.toHaveBeenCalled();
await wrapper.find('input').trigger('keyup.delete');
await wrapper.find('input').trigger('keyup.delete');
expect(wrapper.emitted('removeToken')).toBeUndefined();
});
it('does not trigger "removeToken" on backspaces when tokens are empty', () => {
vm.tokens = [];
it('triggers "focus" on input focus', async () => {
createComponent();
vm.onBackspace();
vm.onBackspace();
await wrapper.find('input').trigger('focus');
expect(vm.backspaceCount).toEqual(0);
expect(vm.$emit).not.toHaveBeenCalled();
expect(wrapper.emitted('focus')).toHaveLength(1);
});
it('triggers "focus" on input focus', () => {
vm.$refs.input.dispatchEvent(new Event('focus'));
it('triggers "blur" on input blur', async () => {
createComponent();
expect(vm.$emit).toHaveBeenCalledWith('focus');
await wrapper.find('input').trigger('blur');
expect(wrapper.emitted('blur')).toHaveLength(1);
});
it('triggers "blur" on input blur', () => {
vm.$refs.input.dispatchEvent(new Event('blur'));
it('triggers "input" with value on input change', async () => {
createComponent();
expect(vm.$emit).toHaveBeenCalledWith('blur');
});
await wrapper.find('input').setValue('something-else');
it('triggers "input" with value on input change', () => {
vm.$refs.input.value = 'something-else';
vm.$refs.input.dispatchEvent(new Event('input'));
expect(vm.$emit).toHaveBeenCalledWith('input', 'something-else');
expect(wrapper.emitted('input')[0]).toStrictEqual(['something-else']);
});
});

View File

@ -0,0 +1,44 @@
import hljs from 'highlight.js/lib/core';
import languageLoader from '~/content_editor/services/highlight_js_language_loader';
import { registerPlugins } from '~/vue_shared/components/source_viewer/plugins/index';
import { highlight } from '~/vue_shared/components/source_viewer/workers/highlight_utils';
jest.mock('highlight.js/lib/core', () => ({
highlight: jest.fn().mockReturnValue({}),
registerLanguage: jest.fn(),
}));
jest.mock('~/content_editor/services/highlight_js_language_loader', () => ({
javascript: jest.fn().mockReturnValue({ default: jest.fn() }),
}));
jest.mock('~/vue_shared/components/source_viewer/plugins/index', () => ({
registerPlugins: jest.fn(),
}));
const fileType = 'text';
const content = 'function test() { return true };';
const language = 'javascript';
describe('Highlight utility', () => {
beforeEach(() => highlight(fileType, content, language));
it('loads the language', () => {
expect(languageLoader.javascript).toHaveBeenCalled();
});
it('registers the plugins', () => {
expect(registerPlugins).toHaveBeenCalled();
});
it('registers the language', () => {
expect(hljs.registerLanguage).toHaveBeenCalledWith(
language,
languageLoader[language]().default,
);
});
it('highlights the content', () => {
expect(hljs.highlight).toHaveBeenCalledWith(content, { language });
});
});

View File

@ -15,7 +15,7 @@ describe('createLink', () => {
it('escapes the user-controlled content', () => {
const unescapedXSS = '<script>XSS</script>';
const escapedPackageName = '&lt;script&gt;XSS&lt;/script&gt;';
const escapedHref = '&amp;lt;script&amp;gt;XSS&amp;lt;/script&amp;gt;';
const escapedHref = '&lt;script&gt;XSS&lt;/script&gt;';
const href = `http://test.com/${unescapedXSS}`;
const innerText = `testing${unescapedXSS}`;
const result = `<a href="http://test.com/${escapedHref}" rel="nofollow noreferrer noopener">testing${escapedPackageName}</a>`;

View File

@ -9,21 +9,70 @@ RSpec.describe RecaptchaHelper, type: :helper do
allow(helper).to receive(:session) { session }
end
describe '.show_recaptcha_sign_up?' do
context 'when reCAPTCHA is disabled' do
it 'returns false' do
stub_application_setting(recaptcha_enabled: false)
shared_examples 'Gitlab QA bypass' do
context 'when GITLAB_QA_USER_AGENT env var is present' do
using RSpec::Parameterized::TableSyntax
expect(helper.show_recaptcha_sign_up?).to be_falsey
where(:dot_com, :user_agent, :qa_user_agent, :result) do
false | 'qa_user_agent' | 'qa_user_agent' | true
true | nil | 'qa_user_agent' | true
true | '' | 'qa_user_agent' | true
true | 'qa_user_agent' | '' | true
true | 'qa_user_agent' | nil | true
true | 'qa_user_agent' | 'qa_user_agent' | false
end
end
context 'when reCAPTCHA is enabled' do
it 'returns true' do
stub_application_setting(recaptcha_enabled: true)
with_them do
before do
allow(Gitlab).to receive(:com?).and_return(dot_com)
stub_env('GITLAB_QA_USER_AGENT', qa_user_agent)
expect(helper.show_recaptcha_sign_up?).to be_truthy
request_double = instance_double(ActionController::TestRequest, user_agent: user_agent)
allow(helper).to receive(:request).and_return(request_double)
end
it { is_expected.to eq result }
end
end
end
describe '.show_recaptcha_sign_up?' do
let(:setting_state) { true }
before do
stub_application_setting(recaptcha_enabled: setting_state)
end
subject { helper.show_recaptcha_sign_up? }
it { is_expected.to eq true }
context 'when setting is disabled' do
let(:setting_state) { false }
it { is_expected.to eq false }
end
include_examples 'Gitlab QA bypass'
end
describe '.recaptcha_enabled_on_login?' do
let(:setting_state) { true }
before do
stub_application_setting(login_recaptcha_protection_enabled: setting_state)
end
subject { helper.recaptcha_enabled_on_login? }
it { is_expected.to eq true }
context 'when setting is disabled' do
let(:setting_state) { false }
it { is_expected.to eq false }
end
include_examples 'Gitlab QA bypass'
end
end

View File

@ -3,21 +3,16 @@
require 'spec_helper'
RSpec.describe Gitlab::Pages::CacheControl do
it 'fails with invalid type' do
expect { described_class.new(type: :unknown, id: nil) }
.to raise_error(ArgumentError, "type must be :namespace or :project")
end
describe '.for_namespace' do
let(:subject) { described_class.for_namespace(1) }
subject(:cache_control) { described_class.for_namespace(1) }
it { expect(subject.cache_key).to eq('pages_domain_for_namespace_1') }
it { expect(subject.cache_key).to match(/pages_domain_for_namespace_1_*/) }
describe '#clear_cache' do
it 'clears the cache' do
expect(Rails.cache)
.to receive(:delete)
.with('pages_domain_for_namespace_1')
.with(/pages_domain_for_namespace_1_*/)
subject.clear_cache
end
@ -25,18 +20,48 @@ RSpec.describe Gitlab::Pages::CacheControl do
end
describe '.for_project' do
let(:subject) { described_class.for_project(1) }
subject(:cache_control) { described_class.for_project(1) }
it { expect(subject.cache_key).to eq('pages_domain_for_project_1') }
it { expect(subject.cache_key).to match(/pages_domain_for_project_1_*/) }
describe '#clear_cache' do
it 'clears the cache' do
expect(Rails.cache)
.to receive(:delete)
.with('pages_domain_for_project_1')
.with(/pages_domain_for_project_1_*/)
subject.clear_cache
end
end
end
describe '#cache_key' do
it 'does not change the pages config' do
expect { described_class.new(type: :project, id: 1).cache_key }
.not_to change(Gitlab.config, :pages)
end
it 'is based on pages settings' do
access_control = Gitlab.config.pages.access_control
cache_key = described_class.new(type: :project, id: 1).cache_key
stub_config(pages: { access_control: !access_control })
expect(described_class.new(type: :project, id: 1).cache_key).not_to eq(cache_key)
end
it 'is based on the force_pages_access_control settings' do
force_pages_access_control = ::Gitlab::CurrentSettings.force_pages_access_control
cache_key = described_class.new(type: :project, id: 1).cache_key
::Gitlab::CurrentSettings.force_pages_access_control = !force_pages_access_control
expect(described_class.new(type: :project, id: 1).cache_key).not_to eq(cache_key)
end
end
it 'fails with invalid type' do
expect { described_class.new(type: :unknown, id: nil) }
.to raise_error(ArgumentError, "type must be :namespace or :project")
end
end

View File

@ -34,5 +34,29 @@ RSpec.describe Sidebars::Projects::Menus::RepositoryMenu do
end
end
end
context 'for menu items' do
subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } }
describe 'Contributors' do
let_it_be(:item_id) { :contributors }
context 'when analytics is disabled' do
before do
project.project_feature.update!(analytics_access_level: ProjectFeature::DISABLED)
end
it { is_expected.to be_nil }
end
context 'when analytics is enabled' do
before do
project.project_feature.update!(analytics_access_level: ProjectFeature::ENABLED)
end
it { is_expected.not_to be_nil }
end
end
end
end
end

View File

@ -1962,7 +1962,7 @@ RSpec.describe Namespace do
it 'returns the virual domain' do
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.lookup_paths).not_to be_empty
expect(virtual_domain.cache_key).to eq("pages_domain_for_namespace_#{namespace.root_ancestor.id}")
expect(virtual_domain.cache_key).to match(/pages_domain_for_namespace_#{namespace.root_ancestor.id}_/)
end
context 'when :cache_pages_domain_api is disabled' do

View File

@ -563,7 +563,7 @@ RSpec.describe PagesDomain do
it 'returns the virual domain when there are pages deployed for the project' do
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.lookup_paths).not_to be_empty
expect(virtual_domain.cache_key).to eq("pages_domain_for_project_#{project.id}")
expect(virtual_domain.cache_key).to match(/pages_domain_for_project_#{project.id}_/)
end
context 'when :cache_pages_domain_api is disabled' do

View File

@ -221,55 +221,25 @@ RSpec.describe API::Branches do
get api(route), params: { per_page: 1 }
end
context 'when increase_branch_cache_expiry is enabled' do
it 'uses the cache up to 60 minutes' do
time_of_request = Time.current
it 'uses the cache up to 60 minutes' do
time_of_request = Time.current
get api(route), params: { per_page: 1 }
travel_to time_of_request + 59.minutes do
expect(API::Entities::Branch).not_to receive(:represent)
get api(route), params: { per_page: 1 }
travel_to time_of_request + 59.minutes do
expect(API::Entities::Branch).not_to receive(:represent)
get api(route), params: { per_page: 1 }
end
end
it 'requests for new value after 60 minutes' do
get api(route), params: { per_page: 1 }
travel_to 61.minutes.from_now do
expect(API::Entities::Branch).to receive(:represent)
get api(route), params: { per_page: 1 }
end
end
end
context 'when increase_branch_cache_expiry is disabled' do
before do
stub_feature_flags(increase_branch_cache_expiry: false)
end
it 'requests for new value after 60 minutes' do
get api(route), params: { per_page: 1 }
it 'uses the cache up to 10 minutes' do
time_of_request = Time.current
travel_to 61.minutes.from_now do
expect(API::Entities::Branch).to receive(:represent)
get api(route), params: { per_page: 1 }
travel_to time_of_request + 9.minutes do
expect(API::Entities::Branch).not_to receive(:represent)
get api(route), params: { per_page: 1 }
end
end
it 'requests for new value after 10 minutes' do
get api(route), params: { per_page: 1 }
travel_to 11.minutes.from_now do
expect(API::Entities::Branch).to receive(:represent)
get api(route), params: { per_page: 1 }
end
end
end
end

View File

@ -27,6 +27,7 @@ require_relative '../../../../scripts/lib/glfm/update_example_snapshots'
#
# Also, the textual content of the individual fixture file entries is also crafted to help
# indicate which scenarios which they are covering.
# rubocop:disable RSpec/MultipleMemoizedHelpers
RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
subject { described_class.new }
@ -34,9 +35,8 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
let(:glfm_spec_txt_path) { described_class::GLFM_SPEC_TXT_PATH }
let(:glfm_spec_txt_local_io) { StringIO.new(glfm_spec_txt_contents) }
let(:glfm_example_status_yml_path) { described_class::GLFM_EXAMPLE_STATUS_YML_PATH }
let(:glfm_example_status_yml_io) { StringIO.new(glfm_example_status_yml_contents) }
let(:glfm_example_metadata_yml_path) { described_class::GLFM_EXAMPLE_METADATA_YML_PATH }
let(:glfm_example_metadata_yml_io) { StringIO.new(glfm_example_metadata_yml_contents) }
let(:glfm_example_normalizations_yml_path) { described_class::GLFM_EXAMPLE_NORMALIZATIONS_YML_PATH }
# Example Snapshot (ES) output files
let(:es_examples_index_yml_path) { described_class::ES_EXAMPLES_INDEX_YML_PATH }
@ -285,10 +285,25 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
YAML
end
let(:test1) { '\1\2URI_PREFIX\4' }
let(:glfm_example_normalizations_yml_contents) do
# NOTE: This heredoc identifier must be quoted because we are using control characters in the heredoc body.
# See https://stackoverflow.com/a/73831037/25192
<<~'YAML'
---
# If a config file entry starts with `00_`, it will be skipped for validation that it exists in `examples_index.yml`
00_shared:
00_uri: &00_uri
- regex: '(href|data-src)(=")(.*?)(test-file\.(png|zip)")'
replacement: '\1\2URI_PREFIX\4'
YAML
end
let(:es_html_yml_io_existing_contents) do
<<~YAML
---
00_00_00__obsolete_entry_to_be_deleted__001:
01_00_00__obsolete_entry_to_be_deleted__001:
canonical: |
This entry is no longer exists in the spec.txt, so it will be deleted.
static: |-
@ -315,7 +330,7 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
let(:es_prosemirror_json_yml_io_existing_contents) do
<<~YAML
---
00_00_00__obsolete_entry_to_be_deleted__001: |-
01_00_00__obsolete_entry_to_be_deleted__001: |-
{
"obsolete": "This entry is no longer exists in the spec.txt, and is not skipped, so it will be deleted."
}
@ -356,9 +371,14 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
# input files
allow(File).to receive(:open).with(glfm_spec_txt_path) { glfm_spec_txt_local_io }
allow(File).to receive(:open).with(glfm_example_status_yml_path) { glfm_example_status_yml_io }
allow(File).to receive(:open).with(glfm_example_status_yml_path) do
StringIO.new(glfm_example_status_yml_contents)
end
allow(File).to receive(:open).with(glfm_example_metadata_yml_path) do
glfm_example_metadata_yml_io
StringIO.new(glfm_example_metadata_yml_contents)
end
allow(File).to receive(:open).with(glfm_example_normalizations_yml_path) do
StringIO.new(glfm_example_normalizations_yml_contents)
end
# output files
@ -525,353 +545,404 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
end
end
# rubocop:disable RSpec/MultipleMemoizedHelpers
describe 'writing html.yml and prosemirror_json.yml' do
let(:es_html_yml_contents) { reread_io(es_html_yml_io) }
let(:es_prosemirror_json_yml_contents) { reread_io(es_prosemirror_json_yml_io) }
describe 'error handling when manually-curated input specification config files contain invalid example names:' do
let(:err_msg) do
/#{config_file}.*01_00_00__invalid__001.*does not have.*entry in.*#{described_class::ES_EXAMPLES_INDEX_YML_PATH}/m
end
# NOTE: This example_status.yml is crafted in conjunction with expected_html_yml_contents
# to test the behavior of the `skip_update_*` flags
let(:glfm_example_status_yml_contents) do
let(:invalid_example_name_file_contents) do
<<~YAML
---
02_01_00__inlines__strong__002:
# NOTE: 02_01_00__inlines__strong__002: is omitted from the existing prosemirror_json.yml file, and is also
# skipped here, to show that an example does not need to exist in order to be skipped.
# TODO: This should be changed to raise an error instead, to enforce that there cannot be orphaned
# entries in glfm_example_status.yml. This task is captured in
# https://gitlab.com/gitlab-org/gitlab/-/issues/361241#other-cleanup-tasks
skip_update_example_snapshot_prosemirror_json: "skipping because JSON isn't cool enough"
03_01_00__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__001:
skip_update_example_snapshot_html_static: "skipping because there's too much static"
04_01_00__second_gitlab_specific_section_with_examples__strong_but_with_html__001:
skip_update_example_snapshot_html_wysiwyg: 'skipping because what you see is NOT what you get'
skip_update_example_snapshot_prosemirror_json: "skipping because JSON still isn't cool enough"
05_01_00__third_gitlab_specific_section_with_skipped_examples__strong_but_skipped__001:
skip_update_example_snapshots: 'skipping this example because it is very bad'
05_02_00__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001:
skip_update_example_snapshots: 'skipping this example because we have manually modified it'
01_00_00__invalid__001:
a: 1
YAML
end
let(:expected_html_yml_contents) do
<<~YAML
---
02_01_00__inlines__strong__001:
canonical: |
<p><strong>bold</strong></p>
static: |-
<p data-sourcepos="1:1-1:8" dir="auto"><strong>bold</strong></p>
wysiwyg: |-
<p><strong>bold</strong></p>
02_01_00__inlines__strong__002:
canonical: |
<p><strong>bold with more text</strong></p>
static: |-
<p data-sourcepos="1:1-1:23" dir="auto"><strong>bold with more text</strong></p>
wysiwyg: |-
<p><strong>bold with more text</strong></p>
02_03_00__inlines__strikethrough_extension__001:
canonical: |
<p><del>Hi</del> Hello, world!</p>
static: |-
<p data-sourcepos="1:1-1:20" dir="auto"><del>Hi</del> Hello, world!</p>
wysiwyg: |-
<p><s>Hi</s> Hello, world!</p>
03_01_00__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__001:
canonical: |
<p><strong>bold</strong></p>
wysiwyg: |-
<p><strong>bold</strong></p>
03_02_01__first_gitlab_specific_section_with_examples__h2_which_contains_an_h3__example_in_an_h3__001:
canonical: |
<p>Example in an H3</p>
static: |-
<p data-sourcepos="1:1-1:16" dir="auto">Example in an H3</p>
wysiwyg: |-
<p>Example in an H3</p>
04_01_00__second_gitlab_specific_section_with_examples__strong_but_with_html__001:
canonical: |
<p><strong>
bold
</strong></p>
static: |-
<strong>
bold
</strong>
05_02_00__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001:
canonical: |
<p><strong>This example will have its manually modified static HTML, WYSIWYG HTML, and ProseMirror JSON preserved</strong></p>
static: |-
<p>This is the manually modified static HTML which will be preserved</p>
wysiwyg: |-
<p>This is the manually modified WYSIWYG HTML which will be preserved</p>
06_01_00__api_request_overrides__group_upload_link__001:
canonical: |
<p><a href="groups-test-file">groups-test-file</a></p>
static: |-
<p data-sourcepos="1:1-1:45" dir="auto"><a href="/groups/glfm_group/-/uploads/groups-test-file" data-canonical-src="/uploads/groups-test-file" data-link="true" class="gfm">groups-test-file</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/uploads/groups-test-file">groups-test-file</a></p>
06_02_00__api_request_overrides__project_repo_link__001:
canonical: |
<p><a href="projects-test-file">projects-test-file</a></p>
static: |-
<p data-sourcepos="1:1-1:40" dir="auto"><a href="/glfm_group/glfm_project/-/blob/master/projects-test-file">projects-test-file</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="projects-test-file">projects-test-file</a></p>
06_03_00__api_request_overrides__project_snippet_ref__001:
canonical: |
<p>This project snippet ID reference IS filtered: <a href="/glfm_group/glfm_project/-/snippets/88888">$88888</a>
static: |-
<p data-sourcepos="1:1-1:53" dir="auto">This project snippet ID reference IS filtered: <a href="/glfm_group/glfm_project/-/snippets/88888" data-reference-type="snippet" data-original="$88888" data-link="false" data-link-reference="false" data-project="77777" data-snippet="88888" data-container="body" data-placement="top" title="glfm_project_snippet" class="gfm gfm-snippet has-tooltip">$88888</a></p>
wysiwyg: |-
<p>This project snippet ID reference IS filtered: $88888</p>
06_04_00__api_request_overrides__personal_snippet_ref__001:
canonical: |
<p>This personal snippet ID reference is NOT filtered: $99999</p>
static: |-
<p data-sourcepos="1:1-1:58" dir="auto">This personal snippet ID reference is NOT filtered: $99999</p>
wysiwyg: |-
<p>This personal snippet ID reference is NOT filtered: $99999</p>
06_05_00__api_request_overrides__project_wiki_link__001:
canonical: |
<p><a href="project-wikis-test-file">project-wikis-test-file</a></p>
static: |-
<p data-sourcepos="1:1-1:50" dir="auto"><a href="/glfm_group/glfm_project/-/wikis/project-wikis-test-file" data-canonical-src="project-wikis-test-file">project-wikis-test-file</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="project-wikis-test-file">project-wikis-test-file</a></p>
YAML
context 'for glfm_example_status.yml' do
let(:config_file) { described_class::GLFM_EXAMPLE_STATUS_YML_PATH }
let(:glfm_example_status_yml_contents) { invalid_example_name_file_contents }
it 'raises error' do
expect { subject.process(skip_static_and_wysiwyg: true) }.to raise_error(err_msg)
end
end
let(:expected_prosemirror_json_contents) do
<<~YAML
---
02_01_00__inlines__strong__001: |-
{
"type": "doc",
"content": [
{
"type": "paragraph",
"content": [
{
"type": "text",
"marks": [
{
"type": "bold"
}
],
"text": "bold"
}
]
}
]
}
02_03_00__inlines__strikethrough_extension__001: |-
{
"type": "doc",
"content": [
{
"type": "paragraph",
"content": [
{
"type": "text",
"marks": [
{
"type": "strike"
}
],
"text": "Hi"
},
{
"type": "text",
"text": " Hello, world!"
}
]
}
]
}
03_01_00__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__001: |-
{
"type": "doc",
"content": [
{
"type": "paragraph",
"content": [
{
"type": "text",
"marks": [
{
"type": "bold"
}
],
"text": "bold"
}
]
}
]
}
03_02_01__first_gitlab_specific_section_with_examples__h2_which_contains_an_h3__example_in_an_h3__001: |-
{
"type": "doc",
"content": [
{
"type": "paragraph",
"content": [
{
"type": "text",
"text": "Example in an H3"
}
]
}
]
}
04_01_00__second_gitlab_specific_section_with_examples__strong_but_with_html__001: |-
{
"existing": "This entry is manually modified and preserved because skip_update_example_snapshot_prosemirror_json will be truthy"
}
05_02_00__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001: |-
{
"existing": "This entry is manually modified and preserved because skip_update_example_snapshots will be truthy"
}
06_01_00__api_request_overrides__group_upload_link__001: |-
{
"type": "doc",
"content": [
{
"type": "paragraph",
"content": [
{
"type": "text",
"marks": [
{
"type": "link",
"attrs": {
"href": "/uploads/groups-test-file",
"target": "_blank",
"class": null,
"title": null,
"canonicalSrc": "/uploads/groups-test-file",
"isReference": false
}
}
],
"text": "groups-test-file"
}
]
}
]
}
06_02_00__api_request_overrides__project_repo_link__001: |-
{
"type": "doc",
"content": [
{
"type": "paragraph",
"content": [
{
"type": "text",
"marks": [
{
"type": "link",
"attrs": {
"href": "projects-test-file",
"target": "_blank",
"class": null,
"title": null,
"canonicalSrc": "projects-test-file",
"isReference": false
}
}
],
"text": "projects-test-file"
}
]
}
]
}
06_03_00__api_request_overrides__project_snippet_ref__001: |-
{
"type": "doc",
"content": [
{
"type": "paragraph",
"content": [
{
"type": "text",
"text": "This project snippet ID reference IS filtered: $88888"
}
]
}
]
}
06_04_00__api_request_overrides__personal_snippet_ref__001: |-
{
"type": "doc",
"content": [
{
"type": "paragraph",
"content": [
{
"type": "text",
"text": "This personal snippet ID reference is NOT filtered: $99999"
}
]
}
]
}
06_05_00__api_request_overrides__project_wiki_link__001: |-
{
"type": "doc",
"content": [
{
"type": "paragraph",
"content": [
{
"type": "text",
"marks": [
{
"type": "link",
"attrs": {
"href": "project-wikis-test-file",
"target": "_blank",
"class": null,
"title": null,
"canonicalSrc": "project-wikis-test-file",
"isReference": false
}
}
],
"text": "project-wikis-test-file"
}
]
}
]
}
YAML
context 'for glfm_example_metadata.yml' do
let(:config_file) { described_class::GLFM_EXAMPLE_METADATA_YML_PATH }
let(:glfm_example_metadata_yml_contents) { invalid_example_name_file_contents }
it 'raises error' do
expect { subject.process(skip_static_and_wysiwyg: true) }.to raise_error(err_msg)
end
end
before do
# NOTE: This is a necessary to avoid an `error Couldn't find an integrity file` error
# when invoking `yarn jest ...` on CI from within an RSpec job. It could be solved by
# adding `.yarn-install` to be included in the RSpec CI job, but that would be a performance
# hit to all RSpec jobs. We could also make a dedicate job just for this spec. However,
# since this is just a single script, those options may not be justified.
described_class.new.run_external_cmd('yarn install') if ENV['CI']
context 'for glfm_example_normalizations.yml' do
let(:config_file) { described_class::GLFM_EXAMPLE_NORMALIZATIONS_YML_PATH }
let(:glfm_example_normalizations_yml_contents) { invalid_example_name_file_contents }
it 'raises error' do
expect { subject.process(skip_static_and_wysiwyg: true) }.to raise_error(err_msg)
end
end
end
context 'with full processing of static and WYSIWYG HTML' do
before(:all) do
# NOTE: It is a necessary to do a `yarn install` in order to ensure that
# `scripts/lib/glfm/render_wysiwyg_html_and_json.js` can be invoked successfully
# on the CI job (which will not be set up for frontend specs since this is
# an RSpec spec), or if the current yarn dependencies are not installed locally.
described_class.new.run_external_cmd('yarn install --frozen-lockfile')
end
# NOTE: Both `html.yml` and `prosemirror_json.yml` generation are tested in a single example, to
# avoid slower tests, because generating the static HTML is slow due to the need to invoke
# the rails environment. We could have separate sections, but this would require an extra flag
# to the `process` method to independently skip static vs. WYSIWYG, which is not worth the effort.
it 'writes the correct content', :unlimited_max_formatted_output_length do
# expectation that skipping message is only output once per example
expect(subject).to receive(:output).once.with(/reason.*skipping this example because it is very bad/i)
describe 'manually-curated input specification config files' do
let(:glfm_example_status_yml_contents) { '' }
let(:glfm_example_metadata_yml_contents) { '' }
let(:glfm_example_normalizations_yml_contents) { '' }
subject.process
it 'can be empty' do
expect { subject.process }.not_to raise_error
end
end
expect(es_html_yml_contents).to eq(expected_html_yml_contents)
expect(es_prosemirror_json_yml_contents).to eq(expected_prosemirror_json_contents)
describe 'writing html.yml and prosemirror_json.yml' do
let(:es_html_yml_contents) { reread_io(es_html_yml_io) }
let(:es_prosemirror_json_yml_contents) { reread_io(es_prosemirror_json_yml_io) }
# NOTE: This example_status.yml is crafted in conjunction with expected_html_yml_contents
# to test the behavior of the `skip_update_*` flags
let(:glfm_example_status_yml_contents) do
<<~YAML
---
02_01_00__inlines__strong__002:
# NOTE: 02_01_00__inlines__strong__002: is omitted from the existing prosemirror_json.yml file, and is also
# skipped here, to show that an example does not need to exist in order to be skipped.
# TODO: This should be changed to raise an error instead, to enforce that there cannot be orphaned
# entries in glfm_example_status.yml. This task is captured in
# https://gitlab.com/gitlab-org/gitlab/-/issues/361241#other-cleanup-tasks
skip_update_example_snapshot_prosemirror_json: "skipping because JSON isn't cool enough"
03_01_00__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__001:
skip_update_example_snapshot_html_static: "skipping because there's too much static"
04_01_00__second_gitlab_specific_section_with_examples__strong_but_with_html__001:
skip_update_example_snapshot_html_wysiwyg: 'skipping because what you see is NOT what you get'
skip_update_example_snapshot_prosemirror_json: "skipping because JSON still isn't cool enough"
05_01_00__third_gitlab_specific_section_with_skipped_examples__strong_but_skipped__001:
skip_update_example_snapshots: 'skipping this example because it is very bad'
05_02_00__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001:
skip_update_example_snapshots: 'skipping this example because we have manually modified it'
YAML
end
let(:expected_html_yml_contents) do
<<~YAML
---
02_01_00__inlines__strong__001:
canonical: |
<p><strong>bold</strong></p>
static: |-
<p data-sourcepos="1:1-1:8" dir="auto"><strong>bold</strong></p>
wysiwyg: |-
<p><strong>bold</strong></p>
02_01_00__inlines__strong__002:
canonical: |
<p><strong>bold with more text</strong></p>
static: |-
<p data-sourcepos="1:1-1:23" dir="auto"><strong>bold with more text</strong></p>
wysiwyg: |-
<p><strong>bold with more text</strong></p>
02_03_00__inlines__strikethrough_extension__001:
canonical: |
<p><del>Hi</del> Hello, world!</p>
static: |-
<p data-sourcepos="1:1-1:20" dir="auto"><del>Hi</del> Hello, world!</p>
wysiwyg: |-
<p><s>Hi</s> Hello, world!</p>
03_01_00__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__001:
canonical: |
<p><strong>bold</strong></p>
wysiwyg: |-
<p><strong>bold</strong></p>
03_02_01__first_gitlab_specific_section_with_examples__h2_which_contains_an_h3__example_in_an_h3__001:
canonical: |
<p>Example in an H3</p>
static: |-
<p data-sourcepos="1:1-1:16" dir="auto">Example in an H3</p>
wysiwyg: |-
<p>Example in an H3</p>
04_01_00__second_gitlab_specific_section_with_examples__strong_but_with_html__001:
canonical: |
<p><strong>
bold
</strong></p>
static: |-
<strong>
bold
</strong>
05_02_00__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001:
canonical: |
<p><strong>This example will have its manually modified static HTML, WYSIWYG HTML, and ProseMirror JSON preserved</strong></p>
static: |-
<p>This is the manually modified static HTML which will be preserved</p>
wysiwyg: |-
<p>This is the manually modified WYSIWYG HTML which will be preserved</p>
06_01_00__api_request_overrides__group_upload_link__001:
canonical: |
<p><a href="groups-test-file">groups-test-file</a></p>
static: |-
<p data-sourcepos="1:1-1:45" dir="auto"><a href="/groups/glfm_group/-/uploads/groups-test-file" data-canonical-src="/uploads/groups-test-file" data-link="true" class="gfm">groups-test-file</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/uploads/groups-test-file">groups-test-file</a></p>
06_02_00__api_request_overrides__project_repo_link__001:
canonical: |
<p><a href="projects-test-file">projects-test-file</a></p>
static: |-
<p data-sourcepos="1:1-1:40" dir="auto"><a href="/glfm_group/glfm_project/-/blob/master/projects-test-file">projects-test-file</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="projects-test-file">projects-test-file</a></p>
06_03_00__api_request_overrides__project_snippet_ref__001:
canonical: |
<p>This project snippet ID reference IS filtered: <a href="/glfm_group/glfm_project/-/snippets/88888">$88888</a>
static: |-
<p data-sourcepos="1:1-1:53" dir="auto">This project snippet ID reference IS filtered: <a href="/glfm_group/glfm_project/-/snippets/88888" data-reference-type="snippet" data-original="$88888" data-link="false" data-link-reference="false" data-project="77777" data-snippet="88888" data-container="body" data-placement="top" title="glfm_project_snippet" class="gfm gfm-snippet has-tooltip">$88888</a></p>
wysiwyg: |-
<p>This project snippet ID reference IS filtered: $88888</p>
06_04_00__api_request_overrides__personal_snippet_ref__001:
canonical: |
<p>This personal snippet ID reference is NOT filtered: $99999</p>
static: |-
<p data-sourcepos="1:1-1:58" dir="auto">This personal snippet ID reference is NOT filtered: $99999</p>
wysiwyg: |-
<p>This personal snippet ID reference is NOT filtered: $99999</p>
06_05_00__api_request_overrides__project_wiki_link__001:
canonical: |
<p><a href="project-wikis-test-file">project-wikis-test-file</a></p>
static: |-
<p data-sourcepos="1:1-1:50" dir="auto"><a href="/glfm_group/glfm_project/-/wikis/project-wikis-test-file" data-canonical-src="project-wikis-test-file">project-wikis-test-file</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="project-wikis-test-file">project-wikis-test-file</a></p>
YAML
end
let(:expected_prosemirror_json_contents) do
<<~YAML
---
02_01_00__inlines__strong__001: |-
{
"type": "doc",
"content": [
{
"type": "paragraph",
"content": [
{
"type": "text",
"marks": [
{
"type": "bold"
}
],
"text": "bold"
}
]
}
]
}
02_03_00__inlines__strikethrough_extension__001: |-
{
"type": "doc",
"content": [
{
"type": "paragraph",
"content": [
{
"type": "text",
"marks": [
{
"type": "strike"
}
],
"text": "Hi"
},
{
"type": "text",
"text": " Hello, world!"
}
]
}
]
}
03_01_00__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__001: |-
{
"type": "doc",
"content": [
{
"type": "paragraph",
"content": [
{
"type": "text",
"marks": [
{
"type": "bold"
}
],
"text": "bold"
}
]
}
]
}
03_02_01__first_gitlab_specific_section_with_examples__h2_which_contains_an_h3__example_in_an_h3__001: |-
{
"type": "doc",
"content": [
{
"type": "paragraph",
"content": [
{
"type": "text",
"text": "Example in an H3"
}
]
}
]
}
04_01_00__second_gitlab_specific_section_with_examples__strong_but_with_html__001: |-
{
"existing": "This entry is manually modified and preserved because skip_update_example_snapshot_prosemirror_json will be truthy"
}
05_02_00__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001: |-
{
"existing": "This entry is manually modified and preserved because skip_update_example_snapshots will be truthy"
}
06_01_00__api_request_overrides__group_upload_link__001: |-
{
"type": "doc",
"content": [
{
"type": "paragraph",
"content": [
{
"type": "text",
"marks": [
{
"type": "link",
"attrs": {
"href": "/uploads/groups-test-file",
"target": "_blank",
"class": null,
"title": null,
"canonicalSrc": "/uploads/groups-test-file",
"isReference": false
}
}
],
"text": "groups-test-file"
}
]
}
]
}
06_02_00__api_request_overrides__project_repo_link__001: |-
{
"type": "doc",
"content": [
{
"type": "paragraph",
"content": [
{
"type": "text",
"marks": [
{
"type": "link",
"attrs": {
"href": "projects-test-file",
"target": "_blank",
"class": null,
"title": null,
"canonicalSrc": "projects-test-file",
"isReference": false
}
}
],
"text": "projects-test-file"
}
]
}
]
}
06_03_00__api_request_overrides__project_snippet_ref__001: |-
{
"type": "doc",
"content": [
{
"type": "paragraph",
"content": [
{
"type": "text",
"text": "This project snippet ID reference IS filtered: $88888"
}
]
}
]
}
06_04_00__api_request_overrides__personal_snippet_ref__001: |-
{
"type": "doc",
"content": [
{
"type": "paragraph",
"content": [
{
"type": "text",
"text": "This personal snippet ID reference is NOT filtered: $99999"
}
]
}
]
}
06_05_00__api_request_overrides__project_wiki_link__001: |-
{
"type": "doc",
"content": [
{
"type": "paragraph",
"content": [
{
"type": "text",
"marks": [
{
"type": "link",
"attrs": {
"href": "project-wikis-test-file",
"target": "_blank",
"class": null,
"title": null,
"canonicalSrc": "project-wikis-test-file",
"isReference": false
}
}
],
"text": "project-wikis-test-file"
}
]
}
]
}
YAML
end
# NOTE: Both `html.yml` and `prosemirror_json.yml` generation are tested in a single example, to
# avoid slower tests, because generating the static HTML is slow due to the need to invoke
# the rails environment. We could have separate sections, but this would require an extra flag
# to the `process` method to independently skip static vs. WYSIWYG, which is not worth the effort.
it 'writes the correct content', :unlimited_max_formatted_output_length do
# expectation that skipping message is only output once per example
expect(subject).to receive(:output).once.with(/reason.*skipping this example because it is very bad/i)
subject.process
expect(es_html_yml_contents).to eq(expected_html_yml_contents)
expect(es_prosemirror_json_yml_contents).to eq(expected_prosemirror_json_contents)
end
end
end
# rubocop:enable RSpec/MultipleMemoizedHelpers

View File

@ -50,6 +50,11 @@ RSpec.describe BulkImports::CreateService do
expect(last_bulk_import.user).to eq(user)
expect(last_bulk_import.source_version).to eq(source_version.to_s)
expect(last_bulk_import.user).to eq(user)
expect_snowplow_event(
category: 'BulkImports::CreateService',
action: 'create',
label: 'bulk_import_group'
)
end
it 'creates bulk import entities' do

View File

@ -86,6 +86,16 @@ RSpec.describe Groups::ImportExport::ImportService do
service.execute
end
it 'tracks the event' do
service.execute
expect_snowplow_event(
category: 'Groups::ImportExport::ImportService',
action: 'create',
label: 'import_group_from_file'
)
end
end
context 'with a ndjson file' do
@ -105,12 +115,11 @@ RSpec.describe Groups::ImportExport::ImportService do
context 'when importing a ndjson export' do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:service) { described_class.new(group: group, user: user) }
let(:import_file) { fixture_file_upload('spec/fixtures/group_export.tar.gz') }
let(:import_logger) { instance_double(Gitlab::Import::Logger) }
subject { service.execute }
subject(:service) { described_class.new(group: group, user: user) }
before do
ImportExportUpload.create!(group: group, import_file: import_file)
@ -128,11 +137,21 @@ RSpec.describe Groups::ImportExport::ImportService do
end
it 'imports group structure successfully' do
expect(subject).to be_truthy
expect(service.execute).to be_truthy
end
it 'tracks the event' do
service.execute
expect_snowplow_event(
category: 'Groups::ImportExport::ImportService',
action: 'create',
label: 'import_group_from_file'
)
end
it 'removes import file' do
subject
service.execute
expect(group.import_export_upload.import_file.file).to be_nil
end
@ -141,7 +160,7 @@ RSpec.describe Groups::ImportExport::ImportService do
shared = Gitlab::ImportExport::Shared.new(group)
allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
subject
service.execute
expect(FileUtils).to have_received(:rm_rf).with(shared.base_path)
expect(Dir.exist?(shared.base_path)).to eq(false)
@ -154,7 +173,7 @@ RSpec.describe Groups::ImportExport::ImportService do
message: 'Group Import/Export: Import succeeded'
).once
subject
service.execute
end
end
@ -166,7 +185,7 @@ RSpec.describe Groups::ImportExport::ImportService do
message: a_string_including('Errors occurred')
)
expect { subject }.to raise_error(Gitlab::ImportExport::Error)
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
it 'tracks the error' do
@ -177,7 +196,7 @@ RSpec.describe Groups::ImportExport::ImportService do
expect(param.message).to include 'does not have required permissions for'
end
expect { subject }.to raise_error(Gitlab::ImportExport::Error)
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
end
@ -191,7 +210,7 @@ RSpec.describe Groups::ImportExport::ImportService do
message: a_string_including('Errors occurred')
).once
expect { subject }.to raise_error(Gitlab::ImportExport::Error)
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
end
@ -203,7 +222,7 @@ RSpec.describe Groups::ImportExport::ImportService do
end
it 'successfully imports the group' do
expect(subject).to be_truthy
expect(service.execute).to be_truthy
end
it 'logs the import success' do
@ -215,7 +234,7 @@ RSpec.describe Groups::ImportExport::ImportService do
message: 'Group Import/Export: Import succeeded'
)
subject
service.execute
end
end
end
@ -223,12 +242,11 @@ RSpec.describe Groups::ImportExport::ImportService do
context 'when importing a json export' do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:service) { described_class.new(group: group, user: user) }
let(:import_file) { fixture_file_upload('spec/fixtures/legacy_group_export.tar.gz') }
let(:import_logger) { instance_double(Gitlab::Import::Logger) }
subject { service.execute }
subject(:service) { described_class.new(group: group, user: user) }
before do
ImportExportUpload.create!(group: group, import_file: import_file)
@ -246,11 +264,21 @@ RSpec.describe Groups::ImportExport::ImportService do
end
it 'imports group structure successfully' do
expect(subject).to be_truthy
expect(service.execute).to be_truthy
end
it 'tracks the event' do
service.execute
expect_snowplow_event(
category: 'Groups::ImportExport::ImportService',
action: 'create',
label: 'import_group_from_file'
)
end
it 'removes import file' do
subject
service.execute
expect(group.import_export_upload.import_file.file).to be_nil
end
@ -259,7 +287,7 @@ RSpec.describe Groups::ImportExport::ImportService do
shared = Gitlab::ImportExport::Shared.new(group)
allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
subject
service.execute
expect(FileUtils).to have_received(:rm_rf).with(shared.base_path)
expect(Dir.exist?(shared.base_path)).to eq(false)
@ -272,7 +300,7 @@ RSpec.describe Groups::ImportExport::ImportService do
message: 'Group Import/Export: Import succeeded'
).once
subject
service.execute
end
end
@ -284,7 +312,7 @@ RSpec.describe Groups::ImportExport::ImportService do
message: a_string_including('Errors occurred')
)
expect { subject }.to raise_error(Gitlab::ImportExport::Error)
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
it 'tracks the error' do
@ -295,7 +323,7 @@ RSpec.describe Groups::ImportExport::ImportService do
expect(param.message).to include 'does not have required permissions for'
end
expect { subject }.to raise_error(Gitlab::ImportExport::Error)
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
end
@ -309,7 +337,7 @@ RSpec.describe Groups::ImportExport::ImportService do
message: a_string_including('Errors occurred')
).once
expect { subject }.to raise_error(Gitlab::ImportExport::Error)
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
end
@ -321,7 +349,7 @@ RSpec.describe Groups::ImportExport::ImportService do
end
it 'successfully imports the group' do
expect(subject).to be_truthy
expect(service.execute).to be_truthy
end
it 'logs the import success' do
@ -333,7 +361,7 @@ RSpec.describe Groups::ImportExport::ImportService do
message: 'Group Import/Export: Import succeeded'
)
subject
service.execute
end
end
end

View File

@ -94,25 +94,6 @@ RSpec.describe MergeRequests::Mergeability::Logger, :request_store do
end
end
context 'when disabled' do
before do
stub_feature_flags(mergeability_checks_logger: false)
end
it "returns the block's value" do
expect(logger.instrument(mergeability_name: :expensive_operation) { 123 }).to eq(123)
end
it 'does not call the logger' do
expect(Gitlab::AppJsonLogger).not_to receive(:new)
expect(logger.instrument(mergeability_name: :expensive_operation) { Project.count + MergeRequest.count })
.to eq(2)
logger.commit
end
end
it 'raises an error when block is not provided' do
expect { logger.instrument(mergeability_name: :expensive_operation) }
.to raise_error(ArgumentError, 'block not given')

View File

@ -4,9 +4,10 @@ require 'rake_helper'
RSpec.describe 'gitlab:app namespace rake task', :delete do
let(:enable_registry) { true }
let(:backup_tasks) { %w{db repo uploads builds artifacts pages lfs terraform_state registry packages} }
let(:backup_restore_pid_path) { "#{Rails.application.root}/tmp/backup_restore.pid" }
let(:backup_tasks) { %w[db repo uploads builds artifacts pages lfs terraform_state registry packages] }
let(:backup_types) do
%w{main_db repositories uploads builds artifacts pages lfs terraform_state registry packages}.tap do |array|
%w[main_db repositories uploads builds artifacts pages lfs terraform_state registry packages].tap do |array|
array.insert(1, 'ci_db') if Gitlab::Database.has_config?(:ci)
end
end
@ -20,11 +21,19 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
def backup_files
%w(backup_information.yml artifacts.tar.gz builds.tar.gz lfs.tar.gz terraform_state.tar.gz pages.tar.gz packages.tar.gz)
%w[
backup_information.yml
artifacts.tar.gz
builds.tar.gz
lfs.tar.gz
terraform_state.tar.gz
pages.tar.gz
packages.tar.gz
]
end
def backup_directories
%w(db repositories)
%w[db repositories]
end
before(:all) do
@ -58,11 +67,88 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
end
describe 'lock parallel backups' do
using RSpec::Parameterized::TableSyntax
context 'when a process is running' do
let(:pid_file) { instance_double(File) }
it 'exits the new process' do
allow(File).to receive(:open).and_call_original
allow(File).to receive(:open).with(backup_restore_pid_path, any_args).and_yield(pid_file)
allow(pid_file).to receive(:read).and_return('123456')
allow(pid_file).to receive(:flock).with(any_args)
expect { run_rake_task('gitlab:backup:create') }.to raise_error(SystemExit).and output(
<<~HEREDOC
Backup and restore in progress:
There is a backup and restore task in progress. Please, try to run the current task once the previous one ends.
If there is no other process running, please remove the PID file manually: rm #{backup_restore_pid_path}
HEREDOC
).to_stdout
end
end
context 'when no processes are running' do
let(:progress) { $stdout }
let(:pid_file) { instance_double(File, write: 12345) }
where(:tasks_name, :rake_task) do
%w[main_db ci_db] | 'gitlab:backup:db:restore'
'repositories' | 'gitlab:backup:repo:restore'
'builds' | 'gitlab:backup:builds:restore'
'uploads' | 'gitlab:backup:uploads:restore'
'artifacts' | 'gitlab:backup:artifacts:restore'
'pages' | 'gitlab:backup:pages:restore'
'lfs' | 'gitlab:backup:lfs:restore'
'terraform_state' | 'gitlab:backup:terraform_state:restore'
'registry' | 'gitlab:backup:registry:restore'
'packages' | 'gitlab:backup:packages:restore'
end
with_them do
before do
allow(Kernel).to receive(:system).and_return(true)
allow(YAML).to receive(:load_file).and_return({ gitlab_version: Gitlab::VERSION })
allow(File).to receive(:delete).with(backup_restore_pid_path).and_return(1)
allow(File).to receive(:open).and_call_original
allow(File).to receive(:open).with(backup_restore_pid_path, any_args).and_yield(pid_file)
allow(pid_file).to receive(:read).and_return('')
allow(pid_file).to receive(:flock).with(any_args)
allow(pid_file).to receive(:write).with(12345).and_return(true)
allow(pid_file).to receive(:flush)
allow(progress).to receive(:puts).at_least(:once)
allow_next_instance_of(::Backup::Manager) do |instance|
Array(tasks_name).each do |task|
allow(instance).to receive(:run_restore_task).with(task)
end
end
end
it 'locks the PID file' do
expect(pid_file).to receive(:flock).with(File::LOCK_EX)
expect(pid_file).to receive(:flock).with(File::LOCK_UN)
run_rake_task(rake_task)
end
it 'deletes the PID file and logs a message' do
expect(File).to receive(:delete).with(backup_restore_pid_path)
expect(progress).to receive(:puts).with(/-- Deleting backup and restore lock file/)
run_rake_task(rake_task)
end
end
end
end
describe 'backup_restore' do
context 'gitlab version' do
context 'with gitlab version' do
before do
allow(Dir).to receive(:glob).and_return(['1_gitlab_backup.tar'])
allow(File).to receive(:exist?).and_return(true)
allow(File).to receive(:exist?).with(backup_restore_pid_path).and_return(false)
allow(Kernel).to receive(:system).and_return(true)
allow(FileUtils).to receive(:cp_r).and_return(true)
allow(FileUtils).to receive(:mv).and_return(true)
@ -72,7 +158,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
let(:gitlab_version) { Gitlab::VERSION }
context 'restore with matching gitlab version' do
context 'when restore matches gitlab version' do
before do
allow(YAML).to receive(:load_file)
.and_return({ gitlab_version: gitlab_version })
@ -124,6 +210,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
backup_tar = Dir.glob(File.join(Gitlab.config.backup.path, '*_gitlab_backup.tar')).last
allow(Dir).to receive(:glob).and_return([backup_tar])
allow(File).to receive(:exist?).and_return(true)
allow(File).to receive(:exist?).with(backup_restore_pid_path).and_return(false)
allow(Kernel).to receive(:system).and_return(true)
allow(FileUtils).to receive(:cp_r).and_return(true)
allow(FileUtils).to receive(:mv).and_return(true)
@ -173,11 +260,11 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
FileUtils.touch(File.join(path, "dummy.txt"))
end
context 'project uses custom_hooks and successfully creates backup' do
context 'when project uses custom_hooks and successfully creates backup' do
it 'creates custom_hooks.tar and project bundle' do
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
tar_contents, exit_status = Gitlab::Popen.popen(%W{tar -tvf #{backup_tar}})
tar_contents, exit_status = Gitlab::Popen.popen(%W[tar -tvf #{backup_tar}])
expect(exit_status).to eq(0)
expect(tar_contents).to match(user_backup_path)
@ -196,7 +283,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
end
context 'specific backup tasks' do
context 'with specific backup tasks' do
it 'prints a progress message to stdout' do
backup_tasks.each do |task|
expect { run_rake_task("gitlab:backup:#{task}:create") }.to output(/Dumping /).to_stdout_from_any_process
@ -264,18 +351,18 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
end
context 'tar creation' do
context 'archive file permissions' do
context 'with tar creation' do
context 'with archive file permissions' do
it 'sets correct permissions on the tar file' do
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
expect(File.exist?(backup_tar)).to be_truthy
expect(File).to exist(backup_tar)
expect(File::Stat.new(backup_tar).mode.to_s(8)).to eq('100600')
end
context 'with custom archive_permissions' do
before do
allow(Gitlab.config.backup).to receive(:archive_permissions).and_return(0651)
allow(Gitlab.config.backup).to receive(:archive_permissions).and_return(0o651)
end
it 'uses the custom permissions' do
@ -290,11 +377,21 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
tar_contents, exit_status = Gitlab::Popen.popen(
%W{tar -tvf #{backup_tar} db uploads.tar.gz repositories builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz terraform_state.tar.gz registry.tar.gz packages.tar.gz}
%W[
tar -tvf #{backup_tar}
db
uploads.tar.gz
repositories
builds.tar.gz
artifacts.tar.gz
pages.tar.gz
lfs.tar.gz
terraform_state.tar.gz
registry.tar.gz
packages.tar.gz
]
)
puts "CONTENT: #{tar_contents}"
expect(exit_status).to eq(0)
expect(tar_contents).to match('db')
expect(tar_contents).to match('uploads.tar.gz')
@ -306,27 +403,31 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
expect(tar_contents).to match('terraform_state.tar.gz')
expect(tar_contents).to match('registry.tar.gz')
expect(tar_contents).to match('packages.tar.gz')
expect(tar_contents).not_to match(%r{^.{4,9}[rwx].* (database.sql.gz|uploads.tar.gz|repositories|builds.tar.gz|pages.tar.gz|artifacts.tar.gz|registry.tar.gz)/$})
expect(tar_contents).not_to match(%r{^.{4,9}[rwx].* (database.sql.gz|uploads.tar.gz|repositories|builds.tar.gz|
pages.tar.gz|artifacts.tar.gz|registry.tar.gz)/$})
end
it 'deletes temp directories' do
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
temp_dirs = Dir.glob(
File.join(Gitlab.config.backup.path, '{db,repositories,uploads,builds,artifacts,pages,lfs,terraform_state,registry,packages}')
File.join(
Gitlab.config.backup.path,
'{db,repositories,uploads,builds,artifacts,pages,lfs,terraform_state,registry,packages}'
)
)
expect(temp_dirs).to be_empty
end
context 'registry disabled' do
context 'when registry is disabled' do
let(:enable_registry) { false }
it 'does not create registry.tar.gz' do
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
tar_contents, exit_status = Gitlab::Popen.popen(
%W{tar -tvf #{backup_tar}}
%W[tar -tvf #{backup_tar}]
)
expect(exit_status).to eq(0)
@ -335,7 +436,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
end
context 'multiple repository storages' do
context 'with multiple repository storages' do
include StubConfiguration
let(:default_storage_name) { 'default' }
@ -344,10 +445,10 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
before do
# We only need a backup of the repositories for this test
stub_env('SKIP', 'db,uploads,builds,artifacts,lfs,terraform_state,registry')
stub_storage_settings( second_storage_name => {
'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address,
'path' => TestEnv::SECOND_STORAGE_PATH
})
stub_storage_settings(second_storage_name => {
'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address,
'path' => TestEnv::SECOND_STORAGE_PATH
})
end
shared_examples 'includes repositories in all repository storages' do
@ -368,27 +469,27 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
tar_contents, exit_status = Gitlab::Popen.popen(
%W{tar -tvf #{backup_tar} repositories}
%W[tar -tvf #{backup_tar} repositories]
)
tar_lines = tar_contents.lines.grep(/\.bundle/)
expect(exit_status).to eq(0)
[
"#{project_a.disk_path}/.+/001.bundle",
"#{project_a.disk_path}.wiki/.+/001.bundle",
"#{project_a.disk_path}.design/.+/001.bundle",
"#{project_b.disk_path}/.+/001.bundle",
"#{project_snippet_a.disk_path}/.+/001.bundle",
"#{project_snippet_b.disk_path}/.+/001.bundle"
%W[
#{project_a.disk_path}/.+/001.bundle
#{project_a.disk_path}.wiki/.+/001.bundle
#{project_a.disk_path}.design/.+/001.bundle
#{project_b.disk_path}/.+/001.bundle
#{project_snippet_a.disk_path}/.+/001.bundle
#{project_snippet_b.disk_path}/.+/001.bundle
].each do |repo_name|
expect(tar_lines).to include(a_string_matching(repo_name))
end
end
end
context 'no concurrency' do
context 'with no concurrency' do
it_behaves_like 'includes repositories in all repository storages'
end
@ -400,7 +501,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
it_behaves_like 'includes repositories in all repository storages'
end
context 'REPOSITORIES_STORAGES set' do
context 'when REPOSITORIES_STORAGES is set' do
before do
stub_env('REPOSITORIES_STORAGES', default_storage_name)
end
@ -422,25 +523,25 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
tar_contents, exit_status = Gitlab::Popen.popen(
%W{tar -tvf #{backup_tar} repositories}
%W[tar -tvf #{backup_tar} repositories]
)
tar_lines = tar_contents.lines.grep(/\.bundle/)
expect(exit_status).to eq(0)
[
"#{project_a.disk_path}/.+/001.bundle",
"#{project_a.disk_path}.wiki/.+/001.bundle",
"#{project_a.disk_path}.design/.+/001.bundle",
"#{project_snippet_a.disk_path}/.+/001.bundle"
%W[
#{project_a.disk_path}/.+/001.bundle
#{project_a.disk_path}.wiki/.+/001.bundle
#{project_a.disk_path}.design/.+/001.bundle
#{project_snippet_a.disk_path}/.+/001.bundle
].each do |repo_name|
expect(tar_lines).to include(a_string_matching(repo_name))
end
[
"#{project_b.disk_path}/.+/001.bundle",
"#{project_snippet_b.disk_path}/.+/001.bundle"
%W[
#{project_b.disk_path}/.+/001.bundle
#{project_snippet_b.disk_path}/.+/001.bundle
].each do |repo_name|
expect(tar_lines).not_to include(a_string_matching(repo_name))
end
@ -448,7 +549,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
end
context 'concurrency settings' do
context 'with concurrency settings' do
before do
# We only need a backup of the repositories for this test
stub_env('SKIP', 'db,uploads,builds,artifacts,lfs,terraform_state,registry')
@ -463,13 +564,18 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
expect(::Backup::Repositories).to receive(:new)
.with(anything, strategy: anything, storages: [], paths: [])
.and_call_original
expect(::Backup::GitalyBackup).to receive(:new).with(anything, max_parallelism: 5, storage_parallelism: 2, incremental: false).and_call_original
expect(::Backup::GitalyBackup).to receive(:new).with(
anything,
max_parallelism: 5,
storage_parallelism: 2,
incremental: false
).and_call_original
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
end
end
context 'CRON env is set' do
context 'when CRON env is set' do
before do
stub_env('CRON', '1')
end
@ -481,7 +587,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
# backup_create task
describe "Skipping items in a backup" do
describe "skipping items in a backup" do
before do
stub_env('SKIP', 'an-unknown-type,repositories,uploads,anotherunknowntype')
@ -492,7 +598,19 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
tar_contents, _exit_status = Gitlab::Popen.popen(
%W{tar -tvf #{backup_tar} db uploads.tar.gz repositories builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz terraform_state.tar.gz registry.tar.gz packages.tar.gz}
%W[
tar -tvf #{backup_tar}
db
uploads.tar.gz
repositories
builds.tar.gz
artifacts.tar.gz
pages.tar.gz
lfs.tar.gz
terraform_state.tar.gz
registry.tar.gz
packages.tar.gz
]
)
expect(tar_contents).to match('db/')
@ -515,7 +633,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
.to receive(:invoke).and_return(true)
expect_next_instance_of(::Backup::Manager) do |instance|
(backup_types - %w{repositories uploads}).each do |subtask|
(backup_types - %w[repositories uploads]).each do |subtask|
expect(instance).to receive(:run_restore_task).with(subtask).ordered
end
expect(instance).not_to receive(:run_restore_task)

View File

@ -13,7 +13,9 @@ RSpec.describe Tooling::Danger::Specs do
include_context "with dangerfile"
let(:fake_danger) { DangerSpecHelper.fake_danger.include(described_class) }
let(:fake_project_helper) { double('fake-project-helper', helper: fake_helper).tap { |h| h.class.include(Tooling::Danger::ProjectHelper) } }
let(:fake_project_helper) { instance_double('Tooling::Danger::ProjectHelper') }
let(:filename) { 'spec/foo_spec.rb' }
let(:file_lines) do
[
" describe 'foo' do",
@ -32,6 +34,7 @@ RSpec.describe Tooling::Danger::Specs do
let(:matching_lines) do
[
"+ expect(foo).to match(['should not error'])",
"+ expect(foo).to match(['bar'])",
"+ expect(foo).to match(['bar'])",
"+ expect(foo).to match ['bar']",
@ -42,31 +45,28 @@ RSpec.describe Tooling::Danger::Specs do
]
end
let(:changed_lines) do
[
" expect(foo).to match(['bar'])",
" expect(foo).to match(['bar'])",
" expect(foo).to match ['bar']",
" expect(foo).to eq(['bar'])",
" expect(foo).to eq ['bar']",
"- expect(foo).to match(['bar'])",
"- expect(foo).to match(['bar'])",
"- expect(foo).to match ['bar']",
"- expect(foo).to eq(['bar'])",
"- expect(foo).to eq ['bar']",
"+ expect(foo).to eq([])"
] + matching_lines
end
subject(:specs) { fake_danger.new(helper: fake_helper) }
before do
allow(specs).to receive(:project_helper).and_return(fake_project_helper)
end
describe '#add_suggestions_for_match_with_array' do
let(:filename) { 'spec/foo_spec.rb' }
before do
expect(specs).to receive(:added_line_matching_match_with_array).and_return(matching_lines)
allow(specs.project_helper).to receive(:file_lines).and_return(file_lines)
end
it 'adds suggestions at the correct lines' do
expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to match_array(['bar'])"), file: filename, line: 2)
expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to match_array(['bar'])"), file: filename, line: 4)
expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to match_array ['bar']"), file: filename, line: 6)
expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to match_array(['bar'])"), file: filename, line: 7)
expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to match_array ['bar']"), file: filename, line: 8)
expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to(match_array(['bar']))"), file: filename, line: 9)
expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to(match_array(['bar']))"), file: filename, line: 10)
specs.add_suggestions_for_match_with_array(filename)
end
allow(specs.helper).to receive(:changed_lines).with(filename).and_return(matching_lines)
allow(specs.project_helper).to receive(:file_lines).and_return(file_lines)
end
describe '#changed_specs_files' do
@ -105,30 +105,128 @@ RSpec.describe Tooling::Danger::Specs do
end
end
describe '#added_line_matching_match_with_array' do
let(:filename) { 'spec/foo_spec.rb' }
describe '#add_suggestions_for_match_with_array' do
let(:template) do
<<~MARKDOWN
```suggestion
%<suggested_line>s
```
If order of the result is not important, please consider using `match_array` to avoid flakiness.
MARKDOWN
end
it 'adds suggestions at the correct lines' do
[
{ suggested_line: " expect(foo).to match_array(['bar'])", number: 2 },
{ suggested_line: " expect(foo).to match_array(['bar'])", number: 4 },
{ suggested_line: " expect(foo).to match_array ['bar']", number: 6 },
{ suggested_line: " expect(foo).to match_array(['bar'])", number: 7 },
{ suggested_line: " expect(foo).to match_array ['bar']", number: 8 },
{ suggested_line: " expect(foo).to(match_array(['bar']))", number: 9 },
{ suggested_line: " expect(foo).to(match_array(['bar']))", number: 10 }
].each do |test_case|
comment = format(template, suggested_line: test_case[:suggested_line])
expect(specs).to receive(:markdown).with(comment, file: filename, line: test_case[:number])
end
specs.add_suggestions_for_match_with_array(filename)
end
end
describe '#add_suggestions_for_project_factory_usage' do
let(:template) do
<<~MARKDOWN
```suggestion
%<suggested_line>s
```
Project creations are very slow. Use `let_it_be`, `build` or `build_stubbed` if possible.
See [testing best practices](https://docs.gitlab.com/ee/development/testing_guide/best_practices.html#optimize-factory-usage)
for background information and alternative options.
MARKDOWN
end
let(:file_lines) do
[
" let(:project) { create(:project) }",
" let_it_be(:project) { create(:project, :repository)",
" let!(:project) { create(:project) }",
" let(:var) { create(:project) }",
" let(:merge_request) { create(:merge_request, project: project)",
" context 'when merge request exists' do",
" it { is_expected.to be_success }",
" end",
" let!(:var) { create(:project) }",
" let(:project) { create(:thing) }",
" let(:project) { build(:project) }",
" let(:project) do",
" create(:project)",
" end",
" let(:project) { create(:project, :repository) }",
" str = 'let(:project) { create(:project) }'",
" let(:project) { create(:project_empty_repo) }",
" let(:project) { create(:project_broken_repo) }",
" let(:project) { create(:forked_project_with_submodules) }",
" let(:project) { create(:redmine_project) }",
" let(:project) { create(:jira_project) }",
" let(:project) { create(:prometheus_project) }",
" let(:project) { create(:project_with_design) }",
" let(:authorization) { create(:project_authorization) }"
]
end
let(:matching_lines) do
[
"+ let(:should_not_error) { create(:project) }",
"+ let(:project) { create(:project) }",
"+ let!(:project) { create(:project) }",
"+ let(:var) { create(:project) }",
"+ let!(:var) { create(:project) }",
"+ let(:project) { create(:project, :repository) }",
"+ let(:project) { create(:project_empty_repo) }",
"+ let(:project) { create(:project_broken_repo) }",
"+ let(:project) { create(:forked_project_with_submodules) }",
"+ let(:project) { create(:redmine_project) }",
"+ let(:project) { create(:jira_project) }",
"+ let(:project) { create(:prometheus_project) }",
"+ let(:project) { create(:project_with_design) }"
]
end
let(:changed_lines) do
[
" expect(foo).to match(['bar'])",
" expect(foo).to match(['bar'])",
" expect(foo).to match ['bar']",
" expect(foo).to eq(['bar'])",
" expect(foo).to eq ['bar']",
"- expect(foo).to match(['bar'])",
"- expect(foo).to match(['bar'])",
"- expect(foo).to match ['bar']",
"- expect(foo).to eq(['bar'])",
"- expect(foo).to eq ['bar']",
"+ expect(foo).to eq([])"
"+ line which doesn't exist in the file and should not cause an error",
"+ let_it_be(:project) { create(:project, :repository)",
"+ let(:project) { create(:thing) }",
"+ let(:project) do",
"+ create(:project)",
"+ end",
"+ str = 'let(:project) { create(:project) }'",
"+ let(:authorization) { create(:project_authorization) }"
] + matching_lines
end
before do
allow(specs.helper).to receive(:changed_lines).with(filename).and_return(changed_lines)
end
it 'adds suggestions at the correct lines', :aggregate_failures do
[
{ suggested_line: " let_it_be(:project) { create(:project) }", number: 1 },
{ suggested_line: " let_it_be(:project) { create(:project) }", number: 3 },
{ suggested_line: " let_it_be(:var) { create(:project) }", number: 4 },
{ suggested_line: " let_it_be(:var) { create(:project) }", number: 9 },
{ suggested_line: " let_it_be(:project) { create(:project, :repository) }", number: 15 },
{ suggested_line: " let_it_be(:project) { create(:project_empty_repo) }", number: 17 },
{ suggested_line: " let_it_be(:project) { create(:project_broken_repo) }", number: 18 },
{ suggested_line: " let_it_be(:project) { create(:forked_project_with_submodules) }", number: 19 },
{ suggested_line: " let_it_be(:project) { create(:redmine_project) }", number: 20 },
{ suggested_line: " let_it_be(:project) { create(:jira_project) }", number: 21 },
{ suggested_line: " let_it_be(:project) { create(:prometheus_project) }", number: 22 },
{ suggested_line: " let_it_be(:project) { create(:project_with_design) }", number: 23 }
].each do |test_case|
comment = format(template, suggested_line: test_case[:suggested_line])
expect(specs).to receive(:markdown).with(comment, file: filename, line: test_case[:number])
end
it 'returns all lines using an array equality matcher' do
expect(specs.added_line_matching_match_with_array(filename)).to match_array(matching_lines)
specs.add_suggestions_for_project_factory_usage(filename)
end
end
end

View File

@ -139,6 +139,19 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
{ type: :namespace, id: 3 }
]
it_behaves_like 'clears caches with',
event_class: PagesDomains::PagesDomainUpdatedEvent,
event_data: {
project_id: 1,
namespace_id: 2,
root_namespace_id: 3,
domain: 'somedomain.com'
},
caches: [
{ type: :project, id: 1 },
{ type: :namespace, id: 3 }
]
context 'when namespace based cache keys are duplicated' do
# de-dups namespace cache keys
it_behaves_like 'clears caches with',

View File

@ -6,14 +6,49 @@ module Tooling
SPEC_FILES_REGEX = 'spec/'
EE_PREFIX = 'ee/'
MATCH_WITH_ARRAY_REGEX = /(?<to>to\(?\s*)(?<matcher>match|eq)(?<expectation>[( ]?\[[^\]]+)/.freeze
SUGGEST_MR_COMMENT = <<~SUGGEST_COMMENT
MATCH_WITH_ARRAY_REPLACEMENT = '\k<to>match_array\k<expectation>'
PROJECT_FACTORIES = %w[
:project
:project_empty_repo
:project_broken_repo
:forked_project_with_submodules
:redmine_project
:jira_project
:prometheus_project
:project_with_design
].freeze
PROJECT_FACTORY_REGEX = /
^\+? # Start of the line, which may or may not have a `+`
(?<head>\s*) # 0-many leading whitespace captured in a group named head
let!? # Literal `let` which may or may not end in `!`
(?<tail> # capture group named tail
\([^)]+\) # Two parenthesis with any non-parenthesis characters between them
\s*\{\s* # Opening curly brace surrounded by 0-many whitespace characters
create\( # literal
(?:#{PROJECT_FACTORIES.join('|')}) # Any of the project factory names
\W # Non-word character, avoid matching factories like :project_authorization
) # end capture group named tail
/x.freeze
PROJECT_FACTORY_REPLACEMENT = '\k<head>let_it_be\k<tail>'
SUGGESTION_MARKDOWN = <<~SUGGESTION_MARKDOWN
```suggestion
%<suggested_line>s
```
SUGGESTION_MARKDOWN
MATCH_WITH_ARRAY_SUGGESTION = <<~SUGGEST_COMMENT
If order of the result is not important, please consider using `match_array` to avoid flakiness.
SUGGEST_COMMENT
PROJECT_FACTORY_SUGGESTION = <<~SUGGEST_COMMENT
Project creations are very slow. Use `let_it_be`, `build` or `build_stubbed` if possible.
See [testing best practices](https://docs.gitlab.com/ee/development/testing_guide/best_practices.html#optimize-factory-usage)
for background information and alternative options.
SUGGEST_COMMENT
def changed_specs_files(ee: :include)
changed_files = helper.all_changed_files
folder_prefix =
@ -30,29 +65,58 @@ module Tooling
end
def add_suggestions_for_match_with_array(filename)
added_lines = added_line_matching_match_with_array(filename)
add_suggestion(
filename,
MATCH_WITH_ARRAY_REGEX,
MATCH_WITH_ARRAY_REPLACEMENT,
MATCH_WITH_ARRAY_SUGGESTION
)
end
def add_suggestions_for_project_factory_usage(filename)
add_suggestion(
filename,
PROJECT_FACTORY_REGEX,
PROJECT_FACTORY_REPLACEMENT,
PROJECT_FACTORY_SUGGESTION
)
end
private
def added_lines_matching(filename, regex)
helper.changed_lines(filename).grep(/\A\+ /).grep(regex)
end
def add_suggestion(filename, regex, replacement, comment_text)
added_lines = added_lines_matching(filename, regex)
return if added_lines.empty?
spec_file_lines = project_helper.file_lines(filename)
added_lines.each_with_object([]) do |added_line, processed_line_numbers|
line_number = find_line_number(spec_file_lines, added_line.delete_prefix('+'), exclude_indexes: processed_line_numbers)
next unless line_number
processed_line_numbers << line_number
markdown(format(SUGGEST_MR_COMMENT, suggested_line: spec_file_lines[line_number].gsub(MATCH_WITH_ARRAY_REGEX, '\k<to>match_array\k<expectation>')), file: filename, line: line_number.succ)
text = format(comment(comment_text), suggested_line: spec_file_lines[line_number].gsub(regex, replacement))
markdown(text, file: filename, line: line_number.succ)
end
end
def added_line_matching_match_with_array(filename)
helper.changed_lines(filename).grep(/\A\+ /).grep(MATCH_WITH_ARRAY_REGEX)
def comment(comment_text)
<<~COMMENT_BODY.chomp
#{SUGGESTION_MARKDOWN}
#{comment_text}
COMMENT_BODY
end
private
def find_line_number(file_lines, searched_line, exclude_indexes: [])
file_lines.each_with_index do |file_line, index|
next if exclude_indexes.include?(index)
break index if file_line == searched_line
_, index = file_lines.each_with_index.find do |file_line, index|
file_line == searched_line && !exclude_indexes.include?(index)
end
index
end
end
end