Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-05-05 09:10:02 +00:00
parent 78bc17257c
commit 1c568d834d
37 changed files with 502 additions and 383 deletions

View File

@ -70,7 +70,7 @@ Are there any other stages or teams involved that need to be kept in the loop?
- [ ] Announce on the issue an estimated time this will be enabled on GitLab.com - [ ] Announce on the issue an estimated time this will be enabled on GitLab.com
- [ ] Check if the feature flag change needs to be accompagnied with a - [ ] Check if the feature flag change needs to be accompanied with a
[change management [change management
issue](https://about.gitlab.com/handbook/engineering/infrastructure/change-management/#feature-flags-and-the-change-management-process). Cross issue](https://about.gitlab.com/handbook/engineering/infrastructure/change-management/#feature-flags-and-the-change-management-process). Cross
link the issue here if it does. link the issue here if it does.

View File

@ -15,6 +15,7 @@ Please link to the respective test case in the testcases project
- [ ] Verify the tags to ensure it runs on the desired test environments. - [ ] Verify the tags to ensure it runs on the desired test environments.
- [ ] If this MR has a dependency on another MR, such as a GitLab QA MR, specify the order in which the MRs should be merged. - [ ] If this MR has a dependency on another MR, such as a GitLab QA MR, specify the order in which the MRs should be merged.
- [ ] (If applicable) Create a follow-up issue to document [the special setup](https://docs.gitlab.com/ee/development/testing_guide/end_to_end/running_tests_that_require_special_setup.html) necessary to run the test: ISSUE_LINK - [ ] (If applicable) Create a follow-up issue to document [the special setup](https://docs.gitlab.com/ee/development/testing_guide/end_to_end/running_tests_that_require_special_setup.html) necessary to run the test: ISSUE_LINK
- [ ] If the test requires an admin's personal access token, ensure that the test passes on your local with and without the `GITLAB_QA_ADMIN_ACCESS_TOKEN` provided.
<!-- Base labels. --> <!-- Base labels. -->
/label ~"Quality" ~"QA" ~test /label ~"Quality" ~"QA" ~test

View File

@ -0,0 +1,45 @@
<script>
import { GlBadge } from '@gitlab/ui';
import { s__ } from '~/locale';
import { INSTANCE_TYPE, GROUP_TYPE, PROJECT_TYPE } from '../constants';
const badge = {
[INSTANCE_TYPE]: {
variant: 'success',
text: s__('Runners|shared'),
},
[GROUP_TYPE]: {
variant: 'success',
text: s__('Runners|group'),
},
[PROJECT_TYPE]: {
variant: 'info',
text: s__('Runners|specific'),
},
};
export default {
components: {
GlBadge,
},
props: {
type: {
type: String,
required: true,
},
},
computed: {
variant() {
return badge[this.type]?.variant;
},
text() {
return badge[this.type]?.text;
},
},
};
</script>
<template>
<gl-badge v-if="text" :variant="variant" v-bind="$attrs">
{{ text }}
</gl-badge>
</template>

View File

@ -0,0 +1,11 @@
import { s__ } from '~/locale';
export const I18N_DETAILS_TITLE = s__('Runners|Runner #%{runner_id}');
export const RUNNER_ENTITY_TYPE = 'Ci::Runner';
// CiRunnerType
export const INSTANCE_TYPE = 'INSTANCE_TYPE';
export const GROUP_TYPE = 'GROUP_TYPE';
export const PROJECT_TYPE = 'PROJECT_TYPE';

View File

@ -0,0 +1,6 @@
query getRunner($id: CiRunnerID!) {
runner(id: $id) {
id
runnerType
}
}

View File

@ -1,3 +0,0 @@
import { s__ } from '~/locale';
export const I18N_TITLE = s__('Runners|Runner #%{runner_id}');

View File

@ -1,7 +1,11 @@
import Vue from 'vue'; import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createDefaultClient from '~/lib/graphql';
import RunnerDetailsApp from './runner_details_app.vue'; import RunnerDetailsApp from './runner_details_app.vue';
export const initRunnerDetail = (selector = '#js-runner-detail') => { Vue.use(VueApollo);
export const initRunnerDetail = (selector = '#js-runner-details') => {
const el = document.querySelector(selector); const el = document.querySelector(selector);
if (!el) { if (!el) {
@ -10,8 +14,18 @@ export const initRunnerDetail = (selector = '#js-runner-detail') => {
const { runnerId } = el.dataset; const { runnerId } = el.dataset;
const apolloProvider = new VueApollo({
defaultClient: createDefaultClient(
{},
{
assumeImmutableResults: true,
},
),
});
return new Vue({ return new Vue({
el, el,
apolloProvider,
render(h) { render(h) {
return h(RunnerDetailsApp, { return h(RunnerDetailsApp, {
props: { props: {

View File

@ -1,9 +1,15 @@
<script> <script>
import { I18N_TITLE } from './constants'; import { convertToGraphQLId } from '~/graphql_shared/utils';
import RunnerTypeBadge from '../components/runner_type_badge.vue';
import { I18N_DETAILS_TITLE, RUNNER_ENTITY_TYPE } from '../constants';
import getRunnerQuery from '../graphql/get_runner.query.graphql';
export default { export default {
components: {
RunnerTypeBadge,
},
i18n: { i18n: {
I18N_TITLE, I18N_DETAILS_TITLE,
}, },
props: { props: {
runnerId: { runnerId: {
@ -11,10 +17,27 @@ export default {
required: true, required: true,
}, },
}, },
data() {
return {
runner: {},
};
},
apollo: {
runner: {
query: getRunnerQuery,
variables() {
return {
id: convertToGraphQLId(RUNNER_ENTITY_TYPE, this.runnerId),
};
},
},
},
}; };
</script> </script>
<template> <template>
<h2 class="page-title"> <h2 class="page-title">
{{ sprintf($options.i18n.I18N_TITLE, { runner_id: runnerId }) }} {{ sprintf($options.i18n.I18N_DETAILS_TITLE, { runner_id: runnerId }) }}
<runner-type-badge v-if="runner.runnerType" :type="runner.runnerType" />
</h2> </h2>
</template> </template>

View File

@ -1,7 +1,7 @@
#import "~/graphql_shared/fragments/user.fragment.graphql" #import "~/graphql_shared/fragments/user.fragment.graphql"
#import "~/graphql_shared/fragments/user_availability.fragment.graphql" #import "~/graphql_shared/fragments/user_availability.fragment.graphql"
query issueParticipants($fullPath: ID!, $iid: String!) { query issueAssignees($fullPath: ID!, $iid: String!) {
workspace: project(fullPath: $fullPath) { workspace: project(fullPath: $fullPath) {
__typename __typename
issuable: issue(iid: $iid) { issuable: issue(iid: $iid) {

View File

@ -88,6 +88,9 @@ export default {
}, },
}, },
searchUsers: { searchUsers: {
// TODO Remove error policy
// https://gitlab.com/gitlab-org/gitlab/-/issues/329750
errorPolicy: 'all',
query: searchUsers, query: searchUsers,
variables() { variables() {
return { return {
@ -97,10 +100,26 @@ export default {
}; };
}, },
update(data) { update(data) {
return data.workspace?.users?.nodes.map(({ user }) => user) || []; // TODO Remove null filter (BE fix required)
// https://gitlab.com/gitlab-org/gitlab/-/issues/329750
return data.workspace?.users?.nodes.filter((x) => x).map(({ user }) => user) || [];
}, },
debounce: ASSIGNEES_DEBOUNCE_DELAY, debounce: ASSIGNEES_DEBOUNCE_DELAY,
error() { error({ graphQLErrors }) {
// TODO This error suppression is temporary (BE fix required)
// https://gitlab.com/gitlab-org/gitlab/-/issues/329750
if (
graphQLErrors.length === 1 &&
graphQLErrors[0]?.message === 'Cannot return null for non-nullable field GroupMember.user'
) {
// eslint-disable-next-line no-console
console.error(
"Suppressing the error 'Cannot return null for non-nullable field GroupMember.user'. Please see https://gitlab.com/gitlab-org/gitlab/-/issues/329750",
);
this.isSearching = false;
return;
}
this.$emit('error'); this.$emit('error');
this.isSearching = false; this.isSearching = false;
}, },
@ -117,15 +136,20 @@ export default {
if (!this.participants) { if (!this.participants) {
return []; return [];
} }
const mergedSearchResults = this.participants.reduce((acc, current) => {
if ( const filteredParticipants = this.participants.filter(
!acc.some((user) => current.username === user.username) && (user) => user.name.includes(this.search) || user.username.includes(this.search),
(current.name.includes(this.search) || current.username.includes(this.search)) );
) {
acc.push(current); // TODO this de-duplication is temporary (BE fix required)
} // https://gitlab.com/gitlab-org/gitlab/-/issues/327822
return acc; const mergedSearchResults = filteredParticipants
}, this.searchUsers); .concat(this.searchUsers)
.reduce(
(acc, current) => (acc.some((user) => current.id === user.id) ? acc : [...acc, current]),
[],
);
return this.moveCurrentUserToStart(mergedSearchResults); return this.moveCurrentUserToStart(mergedSearchResults);
}, },
isSearchEmpty() { isSearchEmpty() {

View File

@ -24,7 +24,7 @@ class Release < ApplicationRecord
before_create :set_released_at before_create :set_released_at
validates :project, :tag, presence: true validates :project, :tag, presence: true
validates :description, length: { maximum: Gitlab::Database::MAX_TEXT_SIZE_LIMIT }, if: :should_validate_description_length? validates :description, length: { maximum: Gitlab::Database::MAX_TEXT_SIZE_LIMIT }, if: :description_changed?
validates_associated :milestone_releases, message: -> (_, obj) { obj[:value].map(&:errors).map(&:full_messages).join(",") } validates_associated :milestone_releases, message: -> (_, obj) { obj[:value].map(&:errors).map(&:full_messages).join(",") }
validates :links, nested_attributes_duplicates: { scope: :release, child_attributes: %i[name url filepath] } validates :links, nested_attributes_duplicates: { scope: :release, child_attributes: %i[name url filepath] }
@ -102,11 +102,6 @@ class Release < ApplicationRecord
private private
def should_validate_description_length?
description_changed? &&
::Feature.enabled?(:validate_release_description_length, project, default_enabled: :yaml)
end
def actual_sha def actual_sha
sha || actual_tag&.dereferenced_target sha || actual_tag&.dereferenced_target
end end

View File

@ -5,7 +5,7 @@
- add_to_breadcrumbs _('Runners'), admin_runners_path - add_to_breadcrumbs _('Runners'), admin_runners_path
- if Feature.enabled?(:runner_detailed_view_vue_ui, current_user, default_enabled: :yaml) - if Feature.enabled?(:runner_detailed_view_vue_ui, current_user, default_enabled: :yaml)
#js-runner-detail{ data: {runner_id: @runner.id} } #js-runner-details{ data: {runner_id: @runner.id} }
- else - else
%h2.page-title %h2.page-title
= s_('Runners|Runner #%{runner_id}' % { runner_id: @runner.id }) = s_('Runners|Runner #%{runner_id}' % { runner_id: @runner.id })

View File

@ -3,21 +3,30 @@ module LimitedCapacity
class JobTracker # rubocop:disable Scalability/IdempotentWorker class JobTracker # rubocop:disable Scalability/IdempotentWorker
include Gitlab::Utils::StrongMemoize include Gitlab::Utils::StrongMemoize
LUA_REGISTER_SCRIPT = <<~EOS
local set_key, element, max_elements = KEYS[1], ARGV[1], ARGV[2]
if redis.call("scard", set_key) < tonumber(max_elements) then
redis.call("sadd", set_key, element)
return true
end
return false
EOS
def initialize(namespace) def initialize(namespace)
@namespace = namespace @namespace = namespace
end end
def register(jid) def register(jid, max_jids)
_added, @count = with_redis_pipeline do |redis| with_redis do |redis|
register_job_keys(redis, jid) redis.eval(LUA_REGISTER_SCRIPT, keys: [counter_key], argv: [jid, max_jids])
get_job_count(redis) end.present?
end
end end
def remove(jid) def remove(jid)
_removed, @count = with_redis_pipeline do |redis| with_redis do |redis|
remove_job_keys(redis, jid) remove_job_keys(redis, jid)
get_job_count(redis)
end end
end end
@ -25,14 +34,13 @@ module LimitedCapacity
completed_jids = Gitlab::SidekiqStatus.completed_jids(running_jids) completed_jids = Gitlab::SidekiqStatus.completed_jids(running_jids)
return unless completed_jids.any? return unless completed_jids.any?
_removed, @count = with_redis_pipeline do |redis| with_redis do |redis|
remove_job_keys(redis, completed_jids) remove_job_keys(redis, completed_jids)
get_job_count(redis)
end end
end end
def count def count
@count ||= with_redis { |redis| get_job_count(redis) } with_redis { |redis| redis.scard(counter_key) }
end end
def running_jids def running_jids
@ -49,14 +57,6 @@ module LimitedCapacity
"worker:#{namespace.to_s.underscore}:running" "worker:#{namespace.to_s.underscore}:running"
end end
def get_job_count(redis)
redis.scard(counter_key)
end
def register_job_keys(redis, keys)
redis.sadd(counter_key, keys)
end
def remove_job_keys(redis, keys) def remove_job_keys(redis, keys)
redis.srem(counter_key, keys) redis.srem(counter_key, keys)
end end
@ -64,11 +64,5 @@ module LimitedCapacity
def with_redis(&block) def with_redis(&block)
Gitlab::Redis::Queues.with(&block) # rubocop: disable CodeReuse/ActiveRecord Gitlab::Redis::Queues.with(&block) # rubocop: disable CodeReuse/ActiveRecord
end end
def with_redis_pipeline(&block)
with_redis do |redis|
redis.pipelined(&block)
end
end
end end
end end

View File

@ -55,26 +55,14 @@ module LimitedCapacity
def perform_with_capacity(*args) def perform_with_capacity(*args)
worker = self.new worker = self.new
worker.remove_failed_jobs worker.remove_failed_jobs
worker.report_prometheus_metrics(*args)
required_jobs_count = worker.required_jobs_count(*args)
arguments = Array.new(required_jobs_count) { args } arguments = Array.new(worker.max_running_jobs) { args }
self.bulk_perform_async(arguments) # rubocop:disable Scalability/BulkPerformWithContext self.bulk_perform_async(arguments) # rubocop:disable Scalability/BulkPerformWithContext
end end
end end
def perform(*args) def perform(*args)
return unless has_capacity? perform_registered(*args) if job_tracker.register(jid, max_running_jobs)
job_tracker.register(jid)
report_running_jobs_metrics
perform_work(*args)
rescue StandardError => exception
raise
ensure
job_tracker.remove(jid)
report_prometheus_metrics(*args)
re_enqueue(*args) unless exception
end end
def perform_work(*args) def perform_work(*args)
@ -89,44 +77,33 @@ module LimitedCapacity
raise NotImplementedError raise NotImplementedError
end end
def has_capacity?
remaining_capacity > 0
end
def remaining_capacity
[
max_running_jobs - running_jobs_count - self.class.queue_size,
0
].max
end
def has_work?(*args)
remaining_work_count(*args) > 0
end
def remove_failed_jobs def remove_failed_jobs
job_tracker.clean_up job_tracker.clean_up
end end
def report_prometheus_metrics(*args) def report_prometheus_metrics(*args)
report_running_jobs_metrics report_running_jobs_metrics
remaining_work_gauge.set(prometheus_labels, remaining_work_count(*args)) set_metric(:remaining_work_gauge, remaining_work_count(*args))
max_running_jobs_gauge.set(prometheus_labels, max_running_jobs) set_metric(:max_running_jobs_gauge, max_running_jobs)
end
def report_running_jobs_metrics
running_jobs_gauge.set(prometheus_labels, running_jobs_count)
end
def required_jobs_count(*args)
[
remaining_work_count(*args),
remaining_capacity
].min
end end
private private
def perform_registered(*args)
report_running_jobs_metrics
perform_work(*args)
rescue StandardError => exception
raise
ensure
job_tracker.remove(jid)
report_prometheus_metrics(*args)
re_enqueue(*args) unless exception
end
def report_running_jobs_metrics
set_metric(:running_jobs_gauge, running_jobs_count)
end
def running_jobs_count def running_jobs_count
job_tracker.count job_tracker.count
end end
@ -138,32 +115,21 @@ module LimitedCapacity
end end
def re_enqueue(*args) def re_enqueue(*args)
return unless has_capacity? return unless remaining_work_count(*args) > 0
return unless has_work?(*args)
self.class.perform_async(*args) self.class.perform_async(*args)
end end
def running_jobs_gauge def set_metric(name, value)
strong_memoize(:running_jobs_gauge) do metrics = strong_memoize(:metrics) do
Gitlab::Metrics.gauge(:limited_capacity_worker_running_jobs, 'Number of running jobs') {
end running_jobs_gauge: Gitlab::Metrics.gauge(:limited_capacity_worker_running_jobs, 'Number of running jobs'),
max_running_jobs_gauge: Gitlab::Metrics.gauge(:limited_capacity_worker_max_running_jobs, 'Maximum number of running jobs'),
remaining_work_gauge: Gitlab::Metrics.gauge(:limited_capacity_worker_remaining_work_count, 'Number of jobs waiting to be enqueued')
}
end end
def max_running_jobs_gauge metrics[name].set({ worker: self.class.name }, value)
strong_memoize(:max_running_jobs_gauge) do
Gitlab::Metrics.gauge(:limited_capacity_worker_max_running_jobs, 'Maximum number of running jobs')
end
end
def remaining_work_gauge
strong_memoize(:remaining_work_gauge) do
Gitlab::Metrics.gauge(:limited_capacity_worker_remaining_work_count, 'Number of jobs waiting to be enqueued')
end
end
def prometheus_labels
{ worker: self.class.name }
end end
end end
end end

View File

@ -0,0 +1,5 @@
---
title: Validate release description length
merge_request: 60892
author:
type: changed

View File

@ -0,0 +1,5 @@
---
title: 'Github Importer: Add Cache to Pull Request Reviews importer'
merge_request: 60668
author:
type: changed

View File

@ -1,8 +0,0 @@
---
name: validate_release_description_length
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/60380
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/329192
milestone: '13.12'
type: development
group: group::release
default_enabled: false

View File

@ -86,6 +86,7 @@ The generator takes three options:
- `--ee`: Indicates if the event is for EE. - `--ee`: Indicates if the event is for EE.
- `--category=CATEGORY`: Indicates the `category` of the event. - `--category=CATEGORY`: Indicates the `category` of the event.
- `--action=ACTION`: Indicates the `action` of the event. - `--action=ACTION`: Indicates the `action` of the event.
- `--force`: Overwrites the existing event definition, if one already exists.
```shell ```shell
bundle exec rails generate gitlab:snowplow_event_definition --category Groups::EmailCampaignsController --action click bundle exec rails generate gitlab:snowplow_event_definition --category Groups::EmailCampaignsController --action click

View File

@ -10460,6 +10460,30 @@ Status: `implemented`
Tiers: `premium`, `ultimate` Tiers: `premium`, `ultimate`
### `redis_hll_counters.epics_usage.g_project_management_users_awarding_epic_emoji_monthly`
Counts of MAU awarding emoji on epic
[YAML definition](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/config/metrics/counts_28d/20210503011217_g_project_management_users_awarding_epic_emoji_monthly.yml)
Group: `group::product planning`
Status: `implemented`
Tiers: `premium`, `ultimate`
### `redis_hll_counters.epics_usage.g_project_management_users_awarding_epic_emoji_weekly`
Counts of WAU awarding emoji on epic
[YAML definition](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/config/metrics/counts_7d/20210503011355_g_project_management_users_awarding_epic_emoji_weekly.yml)
Group: `group::product planning`
Status: `implemented`
Tiers: `premium`, `ultimate`
### `redis_hll_counters.epics_usage.g_project_management_users_creating_epic_notes_monthly` ### `redis_hll_counters.epics_usage.g_project_management_users_creating_epic_notes_monthly`
Counts of MAU adding epic notes Counts of MAU adding epic notes

View File

@ -15,3 +15,7 @@ following compliance tools are available:
- [License Compliance](license_compliance/index.md): Search your project's dependencies for their - [License Compliance](license_compliance/index.md): Search your project's dependencies for their
licenses. This lets you determine if the licenses of your project's dependencies are compatible licenses. This lets you determine if the licenses of your project's dependencies are compatible
with your project's license. with your project's license.
- [Compliance framework labels](../project/settings/index.md#compliance-frameworks): Label your projects that have unique compliance requirements.
- [Compliance pipelines](../project/settings/index.md#compliance-pipeline-configuration): Ensure that needed compliance jobs are always run for compliance-labeled projects.
- [Audit Events](../../administration/audit_events.md): Get visibility into individual actions that have taken place in your GitLab instance, group, or project.
- [Audit Reports](../../administration/audit_reports.md): Create and access reports based on the audit events that have occurred. Use pre-built GitLab reports or the API to build your own.

View File

@ -14,11 +14,12 @@ module Gitlab
class_option :ee, type: :boolean, optional: true, default: false, desc: 'Indicates if event is for ee' class_option :ee, type: :boolean, optional: true, default: false, desc: 'Indicates if event is for ee'
class_option :category, type: :string, optional: false, desc: 'Category of the event' class_option :category, type: :string, optional: false, desc: 'Category of the event'
class_option :action, type: :string, optional: false, desc: 'Action of the event' class_option :action, type: :string, optional: false, desc: 'Action of the event'
class_option :force, type: :boolean, optional: true, default: false, desc: 'Overwrite existing definition'
def create_event_file def create_event_file
raise 'Event definition already exists' if definition_exists? raise "Event definition already exists at #{file_path}" if definition_exists? && !force_definition_override?
template "event_definition.yml", file_path template "event_definition.yml", file_path, force: force_definition_override?
end end
def distributions def distributions
@ -41,6 +42,10 @@ module Gitlab
options[:ee] options[:ee]
end end
def force_definition_override?
options[:force]
end
private private
def definition_exists? def definition_exists?

View File

@ -70,7 +70,7 @@ module Gitlab
end end
def pull_request_reviews(repo_name, iid) def pull_request_reviews(repo_name, iid)
with_rate_limit { octokit.pull_request_reviews(repo_name, iid) } each_object(:pull_request_reviews, repo_name, iid)
end end
# Returns the details of a GitHub repository. # Returns the details of a GitHub repository.

View File

@ -22,14 +22,18 @@ module Gitlab
:pull_requests_merged_by :pull_requests_merged_by
end end
def id_for_already_imported_cache(pr) def id_for_already_imported_cache(merge_request)
pr.number merge_request.id
end end
def each_object_to_import def each_object_to_import
project.merge_requests.with_state(:merged).find_each do |merge_request| project.merge_requests.with_state(:merged).find_each do |merge_request|
next if already_imported?(merge_request)
pull_request = client.pull_request(project.import_source, merge_request.iid) pull_request = client.pull_request(project.import_source, merge_request.iid)
yield(pull_request) yield(pull_request)
mark_as_imported(merge_request)
end end
end end
end end

View File

@ -22,17 +22,22 @@ module Gitlab
:pull_request_reviews :pull_request_reviews
end end
def id_for_already_imported_cache(review) def id_for_already_imported_cache(merge_request)
review.github_id merge_request.id
end end
def each_object_to_import def each_object_to_import
project.merge_requests.find_each do |merge_request| project.merge_requests.find_each do |merge_request|
reviews = client.pull_request_reviews(project.import_source, merge_request.iid) next if already_imported?(merge_request)
reviews.each do |review|
client
.pull_request_reviews(project.import_source, merge_request.iid)
.each do |review|
review.merge_request_id = merge_request.id review.merge_request_id = merge_request.id
yield(review) yield(review)
end end
mark_as_imported(merge_request)
end end
end end
end end

View File

@ -55,6 +55,14 @@
aggregation: daily aggregation: daily
feature_flag: track_epics_activity feature_flag: track_epics_activity
# emoji
- name: g_project_management_users_awarding_epic_emoji
category: epics_usage
redis_slot: project_management
aggregation: daily
feature_flag: track_epics_activity
# start date events # start date events
- name: g_project_management_users_setting_epic_start_date_as_fixed - name: g_project_management_users_setting_epic_start_date_as_fixed

View File

@ -0,0 +1,40 @@
import { GlBadge } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import RunnerTypeBadge from '~/runner/components/runner_type_badge.vue';
import { INSTANCE_TYPE, GROUP_TYPE, PROJECT_TYPE } from '~/runner/constants';
describe('RunnerTypeBadge', () => {
let wrapper;
const findBadge = () => wrapper.findComponent(GlBadge);
const createComponent = ({ props = {} } = {}) => {
wrapper = shallowMount(RunnerTypeBadge, {
propsData: {
...props,
},
});
};
afterEach(() => {
wrapper.destroy();
});
it.each`
type | text | variant
${INSTANCE_TYPE} | ${'shared'} | ${'success'}
${GROUP_TYPE} | ${'group'} | ${'success'}
${PROJECT_TYPE} | ${'specific'} | ${'info'}
`('displays $type runner with as "$text" with a $variant variant ', ({ type, text, variant }) => {
createComponent({ props: { type } });
expect(findBadge().text()).toBe(text);
expect(findBadge().props('variant')).toBe(variant);
});
it('does not display a badge when type is unknown', () => {
createComponent({ props: { type: 'AN_UNKNOWN_VALUE' } });
expect(findBadge().exists()).toBe(false);
});
});

View File

@ -1,29 +0,0 @@
import { shallowMount } from '@vue/test-utils';
import RunnerDetailsApp from '~/runner/runner_details/runner_details_app.vue';
const mockRunnerId = '55';
describe('RunnerDetailsApp', () => {
let wrapper;
const createComponent = (props) => {
wrapper = shallowMount(RunnerDetailsApp, {
propsData: {
runnerId: mockRunnerId,
...props,
},
});
};
beforeEach(() => {
createComponent();
});
afterEach(() => {
wrapper.destroy();
});
it('displays the runner id', () => {
expect(wrapper.text()).toContain('Runner #55');
});
});

View File

@ -0,0 +1,71 @@
import { createLocalVue, mount, shallowMount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import RunnerTypeBadge from '~/runner/components/runner_type_badge.vue';
import { INSTANCE_TYPE } from '~/runner/constants';
import getRunnerQuery from '~/runner/graphql/get_runner.query.graphql';
import RunnerDetailsApp from '~/runner/runner_details/runner_details_app.vue';
const mockRunnerId = '55';
const localVue = createLocalVue();
localVue.use(VueApollo);
describe('RunnerDetailsApp', () => {
let wrapper;
let mockRunnerQuery;
const findRunnerTypeBadge = () => wrapper.findComponent(RunnerTypeBadge);
const createComponentWithApollo = ({ props = {}, mountFn = shallowMount } = {}) => {
const handlers = [[getRunnerQuery, mockRunnerQuery]];
wrapper = mountFn(RunnerDetailsApp, {
localVue,
apolloProvider: createMockApollo(handlers),
propsData: {
runnerId: mockRunnerId,
...props,
},
});
return waitForPromises();
};
beforeEach(async () => {
mockRunnerQuery = jest.fn().mockResolvedValue({
data: {
runner: {
id: `gid://gitlab/Ci::Runner/${mockRunnerId}`,
runnerType: INSTANCE_TYPE,
__typename: 'CiRunner',
},
},
});
});
afterEach(() => {
mockRunnerQuery.mockReset();
wrapper.destroy();
});
it('expect GraphQL ID to be requested', async () => {
await createComponentWithApollo();
expect(mockRunnerQuery).toHaveBeenCalledWith({ id: `gid://gitlab/Ci::Runner/${mockRunnerId}` });
});
it('displays the runner id', async () => {
await createComponentWithApollo();
expect(wrapper.text()).toContain('Runner #55');
});
it('displays the runner type', async () => {
await createComponentWithApollo({ mountFn: mount });
expect(findRunnerTypeBadge().text()).toBe('shared');
});
});

View File

@ -380,6 +380,25 @@ export const subscriptionNullResponse = {
}, },
}; };
const mockUser1 = {
id: 'gid://gitlab/User/1',
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
name: 'Administrator',
username: 'root',
webUrl: '/root',
status: null,
};
const mockUser2 = {
id: 'gid://gitlab/User/4',
avatarUrl: '/avatar2',
name: 'rookie',
username: 'rookie',
webUrl: 'rookie',
status: null,
};
export const searchResponse = { export const searchResponse = {
data: { data: {
workspace: { workspace: {
@ -387,24 +406,10 @@ export const searchResponse = {
users: { users: {
nodes: [ nodes: [
{ {
user: { user: mockUser1,
id: '1',
avatarUrl: '/avatar',
name: 'root',
username: 'root',
webUrl: 'root',
status: null,
},
}, },
{ {
user: { user: mockUser2,
id: '2',
avatarUrl: '/avatar2',
name: 'rookie',
username: 'rookie',
webUrl: 'rookie',
status: null,
},
}, },
], ],
}, },
@ -418,27 +423,13 @@ export const projectMembersResponse = {
__typename: 'Project', __typename: 'Project',
users: { users: {
nodes: [ nodes: [
{ // Remove nulls https://gitlab.com/gitlab-org/gitlab/-/issues/329750
user: { null,
id: 'gid://gitlab/User/1', null,
avatarUrl: // Remove duplicated entry https://gitlab.com/gitlab-org/gitlab/-/issues/327822
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon', mockUser1,
name: 'Administrator', mockUser1,
username: 'root', mockUser2,
webUrl: '/root',
status: null,
},
},
{
user: {
id: '2',
avatarUrl: '/avatar2',
name: 'rookie',
username: 'rookie',
webUrl: 'rookie',
status: null,
},
},
{ {
user: { user: {
id: 'gid://gitlab/User/2', id: 'gid://gitlab/User/2',
@ -468,15 +459,9 @@ export const participantsQueryResponse = {
iid: '1', iid: '1',
participants: { participants: {
nodes: [ nodes: [
{ // Remove duplicated entry https://gitlab.com/gitlab-org/gitlab/-/issues/327822
id: 'gid://gitlab/User/1', mockUser1,
avatarUrl: mockUser1,
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
name: 'Administrator',
username: 'root',
webUrl: '/root',
status: null,
},
{ {
id: 'gid://gitlab/User/2', id: 'gid://gitlab/User/2',
avatarUrl: avatarUrl:

View File

@ -32,6 +32,30 @@ RSpec.describe Gitlab::SnowplowEventDefinitionGenerator do
expect(::Gitlab::Config::Loader::Yaml.new(File.read(event_definition_path)).load_raw!).to eq(sample_event) expect(::Gitlab::Config::Loader::Yaml.new(File.read(event_definition_path)).load_raw!).to eq(sample_event)
end end
context 'event definition already exists' do
before do
stub_const('Gitlab::VERSION', '12.11.0-pre')
described_class.new([], generator_options).invoke_all
end
it 'overwrites event definition --force flag set to true' do
sample_event = ::Gitlab::Config::Loader::Yaml.new(fixture_file(File.join(sample_event_dir, 'sample_event.yml'))).load_raw!
stub_const('Gitlab::VERSION', '13.11.0-pre')
described_class.new([], generator_options.merge('force' => true)).invoke_all
event_definition_path = File.join(ce_temp_dir, 'groups__email_campaigns_controller_click.yml')
event_data = ::Gitlab::Config::Loader::Yaml.new(File.read(event_definition_path)).load_raw!
expect(event_data).to eq(sample_event)
end
it 'raises error when --force flag set to false' do
expect { described_class.new([], generator_options.merge('force' => false)).invoke_all }
.to raise_error(StandardError, /Event definition already exists at/)
end
end
it 'creates EE event definition file using the template' do it 'creates EE event definition file using the template' do
sample_event = ::Gitlab::Config::Loader::Yaml.new(fixture_file(File.join(sample_event_dir, 'sample_event_ee.yml'))).load_raw! sample_event = ::Gitlab::Config::Loader::Yaml.new(fixture_file(File.join(sample_event_dir, 'sample_event_ee.yml'))).load_raw!

View File

@ -32,8 +32,9 @@ RSpec.describe Gitlab::GithubImport::Client do
it 'returns the pull request reviews' do it 'returns the pull request reviews' do
client = described_class.new('foo') client = described_class.new('foo')
expect(client.octokit).to receive(:pull_request_reviews).with('foo/bar', 999) expect(client)
expect(client).to receive(:with_rate_limit).and_yield .to receive(:each_object)
.with(:pull_request_reviews, 'foo/bar', 999)
client.pull_request_reviews('foo/bar', 999) client.pull_request_reviews('foo/bar', 999)
end end

View File

@ -23,12 +23,11 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsMergedByImporter do
end end
describe '#id_for_already_imported_cache' do describe '#id_for_already_imported_cache' do
it { expect(subject.id_for_already_imported_cache(double(number: 1))).to eq(1) } it { expect(subject.id_for_already_imported_cache(double(id: 1))).to eq(1) }
end end
describe '#each_object_to_import' do describe '#each_object_to_import', :clean_gitlab_redis_cache do
it 'fetchs the merged pull requests data' do it 'fetchs the merged pull requests data' do
pull_request = double
create( create(
:merged_merge_request, :merged_merge_request,
iid: 999, iid: 999,
@ -36,12 +35,18 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsMergedByImporter do
target_project: project target_project: project
) )
pull_request = double
allow(client) allow(client)
.to receive(:pull_request) .to receive(:pull_request)
.exactly(:once) # ensure to be cached on the second call
.with('http://somegithub.com', 999) .with('http://somegithub.com', 999)
.and_return(pull_request) .and_return(pull_request)
expect { |b| subject.each_object_to_import(&b) }.to yield_with_args(pull_request) expect { |b| subject.each_object_to_import(&b) }
.to yield_with_args(pull_request)
subject.each_object_to_import {}
end end
end end
end end

View File

@ -23,12 +23,18 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do
end end
describe '#id_for_already_imported_cache' do describe '#id_for_already_imported_cache' do
it { expect(subject.id_for_already_imported_cache(double(github_id: 1))).to eq(1) } it { expect(subject.id_for_already_imported_cache(double(id: 1))).to eq(1) }
end end
describe '#each_object_to_import' do describe '#each_object_to_import', :clean_gitlab_redis_cache do
it 'fetchs the merged pull requests data' do it 'fetchs the merged pull requests data' do
merge_request = create(:merge_request, source_project: project) merge_request = create(
:merged_merge_request,
iid: 999,
source_project: project,
target_project: project
)
review = double review = double
expect(review) expect(review)
@ -37,10 +43,14 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do
allow(client) allow(client)
.to receive(:pull_request_reviews) .to receive(:pull_request_reviews)
.exactly(:once) # ensure to be cached on the second call
.with('github/repo', merge_request.iid) .with('github/repo', merge_request.iid)
.and_return([review]) .and_return([review])
expect { |b| subject.each_object_to_import(&b) }.to yield_with_args(review) expect { |b| subject.each_object_to_import(&b) }
.to yield_with_args(review)
subject.each_object_to_import {}
end end
end end
end end

View File

@ -48,18 +48,6 @@ RSpec.describe Release do
expect(release.errors.full_messages) expect(release.errors.full_messages)
.to include("Description is too long (maximum is #{Gitlab::Database::MAX_TEXT_SIZE_LIMIT} characters)") .to include("Description is too long (maximum is #{Gitlab::Database::MAX_TEXT_SIZE_LIMIT} characters)")
end end
context 'when validate_release_description_length feature flag is disabled' do
before do
stub_feature_flags(validate_release_description_length: false)
end
it 'does not create a validation error' do
release.validate
expect(release.errors.full_messages).to be_empty
end
end
end end
context 'when a release is tied to a milestone for another project' do context 'when a release is tied to a milestone for another project' do

View File

@ -71,7 +71,11 @@ module SnowplowHelpers
# expect_no_snowplow_event # expect_no_snowplow_event
# end # end
# end # end
def expect_no_snowplow_event def expect_no_snowplow_event(category: nil, action: nil, **kwargs)
if category && action
expect(Gitlab::Tracking).not_to have_received(:event).with(category, action, **kwargs) # rubocop:disable RSpec/ExpectGitlabTracking
else
expect(Gitlab::Tracking).not_to have_received(:event) # rubocop:disable RSpec/ExpectGitlabTracking expect(Gitlab::Tracking).not_to have_received(:event) # rubocop:disable RSpec/ExpectGitlabTracking
end end
end
end end

View File

@ -7,30 +7,30 @@ RSpec.describe LimitedCapacity::JobTracker, :clean_gitlab_redis_queues do
described_class.new('namespace') described_class.new('namespace')
end end
let(:max_jids) { 10 }
describe '#register' do describe '#register' do
it 'adds jid to the set' do it 'adds jid to the set' do
job_tracker.register('a-job-id') expect(job_tracker.register('a-job-id', max_jids)). to be true
expect(job_tracker.running_jids).to contain_exactly('a-job-id') expect(job_tracker.running_jids).to contain_exactly('a-job-id')
end end
it 'updates the counter' do it 'returns false if the jid was not added' do
expect { job_tracker.register('a-job-id') } max_jids = 2
.to change { job_tracker.count } %w[jid1 jid2].each do |jid|
.from(0) expect(job_tracker.register(jid, max_jids)).to be true
.to(1)
end end
it 'does it in only one Redis call' do expect(job_tracker.register('jid3', max_jids)).to be false
expect(job_tracker).to receive(:with_redis).once.and_call_original expect(job_tracker.running_jids).to contain_exactly(*%w[jid1 jid2])
job_tracker.register('a-job-id')
end end
end end
describe '#remove' do describe '#remove' do
before do before do
job_tracker.register(%w[a-job-id other-job-id]) %w[a-job-id other-job-id].each do |jid|
job_tracker.register(jid, max_jids)
end
end end
it 'removes jid from the set' do it 'removes jid from the set' do
@ -38,24 +38,11 @@ RSpec.describe LimitedCapacity::JobTracker, :clean_gitlab_redis_queues do
expect(job_tracker.running_jids).to contain_exactly('a-job-id') expect(job_tracker.running_jids).to contain_exactly('a-job-id')
end end
it 'updates the counter' do
expect { job_tracker.remove('other-job-id') }
.to change { job_tracker.count }
.from(2)
.to(1)
end
it 'does it in only one Redis call' do
expect(job_tracker).to receive(:with_redis).once.and_call_original
job_tracker.remove('other-job-id')
end
end end
describe '#clean_up' do describe '#clean_up' do
before do before do
job_tracker.register('a-job-id') job_tracker.register('a-job-id', max_jids)
end end
context 'with running jobs' do context 'with running jobs' do
@ -83,13 +70,6 @@ RSpec.describe LimitedCapacity::JobTracker, :clean_gitlab_redis_queues do
.to change { job_tracker.running_jids.include?('a-job-id') } .to change { job_tracker.running_jids.include?('a-job-id') }
end end
it 'updates the counter' do
expect { job_tracker.clean_up }
.to change { job_tracker.count }
.from(1)
.to(0)
end
it 'gets the job ids, removes them, and updates the counter with only two Redis calls' do it 'gets the job ids, removes them, and updates the counter with only two Redis calls' do
expect(job_tracker).to receive(:with_redis).twice.and_call_original expect(job_tracker).to receive(:with_redis).twice.and_call_original

View File

@ -44,34 +44,17 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
describe '.perform_with_capacity' do describe '.perform_with_capacity' do
subject(:perform_with_capacity) { worker_class.perform_with_capacity(:arg) } subject(:perform_with_capacity) { worker_class.perform_with_capacity(:arg) }
let(:max_running_jobs) { 3 }
before do before do
expect_next_instance_of(worker_class) do |instance| expect_next_instance_of(worker_class) do |instance|
expect(instance).to receive(:remove_failed_jobs) expect(instance).to receive(:remove_failed_jobs)
expect(instance).to receive(:report_prometheus_metrics)
allow(instance).to receive(:remaining_work_count).and_return(remaining_work_count) allow(instance).to receive(:max_running_jobs).and_return(max_running_jobs)
allow(instance).to receive(:remaining_capacity).and_return(remaining_capacity)
end end
end end
context 'when capacity is larger than work' do it 'enqueues jobs' do
let(:remaining_work_count) { 2 }
let(:remaining_capacity) { 3 }
it 'enqueues jobs for remaining work' do
expect(worker_class)
.to receive(:bulk_perform_async)
.with([[:arg], [:arg]])
perform_with_capacity
end
end
context 'when capacity is lower than work' do
let(:remaining_work_count) { 5 }
let(:remaining_capacity) { 3 }
it 'enqueues jobs for remaining work' do
expect(worker_class) expect(worker_class)
.to receive(:bulk_perform_async) .to receive(:bulk_perform_async)
.with([[:arg], [:arg], [:arg]]) .with([[:arg], [:arg], [:arg]])
@ -79,7 +62,6 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
perform_with_capacity perform_with_capacity
end end
end end
end
describe '#perform' do describe '#perform' do
subject(:perform) { worker.perform(:arg) } subject(:perform) { worker.perform(:arg) }
@ -104,21 +86,6 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
perform perform
end end
it 'registers itself in the running set' do
allow(worker).to receive(:perform_work)
expect(job_tracker).to receive(:register).with('my-jid')
perform
end
it 'removes itself from the running set' do
expect(job_tracker).to receive(:remove).with('my-jid')
allow(worker).to receive(:perform_work)
perform
end
it 'reports prometheus metrics' do it 'reports prometheus metrics' do
allow(worker).to receive(:perform_work) allow(worker).to receive(:perform_work)
expect(worker).to receive(:report_prometheus_metrics).once.and_call_original expect(worker).to receive(:report_prometheus_metrics).once.and_call_original
@ -126,12 +93,20 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
perform perform
end end
it 'updates the running set' do
expect(job_tracker.running_jids).to be_empty
allow(worker).to receive(:perform_work)
perform
expect(job_tracker.running_jids).to be_empty
end
end end
context 'with capacity and without work' do context 'with capacity and without work' do
before do before do
allow(worker).to receive(:max_running_jobs).and_return(10) allow(worker).to receive(:max_running_jobs).and_return(10)
allow(worker).to receive(:running_jobs_count).and_return(0)
allow(worker).to receive(:remaining_work_count).and_return(0) allow(worker).to receive(:remaining_work_count).and_return(0)
allow(worker).to receive(:perform_work) allow(worker).to receive(:perform_work)
end end
@ -146,7 +121,7 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
context 'without capacity' do context 'without capacity' do
before do before do
allow(worker).to receive(:max_running_jobs).and_return(10) allow(worker).to receive(:max_running_jobs).and_return(10)
allow(worker).to receive(:running_jobs_count).and_return(15) allow(job_tracker).to receive(:register).and_return(false)
allow(worker).to receive(:remaining_work_count).and_return(10) allow(worker).to receive(:remaining_work_count).and_return(10)
end end
@ -161,27 +136,14 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
perform perform
end end
it 'does not register in the running set' do
expect(job_tracker).not_to receive(:register)
perform
end
it 'removes itself from the running set' do
expect(job_tracker).to receive(:remove).with('my-jid')
perform
end
it 'reports prometheus metrics' do
expect(worker).to receive(:report_prometheus_metrics)
perform
end
end end
context 'when perform_work fails' do context 'when perform_work fails' do
before do
allow(worker).to receive(:max_running_jobs).and_return(10)
allow(job_tracker).to receive(:register).and_return(true)
end
it 'does not re-enqueue itself' do it 'does not re-enqueue itself' do
expect(worker).not_to receive(:re_enqueue) expect(worker).not_to receive(:re_enqueue)
@ -189,7 +151,7 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
end end
it 'removes itself from the running set' do it 'removes itself from the running set' do
expect(job_tracker).to receive(:remove) expect(job_tracker).to receive(:remove).with('my-jid')
expect { perform }.to raise_error(NotImplementedError) expect { perform }.to raise_error(NotImplementedError)
end end
@ -202,65 +164,14 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
end end
end end
describe '#remaining_capacity' do
subject(:remaining_capacity) { worker.remaining_capacity }
before do
expect(worker).to receive(:max_running_jobs).and_return(max_capacity)
end
context 'when changing the capacity to a lower value' do
let(:max_capacity) { -1 }
it { expect(remaining_capacity).to eq(0) }
end
context 'when registering new jobs' do
let(:max_capacity) { 2 }
before do
job_tracker.register('a-job-id')
end
it { expect(remaining_capacity).to eq(1) }
end
context 'with jobs in the queue' do
let(:max_capacity) { 2 }
before do
expect(worker_class).to receive(:queue_size).and_return(1)
end
it { expect(remaining_capacity).to eq(1) }
end
context 'with both running jobs and queued jobs' do
let(:max_capacity) { 10 }
before do
expect(worker_class).to receive(:queue_size).and_return(5)
expect(worker).to receive(:running_jobs_count).and_return(3)
end
it { expect(remaining_capacity).to eq(2) }
end
end
describe '#remove_failed_jobs' do describe '#remove_failed_jobs' do
subject(:remove_failed_jobs) { worker.remove_failed_jobs } subject(:remove_failed_jobs) { worker.remove_failed_jobs }
before do it 'removes failed jobs' do
job_tracker.register('a-job-id') job_tracker.register('a-job-id', 10)
allow(worker).to receive(:max_running_jobs).and_return(2)
expect(job_tracker).to receive(:clean_up).and_call_original expect(job_tracker).to receive(:clean_up).and_call_original
end expect { remove_failed_jobs }.to change { job_tracker.running_jids.size }.by(-1)
context 'with failed jobs' do
it 'update the available capacity' do
expect { remove_failed_jobs }.to change { worker.remaining_capacity }.by(1)
end
end end
end end