Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-06-30 06:07:17 +00:00
parent dbb27a9153
commit 28fd41cf28
56 changed files with 548 additions and 183 deletions

View file

@ -1 +1 @@
d554b758056dd291fec6fbe6083e941ccf316fdb
d5777441938369d512a7825c57ccf6115d7afdfa

View file

@ -2,7 +2,9 @@ import Api from '~/api';
import createFlash from '~/flash';
import { visitUrl, setUrlParams } from '~/lib/utils/url_utility';
import { __ } from '~/locale';
import { GROUPS_LOCAL_STORAGE_KEY, PROJECTS_LOCAL_STORAGE_KEY } from './constants';
import * as types from './mutation_types';
import { loadDataFromLS, setFrequentItemToLS } from './utils';
export const fetchGroups = ({ commit }, search) => {
commit(types.REQUEST_GROUPS);
@ -39,6 +41,24 @@ export const fetchProjects = ({ commit, state }, search) => {
}
};
export const loadFrequentGroups = ({ commit }) => {
const data = loadDataFromLS(GROUPS_LOCAL_STORAGE_KEY);
commit(types.LOAD_FREQUENT_ITEMS, { key: GROUPS_LOCAL_STORAGE_KEY, data });
};
export const loadFrequentProjects = ({ commit }) => {
const data = loadDataFromLS(PROJECTS_LOCAL_STORAGE_KEY);
commit(types.LOAD_FREQUENT_ITEMS, { key: PROJECTS_LOCAL_STORAGE_KEY, data });
};
export const setFrequentGroup = ({ state }, item) => {
setFrequentItemToLS(GROUPS_LOCAL_STORAGE_KEY, state.frequentItems, item);
};
export const setFrequentProject = ({ state }, item) => {
setFrequentItemToLS(PROJECTS_LOCAL_STORAGE_KEY, state.frequentItems, item);
};
export const setQuery = ({ commit }, { key, value }) => {
commit(types.SET_QUERY, { key, value });
};

View file

@ -0,0 +1,7 @@
export const MAX_FREQUENT_ITEMS = 5;
export const MAX_FREQUENCY = 5;
export const GROUPS_LOCAL_STORAGE_KEY = 'global-search-frequent-groups';
export const PROJECTS_LOCAL_STORAGE_KEY = 'global-search-frequent-projects';

View file

@ -7,3 +7,5 @@ export const RECEIVE_PROJECTS_SUCCESS = 'RECEIVE_PROJECTS_SUCCESS';
export const RECEIVE_PROJECTS_ERROR = 'RECEIVE_PROJECTS_ERROR';
export const SET_QUERY = 'SET_QUERY';
export const LOAD_FREQUENT_ITEMS = 'LOAD_FREQUENT_ITEMS';

View file

@ -26,4 +26,7 @@ export default {
[types.SET_QUERY](state, { key, value }) {
state.query[key] = value;
},
[types.LOAD_FREQUENT_ITEMS](state, { key, data }) {
state.frequentItems[key] = data;
},
};

View file

@ -1,8 +1,14 @@
import { GROUPS_LOCAL_STORAGE_KEY, PROJECTS_LOCAL_STORAGE_KEY } from './constants';
const createState = ({ query }) => ({
query,
groups: [],
fetchingGroups: false,
projects: [],
fetchingProjects: false,
frequentItems: {
[GROUPS_LOCAL_STORAGE_KEY]: [],
[PROJECTS_LOCAL_STORAGE_KEY]: [],
},
});
export default createState;

View file

@ -0,0 +1,50 @@
import AccessorUtilities from '../../lib/utils/accessor';
import { MAX_FREQUENT_ITEMS, MAX_FREQUENCY } from './constants';
export const loadDataFromLS = (key) => {
if (!AccessorUtilities.isLocalStorageAccessSafe()) {
return [];
}
try {
return JSON.parse(localStorage.getItem(key)) || [];
} catch {
// The LS got in a bad state, let's wipe it
localStorage.removeItem(key);
return [];
}
};
export const setFrequentItemToLS = (key, data, item) => {
if (!AccessorUtilities.isLocalStorageAccessSafe()) {
return;
}
try {
const frequentItems = data[key];
const existingItemIndex = frequentItems.findIndex((i) => i.id === item.id);
if (existingItemIndex >= 0) {
// Up the frequency (Max 5)
const currentFrequency = frequentItems[existingItemIndex].frequency;
frequentItems[existingItemIndex].frequency = Math.min(currentFrequency + 1, MAX_FREQUENCY);
} else {
// Only store a max of 5 items
if (frequentItems.length >= MAX_FREQUENT_ITEMS) {
frequentItems.pop();
}
frequentItems.push({ id: item.id, frequency: 1 });
}
// Sort by frequency
frequentItems.sort((a, b) => b.frequency - a.frequency);
// Note we do not need to commit a mutation here as immediately after this we refresh the page to
// update the search results.
localStorage.setItem(key, JSON.stringify(frequentItems));
} catch {
// The LS got in a bad state, let's wipe it
localStorage.removeItem(key);
}
};

View file

@ -23,9 +23,17 @@ export default {
return isEmpty(this.initialData) ? ANY_OPTION : this.initialData;
},
},
created() {
this.loadFrequentGroups();
},
methods: {
...mapActions(['fetchGroups']),
...mapActions(['fetchGroups', 'setFrequentGroup', 'loadFrequentGroups']),
handleGroupChange(group) {
// If group.id is null we are clearing the filter and don't need to store that in LS.
if (group.id) {
this.setFrequentGroup(group);
}
visitUrl(
setUrlParams({ [GROUP_DATA.queryParam]: group.id, [PROJECT_DATA.queryParam]: null }),
);

View file

@ -22,9 +22,17 @@ export default {
return this.initialData ? this.initialData : ANY_OPTION;
},
},
created() {
this.loadFrequentProjects();
},
methods: {
...mapActions(['fetchProjects']),
...mapActions(['fetchProjects', 'setFrequentProject', 'loadFrequentProjects']),
handleProjectChange(project) {
// If project.id is null we are clearing the filter and don't need to store that in LS.
if (project.id) {
this.setFrequentProject(project);
}
// This determines if we need to update the group filter or not
const queryParams = {
...(project.namespace?.id && { [GROUP_DATA.queryParam]: project.namespace.id }),

View file

@ -7,7 +7,7 @@ module AutoDevopsHelper
can?(current_user, :admin_pipeline, project) &&
project.has_auto_devops_implicitly_disabled? &&
!project.repository.gitlab_ci_yml &&
!project.ci_service
!project.ci_integration
end
def badge_for_auto_devops_scope(auto_devops_receiver)

View file

@ -248,10 +248,10 @@ module AlertManagement
"#{project.to_reference_base(from, full: full)}#{reference}"
end
def execute_services
return unless project.has_active_services?(:alert_hooks)
def execute_integrations
return unless project.has_active_integrations?(:alert_hooks)
project.execute_services(hook_data, :alert_hooks)
project.execute_integrations(hook_data, :alert_hooks)
end
# Representation of the alert's payload. Avoid accessing

View file

@ -772,7 +772,7 @@ module Ci
return unless project
project.execute_hooks(build_data.dup, :job_hooks) if project.has_active_hooks?(:job_hooks)
project.execute_services(build_data.dup, :job_hooks) if project.has_active_services?(:job_hooks)
project.execute_integrations(build_data.dup, :job_hooks) if project.has_active_integrations?(:job_hooks)
end
def browsable_artifacts?

View file

@ -854,7 +854,7 @@ module Ci
def execute_hooks
project.execute_hooks(pipeline_data, :pipeline_hooks) if project.has_active_hooks?(:pipeline_hooks)
project.execute_services(pipeline_data, :pipeline_hooks) if project.has_active_services?(:pipeline_hooks)
project.execute_integrations(pipeline_data, :pipeline_hooks) if project.has_active_integrations?(:pipeline_hooks)
end
# All the merge requests for which the current pipeline runs/ran against

View file

@ -189,7 +189,7 @@ class Deployment < ApplicationRecord
def execute_hooks(status_changed_at)
deployment_data = Gitlab::DataBuilder::Deployment.build(self, status_changed_at)
project.execute_hooks(deployment_data, :deployment_hooks)
project.execute_services(deployment_data, :deployment_hooks)
project.execute_integrations(deployment_data, :deployment_hooks)
end
def last?

View file

@ -666,7 +666,7 @@ class Group < Namespace
# TODO: group hooks https://gitlab.com/gitlab-org/gitlab/-/issues/216904
end
def execute_services(data, hooks_scope)
def execute_integrations(data, hooks_scope)
# NOOP
# TODO: group hooks https://gitlab.com/gitlab-org/gitlab/-/issues/216904
end

View file

@ -38,31 +38,6 @@ class Integration < ApplicationRecord
Integrations::BaseSlashCommands
].freeze
# used as part of the renaming effort (https://gitlab.com/groups/gitlab-org/-/epics/2504)
RENAMED_TO_INTEGRATION = %w[
asana assembla
bamboo bugzilla buildkite
campfire confluence custom_issue_tracker
datadog discord drone_ci
emails_on_push ewm emails_on_push external_wiki
flowdock
hangouts_chat
irker
jenkins jira
packagist pipelines_email pivotaltracker prometheus pushover
mattermost mattermost_slash_commands microsoft_teams mock_ci mock_monitoring
redmine
slack slack_slash_commands
teamcity
unify_circuit
webex_teams
youtrack
].to_set.freeze
def self.renamed?(name)
RENAMED_TO_INTEGRATION.include?(name)
end
serialize :properties, JSON # rubocop:disable Cop/ActiveRecordSerialize
attribute :type, Gitlab::Integrations::StiType.new
@ -207,14 +182,14 @@ class Integration < ApplicationRecord
end
def self.create_nonexistent_templates
nonexistent_services = build_nonexistent_services_for(for_template)
return if nonexistent_services.empty?
nonexistent_integrations = build_nonexistent_integrations_for(for_template)
return if nonexistent_integrations.empty?
# Create within a transaction to perform the lowest possible SQL queries.
transaction do
nonexistent_services.each do |service|
service.template = true
service.save
nonexistent_integrations.each do |integration|
integration.template = true
integration.save
end
end
end
@ -227,24 +202,24 @@ class Integration < ApplicationRecord
end
def self.find_or_initialize_all_non_project_specific(scope)
scope + build_nonexistent_services_for(scope)
scope + build_nonexistent_integrations_for(scope)
end
def self.build_nonexistent_services_for(scope)
nonexistent_services_types_for(scope).map do |service_type|
integration_type_to_model(service_type).new
def self.build_nonexistent_integrations_for(scope)
nonexistent_integration_types_for(scope).map do |type|
integration_type_to_model(type).new
end
end
private_class_method :build_nonexistent_services_for
private_class_method :build_nonexistent_integrations_for
# Returns a list of service types that do not exist in the given scope.
# Returns a list of integration types that do not exist in the given scope.
# Example: ["AsanaService", ...]
def self.nonexistent_services_types_for(scope)
def self.nonexistent_integration_types_for(scope)
# Using #map instead of #pluck to save one query count. This is because
# ActiveRecord loaded the object here, so we don't need to query again later.
available_integration_types(include_project_specific: false) - scope.map(&:type)
end
private_class_method :nonexistent_services_types_for
private_class_method :nonexistent_integration_types_for
# Returns a list of available integration names.
# Example: ["asana", ...]
@ -261,10 +236,6 @@ class Integration < ApplicationRecord
INTEGRATION_NAMES
end
def self.services_names
integration_names
end
def self.dev_integration_names
return [] unless Rails.env.development?
@ -283,14 +254,14 @@ class Integration < ApplicationRecord
end
end
# Returns the model for the given service name.
# Returns the model for the given integration name.
# Example: "asana" => Integrations::Asana
def self.integration_name_to_model(name)
type = integration_name_to_type(name)
integration_type_to_model(type)
end
# Returns the STI type for the given service name.
# Returns the STI type for the given integration name.
# Example: "asana" => "AsanaService"
def self.integration_name_to_type(name)
"#{name}_service".camelize
@ -419,7 +390,7 @@ class Integration < ApplicationRecord
%w[active]
end
def to_service_hash
def to_integration_hash
as_json(methods: :type, except: %w[id template instance project_id group_id])
end

View file

@ -1340,7 +1340,7 @@ class MergeRequest < ApplicationRecord
def has_ci?
return false if has_no_commits?
!!(head_pipeline_id || all_pipelines.any? || source_project&.ci_service)
!!(head_pipeline_id || all_pipelines.any? || source_project&.ci_integration)
end
def branch_missing?

View file

@ -147,11 +147,7 @@ class Project < ApplicationRecord
has_many :boards
def self.integration_association_name(name)
if ::Integration.renamed?(name)
"#{name}_integration"
else
"#{name}_service"
end
"#{name}_integration"
end
# Project integrations
@ -1425,20 +1421,12 @@ class Project < ApplicationRecord
end
# rubocop: enable CodeReuse/ServiceClass
def ci_services
def ci_integrations
integrations.where(category: :ci)
end
def ci_service
@ci_service ||= ci_services.reorder(nil).find_by(active: true)
end
def monitoring_services
integrations.where(category: :monitoring)
end
def monitoring_service
@monitoring_service ||= monitoring_services.reorder(nil).find_by(active: true)
def ci_integration
@ci_integration ||= ci_integrations.reorder(nil).find_by(active: true)
end
def avatar_in_git
@ -1509,7 +1497,7 @@ class Project < ApplicationRecord
end
# rubocop: enable CodeReuse/ServiceClass
def execute_services(data, hooks_scope = :push_hooks)
def execute_integrations(data, hooks_scope = :push_hooks)
# Call only service hooks that are active for this scope
run_after_commit_or_now do
integrations.public_send(hooks_scope).each do |integration| # rubocop:disable GitlabSecurity/PublicSend
@ -1522,7 +1510,7 @@ class Project < ApplicationRecord
hooks.hooks_for(hooks_scope).any? || SystemHook.hooks_for(hooks_scope).any? || Gitlab::FileHook.any?
end
def has_active_services?(hooks_scope = :push_hooks)
def has_active_integrations?(hooks_scope = :push_hooks)
integrations.public_send(hooks_scope).any? # rubocop:disable GitlabSecurity/PublicSend
end
@ -2699,10 +2687,10 @@ class Project < ApplicationRecord
end
def build_from_instance_or_template(name)
instance = find_integration(services_instances, name)
instance = find_integration(integration_instances, name)
return Integration.build_from_integration(instance, project_id: id) if instance
template = find_integration(services_templates, name)
template = find_integration(integration_templates, name)
return Integration.build_from_integration(template, project_id: id) if template
end
@ -2710,12 +2698,12 @@ class Project < ApplicationRecord
Integration.integration_name_to_model(name).new(project_id: id)
end
def services_templates
@services_templates ||= Integration.for_template
def integration_templates
@integration_templates ||= Integration.for_template
end
def services_instances
@services_instances ||= Integration.for_instance
def integration_instances
@integration_instances ||= Integration.for_instance
end
def closest_namespace_setting(name)

View file

@ -19,8 +19,8 @@ class MergeRequestPresenter < Gitlab::View::Presenter::Delegated
status || "preparing"
else
ci_service = source_project.try(:ci_service)
ci_service&.commit_status(diff_head_sha, source_branch)
ci_integration = source_project.try(:ci_integration)
ci_integration&.commit_status(diff_head_sha, source_branch)
end
end

View file

@ -8,7 +8,7 @@ class BulkCreateIntegrationService
end
def execute
service_list = ServiceList.new(batch, service_hash, association).to_array
service_list = ServiceList.new(batch, integration_hash, association).to_array
Integration.transaction do
results = bulk_insert(*service_list)
@ -31,11 +31,11 @@ class BulkCreateIntegrationService
klass.insert_all(items_to_insert, returning: [:id])
end
def service_hash
def integration_hash
if integration.template?
integration.to_service_hash
integration.to_integration_hash
else
integration.to_service_hash.tap { |json| json['inherit_from_id'] = integration.inherit_from_id || integration.id }
integration.to_integration_hash.tap { |json| json['inherit_from_id'] = integration.inherit_from_id || integration.id }
end
end

View file

@ -9,7 +9,7 @@ class BulkUpdateIntegrationService
# rubocop: disable CodeReuse/ActiveRecord
def execute
Integration.transaction do
Integration.where(id: batch.select(:id)).update_all(service_hash)
Integration.where(id: batch.select(:id)).update_all(integration_hash)
if integration.data_fields_present?
integration.data_fields.class.where(service_id: batch.select(:id)).update_all(data_fields_hash)
@ -22,8 +22,8 @@ class BulkUpdateIntegrationService
attr_reader :integration, :batch
def service_hash
integration.to_service_hash.tap { |json| json['inherit_from_id'] = integration.inherit_from_id || integration.id }
def integration_hash
integration.to_integration_hash.tap { |json| json['inherit_from_id'] = integration.inherit_from_id || integration.id }
end
def data_fields_hash

View file

@ -64,7 +64,7 @@ module AlertManagement
def process_new_alert
if alert.save
alert.execute_services
alert.execute_integrations
SystemNoteService.create_new_alert(alert, alert_source)
process_resolved_alert if resolving_alert?

View file

@ -69,7 +69,7 @@ module Git
# Creating push_data invokes one CommitDelta RPC per commit. Only
# build this data if we actually need it.
project.execute_hooks(push_data, hook_name) if project.has_active_hooks?(hook_name)
project.execute_services(push_data, hook_name) if project.has_active_services?(hook_name)
project.execute_integrations(push_data, hook_name) if project.has_active_integrations?(hook_name)
end
def enqueue_invalidate_cache

View file

@ -60,7 +60,7 @@ module Issues
issue_data = Gitlab::Lazy.new { hook_data(issue, action, old_associations: old_associations) }
hooks_scope = issue.confidential? ? :confidential_issue_hooks : :issue_hooks
issue.project.execute_hooks(issue_data, hooks_scope)
issue.project.execute_services(issue_data, hooks_scope)
issue.project.execute_integrations(issue_data, hooks_scope)
end
def update_project_counter_caches?(issue)

View file

@ -22,7 +22,7 @@ module MergeRequests
def execute_hooks(merge_request, action = 'open', old_rev: nil, old_associations: {})
merge_data = hook_data(merge_request, action, old_rev: old_rev, old_associations: old_associations)
merge_request.project.execute_hooks(merge_data, :merge_request_hooks)
merge_request.project.execute_services(merge_data, :merge_request_hooks)
merge_request.project.execute_integrations(merge_data, :merge_request_hooks)
execute_external_hooks(merge_request, merge_data)

View file

@ -39,7 +39,7 @@ module Notes
hooks_scope = note.confidential?(include_noteable: true) ? :confidential_note_hooks : :note_hooks
note.project.execute_hooks(note_data, hooks_scope)
note.project.execute_services(note_data, hooks_scope)
note.project.execute_integrations(note_data, hooks_scope)
end
end
end

View file

@ -12,7 +12,7 @@ module WikiPages
def execute_hooks(page)
page_data = payload(page)
container.execute_hooks(page_data, :wiki_page_hooks)
container.execute_services(page_data, :wiki_page_hooks)
container.execute_integrations(page_data, :wiki_page_hooks)
increment_usage
create_wiki_event(page)
end

View file

@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/333452
milestone: '14.1'
type: development
group: group::verify
default_enabled: false
default_enabled: true

View file

@ -831,12 +831,12 @@ build.dependencies.each do |d| { puts "status: #{d.status}, finished at: #{d.fin
completed: #{d.complete?}, artifacts_expired: #{d.artifacts_expired?}, erased: #{d.erased?}" }
```
### Try CI service
### Try CI integration
```ruby
p = Project.find_by_full_path('<project_path>')
m = project.merge_requests.find_by(iid: )
m.project.try(:ci_service)
m.project.try(:ci_integration)
```
### Validate the `.gitlab-ci.yml`

View file

@ -18,7 +18,7 @@
# batch_count(::Clusters::Cluster.aws_installed.enabled, :cluster_id)
# batch_count(Namespace.group(:type))
# batch_distinct_count(::Project, :creator_id)
# batch_distinct_count(::Project.with_active_services.service_desk_enabled.where(time_period), start: ::User.minimum(:id), finish: ::User.maximum(:id))
# batch_distinct_count(::Project.with_active_integrations.service_desk_enabled.where(time_period), start: ::User.minimum(:id), finish: ::User.maximum(:id))
# batch_distinct_count(Project.group(:visibility_level), :creator_id)
# batch_sum(User, :sign_in_count)
# batch_sum(Issue.group(:state_id), :weight))

View file

@ -11,17 +11,17 @@ module Gitlab
# In order to not use a possible complex time consuming query when calculating min and max values,
# the start and finish can be sent specifically, start and finish should contain max and min values for PRIMARY KEY of
# relation (most cases `id` column) rather than counted attribute eg:
# estimate_distinct_count(start: ::Project.with_active_services.minimum(:id), finish: ::Project.with_active_services.maximum(:id))
# estimate_distinct_count(start: ::Project.with_active_integrations.minimum(:id), finish: ::Project.with_active_integrations.maximum(:id))
#
# Grouped relations are NOT supported yet.
#
# @example Usage
# ::Gitlab::Database::PostgresHllBatchDistinctCount.new(::Project, :creator_id).execute
# ::Gitlab::Database::PostgresHllBatchDistinctCount.new(::Project.with_active_services.service_desk_enabled.where(time_period))
# ::Gitlab::Database::PostgresHllBatchDistinctCount.new(::Project.with_active_integrations.service_desk_enabled.where(time_period))
# .execute(
# batch_size: 1_000,
# start: ::Project.with_active_services.service_desk_enabled.where(time_period).minimum(:id),
# finish: ::Project.with_active_services.service_desk_enabled.where(time_period).maximum(:id)
# start: ::Project.with_active_integrations.service_desk_enabled.where(time_period).minimum(:id),
# finish: ::Project.with_active_integrations.service_desk_enabled.where(time_period).maximum(:id)
# )
#
# @note HyperLogLog is an PROBABILISTIC algorithm that ESTIMATES distinct count of given attribute value for supplied relation

View file

@ -11716,9 +11716,6 @@ msgstr ""
msgid "Edit iteration"
msgstr ""
msgid "Edit iteration cadence"
msgstr ""
msgid "Edit public deploy key"
msgstr ""
@ -18205,6 +18202,9 @@ msgstr ""
msgid "Iterations"
msgstr ""
msgid "Iterations|Add iteration"
msgstr ""
msgid "Iterations|Automated scheduling"
msgstr ""
@ -18286,6 +18286,9 @@ msgstr ""
msgid "Iterations|Title"
msgstr ""
msgid "Iterations|Unable to find iteration."
msgstr ""
msgid "Iteration|Dates cannot overlap with other existing Iterations within this group"
msgstr ""
@ -21796,9 +21799,6 @@ msgstr ""
msgid "New iteration"
msgstr ""
msgid "New iteration cadence"
msgstr ""
msgid "New iteration created"
msgstr ""

View file

@ -18,7 +18,7 @@ RSpec.describe 'OAuth Login', :js, :allow_forgery_protection do
providers = [:github, :twitter, :bitbucket, :gitlab, :google_oauth2,
:facebook, :cas3, :auth0, :authentiq, :salesforce]
around(:all) do |example|
around do |example|
with_omniauth_full_host { example.run }
end

View file

@ -63,3 +63,5 @@ export const MOCK_SORT_OPTIONS = [
},
},
];
export const MOCK_LS_KEY = 'mock-ls-key';

View file

@ -5,9 +5,11 @@ import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
import * as urlUtils from '~/lib/utils/url_utility';
import * as actions from '~/search/store/actions';
import { GROUPS_LOCAL_STORAGE_KEY, PROJECTS_LOCAL_STORAGE_KEY } from '~/search/store/constants';
import * as types from '~/search/store/mutation_types';
import createState from '~/search/store/state';
import { MOCK_QUERY, MOCK_GROUPS, MOCK_PROJECT, MOCK_PROJECTS } from '../mock_data';
import * as storeUtils from '~/search/store/utils';
import { MOCK_QUERY, MOCK_GROUPS, MOCK_PROJECT, MOCK_PROJECTS, MOCK_GROUP } from '../mock_data';
jest.mock('~/flash');
jest.mock('~/lib/utils/url_utility', () => ({
@ -141,4 +143,86 @@ describe('Global Search Store Actions', () => {
});
});
});
describe('loadFrequentGroups', () => {
beforeEach(() => {
storeUtils.loadDataFromLS = jest.fn().mockReturnValue(MOCK_GROUPS);
});
it(`calls loadDataFromLS with ${GROUPS_LOCAL_STORAGE_KEY} and LOAD_FREQUENT_ITEMS mutation`, async () => {
await testAction({
action: actions.loadFrequentGroups,
state,
expectedMutations: [
{
type: types.LOAD_FREQUENT_ITEMS,
payload: { key: GROUPS_LOCAL_STORAGE_KEY, data: MOCK_GROUPS },
},
],
});
expect(storeUtils.loadDataFromLS).toHaveBeenCalledWith(GROUPS_LOCAL_STORAGE_KEY);
});
});
describe('loadFrequentProjects', () => {
beforeEach(() => {
storeUtils.loadDataFromLS = jest.fn().mockReturnValue(MOCK_PROJECTS);
});
it(`calls loadDataFromLS with ${PROJECTS_LOCAL_STORAGE_KEY} and LOAD_FREQUENT_ITEMS mutation`, async () => {
await testAction({
action: actions.loadFrequentProjects,
state,
expectedMutations: [
{
type: types.LOAD_FREQUENT_ITEMS,
payload: { key: PROJECTS_LOCAL_STORAGE_KEY, data: MOCK_PROJECTS },
},
],
});
expect(storeUtils.loadDataFromLS).toHaveBeenCalledWith(PROJECTS_LOCAL_STORAGE_KEY);
});
});
describe('setFrequentGroup', () => {
beforeEach(() => {
storeUtils.setFrequentItemToLS = jest.fn();
});
it(`calls setFrequentItemToLS with ${GROUPS_LOCAL_STORAGE_KEY} and item data`, async () => {
await testAction({
action: actions.setFrequentGroup,
payload: MOCK_GROUP,
state,
});
expect(storeUtils.setFrequentItemToLS).toHaveBeenCalledWith(
GROUPS_LOCAL_STORAGE_KEY,
state.frequentItems,
MOCK_GROUP,
);
});
});
describe('setFrequentProject', () => {
beforeEach(() => {
storeUtils.setFrequentItemToLS = jest.fn();
});
it(`calls setFrequentItemToLS with ${PROJECTS_LOCAL_STORAGE_KEY} and item data`, async () => {
await testAction({
action: actions.setFrequentProject,
payload: MOCK_PROJECT,
state,
});
expect(storeUtils.setFrequentItemToLS).toHaveBeenCalledWith(
PROJECTS_LOCAL_STORAGE_KEY,
state.frequentItems,
MOCK_PROJECT,
);
});
});
});

View file

@ -71,4 +71,13 @@ describe('Global Search Store Mutations', () => {
expect(state.query[payload.key]).toBe(payload.value);
});
});
describe('LOAD_FREQUENT_ITEMS', () => {
it('sets frequentItems[key] to data', () => {
const payload = { key: 'test-key', data: [1, 2, 3] };
mutations[types.LOAD_FREQUENT_ITEMS](state, payload);
expect(state.frequentItems[payload.key]).toStrictEqual(payload.data);
});
});
});

View file

@ -0,0 +1,147 @@
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import { MAX_FREQUENCY } from '~/search/store/constants';
import { loadDataFromLS, setFrequentItemToLS } from '~/search/store/utils';
import { MOCK_LS_KEY, MOCK_GROUPS } from '../mock_data';
useLocalStorageSpy();
jest.mock('~/lib/utils/accessor', () => ({
isLocalStorageAccessSafe: jest.fn().mockReturnValue(true),
}));
describe('Global Search Store Utils', () => {
afterEach(() => {
localStorage.clear();
});
describe('loadDataFromLS', () => {
let res;
describe('with valid data', () => {
beforeEach(() => {
localStorage.setItem(MOCK_LS_KEY, JSON.stringify(MOCK_GROUPS));
res = loadDataFromLS(MOCK_LS_KEY);
});
it('returns parsed array', () => {
expect(res).toStrictEqual(MOCK_GROUPS);
});
});
describe('with invalid data', () => {
beforeEach(() => {
localStorage.setItem(MOCK_LS_KEY, '[}');
res = loadDataFromLS(MOCK_LS_KEY);
});
it('wipes local storage and returns an empty array', () => {
expect(localStorage.removeItem).toHaveBeenCalledWith(MOCK_LS_KEY);
expect(res).toStrictEqual([]);
});
});
});
describe('setFrequentItemToLS', () => {
const frequentItems = {};
describe('with existing data', () => {
describe(`when frequency is less than ${MAX_FREQUENCY}`, () => {
beforeEach(() => {
frequentItems[MOCK_LS_KEY] = [{ id: MOCK_GROUPS[0].id, frequency: 1 }];
setFrequentItemToLS(MOCK_LS_KEY, frequentItems, MOCK_GROUPS[0]);
});
it('adds 1 to the frequency and calls localStorage.setItem', () => {
expect(localStorage.setItem).toHaveBeenCalledWith(
MOCK_LS_KEY,
JSON.stringify([{ id: MOCK_GROUPS[0].id, frequency: 2 }]),
);
});
});
describe(`when frequency is equal to ${MAX_FREQUENCY}`, () => {
beforeEach(() => {
frequentItems[MOCK_LS_KEY] = [{ id: MOCK_GROUPS[0].id, frequency: MAX_FREQUENCY }];
setFrequentItemToLS(MOCK_LS_KEY, frequentItems, MOCK_GROUPS[0]);
});
it(`does not further increase frequency past ${MAX_FREQUENCY} and calls localStorage.setItem`, () => {
expect(localStorage.setItem).toHaveBeenCalledWith(
MOCK_LS_KEY,
JSON.stringify([{ id: MOCK_GROUPS[0].id, frequency: MAX_FREQUENCY }]),
);
});
});
});
describe('with no existing data', () => {
beforeEach(() => {
frequentItems[MOCK_LS_KEY] = [];
setFrequentItemToLS(MOCK_LS_KEY, frequentItems, MOCK_GROUPS[0]);
});
it('adds a new entry with frequency 1 and calls localStorage.setItem', () => {
expect(localStorage.setItem).toHaveBeenCalledWith(
MOCK_LS_KEY,
JSON.stringify([{ id: MOCK_GROUPS[0].id, frequency: 1 }]),
);
});
});
describe('with multiple entries', () => {
beforeEach(() => {
frequentItems[MOCK_LS_KEY] = [
{ id: MOCK_GROUPS[0].id, frequency: 1 },
{ id: MOCK_GROUPS[1].id, frequency: 1 },
];
setFrequentItemToLS(MOCK_LS_KEY, frequentItems, MOCK_GROUPS[1]);
});
it('sorts the array by most frequent', () => {
expect(localStorage.setItem).toHaveBeenCalledWith(
MOCK_LS_KEY,
JSON.stringify([
{ id: MOCK_GROUPS[1].id, frequency: 2 },
{ id: MOCK_GROUPS[0].id, frequency: 1 },
]),
);
});
});
describe('with max entries', () => {
beforeEach(() => {
frequentItems[MOCK_LS_KEY] = [
{ id: 1, frequency: 5 },
{ id: 2, frequency: 4 },
{ id: 3, frequency: 3 },
{ id: 4, frequency: 2 },
{ id: 5, frequency: 1 },
];
setFrequentItemToLS(MOCK_LS_KEY, frequentItems, { id: 6 });
});
it('removes the least frequent', () => {
expect(localStorage.setItem).toHaveBeenCalledWith(
MOCK_LS_KEY,
JSON.stringify([
{ id: 1, frequency: 5 },
{ id: 2, frequency: 4 },
{ id: 3, frequency: 3 },
{ id: 4, frequency: 2 },
{ id: 6, frequency: 1 },
]),
);
});
});
describe('with null data loaded in', () => {
beforeEach(() => {
frequentItems[MOCK_LS_KEY] = null;
setFrequentItemToLS(MOCK_LS_KEY, frequentItems, MOCK_GROUPS[0]);
});
it('wipes local storage', () => {
expect(localStorage.removeItem).toHaveBeenCalledWith(MOCK_LS_KEY);
});
});
});
});

View file

@ -1,13 +1,14 @@
import { createLocalVue, shallowMount } from '@vue/test-utils';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
import { MOCK_GROUP, MOCK_QUERY } from 'jest/search/mock_data';
import { visitUrl, setUrlParams } from '~/lib/utils/url_utility';
import { GROUPS_LOCAL_STORAGE_KEY } from '~/search/store/constants';
import GroupFilter from '~/search/topbar/components/group_filter.vue';
import SearchableDropdown from '~/search/topbar/components/searchable_dropdown.vue';
import { ANY_OPTION, GROUP_DATA, PROJECT_DATA } from '~/search/topbar/constants';
const localVue = createLocalVue();
localVue.use(Vuex);
Vue.use(Vuex);
jest.mock('~/lib/utils/url_utility', () => ({
visitUrl: jest.fn(),
@ -19,6 +20,8 @@ describe('GroupFilter', () => {
const actionSpies = {
fetchGroups: jest.fn(),
setFrequentGroup: jest.fn(),
loadFrequentGroups: jest.fn(),
};
const defaultProps = {
@ -35,7 +38,6 @@ describe('GroupFilter', () => {
});
wrapper = shallowMount(GroupFilter, {
localVue,
store,
propsData: {
...defaultProps,
@ -77,14 +79,35 @@ describe('GroupFilter', () => {
});
});
describe('when @change is emitted', () => {
describe('when @change is emitted with Any', () => {
beforeEach(() => {
createComponent();
findSearchableDropdown().vm.$emit('change', ANY_OPTION);
});
it('calls setUrlParams with group null, project id null, and then calls visitUrl', () => {
expect(setUrlParams).toHaveBeenCalledWith({
[GROUP_DATA.queryParam]: null,
[PROJECT_DATA.queryParam]: null,
});
expect(visitUrl).toHaveBeenCalled();
});
it('does not call setFrequentGroup', () => {
expect(actionSpies.setFrequentGroup).not.toHaveBeenCalled();
});
});
describe('when @change is emitted with a group', () => {
beforeEach(() => {
createComponent();
findSearchableDropdown().vm.$emit('change', MOCK_GROUP);
});
it('calls calls setUrlParams with group id, project id null, and visitUrl', () => {
it('calls setUrlParams with group id, project id null, and then calls visitUrl', () => {
expect(setUrlParams).toHaveBeenCalledWith({
[GROUP_DATA.queryParam]: MOCK_GROUP.id,
[PROJECT_DATA.queryParam]: null,
@ -92,6 +115,10 @@ describe('GroupFilter', () => {
expect(visitUrl).toHaveBeenCalled();
});
it(`calls setFrequentGroup with the group and ${GROUPS_LOCAL_STORAGE_KEY}`, () => {
expect(actionSpies.setFrequentGroup).toHaveBeenCalledWith(expect.any(Object), MOCK_GROUP);
});
});
});
@ -118,4 +145,14 @@ describe('GroupFilter', () => {
});
});
});
describe('onCreate', () => {
beforeEach(() => {
createComponent();
});
it('calls loadFrequentGroups', () => {
expect(actionSpies.loadFrequentGroups).toHaveBeenCalledTimes(1);
});
});
});

View file

@ -1,13 +1,14 @@
import { createLocalVue, shallowMount } from '@vue/test-utils';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
import { MOCK_PROJECT, MOCK_QUERY } from 'jest/search/mock_data';
import { visitUrl, setUrlParams } from '~/lib/utils/url_utility';
import { PROJECTS_LOCAL_STORAGE_KEY } from '~/search/store/constants';
import ProjectFilter from '~/search/topbar/components/project_filter.vue';
import SearchableDropdown from '~/search/topbar/components/searchable_dropdown.vue';
import { ANY_OPTION, GROUP_DATA, PROJECT_DATA } from '~/search/topbar/constants';
const localVue = createLocalVue();
localVue.use(Vuex);
Vue.use(Vuex);
jest.mock('~/lib/utils/url_utility', () => ({
visitUrl: jest.fn(),
@ -19,6 +20,8 @@ describe('ProjectFilter', () => {
const actionSpies = {
fetchProjects: jest.fn(),
setFrequentProject: jest.fn(),
loadFrequentProjects: jest.fn(),
};
const defaultProps = {
@ -35,7 +38,6 @@ describe('ProjectFilter', () => {
});
wrapper = shallowMount(ProjectFilter, {
localVue,
store,
propsData: {
...defaultProps,
@ -84,12 +86,16 @@ describe('ProjectFilter', () => {
findSearchableDropdown().vm.$emit('change', ANY_OPTION);
});
it('calls setUrlParams with project id, not group id, then calls visitUrl', () => {
it('calls setUrlParams with null, no group id, then calls visitUrl', () => {
expect(setUrlParams).toHaveBeenCalledWith({
[PROJECT_DATA.queryParam]: ANY_OPTION.id,
[PROJECT_DATA.queryParam]: null,
});
expect(visitUrl).toHaveBeenCalled();
});
it('does not call setFrequentProject', () => {
expect(actionSpies.setFrequentProject).not.toHaveBeenCalled();
});
});
describe('with a Project', () => {
@ -104,6 +110,13 @@ describe('ProjectFilter', () => {
});
expect(visitUrl).toHaveBeenCalled();
});
it(`calls setFrequentProject with the group and ${PROJECTS_LOCAL_STORAGE_KEY}`, () => {
expect(actionSpies.setFrequentProject).toHaveBeenCalledWith(
expect.any(Object),
MOCK_PROJECT,
);
});
});
});
});
@ -131,4 +144,14 @@ describe('ProjectFilter', () => {
});
});
});
describe('onCreate', () => {
beforeEach(() => {
createComponent();
});
it('calls loadFrequentProjects', () => {
expect(actionSpies.loadFrequentProjects).toHaveBeenCalledTimes(1);
});
});
});

View file

@ -102,7 +102,7 @@ RSpec.describe Types::GlobalIDType do
end
context 'with a deprecation' do
around(:all) do |example|
around do |example|
# Unset all previously memoized GlobalIDTypes to allow us to define one
# that will use the constants stubbed in the `before` block.
previous_id_types = Types::GlobalIDType.instance_variable_get(:@id_types)

View file

@ -915,7 +915,7 @@ RSpec.describe Integration do
described_class.available_integration_names(include_project_specific: false)
end
it 'does not call dev_services_names with include_dev false' do
it 'does not call dev_integration_names with include_dev false' do
expect(described_class).to receive(:integration_names).and_call_original
expect(described_class).not_to receive(:dev_integration_names)
expect(described_class).to receive(:project_specific_integration_names).and_call_original

View file

@ -18,7 +18,7 @@ RSpec.describe Integrations::DroneCi, :use_clean_rails_memory_store_caching do
it { is_expected.to validate_presence_of(:token) }
it { is_expected.to validate_presence_of(:drone_url) }
it_behaves_like 'issue tracker service URL attribute', :drone_url
it_behaves_like 'issue tracker integration URL attribute', :drone_url
end
context 'inactive' do
@ -66,7 +66,7 @@ RSpec.describe Integrations::DroneCi, :use_clean_rails_memory_store_caching do
end
end
describe "service page/path methods" do
describe "integration page/path methods" do
include_context :drone_ci_integration
it { expect(drone.build_page(sha, branch)).to eq(build_page) }

View file

@ -1898,7 +1898,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
context 'has ci' do
it 'returns true if MR has head_pipeline_id and commits' do
allow(merge_request).to receive_message_chain(:source_project, :ci_service) { nil }
allow(merge_request).to receive_message_chain(:source_project, :ci_integration) { nil }
allow(merge_request).to receive(:head_pipeline_id) { double }
allow(merge_request).to receive(:has_no_commits?) { false }
@ -1906,7 +1906,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
it 'returns true if MR has any pipeline and commits' do
allow(merge_request).to receive_message_chain(:source_project, :ci_service) { nil }
allow(merge_request).to receive_message_chain(:source_project, :ci_integration) { nil }
allow(merge_request).to receive(:head_pipeline_id) { nil }
allow(merge_request).to receive(:has_no_commits?) { false }
allow(merge_request).to receive(:all_pipelines) { [double] }
@ -1914,8 +1914,8 @@ RSpec.describe MergeRequest, factory_default: :keep do
expect(merge_request.has_ci?).to be(true)
end
it 'returns true if MR has CI service and commits' do
allow(merge_request).to receive_message_chain(:source_project, :ci_service) { double }
it 'returns true if MR has CI integration and commits' do
allow(merge_request).to receive_message_chain(:source_project, :ci_integration) { double }
allow(merge_request).to receive(:head_pipeline_id) { nil }
allow(merge_request).to receive(:has_no_commits?) { false }
allow(merge_request).to receive(:all_pipelines) { [] }
@ -1925,8 +1925,8 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
context 'has no ci' do
it 'returns false if MR has no CI service nor pipeline, and no commits' do
allow(merge_request).to receive_message_chain(:source_project, :ci_service) { nil }
it 'returns false if MR has no CI integration nor pipeline, and no commits' do
allow(merge_request).to receive_message_chain(:source_project, :ci_integration) { nil }
allow(merge_request).to receive(:head_pipeline_id) { nil }
allow(merge_request).to receive(:all_pipelines) { [] }
allow(merge_request).to receive(:has_no_commits?) { true }

View file

@ -5418,7 +5418,7 @@ RSpec.describe Project, factory_default: :keep do
end
end
describe '#execute_services' do
describe '#execute_integrations' do
let(:integration) { create(:integrations_slack, push_events: true, merge_requests_events: false, active: true) }
it 'executes integrations with the specified scope' do
@ -5428,7 +5428,7 @@ RSpec.describe Project, factory_default: :keep do
expect(instance).to receive(:async_execute).with(data).once
end
integration.project.execute_services(data, :push_hooks)
integration.project.execute_integrations(data, :push_hooks)
end
it 'does not execute integration that don\'t match the specified scope' do
@ -5438,7 +5438,7 @@ RSpec.describe Project, factory_default: :keep do
end
end
integration.project.execute_services(anything, :merge_request_hooks)
integration.project.execute_integrations(anything, :merge_request_hooks)
end
end
@ -5469,16 +5469,16 @@ RSpec.describe Project, factory_default: :keep do
end
end
describe '#has_active_services?' do
describe '#has_active_integrations?' do
let_it_be(:project) { create(:project) }
it { expect(project.has_active_services?).to be_falsey }
it { expect(project.has_active_integrations?).to be_falsey }
it 'returns true when a matching service exists' do
create(:custom_issue_tracker_integration, push_events: true, merge_requests_events: false, project: project)
expect(project.has_active_services?(:merge_request_hooks)).to be_falsey
expect(project.has_active_services?).to be_truthy
expect(project.has_active_integrations?(:merge_request_hooks)).to be_falsey
expect(project.has_active_integrations?).to be_truthy
end
end
@ -5911,7 +5911,7 @@ RSpec.describe Project, factory_default: :keep do
allow(subject).to receive(:disabled_integrations).and_return(%w[prometheus])
end
it 'returns only enabled services sorted' do
it 'returns only enabled integrations sorted' do
expect(subject.find_or_initialize_integrations).to match [
have_attributes(title: 'JetBrains TeamCity'),
have_attributes(title: 'Pushover')
@ -5955,7 +5955,7 @@ RSpec.describe Project, factory_default: :keep do
create(:prometheus_integration, :template, api_url: 'https://prometheus.template.com/')
end
it 'builds the service from the instance integration' do
it 'builds the integration from the instance integration' do
expect(subject.find_or_initialize_integration('prometheus').api_url).to eq('https://prometheus.instance.com/')
end
end
@ -5965,13 +5965,13 @@ RSpec.describe Project, factory_default: :keep do
create(:prometheus_integration, :template, api_url: 'https://prometheus.template.com/')
end
it 'builds the service from the template' do
it 'builds the integration from the template' do
expect(subject.find_or_initialize_integration('prometheus').api_url).to eq('https://prometheus.template.com/')
end
end
context 'without an exisiting integration, or instance-level or template' do
it 'builds the service' do
it 'builds the integration' do
expect(subject.find_or_initialize_integration('prometheus')).to be_a(::Integrations::Prometheus)
expect(subject.find_or_initialize_integration('prometheus').api_url).to be_nil
end
@ -6662,16 +6662,16 @@ RSpec.describe Project, factory_default: :keep do
create(:prometheus_integration, project: project, manual_configuration: manual_configuration)
end
context 'when project has an activated prometheus service' do
context 'when project has an activated prometheus integration' do
let(:manual_configuration) { true }
it { is_expected.to be_truthy }
end
context 'when project has an inactive prometheus service' do
context 'when project has an inactive prometheus integration' do
let(:manual_configuration) { false }
it 'the service is marked as inactive' do
it 'the integration is marked as inactive' do
expect(subject).to be_falsey
end
end

View file

@ -11,17 +11,17 @@ RSpec.describe MergeRequestPresenter do
subject { described_class.new(resource).ci_status }
context 'when no head pipeline' do
it 'return status using CiService' do
ci_service = double(Integrations::MockCi)
it 'return status from Ci integration' do
ci_integration = double(Integrations::MockCi)
ci_status = double
allow(resource.source_project)
.to receive(:ci_service)
.and_return(ci_service)
.to receive(:ci_integration)
.and_return(ci_integration)
allow(resource).to receive(:head_pipeline).and_return(nil)
expect(ci_service).to receive(:commit_status)
expect(ci_integration).to receive(:commit_status)
.with(resource.diff_head_sha, resource.source_branch)
.and_return(ci_status)

View file

@ -59,7 +59,7 @@ RSpec.describe Git::BaseHooksService do
end
end
describe 'project hooks and services' do
describe 'project hooks and integrations' do
context 'hooks' do
before do
expect(project).to receive(:has_active_hooks?).and_return(active)
@ -88,45 +88,45 @@ RSpec.describe Git::BaseHooksService do
end
end
context 'services' do
context 'with integrations' do
before do
expect(project).to receive(:has_active_services?).and_return(active)
expect(project).to receive(:has_active_integrations?).and_return(active)
end
context 'active services' do
context 'with active integrations' do
let(:active) { true }
it 'executes the services' do
expect(subject).to receive(:push_data).at_least(:once).and_call_original
expect(project).to receive(:execute_services)
expect(project).to receive(:execute_integrations)
subject.execute
end
end
context 'inactive services' do
context 'with inactive integrations' do
let(:active) { false }
it 'does not execute the services' do
expect(subject).not_to receive(:push_data)
expect(project).not_to receive(:execute_services)
expect(project).not_to receive(:execute_integrations)
subject.execute
end
end
end
context 'execute_project_hooks param set to false' do
context 'when execute_project_hooks param is set to false' do
before do
params[:execute_project_hooks] = false
allow(project).to receive(:has_active_hooks?).and_return(true)
allow(project).to receive(:has_active_services?).and_return(true)
allow(project).to receive(:has_active_integrations?).and_return(true)
end
it 'does not execute hooks and services' do
it 'does not execute hooks and integrations' do
expect(project).not_to receive(:execute_hooks)
expect(project).not_to receive(:execute_services)
expect(project).not_to receive(:execute_integrations)
subject.execute
end

View file

@ -323,7 +323,7 @@ RSpec.describe Issues::CloseService do
context 'when issue is not confidential' do
it 'executes issue hooks' do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks)
expect(project).to receive(:execute_services).with(an_instance_of(Hash), :issue_hooks)
expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :issue_hooks)
described_class.new(project: project, current_user: user).close_issue(issue)
end
@ -334,7 +334,7 @@ RSpec.describe Issues::CloseService do
issue = create(:issue, :confidential, project: project)
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks)
expect(project).to receive(:execute_services).with(an_instance_of(Hash), :confidential_issue_hooks)
expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :confidential_issue_hooks)
described_class.new(project: project, current_user: user).close_issue(issue)
end

View file

@ -230,7 +230,7 @@ RSpec.describe Issues::CreateService do
opts = { title: 'Title', description: 'Description', confidential: false }
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks)
expect(project).to receive(:execute_services).with(an_instance_of(Hash), :issue_hooks)
expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :issue_hooks)
described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
end
@ -239,7 +239,7 @@ RSpec.describe Issues::CreateService do
opts = { title: 'Title', description: 'Description', confidential: true }
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks)
expect(project).to receive(:execute_services).with(an_instance_of(Hash), :confidential_issue_hooks)
expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :confidential_issue_hooks)
described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
end

View file

@ -65,7 +65,7 @@ RSpec.describe Issues::ReopenService do
context 'when issue is not confidential' do
it 'executes issue hooks' do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks)
expect(project).to receive(:execute_services).with(an_instance_of(Hash), :issue_hooks)
expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :issue_hooks)
described_class.new(project: project, current_user: user).execute(issue)
end
@ -76,7 +76,7 @@ RSpec.describe Issues::ReopenService do
issue = create(:issue, :confidential, :closed, project: project)
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks)
expect(project).to receive(:execute_services).with(an_instance_of(Hash), :confidential_issue_hooks)
expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :confidential_issue_hooks)
described_class.new(project: project, current_user: user).execute(issue)
end

View file

@ -537,7 +537,7 @@ RSpec.describe Issues::UpdateService, :mailer do
it 'executes confidential issue hooks' do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks)
expect(project).to receive(:execute_services).with(an_instance_of(Hash), :confidential_issue_hooks)
expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :confidential_issue_hooks)
update_issue(confidential: true)
end

View file

@ -104,9 +104,9 @@ RSpec.describe MergeRequests::HandleAssigneesChangeService do
context 'when execute_hooks option is set to true' do
let(:options) { { execute_hooks: true } }
it 'execute hooks and services' do
it 'executes hooks and integrations' do
expect(merge_request.project).to receive(:execute_hooks).with(anything, :merge_request_hooks)
expect(merge_request.project).to receive(:execute_services).with(anything, :merge_request_hooks)
expect(merge_request.project).to receive(:execute_integrations).with(anything, :merge_request_hooks)
expect(service).to receive(:enqueue_jira_connect_messages_for).with(merge_request)
execute

View file

@ -21,7 +21,7 @@ RSpec.describe Notes::PostProcessService do
it do
expect(project).to receive(:execute_hooks)
expect(project).to receive(:execute_services)
expect(project).to receive(:execute_integrations)
described_class.new(@note).execute
end
@ -29,16 +29,16 @@ RSpec.describe Notes::PostProcessService do
context 'with a confidential issue' do
let(:issue) { create(:issue, :confidential, project: project) }
it "doesn't call note hooks/services" do
it "doesn't call note hooks/integrations" do
expect(project).not_to receive(:execute_hooks).with(anything, :note_hooks)
expect(project).not_to receive(:execute_services).with(anything, :note_hooks)
expect(project).not_to receive(:execute_integrations).with(anything, :note_hooks)
described_class.new(@note).execute
end
it "calls confidential-note hooks/services" do
it "calls confidential-note hooks/integrations" do
expect(project).to receive(:execute_hooks).with(anything, :confidential_note_hooks)
expect(project).to receive(:execute_services).with(anything, :confidential_note_hooks)
expect(project).to receive(:execute_integrations).with(anything, :confidential_note_hooks)
described_class.new(@note).execute
end

View file

@ -29,7 +29,7 @@ end
RSpec.configure do |config|
config.include StrategyHelpers, type: :strategy
config.around(:all, type: :strategy) do |example|
config.around(type: :strategy) do |example|
StrategyHelpers.without_test_mode do
example.run
end

View file

@ -13,7 +13,7 @@ RSpec.shared_examples 'creates an alert management alert or errors' do
it 'executes the alert service hooks' do
expect_next_instance_of(AlertManagement::Alert) do |alert|
expect(alert).to receive(:execute_services)
expect(alert).to receive(:execute_integrations)
end
subject
@ -84,7 +84,7 @@ end
# - `alert`, the alert for which events should be incremented
RSpec.shared_examples 'adds an alert management alert event' do
specify do
expect(alert).not_to receive(:execute_services)
expect(alert).not_to receive(:execute_integrations)
expect { subject }.to change { alert.reload.events }.by(1)

View file

@ -20,7 +20,7 @@ end
RSpec.shared_examples 'processes incident issues' do |with_issue: false|
before do
allow_next_instance_of(AlertManagement::Alert) do |alert|
allow(alert).to receive(:execute_services)
allow(alert).to receive(:execute_integrations)
end
end

View file

@ -378,7 +378,7 @@ RSpec.describe PostReceive do
allow(Project).to receive(:find_by).and_return(project)
expect(project).to receive(:execute_hooks).twice
expect(project).to receive(:execute_services).twice
expect(project).to receive(:execute_integrations).twice
perform
end