Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2019-12-07 00:07:51 +00:00
parent 99ddca0d88
commit 4e375367b7
80 changed files with 988 additions and 781 deletions

View File

@ -1,8 +1,8 @@
<script>
import { mapActions, mapState } from 'vuex';
import _ from 'underscore';
import Icon from '~/vue_shared/components/icon.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import Icon from '~/vue_shared/components/icon.vue';
import Item from './item.vue';
export default {

View File

@ -1,8 +1,8 @@
<script>
import $ from 'jquery';
import { mapActions, mapState } from 'vuex';
import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue';
export default {
components: {

View File

@ -1,7 +1,7 @@
<script>
import { mapActions, mapGetters, mapState } from 'vuex';
import Icon from '~/vue_shared/components/icon.vue';
import { GlSkeletonLoading } from '@gitlab/ui';
import Icon from '~/vue_shared/components/icon.vue';
import FileRow from '~/vue_shared/components/file_row.vue';
import NavDropdown from './nav_dropdown.vue';
import FileRowExtra from './file_row_extra.vue';

View File

@ -1,9 +1,9 @@
<script>
import { mapActions, mapState } from 'vuex';
import _ from 'underscore';
import { GlLoadingIcon } from '@gitlab/ui';
import { __ } from '~/locale';
import Icon from '~/vue_shared/components/icon.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import Item from './item.vue';
import TokenedInput from '../shared/tokened_input.vue';

View File

@ -1,8 +1,8 @@
<script>
import $ from 'jquery';
import { mapActions, mapState, mapGetters } from 'vuex';
import flash from '~/flash';
import { __, sprintf, s__ } from '~/locale';
import { mapActions, mapState, mapGetters } from 'vuex';
import DeprecatedModal2 from '~/vue_shared/components/deprecated_modal_2.vue';
import { modalTypes } from '../../constants';

View File

@ -1,7 +1,7 @@
<script>
import { listen } from 'codesandbox-api';
import Icon from '~/vue_shared/components/icon.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import Icon from '~/vue_shared/components/icon.vue';
export default {
components: {

View File

@ -1,6 +1,6 @@
<script>
import { __, sprintf } from '~/locale';
import { mapActions } from 'vuex';
import { __, sprintf } from '~/locale';
import FileIcon from '~/vue_shared/components/file_icon.vue';
import Icon from '~/vue_shared/components/icon.vue';

View File

@ -1,9 +1,9 @@
import $ from 'jquery';
import Vue from 'vue';
import _ from 'underscore';
import { __, sprintf } from '~/locale';
import { visitUrl } from '~/lib/utils/url_utility';
import flash from '~/flash';
import _ from 'underscore';
import * as types from './mutation_types';
import { decorateFiles } from '../lib/files';
import { stageKeys } from '../constants';

View File

@ -246,7 +246,7 @@ class Commit
def lazy_author
BatchLoader.for(author_email.downcase).batch do |emails, loader|
users = User.by_any_email(emails).includes(:emails)
users = User.by_any_email(emails, confirmed: true).includes(:emails)
emails.each do |email|
user = users.find { |u| u.any_email?(email) }
@ -263,8 +263,8 @@ class Commit
end
request_cache(:author) { author_email.downcase }
def committer
@committer ||= User.find_by_any_email(committer_email)
def committer(confirmed: true)
@committer ||= User.find_by_any_email(committer_email, confirmed: confirmed)
end
def parents

View File

@ -0,0 +1,5 @@
---
title: Add nonunique indexes to Labels
merge_request: 21230
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Do not attribute unverified commit e-mails to GitLab users
merge_request: 21214
author:
type: fixed

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddLabelProjectGroupPartialIndexes < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
PROJECT_AND_TITLE = [:project_id, :title]
GROUP_AND_TITLE = [:group_id, :title]
def up
add_concurrent_index :labels, PROJECT_AND_TITLE, unique: false, where: "labels.group_id = null"
add_concurrent_index :labels, GROUP_AND_TITLE, unique: false, where: "labels.project_id = null"
end
def down
remove_concurrent_index :labels, PROJECT_AND_TITLE
remove_concurrent_index :labels, GROUP_AND_TITLE
end
end

View File

@ -10,7 +10,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2019_12_04_070713) do
ActiveRecord::Schema.define(version: 2019_12_04_093410) do
# These are extensions that must be enabled in order to support this database
enable_extension "pg_trgm"
@ -2228,6 +2228,8 @@ ActiveRecord::Schema.define(version: 2019_12_04_070713) do
t.integer "group_id"
t.integer "cached_markdown_version"
t.index ["group_id", "project_id", "title"], name: "index_labels_on_group_id_and_project_id_and_title", unique: true
t.index ["group_id", "title"], name: "index_labels_on_group_id_and_title", where: "(project_id = NULL::integer)"
t.index ["project_id", "title"], name: "index_labels_on_project_id_and_title", where: "(group_id = NULL::integer)"
t.index ["project_id"], name: "index_labels_on_project_id"
t.index ["template"], name: "index_labels_on_template", where: "template"
t.index ["title"], name: "index_labels_on_title"

View File

@ -61,7 +61,7 @@ describe 'Member autocomplete', :js do
before do
allow(User).to receive(:find_by_any_email)
.with(noteable.author_email.downcase).and_return(author)
.with(noteable.author_email.downcase, confirmed: true).and_return(author)
visit project_commit_path(project, noteable)
end

View File

@ -76,16 +76,23 @@ describe 'User browses commits' do
end
context 'secondary email' do
let(:user) { create(:user) }
it 'finds a commit by a secondary email' do
user =
create(:user) do |user|
create(:email, { user: user, email: 'dmitriy.zaporozhets@gmail.com' })
end
create(:email, :confirmed, user: user, email: 'dmitriy.zaporozhets@gmail.com')
visit(project_commit_path(project, sample_commit.parent_id))
check_author_link(sample_commit.author_email, user)
end
it 'links to an unverified e-mail address instead of the user' do
create(:email, user: user, email: 'dmitriy.zaporozhets@gmail.com')
visit(project_commit_path(project, sample_commit.parent_id))
check_author_email(sample_commit.author_email)
end
end
context 'when the blob does not exist' do
@ -263,3 +270,9 @@ def check_author_link(email, author)
expect(author_link['href']).to eq(user_path(author))
expect(find('.commit-author-name').text).to eq(author.name)
end
def check_author_email(email)
author_link = find('.commit-author-link')
expect(author_link['href']).to eq("mailto:#{email}")
end

View File

@ -1,5 +1,5 @@
import Vue from 'vue';
import mountCompontent from 'spec/helpers/vue_mount_component_helper';
import mountCompontent from 'helpers/vue_mount_component_helper';
import router from '~/ide/ide_router';
import Item from '~/ide/components/branches/item.vue';
import { getTimeago } from '~/lib/utils/datetime_utility';
@ -30,7 +30,7 @@ describe('IDE branch item', () => {
it('renders branch name and timeago', () => {
const timeText = getTimeago().format(TEST_BRANCH.committedDate);
expect(vm.$el).toContainText(TEST_BRANCH.name);
expect(vm.$el.textContent).toContain(TEST_BRANCH.name);
expect(vm.$el.querySelector('time')).toHaveText(timeText);
expect(vm.$el.querySelector('.ic-mobile-issue-close')).toBe(null);
});
@ -39,7 +39,7 @@ describe('IDE branch item', () => {
const expectedHref = router.resolve(`/project/${TEST_PROJECT_ID}/edit/${TEST_BRANCH.name}`)
.href;
expect(vm.$el).toMatch('a');
expect(vm.$el.textContent).toMatch('a');
expect(vm.$el).toHaveAttr('href', expectedHref);
});

View File

@ -1,9 +1,9 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import { GlLoadingIcon } from '@gitlab/ui';
import { __ } from '~/locale';
import List from '~/ide/components/branches/search_list.vue';
import Item from '~/ide/components/branches/item.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import { branches } from '../../mock_data';
const localVue = createLocalVue();

View File

@ -1,9 +1,9 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import { GlLoadingIcon } from '@gitlab/ui';
import List from '~/ide/components/merge_requests/list.vue';
import Item from '~/ide/components/merge_requests/item.vue';
import TokenedInput from '~/ide/components/shared/tokened_input.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import { mergeRequests as mergeRequestsMock } from '../../mock_data';
const localVue = createLocalVue();

View File

@ -3,7 +3,7 @@ import '~/behaviors/markdown/render_gfm';
import { createStore } from '~/ide/stores';
import RightPane from '~/ide/components/panes/right.vue';
import { rightSidebarViews } from '~/ide/constants';
import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
describe('IDE right pane', () => {
let Component;
@ -56,7 +56,7 @@ describe('IDE right pane', () => {
describe('click', () => {
beforeEach(() => {
spyOn(vm, 'open');
jest.spyOn(vm, 'open').mockReturnValue();
});
it('sets view to merge request', done => {
@ -74,7 +74,9 @@ describe('IDE right pane', () => {
describe('live preview', () => {
it('renders live preview button', done => {
Vue.set(vm.$store.state.entries, 'package.json', { name: 'package.json' });
Vue.set(vm.$store.state.entries, 'package.json', {
name: 'package.json',
});
vm.$store.state.clientsidePreviewEnabled = true;
vm.$nextTick(() => {

View File

@ -1,11 +1,11 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import { GlLoadingIcon } from '@gitlab/ui';
import { TEST_HOST } from 'helpers/test_constants';
import List from '~/ide/components/pipelines/list.vue';
import JobsList from '~/ide/components/jobs/list.vue';
import Tab from '~/vue_shared/components/tabs/tab.vue';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import { TEST_HOST } from 'helpers/test_constants';
import { pipelines } from '../../../../javascripts/ide/mock_data';
const localVue = createLocalVue();

View File

@ -1,5 +1,6 @@
import Vue from 'vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
import mountComponent from 'helpers/vue_mount_component_helper';
import { TEST_HOST } from 'helpers/test_constants';
import ClientsideNavigator from '~/ide/components/preview/navigator.vue';
describe('IDE clientside preview navigator', () => {
@ -12,14 +13,9 @@ describe('IDE clientside preview navigator', () => {
});
beforeEach(() => {
manager = {
bundlerURL: gl.TEST_HOST,
iframe: { src: '' },
};
manager = { bundlerURL: TEST_HOST, iframe: { src: '' } };
vm = mountComponent(Component, {
manager,
});
vm = mountComponent(Component, { manager });
});
afterEach(() => {
@ -47,7 +43,7 @@ describe('IDE clientside preview navigator', () => {
it('calls back method when clicking back button', done => {
vm.navigationStack.push('/test');
vm.navigationStack.push('/test2');
spyOn(vm, 'back');
jest.spyOn(vm, 'back').mockReturnValue();
vm.$nextTick(() => {
vm.$el.querySelector('.ide-navigator-btn').click();
@ -60,7 +56,7 @@ describe('IDE clientside preview navigator', () => {
it('calls forward method when clicking forward button', done => {
vm.forwardNavigationStack.push('/test');
spyOn(vm, 'forward');
jest.spyOn(vm, 'forward').mockReturnValue();
vm.$nextTick(() => {
vm.$el.querySelectorAll('.ide-navigator-btn')[1].click();
@ -73,49 +69,35 @@ describe('IDE clientside preview navigator', () => {
describe('onUrlChange', () => {
it('updates the path', () => {
vm.onUrlChange({
url: `${gl.TEST_HOST}/url`,
});
vm.onUrlChange({ url: `${TEST_HOST}/url` });
expect(vm.path).toBe('/url');
});
it('sets currentBrowsingIndex 0 if not already set', () => {
vm.onUrlChange({
url: `${gl.TEST_HOST}/url`,
});
vm.onUrlChange({ url: `${TEST_HOST}/url` });
expect(vm.currentBrowsingIndex).toBe(0);
});
it('increases currentBrowsingIndex if path doesnt match', () => {
vm.onUrlChange({
url: `${gl.TEST_HOST}/url`,
});
vm.onUrlChange({ url: `${TEST_HOST}/url` });
vm.onUrlChange({
url: `${gl.TEST_HOST}/url2`,
});
vm.onUrlChange({ url: `${TEST_HOST}/url2` });
expect(vm.currentBrowsingIndex).toBe(1);
});
it('does not increase currentBrowsingIndex if path matches', () => {
vm.onUrlChange({
url: `${gl.TEST_HOST}/url`,
});
vm.onUrlChange({ url: `${TEST_HOST}/url` });
vm.onUrlChange({
url: `${gl.TEST_HOST}/url`,
});
vm.onUrlChange({ url: `${TEST_HOST}/url` });
expect(vm.currentBrowsingIndex).toBe(0);
});
it('pushes path into navigation stack', () => {
vm.onUrlChange({
url: `${gl.TEST_HOST}/url`,
});
vm.onUrlChange({ url: `${TEST_HOST}/url` });
expect(vm.navigationStack).toEqual(['/url']);
});
@ -128,7 +110,7 @@ describe('IDE clientside preview navigator', () => {
vm.navigationStack.push('/test');
vm.navigationStack.push('/test2');
spyOn(vm, 'visitPath');
jest.spyOn(vm, 'visitPath').mockReturnValue();
vm.back();
});
@ -152,7 +134,7 @@ describe('IDE clientside preview navigator', () => {
describe('forward', () => {
it('calls visitPath with first entry in forwardNavigationStack', () => {
spyOn(vm, 'visitPath');
jest.spyOn(vm, 'visitPath').mockReturnValue();
vm.forwardNavigationStack.push('/test');
vm.forwardNavigationStack.push('/test2');
@ -165,7 +147,7 @@ describe('IDE clientside preview navigator', () => {
describe('refresh', () => {
it('calls refresh with current path', () => {
spyOn(vm, 'visitPath');
jest.spyOn(vm, 'visitPath').mockReturnValue();
vm.path = '/test';
@ -179,7 +161,7 @@ describe('IDE clientside preview navigator', () => {
it('updates iframe src with passed in path', () => {
vm.visitPath('/testpath');
expect(manager.iframe.src).toBe(`${gl.TEST_HOST}/testpath`);
expect(manager.iframe.src).toBe(`${TEST_HOST}/testpath`);
});
});
});

View File

@ -0,0 +1,52 @@
import * as pathUtils from 'path';
import { decorateData } from '~/ide/stores/utils';
import state from '~/ide/stores/state';
import commitState from '~/ide/stores/modules/commit/state';
import mergeRequestsState from '~/ide/stores/modules/merge_requests/state';
import pipelinesState from '~/ide/stores/modules/pipelines/state';
import branchesState from '~/ide/stores/modules/branches/state';
import fileTemplatesState from '~/ide/stores/modules/file_templates/state';
import paneState from '~/ide/stores/modules/pane/state';
export const resetStore = store => {
const newState = {
...state(),
commit: commitState(),
mergeRequests: mergeRequestsState(),
pipelines: pipelinesState(),
branches: branchesState(),
fileTemplates: fileTemplatesState(),
rightPane: paneState(),
};
store.replaceState(newState);
};
export const file = (name = 'name', id = name, type = '', parent = null) =>
decorateData({
id,
type,
icon: 'icon',
url: 'url',
name,
path: parent ? `${parent.path}/${name}` : name,
parentPath: parent ? parent.path : '',
lastCommit: {},
});
export const createEntriesFromPaths = paths =>
paths
.map(path => ({
name: pathUtils.basename(path),
dir: pathUtils.dirname(path),
ext: pathUtils.extname(path),
}))
.reduce((entries, path, idx) => {
const { name } = path;
const parent = path.dir ? entries[path.dir] : null;
const type = path.ext ? 'blob' : 'tree';
const entry = file(name, (idx + 1).toString(), type, parent);
return {
[entry.path]: entry,
...entries,
};
}, {});

View File

@ -31,7 +31,7 @@ describe('IDE router', () => {
`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}`,
].forEach(route => {
it(`finds project path when route is "${route}"`, () => {
spyOn(store, 'dispatch').and.returnValue(new Promise(() => {}));
jest.spyOn(store, 'dispatch').mockReturnValue(new Promise(() => {}));
router.push(route);

View File

@ -45,7 +45,9 @@ describe('IDE store getters', () => {
localState.currentMergeRequestId = 1;
localState.projects.abcproject = {
mergeRequests: {
1: { mergeId: 1 },
1: {
mergeId: 1,
},
},
};
@ -62,9 +64,21 @@ describe('IDE store getters', () => {
describe('allBlobs', () => {
beforeEach(() => {
Object.assign(localState.entries, {
index: { type: 'blob', name: 'index', lastOpenedAt: 0 },
app: { type: 'blob', name: 'blob', lastOpenedAt: 0 },
folder: { type: 'folder', name: 'folder', lastOpenedAt: 0 },
index: {
type: 'blob',
name: 'index',
lastOpenedAt: 0,
},
app: {
type: 'blob',
name: 'blob',
lastOpenedAt: 0,
},
folder: {
type: 'folder',
name: 'folder',
lastOpenedAt: 0,
},
});
});
@ -174,7 +188,7 @@ describe('IDE store getters', () => {
},
};
const localGetters = {
findBranch: jasmine.createSpy('findBranchSpy'),
findBranch: jest.fn(),
};
getters.currentBranch(localState, localGetters);
@ -251,7 +265,9 @@ describe('IDE store getters', () => {
describe('packageJson', () => {
it('returns package.json entry', () => {
localState.entries['package.json'] = { name: 'package.json' };
localState.entries['package.json'] = {
name: 'package.json',
};
expect(getters.packageJson(localState)).toEqual({
name: 'package.json',
@ -273,7 +289,9 @@ describe('IDE store getters', () => {
currentProject: {
default_branch: 'master',
},
currentBranch: { can_push: true },
currentBranch: {
can_push: true,
},
};
expect(getters.canPushToBranch({}, localGetters)).toBeTruthy();
@ -284,7 +302,9 @@ describe('IDE store getters', () => {
currentProject: {
default_branch: 'master',
},
currentBranch: { can_push: false },
currentBranch: {
can_push: false,
},
};
expect(getters.canPushToBranch({}, localGetters)).toBeFalsy();

View File

@ -1,5 +1,5 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'spec/helpers/vuex_action_helper';
import testAction from 'helpers/vuex_action_helper';
import axios from '~/lib/utils/axios_utils';
import state from '~/ide/stores/modules/branches/state';
import * as types from '~/ide/stores/modules/branches/mutation_types';
@ -21,12 +21,8 @@ describe('IDE branches actions', () => {
beforeEach(() => {
mockedContext = {
dispatch() {},
rootState: {
currentProjectId: projectData.name_with_namespace,
},
rootGetters: {
currentProject: projectData,
},
rootState: { currentProjectId: projectData.name_with_namespace },
rootGetters: { currentProject: projectData },
state: state(),
};
@ -70,7 +66,7 @@ describe('IDE branches actions', () => {
type: 'setErrorMessage',
payload: {
text: 'Error loading branches.',
action: jasmine.any(Function),
action: expect.any(Function),
actionText: 'Please try again',
actionPayload: { search: TEST_SEARCH },
},
@ -105,15 +101,12 @@ describe('IDE branches actions', () => {
});
it('calls API with params', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough();
const apiSpy = jest.spyOn(axios, 'get');
fetchBranches(mockedContext, { search: TEST_SEARCH });
expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), {
params: jasmine.objectContaining({
search: TEST_SEARCH,
sort: 'updated_desc',
}),
expect(apiSpy).toHaveBeenCalledWith(expect.anything(), {
params: expect.objectContaining({ search: TEST_SEARCH, sort: 'updated_desc' }),
});
});
@ -126,10 +119,7 @@ describe('IDE branches actions', () => {
[
{ type: 'requestBranches' },
{ type: 'resetBranches' },
{
type: 'receiveBranchesSuccess',
payload: branches,
},
{ type: 'receiveBranchesSuccess', payload: branches },
],
done,
);
@ -150,10 +140,7 @@ describe('IDE branches actions', () => {
[
{ type: 'requestBranches' },
{ type: 'resetBranches' },
{
type: 'receiveBranchesError',
payload: { search: TEST_SEARCH },
},
{ type: 'receiveBranchesError', payload: { search: TEST_SEARCH } },
],
done,
);

View File

@ -10,7 +10,7 @@ describe('IDE branches mutations', () => {
mockedState = state();
});
describe(types.REQUEST_BRANCHES, () => {
describe('REQUEST_BRANCHES', () => {
it('sets loading to true', () => {
mutations[types.REQUEST_BRANCHES](mockedState);
@ -18,7 +18,7 @@ describe('IDE branches mutations', () => {
});
});
describe(types.RECEIVE_BRANCHES_ERROR, () => {
describe('RECEIVE_BRANCHES_ERROR', () => {
it('sets loading to false', () => {
mutations[types.RECEIVE_BRANCHES_ERROR](mockedState);
@ -26,7 +26,7 @@ describe('IDE branches mutations', () => {
});
});
describe(types.RECEIVE_BRANCHES_SUCCESS, () => {
describe('RECEIVE_BRANCHES_SUCCESS', () => {
it('sets branches', () => {
const expectedBranches = branches.map(branch => ({
name: branch.name,
@ -39,7 +39,7 @@ describe('IDE branches mutations', () => {
});
});
describe(types.RESET_BRANCHES, () => {
describe('RESET_BRANCHES', () => {
it('clears branches array', () => {
mockedState.branches = ['test'];

View File

@ -1,5 +1,5 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'spec/helpers/vuex_action_helper';
import testAction from 'helpers/vuex_action_helper';
import axios from '~/lib/utils/axios_utils';
import createState from '~/ide/stores/modules/file_templates/state';
import * as actions from '~/ide/stores/modules/file_templates/actions';
@ -43,7 +43,7 @@ describe('IDE file templates actions', () => {
{
type: 'setErrorMessage',
payload: {
action: jasmine.any(Function),
action: expect.any(Function),
actionText: 'Please try again',
text: 'Error loading template types.',
},
@ -82,7 +82,7 @@ describe('IDE file templates actions', () => {
});
it('rejects if selectedTemplateType is empty', done => {
const dispatch = jasmine.createSpy('dispatch');
const dispatch = jest.fn().mockName('dispatch');
actions
.fetchTemplateTypes({ dispatch, state })
@ -95,9 +95,7 @@ describe('IDE file templates actions', () => {
});
it('dispatches actions', done => {
state.selectedTemplateType = {
key: 'licenses',
};
state.selectedTemplateType = { key: 'licenses' };
testAction(
actions.fetchTemplateTypes,
@ -105,17 +103,9 @@ describe('IDE file templates actions', () => {
state,
[],
[
{
type: 'requestTemplateTypes',
},
{
type: 'receiveTemplateTypesSuccess',
payload: pages[0],
},
{
type: 'receiveTemplateTypesSuccess',
payload: pages[0].concat(pages[1]),
},
{ type: 'requestTemplateTypes' },
{ type: 'receiveTemplateTypesSuccess', payload: pages[0] },
{ type: 'receiveTemplateTypesSuccess', payload: pages[0].concat(pages[1]) },
{
type: 'receiveTemplateTypesSuccess',
payload: pages[0].concat(pages[1]).concat(pages[2]),
@ -132,23 +122,14 @@ describe('IDE file templates actions', () => {
});
it('dispatches actions', done => {
state.selectedTemplateType = {
key: 'licenses',
};
state.selectedTemplateType = { key: 'licenses' };
testAction(
actions.fetchTemplateTypes,
null,
state,
[],
[
{
type: 'requestTemplateTypes',
},
{
type: 'receiveTemplateTypesError',
},
],
[{ type: 'requestTemplateTypes' }, { type: 'receiveTemplateTypesError' }],
done,
);
});
@ -157,16 +138,11 @@ describe('IDE file templates actions', () => {
describe('setSelectedTemplateType', () => {
it('commits SET_SELECTED_TEMPLATE_TYPE', () => {
const commit = jasmine.createSpy('commit');
const commit = jest.fn().mockName('commit');
const options = {
commit,
dispatch() {},
rootGetters: {
activeFile: {
name: 'test',
prevPath: '',
},
},
rootGetters: { activeFile: { name: 'test', prevPath: '' } },
};
actions.setSelectedTemplateType(options, { name: 'test' });
@ -175,17 +151,12 @@ describe('IDE file templates actions', () => {
});
it('dispatches discardFileChanges if prevPath matches templates name', () => {
const dispatch = jasmine.createSpy('dispatch');
const dispatch = jest.fn().mockName('dispatch');
const options = {
commit() {},
dispatch,
rootGetters: {
activeFile: {
name: 'test',
path: 'test',
prevPath: 'test',
},
},
rootGetters: { activeFile: { name: 'test', path: 'test', prevPath: 'test' } },
};
actions.setSelectedTemplateType(options, { name: 'test' });
@ -194,27 +165,19 @@ describe('IDE file templates actions', () => {
});
it('dispatches renameEntry if file name doesnt match', () => {
const dispatch = jasmine.createSpy('dispatch');
const dispatch = jest.fn().mockName('dispatch');
const options = {
commit() {},
dispatch,
rootGetters: {
activeFile: {
name: 'oldtest',
path: 'oldtest',
prevPath: '',
},
},
rootGetters: { activeFile: { name: 'oldtest', path: 'oldtest', prevPath: '' } },
};
actions.setSelectedTemplateType(options, { name: 'test' });
expect(dispatch).toHaveBeenCalledWith(
'renameEntry',
{
path: 'oldtest',
name: 'test',
},
{ path: 'oldtest', name: 'test' },
{ root: true },
);
});
@ -231,7 +194,7 @@ describe('IDE file templates actions', () => {
{
type: 'setErrorMessage',
payload: {
action: jasmine.any(Function),
action: expect.any(Function),
actionText: 'Please try again',
text: 'Error loading template.',
actionPayload: 'test',
@ -246,18 +209,16 @@ describe('IDE file templates actions', () => {
describe('fetchTemplate', () => {
describe('success', () => {
beforeEach(() => {
mock.onGet(/api\/(.*)\/templates\/licenses\/mit/).replyOnce(200, {
content: 'MIT content',
});
mock.onGet(/api\/(.*)\/templates\/licenses\/testing/).replyOnce(200, {
content: 'testing content',
});
mock
.onGet(/api\/(.*)\/templates\/licenses\/mit/)
.replyOnce(200, { content: 'MIT content' });
mock
.onGet(/api\/(.*)\/templates\/licenses\/testing/)
.replyOnce(200, { content: 'testing content' });
});
it('dispatches setFileTemplate if template already has content', done => {
const template = {
content: 'already has content',
};
const template = { content: 'already has content' };
testAction(
actions.fetchTemplate,
@ -270,13 +231,9 @@ describe('IDE file templates actions', () => {
});
it('dispatches success', done => {
const template = {
key: 'mit',
};
const template = { key: 'mit' };
state.selectedTemplateType = {
key: 'licenses',
};
state.selectedTemplateType = { key: 'licenses' };
testAction(
actions.fetchTemplate,
@ -289,13 +246,9 @@ describe('IDE file templates actions', () => {
});
it('dispatches success and uses name key for API call', done => {
const template = {
name: 'testing',
};
const template = { name: 'testing' };
state.selectedTemplateType = {
key: 'licenses',
};
state.selectedTemplateType = { key: 'licenses' };
testAction(
actions.fetchTemplate,
@ -314,13 +267,9 @@ describe('IDE file templates actions', () => {
});
it('dispatches error', done => {
const template = {
name: 'testing',
};
const template = { name: 'testing' };
state.selectedTemplateType = {
key: 'licenses',
};
state.selectedTemplateType = { key: 'licenses' };
testAction(
actions.fetchTemplate,
@ -336,11 +285,9 @@ describe('IDE file templates actions', () => {
describe('setFileTemplate', () => {
it('dispatches changeFileContent', () => {
const dispatch = jasmine.createSpy('dispatch');
const commit = jasmine.createSpy('commit');
const rootGetters = {
activeFile: { path: 'test' },
};
const dispatch = jest.fn().mockName('dispatch');
const commit = jest.fn().mockName('commit');
const rootGetters = { activeFile: { path: 'test' } };
actions.setFileTemplate({ dispatch, commit, rootGetters }, { content: 'content' });
@ -352,11 +299,9 @@ describe('IDE file templates actions', () => {
});
it('commits SET_UPDATE_SUCCESS', () => {
const dispatch = jasmine.createSpy('dispatch');
const commit = jasmine.createSpy('commit');
const rootGetters = {
activeFile: { path: 'test' },
};
const dispatch = jest.fn().mockName('dispatch');
const commit = jest.fn().mockName('commit');
const rootGetters = { activeFile: { path: 'test' } };
actions.setFileTemplate({ dispatch, commit, rootGetters }, { content: 'content' });
@ -366,11 +311,9 @@ describe('IDE file templates actions', () => {
describe('undoFileTemplate', () => {
it('dispatches changeFileContent', () => {
const dispatch = jasmine.createSpy('dispatch');
const commit = jasmine.createSpy('commit');
const rootGetters = {
activeFile: { path: 'test', raw: 'raw content' },
};
const dispatch = jest.fn().mockName('dispatch');
const commit = jest.fn().mockName('commit');
const rootGetters = { activeFile: { path: 'test', raw: 'raw content' } };
actions.undoFileTemplate({ dispatch, commit, rootGetters });
@ -382,11 +325,9 @@ describe('IDE file templates actions', () => {
});
it('commits SET_UPDATE_SUCCESS', () => {
const dispatch = jasmine.createSpy('dispatch');
const commit = jasmine.createSpy('commit');
const rootGetters = {
activeFile: { path: 'test', raw: 'raw content' },
};
const dispatch = jest.fn().mockName('dispatch');
const commit = jest.fn().mockName('commit');
const rootGetters = { activeFile: { path: 'test', raw: 'raw content' } };
actions.undoFileTemplate({ dispatch, commit, rootGetters });
@ -394,18 +335,12 @@ describe('IDE file templates actions', () => {
});
it('dispatches discardFileChanges if file has prevPath', () => {
const dispatch = jasmine.createSpy('dispatch');
const rootGetters = {
activeFile: { path: 'test', prevPath: 'newtest', raw: 'raw content' },
};
const dispatch = jest.fn().mockName('dispatch');
const rootGetters = { activeFile: { path: 'test', prevPath: 'newtest', raw: 'raw content' } };
actions.undoFileTemplate({ dispatch, commit() {}, rootGetters });
expect(dispatch.calls.mostRecent().args).toEqual([
'discardFileChanges',
'test',
{ root: true },
]);
expect(dispatch).toHaveBeenCalledWith('discardFileChanges', 'test', { root: true });
});
});
});

View File

@ -19,9 +19,7 @@ describe('IDE merge requests actions', () => {
beforeEach(() => {
mockedState = state();
mockedRootState = {
currentProjectId: 7,
};
mockedRootState = { currentProjectId: 7 };
mock = new MockAdapter(axios);
});
@ -54,7 +52,7 @@ describe('IDE merge requests actions', () => {
type: 'setErrorMessage',
payload: {
text: 'Error loading merge requests.',
action: jasmine.any(Function),
action: expect.any(Function),
actionText: 'Please try again',
actionPayload: { type: 'created', search: '' },
},
@ -71,12 +69,7 @@ describe('IDE merge requests actions', () => {
receiveMergeRequestsSuccess,
mergeRequests,
mockedState,
[
{
type: types.RECEIVE_MERGE_REQUESTS_SUCCESS,
payload: mergeRequests,
},
],
[{ type: types.RECEIVE_MERGE_REQUESTS_SUCCESS, payload: mergeRequests }],
[],
done,
);
@ -94,36 +87,34 @@ describe('IDE merge requests actions', () => {
});
it('calls API with params', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough();
const apiSpy = jest.spyOn(axios, 'get');
fetchMergeRequests(
{ dispatch() {}, state: mockedState, rootState: mockedRootState },
{
dispatch() {},
state: mockedState,
rootState: mockedRootState,
},
{ type: 'created' },
);
expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), {
params: {
scope: 'created-by-me',
state: 'opened',
search: '',
},
expect(apiSpy).toHaveBeenCalledWith(expect.anything(), {
params: { scope: 'created-by-me', state: 'opened', search: '' },
});
});
it('calls API with search', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough();
const apiSpy = jest.spyOn(axios, 'get');
fetchMergeRequests(
{ dispatch() {}, state: mockedState, rootState: mockedRootState },
{
dispatch() {},
state: mockedState,
rootState: mockedRootState,
},
{ type: 'created', search: 'testing search' },
);
expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), {
params: {
scope: 'created-by-me',
state: 'opened',
search: 'testing search',
},
expect(apiSpy).toHaveBeenCalledWith(expect.anything(), {
params: { scope: 'created-by-me', state: 'opened', search: 'testing search' },
});
});
@ -136,10 +127,7 @@ describe('IDE merge requests actions', () => {
[
{ type: 'requestMergeRequests' },
{ type: 'resetMergeRequests' },
{
type: 'receiveMergeRequestsSuccess',
payload: mergeRequests,
},
{ type: 'receiveMergeRequestsSuccess', payload: mergeRequests },
],
done,
);
@ -152,21 +140,19 @@ describe('IDE merge requests actions', () => {
});
it('calls API with project', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough();
const apiSpy = jest.spyOn(axios, 'get');
fetchMergeRequests(
{ dispatch() {}, state: mockedState, rootState: mockedRootState },
{
dispatch() {},
state: mockedState,
rootState: mockedRootState,
},
{ type: null, search: 'testing search' },
);
expect(apiSpy).toHaveBeenCalledWith(
jasmine.stringMatching(`projects/${mockedRootState.currentProjectId}/merge_requests`),
{
params: {
state: 'opened',
search: 'testing search',
},
},
expect.stringMatching(`projects/${mockedRootState.currentProjectId}/merge_requests`),
{ params: { state: 'opened', search: 'testing search' } },
);
});
@ -179,10 +165,7 @@ describe('IDE merge requests actions', () => {
[
{ type: 'requestMergeRequests' },
{ type: 'resetMergeRequests' },
{
type: 'receiveMergeRequestsSuccess',
payload: mergeRequests,
},
{ type: 'receiveMergeRequestsSuccess', payload: mergeRequests },
],
done,
);

View File

@ -1,3 +1,4 @@
import { TEST_HOST } from 'helpers/test_constants';
import state from '~/ide/stores/modules/merge_requests/state';
import mutations from '~/ide/stores/modules/merge_requests/mutations';
import * as types from '~/ide/stores/modules/merge_requests/mutation_types';
@ -10,7 +11,7 @@ describe('IDE merge requests mutations', () => {
mockedState = state();
});
describe(types.REQUEST_MERGE_REQUESTS, () => {
describe('REQUEST_MERGE_REQUESTS', () => {
it('sets loading to true', () => {
mutations[types.REQUEST_MERGE_REQUESTS](mockedState);
@ -18,7 +19,7 @@ describe('IDE merge requests mutations', () => {
});
});
describe(types.RECEIVE_MERGE_REQUESTS_ERROR, () => {
describe('RECEIVE_MERGE_REQUESTS_ERROR', () => {
it('sets loading to false', () => {
mutations[types.RECEIVE_MERGE_REQUESTS_ERROR](mockedState);
@ -26,9 +27,9 @@ describe('IDE merge requests mutations', () => {
});
});
describe(types.RECEIVE_MERGE_REQUESTS_SUCCESS, () => {
describe('RECEIVE_MERGE_REQUESTS_SUCCESS', () => {
it('sets merge requests', () => {
gon.gitlab_url = gl.TEST_HOST;
gon.gitlab_url = TEST_HOST;
mutations[types.RECEIVE_MERGE_REQUESTS_SUCCESS](mockedState, mergeRequests);
expect(mockedState.mergeRequests).toEqual([
@ -43,7 +44,7 @@ describe('IDE merge requests mutations', () => {
});
});
describe(types.RESET_MERGE_REQUESTS, () => {
describe('RESET_MERGE_REQUESTS', () => {
it('clears merge request array', () => {
mockedState.mergeRequests = ['test'];

View File

@ -0,0 +1,66 @@
import testAction from 'helpers/vuex_action_helper';
import * as actions from '~/ide/stores/modules/pane/actions';
import * as types from '~/ide/stores/modules/pane/mutation_types';
describe('IDE pane module actions', () => {
const TEST_VIEW = { name: 'test' };
const TEST_VIEW_KEEP_ALIVE = { name: 'test-keep-alive', keepAlive: true };
describe('toggleOpen', () => {
it('dispatches open if closed', done => {
testAction(
actions.toggleOpen,
TEST_VIEW,
{ isOpen: false },
[],
[{ type: 'open', payload: TEST_VIEW }],
done,
);
});
it('dispatches close if opened', done => {
testAction(actions.toggleOpen, TEST_VIEW, { isOpen: true }, [], [{ type: 'close' }], done);
});
});
describe('open', () => {
it('commits SET_OPEN', done => {
testAction(actions.open, null, {}, [{ type: types.SET_OPEN, payload: true }], [], done);
});
it('commits SET_CURRENT_VIEW if view is given', done => {
testAction(
actions.open,
TEST_VIEW,
{},
[
{ type: types.SET_OPEN, payload: true },
{ type: types.SET_CURRENT_VIEW, payload: TEST_VIEW.name },
],
[],
done,
);
});
it('commits KEEP_ALIVE_VIEW if keepAlive is true', done => {
testAction(
actions.open,
TEST_VIEW_KEEP_ALIVE,
{},
[
{ type: types.SET_OPEN, payload: true },
{ type: types.SET_CURRENT_VIEW, payload: TEST_VIEW_KEEP_ALIVE.name },
{ type: types.KEEP_ALIVE_VIEW, payload: TEST_VIEW_KEEP_ALIVE.name },
],
[],
done,
);
});
});
describe('close', () => {
it('commits SET_OPEN', done => {
testAction(actions.close, null, {}, [{ type: types.SET_OPEN, payload: false }], [], done);
});
});
});

View File

@ -1,5 +1,6 @@
import Visibility from 'visibilityjs';
import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
import {
requestLatestPipeline,
@ -78,7 +79,7 @@ describe('IDE pipelines actions', () => {
type: 'setErrorMessage',
payload: {
text: 'An error occurred whilst fetching the latest pipeline.',
action: jasmine.any(Function),
action: expect.any(Function),
actionText: 'Please try again',
actionPayload: null,
},
@ -91,38 +92,28 @@ describe('IDE pipelines actions', () => {
});
describe('receiveLatestPipelineSuccess', () => {
const rootGetters = {
lastCommit: { id: '123' },
};
const rootGetters = { lastCommit: { id: '123' } };
let commit;
beforeEach(() => {
commit = jasmine.createSpy('commit');
commit = jest.fn().mockName('commit');
});
it('commits pipeline', () => {
receiveLatestPipelineSuccess({ rootGetters, commit }, { pipelines });
expect(commit.calls.argsFor(0)).toEqual([
types.RECEIVE_LASTEST_PIPELINE_SUCCESS,
pipelines[0],
]);
expect(commit).toHaveBeenCalledWith(types.RECEIVE_LASTEST_PIPELINE_SUCCESS, pipelines[0]);
});
it('commits false when there are no pipelines', () => {
receiveLatestPipelineSuccess({ rootGetters, commit }, { pipelines: [] });
expect(commit.calls.argsFor(0)).toEqual([types.RECEIVE_LASTEST_PIPELINE_SUCCESS, false]);
expect(commit).toHaveBeenCalledWith(types.RECEIVE_LASTEST_PIPELINE_SUCCESS, false);
});
});
describe('fetchLatestPipeline', () => {
beforeEach(() => {
jasmine.clock().install();
});
beforeEach(() => {});
afterEach(() => {
jasmine.clock().uninstall();
stopPipelinePolling();
clearEtagPoll();
});
@ -135,10 +126,10 @@ describe('IDE pipelines actions', () => {
});
it('dispatches request', done => {
spyOn(axios, 'get').and.callThrough();
spyOn(Visibility, 'hidden').and.returnValue(false);
jest.spyOn(axios, 'get');
jest.spyOn(Visibility, 'hidden').mockReturnValue(false);
const dispatch = jasmine.createSpy('dispatch');
const dispatch = jest.fn().mockName('dispatch');
const rootGetters = {
lastCommit: { id: 'abc123def456ghi789jkl' },
currentProject: { path_with_namespace: 'abc/def' },
@ -146,31 +137,29 @@ describe('IDE pipelines actions', () => {
fetchLatestPipeline({ dispatch, rootGetters });
expect(dispatch.calls.argsFor(0)).toEqual(['requestLatestPipeline']);
expect(dispatch).toHaveBeenCalledWith('requestLatestPipeline');
jasmine.clock().tick(1000);
jest.advanceTimersByTime(1000);
new Promise(resolve => requestAnimationFrame(resolve))
.then(() => {
expect(axios.get).toHaveBeenCalled();
expect(axios.get.calls.count()).toBe(1);
expect(dispatch.calls.argsFor(1)).toEqual([
expect(axios.get).toHaveBeenCalledTimes(1);
expect(dispatch).toHaveBeenCalledWith(
'receiveLatestPipelineSuccess',
jasmine.anything(),
]);
expect.anything(),
);
jasmine.clock().tick(10000);
jest.advanceTimersByTime(10000);
})
.then(() => new Promise(resolve => requestAnimationFrame(resolve)))
.then(() => {
expect(axios.get).toHaveBeenCalled();
expect(axios.get.calls.count()).toBe(2);
expect(dispatch.calls.argsFor(2)).toEqual([
expect(axios.get).toHaveBeenCalledTimes(2);
expect(dispatch).toHaveBeenCalledWith(
'receiveLatestPipelineSuccess',
jasmine.anything(),
]);
expect.anything(),
);
})
.then(done)
.catch(done.fail);
@ -183,7 +172,7 @@ describe('IDE pipelines actions', () => {
});
it('dispatches error', done => {
const dispatch = jasmine.createSpy('dispatch');
const dispatch = jest.fn().mockName('dispatch');
const rootGetters = {
lastCommit: { id: 'abc123def456ghi789jkl' },
currentProject: { path_with_namespace: 'abc/def' },
@ -191,14 +180,11 @@ describe('IDE pipelines actions', () => {
fetchLatestPipeline({ dispatch, rootGetters });
jasmine.clock().tick(1500);
jest.advanceTimersByTime(1500);
new Promise(resolve => requestAnimationFrame(resolve))
.then(() => {
expect(dispatch.calls.argsFor(1)).toEqual([
'receiveLatestPipelineError',
jasmine.anything(),
]);
expect(dispatch).toHaveBeenCalledWith('receiveLatestPipelineError', expect.anything());
})
.then(done)
.catch(done.fail);
@ -224,7 +210,7 @@ describe('IDE pipelines actions', () => {
type: 'setErrorMessage',
payload: {
text: 'An error occurred whilst loading the pipelines jobs.',
action: jasmine.anything(),
action: expect.anything(),
actionText: 'Please try again',
actionPayload: { id: 1 },
},
@ -249,10 +235,7 @@ describe('IDE pipelines actions', () => {
});
describe('fetchJobs', () => {
const stage = {
id: 1,
dropdownPath: `${gl.TEST_HOST}/jobs`,
};
const stage = { id: 1, dropdownPath: `${TEST_HOST}/jobs` };
describe('success', () => {
beforeEach(() => {
@ -361,7 +344,7 @@ describe('IDE pipelines actions', () => {
type: 'setErrorMessage',
payload: {
text: 'An error occurred whilst fetching the job trace.',
action: jasmine.any(Function),
action: expect.any(Function),
actionText: 'Please try again',
actionPayload: null,
},
@ -387,15 +370,13 @@ describe('IDE pipelines actions', () => {
describe('fetchJobTrace', () => {
beforeEach(() => {
mockedState.detailJob = {
path: `${gl.TEST_HOST}/project/builds`,
};
mockedState.detailJob = { path: `${TEST_HOST}/project/builds` };
});
describe('success', () => {
beforeEach(() => {
spyOn(axios, 'get').and.callThrough();
mock.onGet(`${gl.TEST_HOST}/project/builds/trace`).replyOnce(200, { html: 'html' });
jest.spyOn(axios, 'get');
mock.onGet(`${TEST_HOST}/project/builds/trace`).replyOnce(200, { html: 'html' });
});
it('dispatches request', done => {
@ -413,9 +394,12 @@ describe('IDE pipelines actions', () => {
});
it('sends get request to correct URL', () => {
fetchJobTrace({ state: mockedState, dispatch() {} });
fetchJobTrace({
state: mockedState,
expect(axios.get).toHaveBeenCalledWith(`${gl.TEST_HOST}/project/builds/trace`, {
dispatch() {},
});
expect(axios.get).toHaveBeenCalledWith(`${TEST_HOST}/project/builds/trace`, {
params: { format: 'json' },
});
});
@ -423,7 +407,7 @@ describe('IDE pipelines actions', () => {
describe('error', () => {
beforeEach(() => {
mock.onGet(`${gl.TEST_HOST}/project/builds/trace`).replyOnce(500);
mock.onGet(`${TEST_HOST}/project/builds/trace`).replyOnce(500);
});
it('dispatches error', done => {

View File

@ -10,7 +10,7 @@ describe('IDE pipelines mutations', () => {
mockedState = state();
});
describe(types.REQUEST_LATEST_PIPELINE, () => {
describe('REQUEST_LATEST_PIPELINE', () => {
it('sets loading to true', () => {
mutations[types.REQUEST_LATEST_PIPELINE](mockedState);
@ -18,7 +18,7 @@ describe('IDE pipelines mutations', () => {
});
});
describe(types.RECEIVE_LASTEST_PIPELINE_ERROR, () => {
describe('RECEIVE_LASTEST_PIPELINE_ERROR', () => {
it('sets loading to false', () => {
mutations[types.RECEIVE_LASTEST_PIPELINE_ERROR](mockedState);
@ -26,7 +26,7 @@ describe('IDE pipelines mutations', () => {
});
});
describe(types.RECEIVE_LASTEST_PIPELINE_SUCCESS, () => {
describe('RECEIVE_LASTEST_PIPELINE_SUCCESS', () => {
const itSetsPipelineLoadingStates = () => {
it('sets has loaded to true', () => {
expect(mockedState.hasLoadedPipeline).toBe(true);
@ -52,7 +52,7 @@ describe('IDE pipelines mutations', () => {
id: '51',
path: 'test',
commit: { id: '123' },
details: { status: jasmine.any(Object) },
details: { status: expect.any(Object) },
yamlError: undefined,
});
});
@ -95,12 +95,9 @@ describe('IDE pipelines mutations', () => {
});
});
describe(types.REQUEST_JOBS, () => {
describe('REQUEST_JOBS', () => {
beforeEach(() => {
mockedState.stages = stages.map((stage, i) => ({
...stage,
id: i,
}));
mockedState.stages = stages.map((stage, i) => ({ ...stage, id: i }));
});
it('sets isLoading on stage', () => {
@ -110,12 +107,9 @@ describe('IDE pipelines mutations', () => {
});
});
describe(types.RECEIVE_JOBS_ERROR, () => {
describe('RECEIVE_JOBS_ERROR', () => {
beforeEach(() => {
mockedState.stages = stages.map((stage, i) => ({
...stage,
id: i,
}));
mockedState.stages = stages.map((stage, i) => ({ ...stage, id: i }));
});
it('sets isLoading on stage after error', () => {
@ -125,29 +119,22 @@ describe('IDE pipelines mutations', () => {
});
});
describe(types.RECEIVE_JOBS_SUCCESS, () => {
describe('RECEIVE_JOBS_SUCCESS', () => {
let data;
beforeEach(() => {
mockedState.stages = stages.map((stage, i) => ({
...stage,
id: i,
}));
mockedState.stages = stages.map((stage, i) => ({ ...stage, id: i }));
data = {
latest_statuses: [...jobs],
};
data = { latest_statuses: [...jobs] };
});
it('updates loading', () => {
mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, { id: mockedState.stages[0].id, data });
expect(mockedState.stages[0].isLoading).toBe(false);
});
it('sets jobs on stage', () => {
mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, { id: mockedState.stages[0].id, data });
expect(mockedState.stages[0].jobs.length).toBe(jobs.length);
expect(mockedState.stages[0].jobs).toEqual(
jobs.map(job => ({
@ -164,13 +151,9 @@ describe('IDE pipelines mutations', () => {
});
});
describe(types.TOGGLE_STAGE_COLLAPSE, () => {
describe('TOGGLE_STAGE_COLLAPSE', () => {
beforeEach(() => {
mockedState.stages = stages.map((stage, i) => ({
...stage,
id: i,
isCollapsed: false,
}));
mockedState.stages = stages.map((stage, i) => ({ ...stage, id: i, isCollapsed: false }));
});
it('toggles collapsed state', () => {
@ -184,7 +167,7 @@ describe('IDE pipelines mutations', () => {
});
});
describe(types.SET_DETAIL_JOB, () => {
describe('SET_DETAIL_JOB', () => {
it('sets detail job', () => {
mutations[types.SET_DETAIL_JOB](mockedState, jobs[0]);
@ -192,7 +175,7 @@ describe('IDE pipelines mutations', () => {
});
});
describe(types.REQUEST_JOB_TRACE, () => {
describe('REQUEST_JOB_TRACE', () => {
beforeEach(() => {
mockedState.detailJob = { ...jobs[0] };
});
@ -204,7 +187,7 @@ describe('IDE pipelines mutations', () => {
});
});
describe(types.RECEIVE_JOB_TRACE_ERROR, () => {
describe('RECEIVE_JOB_TRACE_ERROR', () => {
beforeEach(() => {
mockedState.detailJob = { ...jobs[0], isLoading: true };
});
@ -216,14 +199,13 @@ describe('IDE pipelines mutations', () => {
});
});
describe(types.RECEIVE_JOB_TRACE_SUCCESS, () => {
describe('RECEIVE_JOB_TRACE_SUCCESS', () => {
beforeEach(() => {
mockedState.detailJob = { ...jobs[0], isLoading: true };
});
it('sets output on detail job', () => {
mutations[types.RECEIVE_JOB_TRACE_SUCCESS](mockedState, { html: 'html' });
expect(mockedState.detailJob.output).toBe('html');
expect(mockedState.detailJob.isLoading).toBe(false);
});

View File

@ -9,10 +9,7 @@ describe('IDE store file mutations', () => {
beforeEach(() => {
localState = state();
localFile = {
...file(),
type: 'blob',
};
localFile = { ...file(), type: 'blob' };
localState.entries[localFile.path] = localFile;
});
@ -28,11 +25,7 @@ describe('IDE store file mutations', () => {
});
it('sets pending tab as not active', () => {
localState.openFiles.push({
...localFile,
pending: true,
active: true,
});
localState.openFiles.push({ ...localFile, pending: true, active: true });
mutations.SET_FILE_ACTIVE(localState, {
path: localFile.path,
@ -132,7 +125,7 @@ describe('IDE store file mutations', () => {
localFile,
].forEach(f => {
expect(f).toEqual(
jasmine.objectContaining({
expect.objectContaining({
path,
name,
raw: null,
@ -154,10 +147,7 @@ describe('IDE store file mutations', () => {
});
it('adds raw data to open pending file', () => {
localState.openFiles.push({
...localFile,
pending: true,
});
localState.openFiles.push({ ...localFile, pending: true });
mutations.SET_FILE_RAW_DATA(localState, {
file: localFile,
@ -168,11 +158,7 @@ describe('IDE store file mutations', () => {
});
it('does not add raw data to open pending tempFile file', () => {
localState.openFiles.push({
...localFile,
pending: true,
tempFile: true,
});
localState.openFiles.push({ ...localFile, pending: true, tempFile: true });
mutations.SET_FILE_RAW_DATA(localState, {
file: localFile,
@ -234,7 +220,9 @@ describe('IDE store file mutations', () => {
it('sets file mr change', () => {
mutations.SET_FILE_MERGE_REQUEST_CHANGE(localState, {
file: localFile,
mrChange: { diff: 'ABC' },
mrChange: {
diff: 'ABC',
},
});
expect(localFile.mrChange.diff).toBe('ABC');
@ -311,12 +299,7 @@ describe('IDE store file mutations', () => {
mutations.DISCARD_FILE_CHANGES(localState, localFile.path);
expect(localState.trees['gitlab-ce/master'].tree).toEqual([
{
...localFile,
deleted: false,
},
]);
expect(localState.trees['gitlab-ce/master'].tree).toEqual([{ ...localFile, deleted: false }]);
});
it('adds to parent tree if deleted', () => {
@ -328,12 +311,7 @@ describe('IDE store file mutations', () => {
mutations.DISCARD_FILE_CHANGES(localState, localFile.path);
expect(localState.entries.parentPath.tree).toEqual([
{
...localFile,
deleted: false,
},
]);
expect(localState.entries.parentPath.tree).toEqual([{ ...localFile, deleted: false }]);
});
});
@ -379,11 +357,7 @@ describe('IDE store file mutations', () => {
let f;
beforeEach(() => {
f = {
...file(),
type: 'blob',
staged: true,
};
f = { ...file(), type: 'blob', staged: true };
localState.stagedFiles.push(f);
localState.changedFiles.push(f);
@ -422,19 +396,16 @@ describe('IDE store file mutations', () => {
describe('ADD_PENDING_TAB', () => {
beforeEach(() => {
const f = {
...file('openFile'),
path: 'openFile',
active: true,
opened: true,
};
const f = { ...file('openFile'), path: 'openFile', active: true, opened: true };
localState.entries[f.path] = f;
localState.openFiles.push(f);
});
it('adds file into openFiles as pending', () => {
mutations.ADD_PENDING_TAB(localState, { file: localFile });
mutations.ADD_PENDING_TAB(localState, {
file: localFile,
});
expect(localState.openFiles.length).toBe(1);
expect(localState.openFiles[0].pending).toBe(true);
@ -445,11 +416,15 @@ describe('IDE store file mutations', () => {
const newFile = file('test');
localState.entries[newFile.path] = newFile;
mutations.ADD_PENDING_TAB(localState, { file: localFile });
mutations.ADD_PENDING_TAB(localState, {
file: localFile,
});
expect(localState.openFiles.length).toBe(1);
mutations.ADD_PENDING_TAB(localState, { file: file('test') });
mutations.ADD_PENDING_TAB(localState, {
file: file('test'),
});
expect(localState.openFiles.length).toBe(1);
expect(localState.openFiles[0].name).toBe('test');

View File

@ -51,7 +51,9 @@ describe('Multi-file store tree mutations', () => {
});
it('keeps loading state', () => {
mutations.CREATE_TREE(localState, { treePath: 'project/master' });
mutations.CREATE_TREE(localState, {
treePath: 'project/master',
});
mutations.SET_DIRECTORY_DATA(localState, {
data,
treePath: 'project/master',

View File

@ -1,3 +1,4 @@
import { TEST_HOST } from 'helpers/test_constants';
import mutations from '~/ide/stores/mutations';
import state from '~/ide/stores/state';
import { file } from '../helpers';
@ -25,21 +26,30 @@ describe('Multi-file store mutations', () => {
describe('TOGGLE_LOADING', () => {
it('toggles loading of entry', () => {
mutations.TOGGLE_LOADING(localState, { entry });
mutations.TOGGLE_LOADING(localState, {
entry,
});
expect(entry.loading).toBeTruthy();
mutations.TOGGLE_LOADING(localState, { entry });
mutations.TOGGLE_LOADING(localState, {
entry,
});
expect(entry.loading).toBeFalsy();
});
it('toggles loading of entry and sets specific value', () => {
mutations.TOGGLE_LOADING(localState, { entry });
mutations.TOGGLE_LOADING(localState, {
entry,
});
expect(entry.loading).toBeTruthy();
mutations.TOGGLE_LOADING(localState, { entry, forceValue: true });
mutations.TOGGLE_LOADING(localState, {
entry,
forceValue: true,
});
expect(entry.loading).toBeTruthy();
});
@ -123,11 +133,7 @@ describe('Multi-file store mutations', () => {
mutations.CREATE_TMP_ENTRY(localState, {
data: {
entries: {
test: {
...tmpFile,
tempFile: true,
changed: true,
},
test: { ...tmpFile, tempFile: true, changed: true },
},
treeList: [tmpFile],
},
@ -141,18 +147,11 @@ describe('Multi-file store mutations', () => {
it('marks entry as replacing previous entry if the old one has been deleted', () => {
const tmpFile = file('test');
localState.entries.test = {
...tmpFile,
deleted: true,
};
localState.entries.test = { ...tmpFile, deleted: true };
mutations.CREATE_TMP_ENTRY(localState, {
data: {
entries: {
test: {
...tmpFile,
tempFile: true,
changed: true,
},
test: { ...tmpFile, tempFile: true, changed: true },
},
treeList: [tmpFile],
},
@ -167,21 +166,23 @@ describe('Multi-file store mutations', () => {
describe('UPDATE_TEMP_FLAG', () => {
beforeEach(() => {
localState.entries.test = {
...file(),
tempFile: true,
changed: true,
};
localState.entries.test = { ...file(), tempFile: true, changed: true };
});
it('updates tempFile flag', () => {
mutations.UPDATE_TEMP_FLAG(localState, { path: 'test', tempFile: false });
mutations.UPDATE_TEMP_FLAG(localState, {
path: 'test',
tempFile: false,
});
expect(localState.entries.test.tempFile).toBe(false);
});
it('updates changed flag', () => {
mutations.UPDATE_TEMP_FLAG(localState, { path: 'test', tempFile: false });
mutations.UPDATE_TEMP_FLAG(localState, {
path: 'test',
tempFile: false,
});
expect(localState.entries.test.changed).toBe(false);
});
@ -303,23 +304,28 @@ describe('Multi-file store mutations', () => {
const f = {
...file('test'),
prevPath: 'testing-123',
rawPath: `${gl.TEST_HOST}/testing-123`,
permalink: `${gl.TEST_HOST}/testing-123`,
commitsPath: `${gl.TEST_HOST}/testing-123`,
blamePath: `${gl.TEST_HOST}/testing-123`,
rawPath: `${TEST_HOST}/testing-123`,
permalink: `${TEST_HOST}/testing-123`,
commitsPath: `${TEST_HOST}/testing-123`,
blamePath: `${TEST_HOST}/testing-123`,
replaces: true,
};
localState.entries.test = f;
localState.changedFiles.push(f);
mutations.UPDATE_FILE_AFTER_COMMIT(localState, { file: f, lastCommit: { commit: {} } });
mutations.UPDATE_FILE_AFTER_COMMIT(localState, {
file: f,
lastCommit: {
commit: {},
},
});
expect(f).toEqual(
jasmine.objectContaining({
rawPath: `${gl.TEST_HOST}/test`,
permalink: `${gl.TEST_HOST}/test`,
commitsPath: `${gl.TEST_HOST}/test`,
blamePath: `${gl.TEST_HOST}/test`,
expect.objectContaining({
rawPath: `${TEST_HOST}/test`,
permalink: `${TEST_HOST}/test`,
commitsPath: `${TEST_HOST}/test`,
blamePath: `${TEST_HOST}/test`,
replaces: false,
prevId: undefined,
prevPath: undefined,
@ -335,7 +341,10 @@ describe('Multi-file store mutations', () => {
it('sets entryModal', () => {
localState.entries.testPath = file();
mutations.OPEN_NEW_ENTRY_MODAL(localState, { type: 'test', path: 'testPath' });
mutations.OPEN_NEW_ENTRY_MODAL(localState, {
type: 'test',
path: 'testPath',
});
expect(localState.entryModal).toEqual({
type: 'test',
@ -348,7 +357,9 @@ describe('Multi-file store mutations', () => {
describe('RENAME_ENTRY', () => {
beforeEach(() => {
localState.trees = {
'gitlab-ce/master': { tree: [] },
'gitlab-ce/master': {
tree: [],
},
};
localState.currentProjectId = 'gitlab-ce';
localState.currentBranchId = 'master';
@ -365,7 +376,7 @@ describe('Multi-file store mutations', () => {
});
expect(localState.entries).toEqual({
newPath: jasmine.objectContaining({
newPath: expect.objectContaining({
path: 'newPath',
prevPath: 'oldPath',
}),
@ -386,7 +397,7 @@ describe('Multi-file store mutations', () => {
});
expect(localState.entries).toEqual({
newestPath: jasmine.objectContaining({
newestPath: expect.objectContaining({
path: 'newestPath',
prevPath: 'oldPath',
}),
@ -396,10 +407,7 @@ describe('Multi-file store mutations', () => {
it('correctly handles the same entry within a consecutively renamed folder', () => {
const oldPath = file('root-folder/oldPath', 'root-folder/oldPath', 'blob');
localState.entries = {
'root-folder': {
...file('root-folder', 'root-folder', 'tree'),
tree: [oldPath],
},
'root-folder': { ...file('root-folder', 'root-folder', 'tree'), tree: [oldPath] },
'root-folder/oldPath': oldPath,
};
Object.assign(localState.entries['root-folder/oldPath'], {
@ -422,10 +430,10 @@ describe('Multi-file store mutations', () => {
});
expect(localState.entries).toEqual({
'root-folder': jasmine.objectContaining({
'root-folder': expect.objectContaining({
path: 'root-folder',
}),
'simply-renamed/oldPath': jasmine.objectContaining({
'simply-renamed/oldPath': expect.objectContaining({
path: 'simply-renamed/oldPath',
prevPath: 'root-folder/oldPath',
}),
@ -450,8 +458,7 @@ describe('Multi-file store mutations', () => {
path: 'newPath',
name: 'newPath',
url: `project/-/newPath`,
key: jasmine.stringMatching('newPath'),
key: expect.stringMatching('newPath'),
prevId: 'oldPath',
prevName: 'oldPath',
prevPath: 'oldPath',
@ -473,13 +480,13 @@ describe('Multi-file store mutations', () => {
});
expect(localState.entries.newPath).not.toEqual(
jasmine.objectContaining({
prevId: jasmine.anything(),
prevName: jasmine.anything(),
prevPath: jasmine.anything(),
prevUrl: jasmine.anything(),
prevKey: jasmine.anything(),
prevParentPath: jasmine.anything(),
expect.objectContaining({
prevId: expect.anything(),
prevName: expect.anything(),
prevPath: expect.anything(),
prevUrl: expect.anything(),
prevKey: expect.anything(),
prevParentPath: expect.anything(),
}),
);
});
@ -487,10 +494,7 @@ describe('Multi-file store mutations', () => {
it('properly handles files with spaces in name', () => {
const path = 'my fancy path';
const newPath = 'new path';
const oldEntry = {
...file(path, path, 'blob'),
url: `project/-/${encodeURI(path)}`,
};
const oldEntry = { ...file(path, path, 'blob'), url: `project/-/${encodeURI(path)}` };
localState.entries[path] = oldEntry;
@ -507,8 +511,7 @@ describe('Multi-file store mutations', () => {
path: newPath,
name: newPath,
url: `project/-/new%20path`,
key: jasmine.stringMatching(newPath),
key: expect.stringMatching(newPath),
prevId: path,
prevName: path,
prevPath: path,
@ -540,7 +543,11 @@ describe('Multi-file store mutations', () => {
const alpha = file('alpha', 'alpha', 'blob');
const beta = file('beta', 'beta', 'blob');
const gamma = file('gamma', 'gamma', 'blob');
localState.entries = { alpha, beta, gamma };
localState.entries = {
alpha,
beta,
gamma,
};
localState.trees['gitlab-ce/master'].tree = [alpha, beta, gamma];
@ -552,9 +559,13 @@ describe('Multi-file store mutations', () => {
});
expect(localState.trees['gitlab-ce/master'].tree).toEqual([
jasmine.objectContaining({ name: 'beta' }),
jasmine.objectContaining({ name: 'gamma' }),
jasmine.objectContaining({
expect.objectContaining({
name: 'beta',
}),
expect.objectContaining({
name: 'gamma',
}),
expect.objectContaining({
path: 'theta',
name: 'theta',
}),
@ -570,23 +581,26 @@ describe('Multi-file store mutations', () => {
openFiles: [localState.entries.oldPath],
});
mutations.RENAME_ENTRY(localState, { path: 'oldPath', name: 'newPath' });
mutations.RENAME_ENTRY(localState, {
path: 'oldPath',
name: 'newPath',
});
expect(localState.openFiles.length).toBe(1);
expect(localState.openFiles[0].path).toBe('newPath');
});
it('does not add renamed entry to changedFiles', () => {
mutations.RENAME_ENTRY(localState, { path: 'oldPath', name: 'newPath' });
mutations.RENAME_ENTRY(localState, {
path: 'oldPath',
name: 'newPath',
});
expect(localState.changedFiles.length).toBe(0);
});
it('updates existing changedFiles entry with the renamed one', () => {
const origFile = {
...file('oldPath', 'oldPath', 'blob'),
content: 'Foo',
};
const origFile = { ...file('oldPath', 'oldPath', 'blob'), content: 'Foo' };
Object.assign(localState, {
changedFiles: [origFile],
@ -595,10 +609,13 @@ describe('Multi-file store mutations', () => {
oldPath: origFile,
});
mutations.RENAME_ENTRY(localState, { path: 'oldPath', name: 'newPath' });
mutations.RENAME_ENTRY(localState, {
path: 'oldPath',
name: 'newPath',
});
expect(localState.changedFiles).toEqual([
jasmine.objectContaining({
expect.objectContaining({
path: 'newPath',
content: 'Foo',
}),
@ -613,13 +630,19 @@ describe('Multi-file store mutations', () => {
{},
);
mutations.RENAME_ENTRY(localState, { path: 'oldPath', name: 'newPath' });
mutations.RENAME_ENTRY(localState, {
path: 'oldPath',
name: 'newPath',
});
expect(localState.entries.newPath).toEqual(jasmine.objectContaining(expectedObj));
expect(localState.entries.newPath).toEqual(expect.objectContaining(expectedObj));
mutations.RENAME_ENTRY(localState, { path: 'newPath', name: 'newer' });
mutations.RENAME_ENTRY(localState, {
path: 'newPath',
name: 'newer',
});
expect(localState.entries.newer).toEqual(jasmine.objectContaining(expectedObj));
expect(localState.entries.newer).toEqual(expect.objectContaining(expectedObj));
});
describe('renaming back to original', () => {
@ -638,12 +661,16 @@ describe('Multi-file store mutations', () => {
renamed: renamedEntry,
};
mutations.RENAME_ENTRY(localState, { path: 'renamed', name: 'orig', parentPath: 'lorem' });
mutations.RENAME_ENTRY(localState, {
path: 'renamed',
name: 'orig',
parentPath: 'lorem',
});
});
it('renames entry and clears prev properties', () => {
expect(localState.entries).toEqual({
'lorem/orig': jasmine.objectContaining({
'lorem/orig': expect.objectContaining({
id: 'lorem/orig',
path: 'lorem/orig',
name: 'orig',
@ -672,7 +699,10 @@ describe('Multi-file store mutations', () => {
it('sets properly constucted key while preserving the original one', () => {
const key = 'oldPath.txt-blob-oldPath.txt';
localState.entries['oldPath.txt'].key = key;
mutations.RENAME_ENTRY(localState, { path: 'oldPath.txt', name: 'newPath.md' });
mutations.RENAME_ENTRY(localState, {
path: 'oldPath.txt',
name: 'newPath.md',
});
expect(localState.entries['newPath.md'].key).toBe('newPath.md-blob-newPath.md');
expect(localState.entries['newPath.md'].prevKey).toBe(key);
@ -680,14 +710,20 @@ describe('Multi-file store mutations', () => {
it('correctly updates key for an entry without an extension', () => {
localState.entries.oldPath.key = 'oldPath-blob-oldPath';
mutations.RENAME_ENTRY(localState, { path: 'oldPath', name: 'newPath.md' });
mutations.RENAME_ENTRY(localState, {
path: 'oldPath',
name: 'newPath.md',
});
expect(localState.entries['newPath.md'].key).toBe('newPath.md-blob-newPath.md');
});
it('correctly updates key when new name does not have an extension', () => {
localState.entries['oldPath.txt'].key = 'oldPath.txt-blob-oldPath.txt';
mutations.RENAME_ENTRY(localState, { path: 'oldPath.txt', name: 'newPath' });
mutations.RENAME_ENTRY(localState, {
path: 'oldPath.txt',
name: 'newPath',
});
expect(localState.entries.newPath.key).toBe('newPath-blob-newPath');
});

View File

@ -104,21 +104,9 @@ describe('Multi-file store utils', () => {
base64: true,
lastCommitSha: '123456789',
},
{
...file('deletedFile'),
path: 'deletedFile',
deleted: true,
},
{
...file('renamedFile'),
path: 'renamedFile',
prevPath: 'prevPath',
},
{
...file('replacingFile'),
path: 'replacingFile',
replaces: true,
},
{ ...file('deletedFile'), path: 'deletedFile', deleted: true },
{ ...file('renamedFile'), path: 'renamedFile', prevPath: 'prevPath' },
{ ...file('replacingFile'), path: 'replacingFile', replaces: true },
],
currentBranchId: 'master',
};
@ -237,15 +225,27 @@ describe('Multi-file store utils', () => {
describe('commitActionForFile', () => {
it('returns deleted for deleted file', () => {
expect(utils.commitActionForFile({ deleted: true })).toBe(commitActionTypes.delete);
expect(
utils.commitActionForFile({
deleted: true,
}),
).toBe(commitActionTypes.delete);
});
it('returns create for tempFile', () => {
expect(utils.commitActionForFile({ tempFile: true })).toBe(commitActionTypes.create);
expect(
utils.commitActionForFile({
tempFile: true,
}),
).toBe(commitActionTypes.create);
});
it('returns move for moved file', () => {
expect(utils.commitActionForFile({ prevPath: 'test' })).toBe(commitActionTypes.move);
expect(
utils.commitActionForFile({
prevPath: 'test',
}),
).toBe(commitActionTypes.move);
});
it('returns update by default', () => {
@ -341,12 +341,7 @@ describe('Multi-file store utils', () => {
fromTree[0].tree.push({
...file('alpha'),
path: 'foo/alpha',
tree: [
{
...file('beta.md'),
path: 'foo/alpha/beta.md',
},
],
tree: [{ ...file('beta.md'), path: 'foo/alpha/beta.md' }],
});
toTree.push({
@ -355,12 +350,7 @@ describe('Multi-file store utils', () => {
{
...file('alpha'),
path: 'foo/alpha',
tree: [
{
...file('gamma.md'),
path: 'foo/alpha/gamma.md',
},
],
tree: [{ ...file('gamma.md'), path: 'foo/alpha/gamma.md' }],
},
],
});
@ -381,12 +371,7 @@ describe('Multi-file store utils', () => {
fromTree[0].tree.push({
...file('alpha'),
path: 'foo/alpha',
tree: [
{
...file('beta.md'),
path: 'foo/alpha/beta.md',
},
],
tree: [{ ...file('beta.md'), path: 'foo/alpha/beta.md' }],
});
toTree.push({
@ -395,12 +380,7 @@ describe('Multi-file store utils', () => {
{
...file('alpha'),
path: 'foo/alpha',
tree: [
{
...file('gamma.md'),
path: 'foo/alpha/gamma.md',
},
],
tree: [{ ...file('gamma.md'), path: 'foo/alpha/gamma.md' }],
},
],
});
@ -431,10 +411,7 @@ describe('Multi-file store utils', () => {
});
it('swaps existing entry with a new one', () => {
const file1 = {
...file('old'),
key: 'foo',
};
const file1 = { ...file('old'), key: 'foo' };
const file2 = file('new');
const arr = [file1];
@ -511,8 +488,12 @@ describe('Multi-file store utils', () => {
expect(branchInfo.tree.length).toBe(2);
expect(branchInfo.tree).toEqual([
jasmine.objectContaining({ name: 'newPath' }),
jasmine.objectContaining({ name: 'oldPath' }),
expect.objectContaining({
name: 'newPath',
}),
expect.objectContaining({
name: 'oldPath',
}),
]);
});
@ -521,7 +502,9 @@ describe('Multi-file store utils', () => {
expect(localState.entries.parentPath.tree.length).toBe(1);
expect(localState.entries.parentPath.tree).toEqual([
jasmine.objectContaining({ name: 'newPath' }),
expect.objectContaining({
name: 'newPath',
}),
]);
localState.entries.parentPath.tree = [localState.entries.oldPath];
@ -530,8 +513,12 @@ describe('Multi-file store utils', () => {
expect(localState.entries.parentPath.tree.length).toBe(2);
expect(localState.entries.parentPath.tree).toEqual([
jasmine.objectContaining({ name: 'newPath' }),
jasmine.objectContaining({ name: 'oldPath' }),
expect.objectContaining({
name: 'newPath',
}),
expect.objectContaining({
name: 'oldPath',
}),
]);
});
});
@ -542,11 +529,19 @@ describe('Multi-file store utils', () => {
utils.swapInParentTreeWithSorting(localState, localState.entries.oldPath.key, 'newPath');
expect(branchInfo.tree).toEqual([jasmine.objectContaining({ name: 'newPath' })]);
expect(branchInfo.tree).toEqual([
expect.objectContaining({
name: 'newPath',
}),
]);
utils.swapInParentTreeWithSorting(localState, localState.entries.newPath.key, 'oldPath');
expect(branchInfo.tree).toEqual([jasmine.objectContaining({ name: 'oldPath' })]);
expect(branchInfo.tree).toEqual([
expect.objectContaining({
name: 'oldPath',
}),
]);
});
it('sorts tree after swapping the entries', () => {
@ -554,32 +549,55 @@ describe('Multi-file store utils', () => {
const beta = file('beta', 'beta', 'blob');
const gamma = file('gamma', 'gamma', 'blob');
const theta = file('theta', 'theta', 'blob');
localState.entries = { alpha, beta, gamma, theta };
localState.entries = {
alpha,
beta,
gamma,
theta,
};
branchInfo.tree = [alpha, beta, gamma];
utils.swapInParentTreeWithSorting(localState, alpha.key, 'theta');
expect(branchInfo.tree).toEqual([
jasmine.objectContaining({ name: 'beta' }),
jasmine.objectContaining({ name: 'gamma' }),
jasmine.objectContaining({ name: 'theta' }),
expect.objectContaining({
name: 'beta',
}),
expect.objectContaining({
name: 'gamma',
}),
expect.objectContaining({
name: 'theta',
}),
]);
utils.swapInParentTreeWithSorting(localState, gamma.key, 'alpha');
expect(branchInfo.tree).toEqual([
jasmine.objectContaining({ name: 'alpha' }),
jasmine.objectContaining({ name: 'beta' }),
jasmine.objectContaining({ name: 'theta' }),
expect.objectContaining({
name: 'alpha',
}),
expect.objectContaining({
name: 'beta',
}),
expect.objectContaining({
name: 'theta',
}),
]);
utils.swapInParentTreeWithSorting(localState, beta.key, 'gamma');
expect(branchInfo.tree).toEqual([
jasmine.objectContaining({ name: 'alpha' }),
jasmine.objectContaining({ name: 'gamma' }),
jasmine.objectContaining({ name: 'theta' }),
expect.objectContaining({
name: 'alpha',
}),
expect.objectContaining({
name: 'gamma',
}),
expect.objectContaining({
name: 'theta',
}),
]);
});
});
@ -587,11 +605,26 @@ describe('Multi-file store utils', () => {
describe('cleanTrailingSlash', () => {
[
{ input: '', output: '' },
{ input: 'abc', output: 'abc' },
{ input: 'abc/', output: 'abc' },
{ input: 'abc/def', output: 'abc/def' },
{ input: 'abc/def/', output: 'abc/def' },
{
input: '',
output: '',
},
{
input: 'abc',
output: 'abc',
},
{
input: 'abc/',
output: 'abc',
},
{
input: 'abc/def',
output: 'abc/def',
},
{
input: 'abc/def/',
output: 'abc/def',
},
].forEach(({ input, output }) => {
it(`cleans trailing slash from string "${input}"`, () => {
expect(utils.cleanTrailingSlash(input)).toEqual(output);
@ -601,13 +634,34 @@ describe('Multi-file store utils', () => {
describe('pathsAreEqual', () => {
[
{ args: ['abc', 'abc'], output: true },
{ args: ['abc', 'def'], output: false },
{ args: ['abc/', 'abc'], output: true },
{ args: ['abc/abc', 'abc'], output: false },
{ args: ['/', ''], output: true },
{ args: ['', '/'], output: true },
{ args: [false, '/'], output: true },
{
args: ['abc', 'abc'],
output: true,
},
{
args: ['abc', 'def'],
output: false,
},
{
args: ['abc/', 'abc'],
output: true,
},
{
args: ['abc/abc', 'abc'],
output: false,
},
{
args: ['/', ''],
output: true,
},
{
args: ['', '/'],
output: true,
},
{
args: [false, '/'],
output: true,
},
].forEach(({ args, output }) => {
it(`cleans and tests equality (${JSON.stringify(args)})`, () => {
expect(utils.pathsAreEqual(...args)).toEqual(output);
@ -618,10 +672,22 @@ describe('Multi-file store utils', () => {
describe('addFinalNewlineIfNeeded', () => {
it('adds a newline if it doesnt already exist', () => {
[
{ input: 'some text', output: 'some text\n' },
{ input: 'some text\n', output: 'some text\n' },
{ input: 'some text\n\n', output: 'some text\n\n' },
{ input: 'some\n text', output: 'some\n text\n' },
{
input: 'some text',
output: 'some text\n',
},
{
input: 'some text\n',
output: 'some text\n',
},
{
input: 'some text\n\n',
output: 'some text\n\n',
},
{
input: 'some\n text',
output: 'some\n text\n',
},
].forEach(({ input, output }) => {
expect(utils.addFinalNewlineIfNeeded(input)).toEqual(output);
});

View File

@ -26,15 +26,18 @@ describe('WebIDE utils', () => {
entry.deleted = true;
expect(getCommitIconMap(entry)).toEqual(commitItemIconMap.deleted);
});
it('renders "addition" icon for temp entries', () => {
entry.tempFile = true;
expect(getCommitIconMap(entry)).toEqual(commitItemIconMap.addition);
});
it('renders "modified" icon for newly-renamed entries', () => {
entry.prevPath = 'foo/bar';
entry.tempFile = false;
expect(getCommitIconMap(entry)).toEqual(commitItemIconMap.modified);
});
it('renders "modified" icon even for temp entries if they are newly-renamed', () => {
entry.prevPath = 'foo/bar';
entry.tempFile = true;

View File

@ -1,54 +1 @@
import * as pathUtils from 'path';
import { decorateData } from '~/ide/stores/utils';
import state from '~/ide/stores/state';
import commitState from '~/ide/stores/modules/commit/state';
import mergeRequestsState from '~/ide/stores/modules/merge_requests/state';
import pipelinesState from '~/ide/stores/modules/pipelines/state';
import branchesState from '~/ide/stores/modules/branches/state';
import fileTemplatesState from '~/ide/stores/modules/file_templates/state';
import paneState from '~/ide/stores/modules/pane/state';
export const resetStore = store => {
const newState = {
...state(),
commit: commitState(),
mergeRequests: mergeRequestsState(),
pipelines: pipelinesState(),
branches: branchesState(),
fileTemplates: fileTemplatesState(),
rightPane: paneState(),
};
store.replaceState(newState);
};
export const file = (name = 'name', id = name, type = '', parent = null) =>
decorateData({
id,
type,
icon: 'icon',
url: 'url',
name,
path: parent ? `${parent.path}/${name}` : name,
parentPath: parent ? parent.path : '',
lastCommit: {},
});
export const createEntriesFromPaths = paths =>
paths
.map(path => ({
name: pathUtils.basename(path),
dir: pathUtils.dirname(path),
ext: pathUtils.extname(path),
}))
.reduce((entries, path, idx) => {
const { name } = path;
const parent = path.dir ? entries[path.dir] : null;
const type = path.ext ? 'blob' : 'tree';
const entry = file(name, (idx + 1).toString(), type, parent);
return {
[entry.path]: entry,
...entries,
};
}, {});
export * from '../../frontend/ide/helpers';

View File

@ -1,66 +0,0 @@
import testAction from 'spec/helpers/vuex_action_helper';
import * as actions from '~/ide/stores/modules/pane/actions';
import * as types from '~/ide/stores/modules/pane/mutation_types';
describe('IDE pane module actions', () => {
const TEST_VIEW = { name: 'test' };
const TEST_VIEW_KEEP_ALIVE = { name: 'test-keep-alive', keepAlive: true };
describe('toggleOpen', () => {
it('dispatches open if closed', done => {
testAction(
actions.toggleOpen,
TEST_VIEW,
{ isOpen: false },
[],
[{ type: 'open', payload: TEST_VIEW }],
done,
);
});
it('dispatches close if opened', done => {
testAction(actions.toggleOpen, TEST_VIEW, { isOpen: true }, [], [{ type: 'close' }], done);
});
});
describe('open', () => {
it('commits SET_OPEN', done => {
testAction(actions.open, null, {}, [{ type: types.SET_OPEN, payload: true }], [], done);
});
it('commits SET_CURRENT_VIEW if view is given', done => {
testAction(
actions.open,
TEST_VIEW,
{},
[
{ type: types.SET_OPEN, payload: true },
{ type: types.SET_CURRENT_VIEW, payload: TEST_VIEW.name },
],
[],
done,
);
});
it('commits KEEP_ALIVE_VIEW if keepAlive is true', done => {
testAction(
actions.open,
TEST_VIEW_KEEP_ALIVE,
{},
[
{ type: types.SET_OPEN, payload: true },
{ type: types.SET_CURRENT_VIEW, payload: TEST_VIEW_KEEP_ALIVE.name },
{ type: types.KEEP_ALIVE_VIEW, payload: TEST_VIEW_KEEP_ALIVE.name },
],
[],
done,
);
});
});
describe('close', () => {
it('commits SET_OPEN', done => {
testAction(actions.close, null, {}, [{ type: types.SET_OPEN, payload: false }], [], done);
});
});
});

View File

@ -136,7 +136,9 @@ describe Gitlab::Auth::LDAP::Access do
context 'without ActiveDirectory enabled' do
before do
allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true)
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive(:active_directory).and_return(false)
allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive(:active_directory).and_return(false)
end
end
it 'returns true' do

View File

@ -58,7 +58,9 @@ describe Gitlab::Auth::LDAP::AuthHash do
end
before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive(:attributes).and_return(attributes)
allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive(:attributes).and_return(attributes)
end
end
it "has the correct username" do

View File

@ -18,8 +18,9 @@ describe Gitlab::Auth::LDAP::Authentication do
# try only to fake the LDAP call
adapter = double('adapter', dn: dn).as_null_object
allow_any_instance_of(described_class)
.to receive(:adapter).and_return(adapter)
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:adapter).and_return(adapter)
end
expect(described_class.login(login, password)).to be_truthy
end
@ -27,8 +28,9 @@ describe Gitlab::Auth::LDAP::Authentication do
it "is false if the user does not exist" do
# try only to fake the LDAP call
adapter = double('adapter', dn: dn).as_null_object
allow_any_instance_of(described_class)
.to receive(:adapter).and_return(adapter)
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:adapter).and_return(adapter)
end
expect(described_class.login(login, password)).to be_falsey
end
@ -38,8 +40,9 @@ describe Gitlab::Auth::LDAP::Authentication do
# try only to fake the LDAP call
adapter = double('adapter', bind_as: nil).as_null_object
allow_any_instance_of(described_class)
.to receive(:adapter).and_return(adapter)
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:adapter).and_return(adapter)
end
expect(described_class.login(login, password)).to be_falsey
end

View File

@ -396,7 +396,9 @@ describe Gitlab::Auth::OAuth::User do
context "and no account for the LDAP user" do
context 'dont block on create (LDAP)' do
before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: false)
allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: false)
end
end
it do
@ -408,7 +410,9 @@ describe Gitlab::Auth::OAuth::User do
context 'block on create (LDAP)' do
before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: true)
allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: true)
end
end
it do
@ -424,7 +428,9 @@ describe Gitlab::Auth::OAuth::User do
context 'dont block on create (LDAP)' do
before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: false)
allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: false)
end
end
it do
@ -436,7 +442,9 @@ describe Gitlab::Auth::OAuth::User do
context 'block on create (LDAP)' do
before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: true)
allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: true)
end
end
it do
@ -480,7 +488,9 @@ describe Gitlab::Auth::OAuth::User do
context 'dont block on create (LDAP)' do
before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: false)
allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: false)
end
end
it do
@ -492,7 +502,9 @@ describe Gitlab::Auth::OAuth::User do
context 'block on create (LDAP)' do
before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: true)
allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: true)
end
end
it do

View File

@ -75,7 +75,9 @@ describe Gitlab::BareRepositoryImport::Importer, :seed_helper do
end
it 'does not schedule an import' do
expect_any_instance_of(Project).not_to receive(:import_schedule)
expect_next_instance_of(Project) do |instance|
expect(instance).not_to receive(:import_schedule)
end
importer.create_project_if_needed
end

View File

@ -9,7 +9,9 @@ describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cache do
describe '.load_for_project' do
it "loads the status" do
expect_any_instance_of(described_class).to receive(:load_status)
expect_next_instance_of(described_class) do |instance|
expect(instance).to receive(:load_status)
end
described_class.load_for_project(project)
end

View File

@ -32,7 +32,9 @@ describe Gitlab::Checks::BranchCheck do
end
it 'raises an error if the user is not allowed to merge to protected branches' do
expect_any_instance_of(Gitlab::Checks::MatchingMergeRequest).to receive(:match?).and_return(true)
expect_next_instance_of(Gitlab::Checks::MatchingMergeRequest) do |instance|
expect(instance).to receive(:match?).and_return(true)
end
expect(user_access).to receive(:can_merge_to_branch?).and_return(false)
expect(user_access).to receive(:can_push_to_branch?).and_return(false)

View File

@ -14,31 +14,41 @@ describe Gitlab::Checks::ChangeAccess do
end
it 'calls pushes checks' do
expect_any_instance_of(Gitlab::Checks::PushCheck).to receive(:validate!)
expect_next_instance_of(Gitlab::Checks::PushCheck) do |instance|
expect(instance).to receive(:validate!)
end
subject.exec
end
it 'calls branches checks' do
expect_any_instance_of(Gitlab::Checks::BranchCheck).to receive(:validate!)
expect_next_instance_of(Gitlab::Checks::BranchCheck) do |instance|
expect(instance).to receive(:validate!)
end
subject.exec
end
it 'calls tags checks' do
expect_any_instance_of(Gitlab::Checks::TagCheck).to receive(:validate!)
expect_next_instance_of(Gitlab::Checks::TagCheck) do |instance|
expect(instance).to receive(:validate!)
end
subject.exec
end
it 'calls lfs checks' do
expect_any_instance_of(Gitlab::Checks::LfsCheck).to receive(:validate!)
expect_next_instance_of(Gitlab::Checks::LfsCheck) do |instance|
expect(instance).to receive(:validate!)
end
subject.exec
end
it 'calls diff checks' do
expect_any_instance_of(Gitlab::Checks::DiffCheck).to receive(:validate!)
expect_next_instance_of(Gitlab::Checks::DiffCheck) do |instance|
expect(instance).to receive(:validate!)
end
subject.exec
end

View File

@ -12,12 +12,16 @@ describe Gitlab::Ci::Build::Credentials::Factory do
end
before do
allow_any_instance_of(described_class).to receive(:providers).and_return([TestProvider])
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:providers).and_return([TestProvider])
end
end
context 'when provider is valid' do
before do
allow_any_instance_of(TestProvider).to receive(:valid?).and_return(true)
allow_next_instance_of(TestProvider) do |instance|
allow(instance).to receive(:valid?).and_return(true)
end
end
it 'generates an array of credentials objects' do
@ -29,7 +33,9 @@ describe Gitlab::Ci::Build::Credentials::Factory do
context 'when provider is not valid' do
before do
allow_any_instance_of(TestProvider).to receive(:valid?).and_return(false)
allow_next_instance_of(TestProvider) do |instance|
allow(instance).to receive(:valid?).and_return(false)
end
end
it 'generates an array without specific credential object' do

View File

@ -15,8 +15,9 @@ describe Gitlab::Ci::Config::External::File::Project do
before do
project.add_developer(user)
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
.to receive(:check_execution_time!)
allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
allow(instance).to receive(:check_execution_time!)
end
end
describe '#matching?' do
@ -159,8 +160,8 @@ describe Gitlab::Ci::Config::External::File::Project do
private
def stub_project_blob(ref, path)
allow_any_instance_of(Repository)
.to receive(:blob_data_at)
.with(ref, path) { yield }
allow_next_instance_of(Repository) do |instance|
allow(instance).to receive(:blob_data_at).with(ref, path) { yield }
end
end
end

View File

@ -21,8 +21,9 @@ describe Gitlab::Ci::Config::External::File::Remote do
end
before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
.to receive(:check_execution_time!)
allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
allow(instance).to receive(:check_execution_time!)
end
end
describe '#matching?' do

View File

@ -14,8 +14,9 @@ describe Gitlab::Ci::Config::External::File::Template do
let(:template_file) { described_class.new(params, context) }
before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
.to receive(:check_execution_time!)
allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
allow(instance).to receive(:check_execution_time!)
end
end
describe '#matching?' do

View File

@ -23,8 +23,9 @@ describe Gitlab::Ci::Config::External::Mapper do
before do
stub_full_request(remote_url).to_return(body: file_content)
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
.to receive(:check_execution_time!)
allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
allow(instance).to receive(:check_execution_time!)
end
end
describe '#process' do

View File

@ -8,8 +8,9 @@ describe Gitlab::Ci::Config do
set(:user) { create(:user) }
before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
.to receive(:check_execution_time!)
allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
allow(instance).to receive(:check_execution_time!)
end
end
let(:config) do
@ -358,18 +359,11 @@ describe Gitlab::Ci::Config do
context "when it takes too long to evaluate includes" do
before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
.to receive(:check_execution_time!)
.and_call_original
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
.to receive(:set_deadline)
.with(described_class::TIMEOUT_SECONDS)
.and_call_original
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
.to receive(:execution_expired?)
.and_return(true)
allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
allow(instance).to receive(:check_execution_time!).and_call_original
allow(instance).to receive(:set_deadline).with(described_class::TIMEOUT_SECONDS).and_call_original
allow(instance).to receive(:execution_expired?).and_return(true)
end
end
it 'raises error TimeoutError' do
@ -384,9 +378,9 @@ describe Gitlab::Ci::Config do
context 'when context expansion timeout is disabled' do
before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
.to receive(:check_execution_time!)
.and_call_original
allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
allow(instance).to receive(:check_execution_time!).and_call_original
end
allow(Feature)
.to receive(:enabled?)

View File

@ -81,7 +81,9 @@ describe Gitlab::Ci::Pipeline::Seed::Stage do
context 'when a ref is protected' do
before do
allow_any_instance_of(Project).to receive(:protected_for?).and_return(true)
allow_next_instance_of(Project) do |instance|
allow(instance).to receive(:protected_for?).and_return(true)
end
end
it 'returns protected builds' do
@ -91,7 +93,9 @@ describe Gitlab::Ci::Pipeline::Seed::Stage do
context 'when a ref is not protected' do
before do
allow_any_instance_of(Project).to receive(:protected_for?).and_return(false)
allow_next_instance_of(Project) do |instance|
allow(instance).to receive(:protected_for?).and_return(false)
end
end
it 'returns unprotected builds' do

View File

@ -112,8 +112,9 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
end
it 'calls get_chunk only once' do
expect_any_instance_of(Gitlab::Ci::Trace::ChunkedIO)
.to receive(:current_chunk).once.and_call_original
expect_next_instance_of(Gitlab::Ci::Trace::ChunkedIO) do |instance|
expect(instance).to receive(:current_chunk).once.and_call_original
end
chunked_io.each_line { |line| }
end

View File

@ -9,7 +9,9 @@ shared_examples 'base stage' do
before do
allow(stage).to receive(:project_median).and_return(1.12)
allow_any_instance_of(Gitlab::CycleAnalytics::BaseEventFetcher).to receive(:event_result).and_return({})
allow_next_instance_of(Gitlab::CycleAnalytics::BaseEventFetcher) do |instance|
allow(instance).to receive(:event_result).and_return({})
end
end
it 'has the median data value' do

View File

@ -17,7 +17,9 @@ describe Gitlab::CycleAnalytics::UsageData do
projects.each_with_index do |project, time|
issue = create(:issue, project: project, created_at: (time + 1).hour.ago)
allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([issue])
allow_next_instance_of(Gitlab::ReferenceExtractor) do |instance|
allow(instance).to receive(:issues).and_return([issue])
end
milestone = create(:milestone, project: project)
mr = create_merge_request_closing_issue(user, project, issue, commit_message: "References #{issue.to_reference}")

View File

@ -10,17 +10,25 @@ describe Gitlab::Diff::FileCollection::MergeRequestDiff do
describe '#diff_files' do
it 'does not highlight binary files' do
allow_any_instance_of(Gitlab::Diff::File).to receive(:text?).and_return(false)
allow_next_instance_of(Gitlab::Diff::File) do |instance|
allow(instance).to receive(:text?).and_return(false)
end
expect_any_instance_of(Gitlab::Diff::File).not_to receive(:highlighted_diff_lines)
expect_next_instance_of(Gitlab::Diff::File) do |instance|
expect(instance).not_to receive(:highlighted_diff_lines)
end
diff_files
end
it 'does not highlight files marked as undiffable in .gitattributes' do
allow_any_instance_of(Gitlab::Diff::File).to receive(:diffable?).and_return(false)
allow_next_instance_of(Gitlab::Diff::File) do |instance|
allow(instance).to receive(:diffable?).and_return(false)
end
expect_any_instance_of(Gitlab::Diff::File).not_to receive(:highlighted_diff_lines)
expect_next_instance_of(Gitlab::Diff::File) do |instance|
expect(instance).not_to receive(:highlighted_diff_lines)
end
diff_files
end

View File

@ -95,7 +95,9 @@ describe Gitlab::Email::Handler::CreateMergeRequestHandler do
context "something is wrong" do
context "when the merge request could not be saved" do
before do
allow_any_instance_of(MergeRequest).to receive(:save).and_return(false)
allow_next_instance_of(MergeRequest) do |instance|
allow(instance).to receive(:save).and_return(false)
end
end
it "raises an InvalidMergeRequestError" do

View File

@ -38,8 +38,9 @@ describe Gitlab::EtagCaching::Middleware do
end
it 'generates ETag' do
expect_any_instance_of(Gitlab::EtagCaching::Store)
.to receive(:touch).and_return('123')
expect_next_instance_of(Gitlab::EtagCaching::Store) do |instance|
expect(instance).to receive(:touch).and_return('123')
end
middleware.call(build_request(path, if_none_match))
end
@ -177,9 +178,9 @@ describe Gitlab::EtagCaching::Middleware do
'SCRIPT_NAME' => '/relative-gitlab'
}
expect_any_instance_of(Gitlab::EtagCaching::Store)
.to receive(:get).with("/relative-gitlab#{enabled_path}")
.and_return(nil)
expect_next_instance_of(Gitlab::EtagCaching::Store) do |instance|
expect(instance).to receive(:get).with("/relative-gitlab#{enabled_path}").and_return(nil)
end
middleware.call(env)
end
@ -190,8 +191,9 @@ describe Gitlab::EtagCaching::Middleware do
end
def mock_value_in_store(value)
allow_any_instance_of(Gitlab::EtagCaching::Store)
.to receive(:get).and_return(value)
allow_next_instance_of(Gitlab::EtagCaching::Store) do |instance|
allow(instance).to receive(:get).and_return(value)
end
end
def build_request(path, if_none_match)

View File

@ -158,7 +158,9 @@ describe Gitlab::Experimentation do
context 'the user is part of the control group' do
before do
allow_any_instance_of(described_class).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
end
end
it 'pushes the right parameters to gon' do

View File

@ -20,6 +20,8 @@ describe Gitlab::FogbugzImport::Client do
end
def stub_api(users)
allow_any_instance_of(::Fogbugz::Interface).to receive(:command).with(:listPeople).and_return(users)
allow_next_instance_of(::Fogbugz::Interface) do |instance|
allow(instance).to receive(:command).with(:listPeople).and_return(users)
end
end
end

View File

@ -0,0 +1,73 @@
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::FogbugzImport::Importer do
let(:project) { create(:project_empty_repo) }
let(:importer) { described_class.new(project) }
let(:repo) do
instance_double(Gitlab::FogbugzImport::Repository,
safe_name: 'vim',
path: 'vim',
raw_data: '')
end
let(:import_data) { { 'repo' => repo } }
let(:credentials) do
{
'fb_session' => {
'uri' => 'https://testing.fogbugz.com',
'token' => 'token'
}
}
end
let(:closed_bug) do
{
fOpen: 'false',
sTitle: 'Closed bug',
sLatestTextSummary: "",
dtOpened: Time.now.to_s,
dtLastUpdated: Time.now.to_s,
events: { event: [] }
}.with_indifferent_access
end
let(:opened_bug) do
{
fOpen: 'true',
sTitle: 'Opened bug',
sLatestTextSummary: "",
dtOpened: Time.now.to_s,
dtLastUpdated: Time.now.to_s,
events: { event: [] }
}.with_indifferent_access
end
let(:fogbugz_bugs) { [opened_bug, closed_bug] }
before do
project.create_import_data(data: import_data, credentials: credentials)
allow_any_instance_of(::Fogbugz::Interface).to receive(:command).with(:listCategories).and_return([])
allow_any_instance_of(Gitlab::FogbugzImport::Client).to receive(:cases).and_return(fogbugz_bugs)
end
it 'imports bugs' do
expect { importer.execute }.to change { Issue.count }.by(2)
end
it 'imports opened bugs' do
importer.execute
issue = Issue.where(project_id: project.id).find_by_title(opened_bug[:sTitle])
expect(issue.state_id).to eq(Issue.available_states[:opened])
end
it 'imports closed bugs' do
importer.execute
issue = Issue.where(project_id: project.id).find_by_title(closed_bug[:sTitle])
expect(issue.state_id).to eq(Issue.available_states[:closed])
end
end

View File

@ -134,7 +134,9 @@ describe Gitlab::Git::Blob, :seed_helper do
describe '.find with Rugged enabled', :enable_rugged do
it 'calls out to the Rugged implementation' do
allow_any_instance_of(Rugged).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original
allow_next_instance_of(Rugged) do |instance|
allow(instance).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original
end
described_class.find(repository, SeedRepo::Commit::ID, 'files/images/6049019_460s.jpg')
end

View File

@ -176,7 +176,9 @@ describe Gitlab::Git::Commit, :seed_helper do
describe '.find with Rugged enabled', :enable_rugged do
it 'calls out to the Rugged implementation' do
allow_any_instance_of(Rugged).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original
allow_next_instance_of(Rugged) do |instance|
allow(instance).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original
end
described_class.find(repository, SeedRepo::Commit::ID)
end
@ -438,7 +440,9 @@ describe Gitlab::Git::Commit, :seed_helper do
it_should_behave_like '.batch_by_oid'
it 'calls out to the Rugged implementation' do
allow_any_instance_of(Rugged).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original
allow_next_instance_of(Rugged) do |instance|
allow(instance).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original
end
described_class.batch_by_oid(repository, [SeedRepo::Commit::ID])
end

View File

@ -145,7 +145,9 @@ describe Gitlab::Git::Tree, :seed_helper do
describe '.where with Rugged enabled', :enable_rugged do
it 'calls out to the Rugged implementation' do
allow_any_instance_of(Rugged).to receive(:lookup).with(SeedRepo::Commit::ID)
allow_next_instance_of(Rugged) do |instance|
allow(instance).to receive(:lookup).with(SeedRepo::Commit::ID)
end
described_class.where(repository, SeedRepo::Commit::ID, 'files', false)
end

View File

@ -730,7 +730,9 @@ describe Gitlab::GitAccess do
it 'checks LFS integrity only for first change' do
allow(project).to receive(:lfs_enabled?).and_return(true)
expect_any_instance_of(Gitlab::Checks::LfsIntegrity).to receive(:objects_missing?).exactly(1).times
expect_next_instance_of(Gitlab::Checks::LfsIntegrity) do |instance|
expect(instance).to receive(:objects_missing?).exactly(1).times
end
push_access_check
end

View File

@ -10,10 +10,11 @@ describe Gitlab::GitalyClient::CleanupService do
describe '#apply_bfg_object_map_stream' do
it 'sends an apply_bfg_object_map_stream message' do
expect_any_instance_of(Gitaly::CleanupService::Stub)
.to receive(:apply_bfg_object_map_stream)
.with(kind_of(Enumerator), kind_of(Hash))
.and_return([])
expect_next_instance_of(Gitaly::CleanupService::Stub) do |instance|
expect(instance).to receive(:apply_bfg_object_map_stream)
.with(kind_of(Enumerator), kind_of(Hash))
.and_return([])
end
client.apply_bfg_object_map_stream(StringIO.new)
end

View File

@ -55,7 +55,9 @@ describe Gitlab::GitalyClient do
it 'returns an empty string when the storage is not found in the response' do
response = double("response")
allow(response).to receive(:storage_statuses).and_return([])
allow_any_instance_of(Gitlab::GitalyClient::ServerService).to receive(:info).and_return(response)
allow_next_instance_of(Gitlab::GitalyClient::ServerService) do |instance|
allow(instance).to receive(:info).and_return(response)
end
expect(described_class.filesystem_id('default')).to eq(nil)
end

View File

@ -144,9 +144,9 @@ describe Gitlab::GithubImport::Importer::DiffNoteImporter do
describe '#find_merge_request_id' do
it 'returns a merge request ID' do
expect_any_instance_of(Gitlab::GithubImport::IssuableFinder)
.to receive(:database_id)
.and_return(10)
expect_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |instance|
expect(instance).to receive(:database_id).and_return(10)
end
expect(importer.find_merge_request_id).to eq(10)
end

View File

@ -74,9 +74,9 @@ describe Gitlab::GithubImport::Importer::LabelLinksImporter do
describe '#find_target_id' do
it 'returns the ID of the issuable to create the label link for' do
expect_any_instance_of(Gitlab::GithubImport::IssuableFinder)
.to receive(:database_id)
.and_return(10)
expect_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |instance|
expect(instance).to receive(:database_id).and_return(10)
end
expect(importer.find_target_id).to eq(10)
end

View File

@ -50,8 +50,9 @@ describe Gitlab::GithubImport::Importer::LabelsImporter, :clean_gitlab_redis_cac
describe '#build_labels_cache' do
it 'builds the labels cache' do
expect_any_instance_of(Gitlab::GithubImport::LabelFinder)
.to receive(:build_cache)
expect_next_instance_of(Gitlab::GithubImport::LabelFinder) do |instance|
expect(instance).to receive(:build_cache)
end
importer.build_labels_cache
end

View File

@ -80,8 +80,9 @@ describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab_redis
describe '#build_milestones_cache' do
it 'builds the milestones cache' do
expect_any_instance_of(Gitlab::GithubImport::MilestoneFinder)
.to receive(:build_cache)
expect_next_instance_of(Gitlab::GithubImport::MilestoneFinder) do |instance|
expect(instance).to receive(:build_cache)
end
importer.build_milestones_cache
end

View File

@ -143,9 +143,9 @@ describe Gitlab::GithubImport::Importer::NoteImporter do
describe '#find_noteable_id' do
it 'returns the ID of the noteable' do
expect_any_instance_of(Gitlab::GithubImport::IssuableFinder)
.to receive(:database_id)
.and_return(10)
expect_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |instance|
expect(instance).to receive(:database_id).and_return(10)
end
expect(importer.find_noteable_id).to eq(10)
end

View File

@ -9,8 +9,9 @@ describe Gitlab::GithubImport::SequentialImporter do
project = double(:project, id: 1, repository: repository)
importer = described_class.new(project, token: 'foo')
expect_any_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter)
.to receive(:execute)
expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
expect(instance).to receive(:execute)
end
described_class::SEQUENTIAL_IMPORTERS.each do |klass|
instance = double(:instance)

View File

@ -21,18 +21,24 @@ describe Gitlab::GitlabImport::Client do
it 'uses membership and simple flags' do
stub_request('/api/v4/projects?membership=true&page=1&per_page=100&simple=true')
expect_any_instance_of(OAuth2::Response).to receive(:parsed).and_return([])
expect_next_instance_of(OAuth2::Response) do |instance|
expect(instance).to receive(:parsed).and_return([])
end
expect(client.projects.to_a).to eq []
end
shared_examples 'pagination params' do
before do
allow_any_instance_of(OAuth2::Response).to receive(:parsed).and_return([])
allow_next_instance_of(OAuth2::Response) do |instance|
allow(instance).to receive(:parsed).and_return([])
end
end
it 'allows page_limit param' do
allow_any_instance_of(OAuth2::Response).to receive(:parsed).and_return(element_list)
allow_next_instance_of(OAuth2::Response) do |instance|
allow(instance).to receive(:parsed).and_return(element_list)
end
expect(client).to receive(:lazy_page_iterator).with(hash_including(page_limit: 2)).and_call_original

View File

@ -109,7 +109,9 @@ describe Gitlab::HttpIO do
end
it 'calls get_chunk only once' do
expect_any_instance_of(Net::HTTP).to receive(:request).once.and_call_original
expect_next_instance_of(Net::HTTP) do |instance|
expect(instance).to receive(:request).once.and_call_original
end
http_io.each_line { |line| }
end

View File

@ -43,7 +43,9 @@ describe Gitlab::RequestContext do
let(:ip) { '192.168.1.11' }
before do
allow_any_instance_of(Rack::Request).to receive(:ip).and_return(ip)
allow_next_instance_of(Rack::Request) do |instance|
allow(instance).to receive(:ip).and_return(ip)
end
described_class.new(app).call(env)
end

View File

@ -80,6 +80,17 @@ describe Commit do
expect(commit.author).to eq(user)
end
context 'with a user with an unconfirmed e-mail' do
before do
user = create(:user)
create(:email, user: user, email: commit.author_email)
end
it 'returns no user' do
expect(commit.author).to be_nil
end
end
context 'using eager loading' do
let!(:alice) { create(:user, email: 'alice@example.com') }
let!(:bob) { create(:user, email: 'hunter2@example.com') }
@ -115,7 +126,7 @@ describe Commit do
let!(:commits) { [alice_commit, bob_commit, eve_commit, jeff_commit] }
before do
create(:email, user: bob, email: 'bob@example.com')
create(:email, :confirmed, user: bob, email: 'bob@example.com')
end
it 'executes only two SQL queries' do
@ -179,6 +190,32 @@ describe Commit do
end
end
describe '#committer' do
context 'with a confirmed e-mail' do
it 'returns the user' do
user = create(:user, email: commit.committer_email)
expect(commit.committer).to eq(user)
end
end
context 'with an unconfirmed e-mail' do
let(:user) { create(:user) }
before do
create(:email, user: user, email: commit.committer_email)
end
it 'returns no user' do
expect(commit.committer).to be_nil
end
it 'returns the user' do
expect(commit.committer(confirmed: false)).to eq(user)
end
end
end
describe '#to_reference' do
let(:project) { create(:project, :repository, path: 'sample-project') }