Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2019-12-07 00:07:51 +00:00
parent 99ddca0d88
commit 4e375367b7
80 changed files with 988 additions and 781 deletions

View File

@ -1,8 +1,8 @@
<script> <script>
import { mapActions, mapState } from 'vuex'; import { mapActions, mapState } from 'vuex';
import _ from 'underscore'; import _ from 'underscore';
import Icon from '~/vue_shared/components/icon.vue';
import { GlLoadingIcon } from '@gitlab/ui'; import { GlLoadingIcon } from '@gitlab/ui';
import Icon from '~/vue_shared/components/icon.vue';
import Item from './item.vue'; import Item from './item.vue';
export default { export default {

View File

@ -1,8 +1,8 @@
<script> <script>
import $ from 'jquery'; import $ from 'jquery';
import { mapActions, mapState } from 'vuex'; import { mapActions, mapState } from 'vuex';
import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue';
import { GlLoadingIcon } from '@gitlab/ui'; import { GlLoadingIcon } from '@gitlab/ui';
import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue';
export default { export default {
components: { components: {

View File

@ -1,7 +1,7 @@
<script> <script>
import { mapActions, mapGetters, mapState } from 'vuex'; import { mapActions, mapGetters, mapState } from 'vuex';
import Icon from '~/vue_shared/components/icon.vue';
import { GlSkeletonLoading } from '@gitlab/ui'; import { GlSkeletonLoading } from '@gitlab/ui';
import Icon from '~/vue_shared/components/icon.vue';
import FileRow from '~/vue_shared/components/file_row.vue'; import FileRow from '~/vue_shared/components/file_row.vue';
import NavDropdown from './nav_dropdown.vue'; import NavDropdown from './nav_dropdown.vue';
import FileRowExtra from './file_row_extra.vue'; import FileRowExtra from './file_row_extra.vue';

View File

@ -1,9 +1,9 @@
<script> <script>
import { mapActions, mapState } from 'vuex'; import { mapActions, mapState } from 'vuex';
import _ from 'underscore'; import _ from 'underscore';
import { GlLoadingIcon } from '@gitlab/ui';
import { __ } from '~/locale'; import { __ } from '~/locale';
import Icon from '~/vue_shared/components/icon.vue'; import Icon from '~/vue_shared/components/icon.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import Item from './item.vue'; import Item from './item.vue';
import TokenedInput from '../shared/tokened_input.vue'; import TokenedInput from '../shared/tokened_input.vue';

View File

@ -1,8 +1,8 @@
<script> <script>
import $ from 'jquery'; import $ from 'jquery';
import { mapActions, mapState, mapGetters } from 'vuex';
import flash from '~/flash'; import flash from '~/flash';
import { __, sprintf, s__ } from '~/locale'; import { __, sprintf, s__ } from '~/locale';
import { mapActions, mapState, mapGetters } from 'vuex';
import DeprecatedModal2 from '~/vue_shared/components/deprecated_modal_2.vue'; import DeprecatedModal2 from '~/vue_shared/components/deprecated_modal_2.vue';
import { modalTypes } from '../../constants'; import { modalTypes } from '../../constants';

View File

@ -1,7 +1,7 @@
<script> <script>
import { listen } from 'codesandbox-api'; import { listen } from 'codesandbox-api';
import Icon from '~/vue_shared/components/icon.vue';
import { GlLoadingIcon } from '@gitlab/ui'; import { GlLoadingIcon } from '@gitlab/ui';
import Icon from '~/vue_shared/components/icon.vue';
export default { export default {
components: { components: {

View File

@ -1,6 +1,6 @@
<script> <script>
import { __, sprintf } from '~/locale';
import { mapActions } from 'vuex'; import { mapActions } from 'vuex';
import { __, sprintf } from '~/locale';
import FileIcon from '~/vue_shared/components/file_icon.vue'; import FileIcon from '~/vue_shared/components/file_icon.vue';
import Icon from '~/vue_shared/components/icon.vue'; import Icon from '~/vue_shared/components/icon.vue';

View File

@ -1,9 +1,9 @@
import $ from 'jquery'; import $ from 'jquery';
import Vue from 'vue'; import Vue from 'vue';
import _ from 'underscore';
import { __, sprintf } from '~/locale'; import { __, sprintf } from '~/locale';
import { visitUrl } from '~/lib/utils/url_utility'; import { visitUrl } from '~/lib/utils/url_utility';
import flash from '~/flash'; import flash from '~/flash';
import _ from 'underscore';
import * as types from './mutation_types'; import * as types from './mutation_types';
import { decorateFiles } from '../lib/files'; import { decorateFiles } from '../lib/files';
import { stageKeys } from '../constants'; import { stageKeys } from '../constants';

View File

@ -246,7 +246,7 @@ class Commit
def lazy_author def lazy_author
BatchLoader.for(author_email.downcase).batch do |emails, loader| BatchLoader.for(author_email.downcase).batch do |emails, loader|
users = User.by_any_email(emails).includes(:emails) users = User.by_any_email(emails, confirmed: true).includes(:emails)
emails.each do |email| emails.each do |email|
user = users.find { |u| u.any_email?(email) } user = users.find { |u| u.any_email?(email) }
@ -263,8 +263,8 @@ class Commit
end end
request_cache(:author) { author_email.downcase } request_cache(:author) { author_email.downcase }
def committer def committer(confirmed: true)
@committer ||= User.find_by_any_email(committer_email) @committer ||= User.find_by_any_email(committer_email, confirmed: confirmed)
end end
def parents def parents

View File

@ -0,0 +1,5 @@
---
title: Add nonunique indexes to Labels
merge_request: 21230
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Do not attribute unverified commit e-mails to GitLab users
merge_request: 21214
author:
type: fixed

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddLabelProjectGroupPartialIndexes < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
PROJECT_AND_TITLE = [:project_id, :title]
GROUP_AND_TITLE = [:group_id, :title]
def up
add_concurrent_index :labels, PROJECT_AND_TITLE, unique: false, where: "labels.group_id = null"
add_concurrent_index :labels, GROUP_AND_TITLE, unique: false, where: "labels.project_id = null"
end
def down
remove_concurrent_index :labels, PROJECT_AND_TITLE
remove_concurrent_index :labels, GROUP_AND_TITLE
end
end

View File

@ -10,7 +10,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2019_12_04_070713) do ActiveRecord::Schema.define(version: 2019_12_04_093410) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "pg_trgm" enable_extension "pg_trgm"
@ -2228,6 +2228,8 @@ ActiveRecord::Schema.define(version: 2019_12_04_070713) do
t.integer "group_id" t.integer "group_id"
t.integer "cached_markdown_version" t.integer "cached_markdown_version"
t.index ["group_id", "project_id", "title"], name: "index_labels_on_group_id_and_project_id_and_title", unique: true t.index ["group_id", "project_id", "title"], name: "index_labels_on_group_id_and_project_id_and_title", unique: true
t.index ["group_id", "title"], name: "index_labels_on_group_id_and_title", where: "(project_id = NULL::integer)"
t.index ["project_id", "title"], name: "index_labels_on_project_id_and_title", where: "(group_id = NULL::integer)"
t.index ["project_id"], name: "index_labels_on_project_id" t.index ["project_id"], name: "index_labels_on_project_id"
t.index ["template"], name: "index_labels_on_template", where: "template" t.index ["template"], name: "index_labels_on_template", where: "template"
t.index ["title"], name: "index_labels_on_title" t.index ["title"], name: "index_labels_on_title"

View File

@ -61,7 +61,7 @@ describe 'Member autocomplete', :js do
before do before do
allow(User).to receive(:find_by_any_email) allow(User).to receive(:find_by_any_email)
.with(noteable.author_email.downcase).and_return(author) .with(noteable.author_email.downcase, confirmed: true).and_return(author)
visit project_commit_path(project, noteable) visit project_commit_path(project, noteable)
end end

View File

@ -76,16 +76,23 @@ describe 'User browses commits' do
end end
context 'secondary email' do context 'secondary email' do
let(:user) { create(:user) }
it 'finds a commit by a secondary email' do it 'finds a commit by a secondary email' do
user = create(:email, :confirmed, user: user, email: 'dmitriy.zaporozhets@gmail.com')
create(:user) do |user|
create(:email, { user: user, email: 'dmitriy.zaporozhets@gmail.com' })
end
visit(project_commit_path(project, sample_commit.parent_id)) visit(project_commit_path(project, sample_commit.parent_id))
check_author_link(sample_commit.author_email, user) check_author_link(sample_commit.author_email, user)
end end
it 'links to an unverified e-mail address instead of the user' do
create(:email, user: user, email: 'dmitriy.zaporozhets@gmail.com')
visit(project_commit_path(project, sample_commit.parent_id))
check_author_email(sample_commit.author_email)
end
end end
context 'when the blob does not exist' do context 'when the blob does not exist' do
@ -263,3 +270,9 @@ def check_author_link(email, author)
expect(author_link['href']).to eq(user_path(author)) expect(author_link['href']).to eq(user_path(author))
expect(find('.commit-author-name').text).to eq(author.name) expect(find('.commit-author-name').text).to eq(author.name)
end end
def check_author_email(email)
author_link = find('.commit-author-link')
expect(author_link['href']).to eq("mailto:#{email}")
end

View File

@ -1,5 +1,5 @@
import Vue from 'vue'; import Vue from 'vue';
import mountCompontent from 'spec/helpers/vue_mount_component_helper'; import mountCompontent from 'helpers/vue_mount_component_helper';
import router from '~/ide/ide_router'; import router from '~/ide/ide_router';
import Item from '~/ide/components/branches/item.vue'; import Item from '~/ide/components/branches/item.vue';
import { getTimeago } from '~/lib/utils/datetime_utility'; import { getTimeago } from '~/lib/utils/datetime_utility';
@ -30,7 +30,7 @@ describe('IDE branch item', () => {
it('renders branch name and timeago', () => { it('renders branch name and timeago', () => {
const timeText = getTimeago().format(TEST_BRANCH.committedDate); const timeText = getTimeago().format(TEST_BRANCH.committedDate);
expect(vm.$el).toContainText(TEST_BRANCH.name); expect(vm.$el.textContent).toContain(TEST_BRANCH.name);
expect(vm.$el.querySelector('time')).toHaveText(timeText); expect(vm.$el.querySelector('time')).toHaveText(timeText);
expect(vm.$el.querySelector('.ic-mobile-issue-close')).toBe(null); expect(vm.$el.querySelector('.ic-mobile-issue-close')).toBe(null);
}); });
@ -39,7 +39,7 @@ describe('IDE branch item', () => {
const expectedHref = router.resolve(`/project/${TEST_PROJECT_ID}/edit/${TEST_BRANCH.name}`) const expectedHref = router.resolve(`/project/${TEST_PROJECT_ID}/edit/${TEST_BRANCH.name}`)
.href; .href;
expect(vm.$el).toMatch('a'); expect(vm.$el.textContent).toMatch('a');
expect(vm.$el).toHaveAttr('href', expectedHref); expect(vm.$el).toHaveAttr('href', expectedHref);
}); });

View File

@ -1,9 +1,9 @@
import { shallowMount, createLocalVue } from '@vue/test-utils'; import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex'; import Vuex from 'vuex';
import { GlLoadingIcon } from '@gitlab/ui';
import { __ } from '~/locale'; import { __ } from '~/locale';
import List from '~/ide/components/branches/search_list.vue'; import List from '~/ide/components/branches/search_list.vue';
import Item from '~/ide/components/branches/item.vue'; import Item from '~/ide/components/branches/item.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import { branches } from '../../mock_data'; import { branches } from '../../mock_data';
const localVue = createLocalVue(); const localVue = createLocalVue();

View File

@ -1,9 +1,9 @@
import { shallowMount, createLocalVue } from '@vue/test-utils'; import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex'; import Vuex from 'vuex';
import { GlLoadingIcon } from '@gitlab/ui';
import List from '~/ide/components/merge_requests/list.vue'; import List from '~/ide/components/merge_requests/list.vue';
import Item from '~/ide/components/merge_requests/item.vue'; import Item from '~/ide/components/merge_requests/item.vue';
import TokenedInput from '~/ide/components/shared/tokened_input.vue'; import TokenedInput from '~/ide/components/shared/tokened_input.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import { mergeRequests as mergeRequestsMock } from '../../mock_data'; import { mergeRequests as mergeRequestsMock } from '../../mock_data';
const localVue = createLocalVue(); const localVue = createLocalVue();

View File

@ -3,7 +3,7 @@ import '~/behaviors/markdown/render_gfm';
import { createStore } from '~/ide/stores'; import { createStore } from '~/ide/stores';
import RightPane from '~/ide/components/panes/right.vue'; import RightPane from '~/ide/components/panes/right.vue';
import { rightSidebarViews } from '~/ide/constants'; import { rightSidebarViews } from '~/ide/constants';
import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper'; import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
describe('IDE right pane', () => { describe('IDE right pane', () => {
let Component; let Component;
@ -56,7 +56,7 @@ describe('IDE right pane', () => {
describe('click', () => { describe('click', () => {
beforeEach(() => { beforeEach(() => {
spyOn(vm, 'open'); jest.spyOn(vm, 'open').mockReturnValue();
}); });
it('sets view to merge request', done => { it('sets view to merge request', done => {
@ -74,7 +74,9 @@ describe('IDE right pane', () => {
describe('live preview', () => { describe('live preview', () => {
it('renders live preview button', done => { it('renders live preview button', done => {
Vue.set(vm.$store.state.entries, 'package.json', { name: 'package.json' }); Vue.set(vm.$store.state.entries, 'package.json', {
name: 'package.json',
});
vm.$store.state.clientsidePreviewEnabled = true; vm.$store.state.clientsidePreviewEnabled = true;
vm.$nextTick(() => { vm.$nextTick(() => {

View File

@ -1,11 +1,11 @@
import { shallowMount, createLocalVue } from '@vue/test-utils'; import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex'; import Vuex from 'vuex';
import { GlLoadingIcon } from '@gitlab/ui';
import { TEST_HOST } from 'helpers/test_constants';
import List from '~/ide/components/pipelines/list.vue'; import List from '~/ide/components/pipelines/list.vue';
import JobsList from '~/ide/components/jobs/list.vue'; import JobsList from '~/ide/components/jobs/list.vue';
import Tab from '~/vue_shared/components/tabs/tab.vue'; import Tab from '~/vue_shared/components/tabs/tab.vue';
import CiIcon from '~/vue_shared/components/ci_icon.vue'; import CiIcon from '~/vue_shared/components/ci_icon.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import { TEST_HOST } from 'helpers/test_constants';
import { pipelines } from '../../../../javascripts/ide/mock_data'; import { pipelines } from '../../../../javascripts/ide/mock_data';
const localVue = createLocalVue(); const localVue = createLocalVue();

View File

@ -1,5 +1,6 @@
import Vue from 'vue'; import Vue from 'vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper'; import mountComponent from 'helpers/vue_mount_component_helper';
import { TEST_HOST } from 'helpers/test_constants';
import ClientsideNavigator from '~/ide/components/preview/navigator.vue'; import ClientsideNavigator from '~/ide/components/preview/navigator.vue';
describe('IDE clientside preview navigator', () => { describe('IDE clientside preview navigator', () => {
@ -12,14 +13,9 @@ describe('IDE clientside preview navigator', () => {
}); });
beforeEach(() => { beforeEach(() => {
manager = { manager = { bundlerURL: TEST_HOST, iframe: { src: '' } };
bundlerURL: gl.TEST_HOST,
iframe: { src: '' },
};
vm = mountComponent(Component, { vm = mountComponent(Component, { manager });
manager,
});
}); });
afterEach(() => { afterEach(() => {
@ -47,7 +43,7 @@ describe('IDE clientside preview navigator', () => {
it('calls back method when clicking back button', done => { it('calls back method when clicking back button', done => {
vm.navigationStack.push('/test'); vm.navigationStack.push('/test');
vm.navigationStack.push('/test2'); vm.navigationStack.push('/test2');
spyOn(vm, 'back'); jest.spyOn(vm, 'back').mockReturnValue();
vm.$nextTick(() => { vm.$nextTick(() => {
vm.$el.querySelector('.ide-navigator-btn').click(); vm.$el.querySelector('.ide-navigator-btn').click();
@ -60,7 +56,7 @@ describe('IDE clientside preview navigator', () => {
it('calls forward method when clicking forward button', done => { it('calls forward method when clicking forward button', done => {
vm.forwardNavigationStack.push('/test'); vm.forwardNavigationStack.push('/test');
spyOn(vm, 'forward'); jest.spyOn(vm, 'forward').mockReturnValue();
vm.$nextTick(() => { vm.$nextTick(() => {
vm.$el.querySelectorAll('.ide-navigator-btn')[1].click(); vm.$el.querySelectorAll('.ide-navigator-btn')[1].click();
@ -73,49 +69,35 @@ describe('IDE clientside preview navigator', () => {
describe('onUrlChange', () => { describe('onUrlChange', () => {
it('updates the path', () => { it('updates the path', () => {
vm.onUrlChange({ vm.onUrlChange({ url: `${TEST_HOST}/url` });
url: `${gl.TEST_HOST}/url`,
});
expect(vm.path).toBe('/url'); expect(vm.path).toBe('/url');
}); });
it('sets currentBrowsingIndex 0 if not already set', () => { it('sets currentBrowsingIndex 0 if not already set', () => {
vm.onUrlChange({ vm.onUrlChange({ url: `${TEST_HOST}/url` });
url: `${gl.TEST_HOST}/url`,
});
expect(vm.currentBrowsingIndex).toBe(0); expect(vm.currentBrowsingIndex).toBe(0);
}); });
it('increases currentBrowsingIndex if path doesnt match', () => { it('increases currentBrowsingIndex if path doesnt match', () => {
vm.onUrlChange({ vm.onUrlChange({ url: `${TEST_HOST}/url` });
url: `${gl.TEST_HOST}/url`,
});
vm.onUrlChange({ vm.onUrlChange({ url: `${TEST_HOST}/url2` });
url: `${gl.TEST_HOST}/url2`,
});
expect(vm.currentBrowsingIndex).toBe(1); expect(vm.currentBrowsingIndex).toBe(1);
}); });
it('does not increase currentBrowsingIndex if path matches', () => { it('does not increase currentBrowsingIndex if path matches', () => {
vm.onUrlChange({ vm.onUrlChange({ url: `${TEST_HOST}/url` });
url: `${gl.TEST_HOST}/url`,
});
vm.onUrlChange({ vm.onUrlChange({ url: `${TEST_HOST}/url` });
url: `${gl.TEST_HOST}/url`,
});
expect(vm.currentBrowsingIndex).toBe(0); expect(vm.currentBrowsingIndex).toBe(0);
}); });
it('pushes path into navigation stack', () => { it('pushes path into navigation stack', () => {
vm.onUrlChange({ vm.onUrlChange({ url: `${TEST_HOST}/url` });
url: `${gl.TEST_HOST}/url`,
});
expect(vm.navigationStack).toEqual(['/url']); expect(vm.navigationStack).toEqual(['/url']);
}); });
@ -128,7 +110,7 @@ describe('IDE clientside preview navigator', () => {
vm.navigationStack.push('/test'); vm.navigationStack.push('/test');
vm.navigationStack.push('/test2'); vm.navigationStack.push('/test2');
spyOn(vm, 'visitPath'); jest.spyOn(vm, 'visitPath').mockReturnValue();
vm.back(); vm.back();
}); });
@ -152,7 +134,7 @@ describe('IDE clientside preview navigator', () => {
describe('forward', () => { describe('forward', () => {
it('calls visitPath with first entry in forwardNavigationStack', () => { it('calls visitPath with first entry in forwardNavigationStack', () => {
spyOn(vm, 'visitPath'); jest.spyOn(vm, 'visitPath').mockReturnValue();
vm.forwardNavigationStack.push('/test'); vm.forwardNavigationStack.push('/test');
vm.forwardNavigationStack.push('/test2'); vm.forwardNavigationStack.push('/test2');
@ -165,7 +147,7 @@ describe('IDE clientside preview navigator', () => {
describe('refresh', () => { describe('refresh', () => {
it('calls refresh with current path', () => { it('calls refresh with current path', () => {
spyOn(vm, 'visitPath'); jest.spyOn(vm, 'visitPath').mockReturnValue();
vm.path = '/test'; vm.path = '/test';
@ -179,7 +161,7 @@ describe('IDE clientside preview navigator', () => {
it('updates iframe src with passed in path', () => { it('updates iframe src with passed in path', () => {
vm.visitPath('/testpath'); vm.visitPath('/testpath');
expect(manager.iframe.src).toBe(`${gl.TEST_HOST}/testpath`); expect(manager.iframe.src).toBe(`${TEST_HOST}/testpath`);
}); });
}); });
}); });

View File

@ -0,0 +1,52 @@
import * as pathUtils from 'path';
import { decorateData } from '~/ide/stores/utils';
import state from '~/ide/stores/state';
import commitState from '~/ide/stores/modules/commit/state';
import mergeRequestsState from '~/ide/stores/modules/merge_requests/state';
import pipelinesState from '~/ide/stores/modules/pipelines/state';
import branchesState from '~/ide/stores/modules/branches/state';
import fileTemplatesState from '~/ide/stores/modules/file_templates/state';
import paneState from '~/ide/stores/modules/pane/state';
export const resetStore = store => {
const newState = {
...state(),
commit: commitState(),
mergeRequests: mergeRequestsState(),
pipelines: pipelinesState(),
branches: branchesState(),
fileTemplates: fileTemplatesState(),
rightPane: paneState(),
};
store.replaceState(newState);
};
export const file = (name = 'name', id = name, type = '', parent = null) =>
decorateData({
id,
type,
icon: 'icon',
url: 'url',
name,
path: parent ? `${parent.path}/${name}` : name,
parentPath: parent ? parent.path : '',
lastCommit: {},
});
export const createEntriesFromPaths = paths =>
paths
.map(path => ({
name: pathUtils.basename(path),
dir: pathUtils.dirname(path),
ext: pathUtils.extname(path),
}))
.reduce((entries, path, idx) => {
const { name } = path;
const parent = path.dir ? entries[path.dir] : null;
const type = path.ext ? 'blob' : 'tree';
const entry = file(name, (idx + 1).toString(), type, parent);
return {
[entry.path]: entry,
...entries,
};
}, {});

View File

@ -31,7 +31,7 @@ describe('IDE router', () => {
`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}`, `/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}`,
].forEach(route => { ].forEach(route => {
it(`finds project path when route is "${route}"`, () => { it(`finds project path when route is "${route}"`, () => {
spyOn(store, 'dispatch').and.returnValue(new Promise(() => {})); jest.spyOn(store, 'dispatch').mockReturnValue(new Promise(() => {}));
router.push(route); router.push(route);

View File

@ -45,7 +45,9 @@ describe('IDE store getters', () => {
localState.currentMergeRequestId = 1; localState.currentMergeRequestId = 1;
localState.projects.abcproject = { localState.projects.abcproject = {
mergeRequests: { mergeRequests: {
1: { mergeId: 1 }, 1: {
mergeId: 1,
},
}, },
}; };
@ -62,9 +64,21 @@ describe('IDE store getters', () => {
describe('allBlobs', () => { describe('allBlobs', () => {
beforeEach(() => { beforeEach(() => {
Object.assign(localState.entries, { Object.assign(localState.entries, {
index: { type: 'blob', name: 'index', lastOpenedAt: 0 }, index: {
app: { type: 'blob', name: 'blob', lastOpenedAt: 0 }, type: 'blob',
folder: { type: 'folder', name: 'folder', lastOpenedAt: 0 }, name: 'index',
lastOpenedAt: 0,
},
app: {
type: 'blob',
name: 'blob',
lastOpenedAt: 0,
},
folder: {
type: 'folder',
name: 'folder',
lastOpenedAt: 0,
},
}); });
}); });
@ -174,7 +188,7 @@ describe('IDE store getters', () => {
}, },
}; };
const localGetters = { const localGetters = {
findBranch: jasmine.createSpy('findBranchSpy'), findBranch: jest.fn(),
}; };
getters.currentBranch(localState, localGetters); getters.currentBranch(localState, localGetters);
@ -251,7 +265,9 @@ describe('IDE store getters', () => {
describe('packageJson', () => { describe('packageJson', () => {
it('returns package.json entry', () => { it('returns package.json entry', () => {
localState.entries['package.json'] = { name: 'package.json' }; localState.entries['package.json'] = {
name: 'package.json',
};
expect(getters.packageJson(localState)).toEqual({ expect(getters.packageJson(localState)).toEqual({
name: 'package.json', name: 'package.json',
@ -273,7 +289,9 @@ describe('IDE store getters', () => {
currentProject: { currentProject: {
default_branch: 'master', default_branch: 'master',
}, },
currentBranch: { can_push: true }, currentBranch: {
can_push: true,
},
}; };
expect(getters.canPushToBranch({}, localGetters)).toBeTruthy(); expect(getters.canPushToBranch({}, localGetters)).toBeTruthy();
@ -284,7 +302,9 @@ describe('IDE store getters', () => {
currentProject: { currentProject: {
default_branch: 'master', default_branch: 'master',
}, },
currentBranch: { can_push: false }, currentBranch: {
can_push: false,
},
}; };
expect(getters.canPushToBranch({}, localGetters)).toBeFalsy(); expect(getters.canPushToBranch({}, localGetters)).toBeFalsy();

View File

@ -1,5 +1,5 @@
import MockAdapter from 'axios-mock-adapter'; import MockAdapter from 'axios-mock-adapter';
import testAction from 'spec/helpers/vuex_action_helper'; import testAction from 'helpers/vuex_action_helper';
import axios from '~/lib/utils/axios_utils'; import axios from '~/lib/utils/axios_utils';
import state from '~/ide/stores/modules/branches/state'; import state from '~/ide/stores/modules/branches/state';
import * as types from '~/ide/stores/modules/branches/mutation_types'; import * as types from '~/ide/stores/modules/branches/mutation_types';
@ -21,12 +21,8 @@ describe('IDE branches actions', () => {
beforeEach(() => { beforeEach(() => {
mockedContext = { mockedContext = {
dispatch() {}, dispatch() {},
rootState: { rootState: { currentProjectId: projectData.name_with_namespace },
currentProjectId: projectData.name_with_namespace, rootGetters: { currentProject: projectData },
},
rootGetters: {
currentProject: projectData,
},
state: state(), state: state(),
}; };
@ -70,7 +66,7 @@ describe('IDE branches actions', () => {
type: 'setErrorMessage', type: 'setErrorMessage',
payload: { payload: {
text: 'Error loading branches.', text: 'Error loading branches.',
action: jasmine.any(Function), action: expect.any(Function),
actionText: 'Please try again', actionText: 'Please try again',
actionPayload: { search: TEST_SEARCH }, actionPayload: { search: TEST_SEARCH },
}, },
@ -105,15 +101,12 @@ describe('IDE branches actions', () => {
}); });
it('calls API with params', () => { it('calls API with params', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough(); const apiSpy = jest.spyOn(axios, 'get');
fetchBranches(mockedContext, { search: TEST_SEARCH }); fetchBranches(mockedContext, { search: TEST_SEARCH });
expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), { expect(apiSpy).toHaveBeenCalledWith(expect.anything(), {
params: jasmine.objectContaining({ params: expect.objectContaining({ search: TEST_SEARCH, sort: 'updated_desc' }),
search: TEST_SEARCH,
sort: 'updated_desc',
}),
}); });
}); });
@ -126,10 +119,7 @@ describe('IDE branches actions', () => {
[ [
{ type: 'requestBranches' }, { type: 'requestBranches' },
{ type: 'resetBranches' }, { type: 'resetBranches' },
{ { type: 'receiveBranchesSuccess', payload: branches },
type: 'receiveBranchesSuccess',
payload: branches,
},
], ],
done, done,
); );
@ -150,10 +140,7 @@ describe('IDE branches actions', () => {
[ [
{ type: 'requestBranches' }, { type: 'requestBranches' },
{ type: 'resetBranches' }, { type: 'resetBranches' },
{ { type: 'receiveBranchesError', payload: { search: TEST_SEARCH } },
type: 'receiveBranchesError',
payload: { search: TEST_SEARCH },
},
], ],
done, done,
); );

View File

@ -10,7 +10,7 @@ describe('IDE branches mutations', () => {
mockedState = state(); mockedState = state();
}); });
describe(types.REQUEST_BRANCHES, () => { describe('REQUEST_BRANCHES', () => {
it('sets loading to true', () => { it('sets loading to true', () => {
mutations[types.REQUEST_BRANCHES](mockedState); mutations[types.REQUEST_BRANCHES](mockedState);
@ -18,7 +18,7 @@ describe('IDE branches mutations', () => {
}); });
}); });
describe(types.RECEIVE_BRANCHES_ERROR, () => { describe('RECEIVE_BRANCHES_ERROR', () => {
it('sets loading to false', () => { it('sets loading to false', () => {
mutations[types.RECEIVE_BRANCHES_ERROR](mockedState); mutations[types.RECEIVE_BRANCHES_ERROR](mockedState);
@ -26,7 +26,7 @@ describe('IDE branches mutations', () => {
}); });
}); });
describe(types.RECEIVE_BRANCHES_SUCCESS, () => { describe('RECEIVE_BRANCHES_SUCCESS', () => {
it('sets branches', () => { it('sets branches', () => {
const expectedBranches = branches.map(branch => ({ const expectedBranches = branches.map(branch => ({
name: branch.name, name: branch.name,
@ -39,7 +39,7 @@ describe('IDE branches mutations', () => {
}); });
}); });
describe(types.RESET_BRANCHES, () => { describe('RESET_BRANCHES', () => {
it('clears branches array', () => { it('clears branches array', () => {
mockedState.branches = ['test']; mockedState.branches = ['test'];

View File

@ -1,5 +1,5 @@
import MockAdapter from 'axios-mock-adapter'; import MockAdapter from 'axios-mock-adapter';
import testAction from 'spec/helpers/vuex_action_helper'; import testAction from 'helpers/vuex_action_helper';
import axios from '~/lib/utils/axios_utils'; import axios from '~/lib/utils/axios_utils';
import createState from '~/ide/stores/modules/file_templates/state'; import createState from '~/ide/stores/modules/file_templates/state';
import * as actions from '~/ide/stores/modules/file_templates/actions'; import * as actions from '~/ide/stores/modules/file_templates/actions';
@ -43,7 +43,7 @@ describe('IDE file templates actions', () => {
{ {
type: 'setErrorMessage', type: 'setErrorMessage',
payload: { payload: {
action: jasmine.any(Function), action: expect.any(Function),
actionText: 'Please try again', actionText: 'Please try again',
text: 'Error loading template types.', text: 'Error loading template types.',
}, },
@ -82,7 +82,7 @@ describe('IDE file templates actions', () => {
}); });
it('rejects if selectedTemplateType is empty', done => { it('rejects if selectedTemplateType is empty', done => {
const dispatch = jasmine.createSpy('dispatch'); const dispatch = jest.fn().mockName('dispatch');
actions actions
.fetchTemplateTypes({ dispatch, state }) .fetchTemplateTypes({ dispatch, state })
@ -95,9 +95,7 @@ describe('IDE file templates actions', () => {
}); });
it('dispatches actions', done => { it('dispatches actions', done => {
state.selectedTemplateType = { state.selectedTemplateType = { key: 'licenses' };
key: 'licenses',
};
testAction( testAction(
actions.fetchTemplateTypes, actions.fetchTemplateTypes,
@ -105,17 +103,9 @@ describe('IDE file templates actions', () => {
state, state,
[], [],
[ [
{ { type: 'requestTemplateTypes' },
type: 'requestTemplateTypes', { type: 'receiveTemplateTypesSuccess', payload: pages[0] },
}, { type: 'receiveTemplateTypesSuccess', payload: pages[0].concat(pages[1]) },
{
type: 'receiveTemplateTypesSuccess',
payload: pages[0],
},
{
type: 'receiveTemplateTypesSuccess',
payload: pages[0].concat(pages[1]),
},
{ {
type: 'receiveTemplateTypesSuccess', type: 'receiveTemplateTypesSuccess',
payload: pages[0].concat(pages[1]).concat(pages[2]), payload: pages[0].concat(pages[1]).concat(pages[2]),
@ -132,23 +122,14 @@ describe('IDE file templates actions', () => {
}); });
it('dispatches actions', done => { it('dispatches actions', done => {
state.selectedTemplateType = { state.selectedTemplateType = { key: 'licenses' };
key: 'licenses',
};
testAction( testAction(
actions.fetchTemplateTypes, actions.fetchTemplateTypes,
null, null,
state, state,
[], [],
[ [{ type: 'requestTemplateTypes' }, { type: 'receiveTemplateTypesError' }],
{
type: 'requestTemplateTypes',
},
{
type: 'receiveTemplateTypesError',
},
],
done, done,
); );
}); });
@ -157,16 +138,11 @@ describe('IDE file templates actions', () => {
describe('setSelectedTemplateType', () => { describe('setSelectedTemplateType', () => {
it('commits SET_SELECTED_TEMPLATE_TYPE', () => { it('commits SET_SELECTED_TEMPLATE_TYPE', () => {
const commit = jasmine.createSpy('commit'); const commit = jest.fn().mockName('commit');
const options = { const options = {
commit, commit,
dispatch() {}, dispatch() {},
rootGetters: { rootGetters: { activeFile: { name: 'test', prevPath: '' } },
activeFile: {
name: 'test',
prevPath: '',
},
},
}; };
actions.setSelectedTemplateType(options, { name: 'test' }); actions.setSelectedTemplateType(options, { name: 'test' });
@ -175,17 +151,12 @@ describe('IDE file templates actions', () => {
}); });
it('dispatches discardFileChanges if prevPath matches templates name', () => { it('dispatches discardFileChanges if prevPath matches templates name', () => {
const dispatch = jasmine.createSpy('dispatch'); const dispatch = jest.fn().mockName('dispatch');
const options = { const options = {
commit() {}, commit() {},
dispatch, dispatch,
rootGetters: { rootGetters: { activeFile: { name: 'test', path: 'test', prevPath: 'test' } },
activeFile: {
name: 'test',
path: 'test',
prevPath: 'test',
},
},
}; };
actions.setSelectedTemplateType(options, { name: 'test' }); actions.setSelectedTemplateType(options, { name: 'test' });
@ -194,27 +165,19 @@ describe('IDE file templates actions', () => {
}); });
it('dispatches renameEntry if file name doesnt match', () => { it('dispatches renameEntry if file name doesnt match', () => {
const dispatch = jasmine.createSpy('dispatch'); const dispatch = jest.fn().mockName('dispatch');
const options = { const options = {
commit() {}, commit() {},
dispatch, dispatch,
rootGetters: { rootGetters: { activeFile: { name: 'oldtest', path: 'oldtest', prevPath: '' } },
activeFile: {
name: 'oldtest',
path: 'oldtest',
prevPath: '',
},
},
}; };
actions.setSelectedTemplateType(options, { name: 'test' }); actions.setSelectedTemplateType(options, { name: 'test' });
expect(dispatch).toHaveBeenCalledWith( expect(dispatch).toHaveBeenCalledWith(
'renameEntry', 'renameEntry',
{ { path: 'oldtest', name: 'test' },
path: 'oldtest',
name: 'test',
},
{ root: true }, { root: true },
); );
}); });
@ -231,7 +194,7 @@ describe('IDE file templates actions', () => {
{ {
type: 'setErrorMessage', type: 'setErrorMessage',
payload: { payload: {
action: jasmine.any(Function), action: expect.any(Function),
actionText: 'Please try again', actionText: 'Please try again',
text: 'Error loading template.', text: 'Error loading template.',
actionPayload: 'test', actionPayload: 'test',
@ -246,18 +209,16 @@ describe('IDE file templates actions', () => {
describe('fetchTemplate', () => { describe('fetchTemplate', () => {
describe('success', () => { describe('success', () => {
beforeEach(() => { beforeEach(() => {
mock.onGet(/api\/(.*)\/templates\/licenses\/mit/).replyOnce(200, { mock
content: 'MIT content', .onGet(/api\/(.*)\/templates\/licenses\/mit/)
}); .replyOnce(200, { content: 'MIT content' });
mock.onGet(/api\/(.*)\/templates\/licenses\/testing/).replyOnce(200, { mock
content: 'testing content', .onGet(/api\/(.*)\/templates\/licenses\/testing/)
}); .replyOnce(200, { content: 'testing content' });
}); });
it('dispatches setFileTemplate if template already has content', done => { it('dispatches setFileTemplate if template already has content', done => {
const template = { const template = { content: 'already has content' };
content: 'already has content',
};
testAction( testAction(
actions.fetchTemplate, actions.fetchTemplate,
@ -270,13 +231,9 @@ describe('IDE file templates actions', () => {
}); });
it('dispatches success', done => { it('dispatches success', done => {
const template = { const template = { key: 'mit' };
key: 'mit',
};
state.selectedTemplateType = { state.selectedTemplateType = { key: 'licenses' };
key: 'licenses',
};
testAction( testAction(
actions.fetchTemplate, actions.fetchTemplate,
@ -289,13 +246,9 @@ describe('IDE file templates actions', () => {
}); });
it('dispatches success and uses name key for API call', done => { it('dispatches success and uses name key for API call', done => {
const template = { const template = { name: 'testing' };
name: 'testing',
};
state.selectedTemplateType = { state.selectedTemplateType = { key: 'licenses' };
key: 'licenses',
};
testAction( testAction(
actions.fetchTemplate, actions.fetchTemplate,
@ -314,13 +267,9 @@ describe('IDE file templates actions', () => {
}); });
it('dispatches error', done => { it('dispatches error', done => {
const template = { const template = { name: 'testing' };
name: 'testing',
};
state.selectedTemplateType = { state.selectedTemplateType = { key: 'licenses' };
key: 'licenses',
};
testAction( testAction(
actions.fetchTemplate, actions.fetchTemplate,
@ -336,11 +285,9 @@ describe('IDE file templates actions', () => {
describe('setFileTemplate', () => { describe('setFileTemplate', () => {
it('dispatches changeFileContent', () => { it('dispatches changeFileContent', () => {
const dispatch = jasmine.createSpy('dispatch'); const dispatch = jest.fn().mockName('dispatch');
const commit = jasmine.createSpy('commit'); const commit = jest.fn().mockName('commit');
const rootGetters = { const rootGetters = { activeFile: { path: 'test' } };
activeFile: { path: 'test' },
};
actions.setFileTemplate({ dispatch, commit, rootGetters }, { content: 'content' }); actions.setFileTemplate({ dispatch, commit, rootGetters }, { content: 'content' });
@ -352,11 +299,9 @@ describe('IDE file templates actions', () => {
}); });
it('commits SET_UPDATE_SUCCESS', () => { it('commits SET_UPDATE_SUCCESS', () => {
const dispatch = jasmine.createSpy('dispatch'); const dispatch = jest.fn().mockName('dispatch');
const commit = jasmine.createSpy('commit'); const commit = jest.fn().mockName('commit');
const rootGetters = { const rootGetters = { activeFile: { path: 'test' } };
activeFile: { path: 'test' },
};
actions.setFileTemplate({ dispatch, commit, rootGetters }, { content: 'content' }); actions.setFileTemplate({ dispatch, commit, rootGetters }, { content: 'content' });
@ -366,11 +311,9 @@ describe('IDE file templates actions', () => {
describe('undoFileTemplate', () => { describe('undoFileTemplate', () => {
it('dispatches changeFileContent', () => { it('dispatches changeFileContent', () => {
const dispatch = jasmine.createSpy('dispatch'); const dispatch = jest.fn().mockName('dispatch');
const commit = jasmine.createSpy('commit'); const commit = jest.fn().mockName('commit');
const rootGetters = { const rootGetters = { activeFile: { path: 'test', raw: 'raw content' } };
activeFile: { path: 'test', raw: 'raw content' },
};
actions.undoFileTemplate({ dispatch, commit, rootGetters }); actions.undoFileTemplate({ dispatch, commit, rootGetters });
@ -382,11 +325,9 @@ describe('IDE file templates actions', () => {
}); });
it('commits SET_UPDATE_SUCCESS', () => { it('commits SET_UPDATE_SUCCESS', () => {
const dispatch = jasmine.createSpy('dispatch'); const dispatch = jest.fn().mockName('dispatch');
const commit = jasmine.createSpy('commit'); const commit = jest.fn().mockName('commit');
const rootGetters = { const rootGetters = { activeFile: { path: 'test', raw: 'raw content' } };
activeFile: { path: 'test', raw: 'raw content' },
};
actions.undoFileTemplate({ dispatch, commit, rootGetters }); actions.undoFileTemplate({ dispatch, commit, rootGetters });
@ -394,18 +335,12 @@ describe('IDE file templates actions', () => {
}); });
it('dispatches discardFileChanges if file has prevPath', () => { it('dispatches discardFileChanges if file has prevPath', () => {
const dispatch = jasmine.createSpy('dispatch'); const dispatch = jest.fn().mockName('dispatch');
const rootGetters = { const rootGetters = { activeFile: { path: 'test', prevPath: 'newtest', raw: 'raw content' } };
activeFile: { path: 'test', prevPath: 'newtest', raw: 'raw content' },
};
actions.undoFileTemplate({ dispatch, commit() {}, rootGetters }); actions.undoFileTemplate({ dispatch, commit() {}, rootGetters });
expect(dispatch.calls.mostRecent().args).toEqual([ expect(dispatch).toHaveBeenCalledWith('discardFileChanges', 'test', { root: true });
'discardFileChanges',
'test',
{ root: true },
]);
}); });
}); });
}); });

View File

@ -19,9 +19,7 @@ describe('IDE merge requests actions', () => {
beforeEach(() => { beforeEach(() => {
mockedState = state(); mockedState = state();
mockedRootState = { mockedRootState = { currentProjectId: 7 };
currentProjectId: 7,
};
mock = new MockAdapter(axios); mock = new MockAdapter(axios);
}); });
@ -54,7 +52,7 @@ describe('IDE merge requests actions', () => {
type: 'setErrorMessage', type: 'setErrorMessage',
payload: { payload: {
text: 'Error loading merge requests.', text: 'Error loading merge requests.',
action: jasmine.any(Function), action: expect.any(Function),
actionText: 'Please try again', actionText: 'Please try again',
actionPayload: { type: 'created', search: '' }, actionPayload: { type: 'created', search: '' },
}, },
@ -71,12 +69,7 @@ describe('IDE merge requests actions', () => {
receiveMergeRequestsSuccess, receiveMergeRequestsSuccess,
mergeRequests, mergeRequests,
mockedState, mockedState,
[ [{ type: types.RECEIVE_MERGE_REQUESTS_SUCCESS, payload: mergeRequests }],
{
type: types.RECEIVE_MERGE_REQUESTS_SUCCESS,
payload: mergeRequests,
},
],
[], [],
done, done,
); );
@ -94,36 +87,34 @@ describe('IDE merge requests actions', () => {
}); });
it('calls API with params', () => { it('calls API with params', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough(); const apiSpy = jest.spyOn(axios, 'get');
fetchMergeRequests( fetchMergeRequests(
{ dispatch() {}, state: mockedState, rootState: mockedRootState }, {
dispatch() {},
state: mockedState,
rootState: mockedRootState,
},
{ type: 'created' }, { type: 'created' },
); );
expect(apiSpy).toHaveBeenCalledWith(expect.anything(), {
expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), { params: { scope: 'created-by-me', state: 'opened', search: '' },
params: {
scope: 'created-by-me',
state: 'opened',
search: '',
},
}); });
}); });
it('calls API with search', () => { it('calls API with search', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough(); const apiSpy = jest.spyOn(axios, 'get');
fetchMergeRequests( fetchMergeRequests(
{ dispatch() {}, state: mockedState, rootState: mockedRootState }, {
dispatch() {},
state: mockedState,
rootState: mockedRootState,
},
{ type: 'created', search: 'testing search' }, { type: 'created', search: 'testing search' },
); );
expect(apiSpy).toHaveBeenCalledWith(expect.anything(), {
expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), { params: { scope: 'created-by-me', state: 'opened', search: 'testing search' },
params: {
scope: 'created-by-me',
state: 'opened',
search: 'testing search',
},
}); });
}); });
@ -136,10 +127,7 @@ describe('IDE merge requests actions', () => {
[ [
{ type: 'requestMergeRequests' }, { type: 'requestMergeRequests' },
{ type: 'resetMergeRequests' }, { type: 'resetMergeRequests' },
{ { type: 'receiveMergeRequestsSuccess', payload: mergeRequests },
type: 'receiveMergeRequestsSuccess',
payload: mergeRequests,
},
], ],
done, done,
); );
@ -152,21 +140,19 @@ describe('IDE merge requests actions', () => {
}); });
it('calls API with project', () => { it('calls API with project', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough(); const apiSpy = jest.spyOn(axios, 'get');
fetchMergeRequests( fetchMergeRequests(
{ dispatch() {}, state: mockedState, rootState: mockedRootState }, {
dispatch() {},
state: mockedState,
rootState: mockedRootState,
},
{ type: null, search: 'testing search' }, { type: null, search: 'testing search' },
); );
expect(apiSpy).toHaveBeenCalledWith( expect(apiSpy).toHaveBeenCalledWith(
jasmine.stringMatching(`projects/${mockedRootState.currentProjectId}/merge_requests`), expect.stringMatching(`projects/${mockedRootState.currentProjectId}/merge_requests`),
{ { params: { state: 'opened', search: 'testing search' } },
params: {
state: 'opened',
search: 'testing search',
},
},
); );
}); });
@ -179,10 +165,7 @@ describe('IDE merge requests actions', () => {
[ [
{ type: 'requestMergeRequests' }, { type: 'requestMergeRequests' },
{ type: 'resetMergeRequests' }, { type: 'resetMergeRequests' },
{ { type: 'receiveMergeRequestsSuccess', payload: mergeRequests },
type: 'receiveMergeRequestsSuccess',
payload: mergeRequests,
},
], ],
done, done,
); );

View File

@ -1,3 +1,4 @@
import { TEST_HOST } from 'helpers/test_constants';
import state from '~/ide/stores/modules/merge_requests/state'; import state from '~/ide/stores/modules/merge_requests/state';
import mutations from '~/ide/stores/modules/merge_requests/mutations'; import mutations from '~/ide/stores/modules/merge_requests/mutations';
import * as types from '~/ide/stores/modules/merge_requests/mutation_types'; import * as types from '~/ide/stores/modules/merge_requests/mutation_types';
@ -10,7 +11,7 @@ describe('IDE merge requests mutations', () => {
mockedState = state(); mockedState = state();
}); });
describe(types.REQUEST_MERGE_REQUESTS, () => { describe('REQUEST_MERGE_REQUESTS', () => {
it('sets loading to true', () => { it('sets loading to true', () => {
mutations[types.REQUEST_MERGE_REQUESTS](mockedState); mutations[types.REQUEST_MERGE_REQUESTS](mockedState);
@ -18,7 +19,7 @@ describe('IDE merge requests mutations', () => {
}); });
}); });
describe(types.RECEIVE_MERGE_REQUESTS_ERROR, () => { describe('RECEIVE_MERGE_REQUESTS_ERROR', () => {
it('sets loading to false', () => { it('sets loading to false', () => {
mutations[types.RECEIVE_MERGE_REQUESTS_ERROR](mockedState); mutations[types.RECEIVE_MERGE_REQUESTS_ERROR](mockedState);
@ -26,9 +27,9 @@ describe('IDE merge requests mutations', () => {
}); });
}); });
describe(types.RECEIVE_MERGE_REQUESTS_SUCCESS, () => { describe('RECEIVE_MERGE_REQUESTS_SUCCESS', () => {
it('sets merge requests', () => { it('sets merge requests', () => {
gon.gitlab_url = gl.TEST_HOST; gon.gitlab_url = TEST_HOST;
mutations[types.RECEIVE_MERGE_REQUESTS_SUCCESS](mockedState, mergeRequests); mutations[types.RECEIVE_MERGE_REQUESTS_SUCCESS](mockedState, mergeRequests);
expect(mockedState.mergeRequests).toEqual([ expect(mockedState.mergeRequests).toEqual([
@ -43,7 +44,7 @@ describe('IDE merge requests mutations', () => {
}); });
}); });
describe(types.RESET_MERGE_REQUESTS, () => { describe('RESET_MERGE_REQUESTS', () => {
it('clears merge request array', () => { it('clears merge request array', () => {
mockedState.mergeRequests = ['test']; mockedState.mergeRequests = ['test'];

View File

@ -0,0 +1,66 @@
import testAction from 'helpers/vuex_action_helper';
import * as actions from '~/ide/stores/modules/pane/actions';
import * as types from '~/ide/stores/modules/pane/mutation_types';
describe('IDE pane module actions', () => {
const TEST_VIEW = { name: 'test' };
const TEST_VIEW_KEEP_ALIVE = { name: 'test-keep-alive', keepAlive: true };
describe('toggleOpen', () => {
it('dispatches open if closed', done => {
testAction(
actions.toggleOpen,
TEST_VIEW,
{ isOpen: false },
[],
[{ type: 'open', payload: TEST_VIEW }],
done,
);
});
it('dispatches close if opened', done => {
testAction(actions.toggleOpen, TEST_VIEW, { isOpen: true }, [], [{ type: 'close' }], done);
});
});
describe('open', () => {
it('commits SET_OPEN', done => {
testAction(actions.open, null, {}, [{ type: types.SET_OPEN, payload: true }], [], done);
});
it('commits SET_CURRENT_VIEW if view is given', done => {
testAction(
actions.open,
TEST_VIEW,
{},
[
{ type: types.SET_OPEN, payload: true },
{ type: types.SET_CURRENT_VIEW, payload: TEST_VIEW.name },
],
[],
done,
);
});
it('commits KEEP_ALIVE_VIEW if keepAlive is true', done => {
testAction(
actions.open,
TEST_VIEW_KEEP_ALIVE,
{},
[
{ type: types.SET_OPEN, payload: true },
{ type: types.SET_CURRENT_VIEW, payload: TEST_VIEW_KEEP_ALIVE.name },
{ type: types.KEEP_ALIVE_VIEW, payload: TEST_VIEW_KEEP_ALIVE.name },
],
[],
done,
);
});
});
describe('close', () => {
it('commits SET_OPEN', done => {
testAction(actions.close, null, {}, [{ type: types.SET_OPEN, payload: false }], [], done);
});
});
});

View File

@ -1,5 +1,6 @@
import Visibility from 'visibilityjs'; import Visibility from 'visibilityjs';
import MockAdapter from 'axios-mock-adapter'; import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils'; import axios from '~/lib/utils/axios_utils';
import { import {
requestLatestPipeline, requestLatestPipeline,
@ -78,7 +79,7 @@ describe('IDE pipelines actions', () => {
type: 'setErrorMessage', type: 'setErrorMessage',
payload: { payload: {
text: 'An error occurred whilst fetching the latest pipeline.', text: 'An error occurred whilst fetching the latest pipeline.',
action: jasmine.any(Function), action: expect.any(Function),
actionText: 'Please try again', actionText: 'Please try again',
actionPayload: null, actionPayload: null,
}, },
@ -91,38 +92,28 @@ describe('IDE pipelines actions', () => {
}); });
describe('receiveLatestPipelineSuccess', () => { describe('receiveLatestPipelineSuccess', () => {
const rootGetters = { const rootGetters = { lastCommit: { id: '123' } };
lastCommit: { id: '123' },
};
let commit; let commit;
beforeEach(() => { beforeEach(() => {
commit = jasmine.createSpy('commit'); commit = jest.fn().mockName('commit');
}); });
it('commits pipeline', () => { it('commits pipeline', () => {
receiveLatestPipelineSuccess({ rootGetters, commit }, { pipelines }); receiveLatestPipelineSuccess({ rootGetters, commit }, { pipelines });
expect(commit).toHaveBeenCalledWith(types.RECEIVE_LASTEST_PIPELINE_SUCCESS, pipelines[0]);
expect(commit.calls.argsFor(0)).toEqual([
types.RECEIVE_LASTEST_PIPELINE_SUCCESS,
pipelines[0],
]);
}); });
it('commits false when there are no pipelines', () => { it('commits false when there are no pipelines', () => {
receiveLatestPipelineSuccess({ rootGetters, commit }, { pipelines: [] }); receiveLatestPipelineSuccess({ rootGetters, commit }, { pipelines: [] });
expect(commit).toHaveBeenCalledWith(types.RECEIVE_LASTEST_PIPELINE_SUCCESS, false);
expect(commit.calls.argsFor(0)).toEqual([types.RECEIVE_LASTEST_PIPELINE_SUCCESS, false]);
}); });
}); });
describe('fetchLatestPipeline', () => { describe('fetchLatestPipeline', () => {
beforeEach(() => { beforeEach(() => {});
jasmine.clock().install();
});
afterEach(() => { afterEach(() => {
jasmine.clock().uninstall();
stopPipelinePolling(); stopPipelinePolling();
clearEtagPoll(); clearEtagPoll();
}); });
@ -135,10 +126,10 @@ describe('IDE pipelines actions', () => {
}); });
it('dispatches request', done => { it('dispatches request', done => {
spyOn(axios, 'get').and.callThrough(); jest.spyOn(axios, 'get');
spyOn(Visibility, 'hidden').and.returnValue(false); jest.spyOn(Visibility, 'hidden').mockReturnValue(false);
const dispatch = jasmine.createSpy('dispatch'); const dispatch = jest.fn().mockName('dispatch');
const rootGetters = { const rootGetters = {
lastCommit: { id: 'abc123def456ghi789jkl' }, lastCommit: { id: 'abc123def456ghi789jkl' },
currentProject: { path_with_namespace: 'abc/def' }, currentProject: { path_with_namespace: 'abc/def' },
@ -146,31 +137,29 @@ describe('IDE pipelines actions', () => {
fetchLatestPipeline({ dispatch, rootGetters }); fetchLatestPipeline({ dispatch, rootGetters });
expect(dispatch.calls.argsFor(0)).toEqual(['requestLatestPipeline']); expect(dispatch).toHaveBeenCalledWith('requestLatestPipeline');
jasmine.clock().tick(1000); jest.advanceTimersByTime(1000);
new Promise(resolve => requestAnimationFrame(resolve)) new Promise(resolve => requestAnimationFrame(resolve))
.then(() => { .then(() => {
expect(axios.get).toHaveBeenCalled(); expect(axios.get).toHaveBeenCalled();
expect(axios.get.calls.count()).toBe(1); expect(axios.get).toHaveBeenCalledTimes(1);
expect(dispatch).toHaveBeenCalledWith(
expect(dispatch.calls.argsFor(1)).toEqual([
'receiveLatestPipelineSuccess', 'receiveLatestPipelineSuccess',
jasmine.anything(), expect.anything(),
]); );
jasmine.clock().tick(10000); jest.advanceTimersByTime(10000);
}) })
.then(() => new Promise(resolve => requestAnimationFrame(resolve))) .then(() => new Promise(resolve => requestAnimationFrame(resolve)))
.then(() => { .then(() => {
expect(axios.get).toHaveBeenCalled(); expect(axios.get).toHaveBeenCalled();
expect(axios.get.calls.count()).toBe(2); expect(axios.get).toHaveBeenCalledTimes(2);
expect(dispatch).toHaveBeenCalledWith(
expect(dispatch.calls.argsFor(2)).toEqual([
'receiveLatestPipelineSuccess', 'receiveLatestPipelineSuccess',
jasmine.anything(), expect.anything(),
]); );
}) })
.then(done) .then(done)
.catch(done.fail); .catch(done.fail);
@ -183,7 +172,7 @@ describe('IDE pipelines actions', () => {
}); });
it('dispatches error', done => { it('dispatches error', done => {
const dispatch = jasmine.createSpy('dispatch'); const dispatch = jest.fn().mockName('dispatch');
const rootGetters = { const rootGetters = {
lastCommit: { id: 'abc123def456ghi789jkl' }, lastCommit: { id: 'abc123def456ghi789jkl' },
currentProject: { path_with_namespace: 'abc/def' }, currentProject: { path_with_namespace: 'abc/def' },
@ -191,14 +180,11 @@ describe('IDE pipelines actions', () => {
fetchLatestPipeline({ dispatch, rootGetters }); fetchLatestPipeline({ dispatch, rootGetters });
jasmine.clock().tick(1500); jest.advanceTimersByTime(1500);
new Promise(resolve => requestAnimationFrame(resolve)) new Promise(resolve => requestAnimationFrame(resolve))
.then(() => { .then(() => {
expect(dispatch.calls.argsFor(1)).toEqual([ expect(dispatch).toHaveBeenCalledWith('receiveLatestPipelineError', expect.anything());
'receiveLatestPipelineError',
jasmine.anything(),
]);
}) })
.then(done) .then(done)
.catch(done.fail); .catch(done.fail);
@ -224,7 +210,7 @@ describe('IDE pipelines actions', () => {
type: 'setErrorMessage', type: 'setErrorMessage',
payload: { payload: {
text: 'An error occurred whilst loading the pipelines jobs.', text: 'An error occurred whilst loading the pipelines jobs.',
action: jasmine.anything(), action: expect.anything(),
actionText: 'Please try again', actionText: 'Please try again',
actionPayload: { id: 1 }, actionPayload: { id: 1 },
}, },
@ -249,10 +235,7 @@ describe('IDE pipelines actions', () => {
}); });
describe('fetchJobs', () => { describe('fetchJobs', () => {
const stage = { const stage = { id: 1, dropdownPath: `${TEST_HOST}/jobs` };
id: 1,
dropdownPath: `${gl.TEST_HOST}/jobs`,
};
describe('success', () => { describe('success', () => {
beforeEach(() => { beforeEach(() => {
@ -361,7 +344,7 @@ describe('IDE pipelines actions', () => {
type: 'setErrorMessage', type: 'setErrorMessage',
payload: { payload: {
text: 'An error occurred whilst fetching the job trace.', text: 'An error occurred whilst fetching the job trace.',
action: jasmine.any(Function), action: expect.any(Function),
actionText: 'Please try again', actionText: 'Please try again',
actionPayload: null, actionPayload: null,
}, },
@ -387,15 +370,13 @@ describe('IDE pipelines actions', () => {
describe('fetchJobTrace', () => { describe('fetchJobTrace', () => {
beforeEach(() => { beforeEach(() => {
mockedState.detailJob = { mockedState.detailJob = { path: `${TEST_HOST}/project/builds` };
path: `${gl.TEST_HOST}/project/builds`,
};
}); });
describe('success', () => { describe('success', () => {
beforeEach(() => { beforeEach(() => {
spyOn(axios, 'get').and.callThrough(); jest.spyOn(axios, 'get');
mock.onGet(`${gl.TEST_HOST}/project/builds/trace`).replyOnce(200, { html: 'html' }); mock.onGet(`${TEST_HOST}/project/builds/trace`).replyOnce(200, { html: 'html' });
}); });
it('dispatches request', done => { it('dispatches request', done => {
@ -413,9 +394,12 @@ describe('IDE pipelines actions', () => {
}); });
it('sends get request to correct URL', () => { it('sends get request to correct URL', () => {
fetchJobTrace({ state: mockedState, dispatch() {} }); fetchJobTrace({
state: mockedState,
expect(axios.get).toHaveBeenCalledWith(`${gl.TEST_HOST}/project/builds/trace`, { dispatch() {},
});
expect(axios.get).toHaveBeenCalledWith(`${TEST_HOST}/project/builds/trace`, {
params: { format: 'json' }, params: { format: 'json' },
}); });
}); });
@ -423,7 +407,7 @@ describe('IDE pipelines actions', () => {
describe('error', () => { describe('error', () => {
beforeEach(() => { beforeEach(() => {
mock.onGet(`${gl.TEST_HOST}/project/builds/trace`).replyOnce(500); mock.onGet(`${TEST_HOST}/project/builds/trace`).replyOnce(500);
}); });
it('dispatches error', done => { it('dispatches error', done => {

View File

@ -10,7 +10,7 @@ describe('IDE pipelines mutations', () => {
mockedState = state(); mockedState = state();
}); });
describe(types.REQUEST_LATEST_PIPELINE, () => { describe('REQUEST_LATEST_PIPELINE', () => {
it('sets loading to true', () => { it('sets loading to true', () => {
mutations[types.REQUEST_LATEST_PIPELINE](mockedState); mutations[types.REQUEST_LATEST_PIPELINE](mockedState);
@ -18,7 +18,7 @@ describe('IDE pipelines mutations', () => {
}); });
}); });
describe(types.RECEIVE_LASTEST_PIPELINE_ERROR, () => { describe('RECEIVE_LASTEST_PIPELINE_ERROR', () => {
it('sets loading to false', () => { it('sets loading to false', () => {
mutations[types.RECEIVE_LASTEST_PIPELINE_ERROR](mockedState); mutations[types.RECEIVE_LASTEST_PIPELINE_ERROR](mockedState);
@ -26,7 +26,7 @@ describe('IDE pipelines mutations', () => {
}); });
}); });
describe(types.RECEIVE_LASTEST_PIPELINE_SUCCESS, () => { describe('RECEIVE_LASTEST_PIPELINE_SUCCESS', () => {
const itSetsPipelineLoadingStates = () => { const itSetsPipelineLoadingStates = () => {
it('sets has loaded to true', () => { it('sets has loaded to true', () => {
expect(mockedState.hasLoadedPipeline).toBe(true); expect(mockedState.hasLoadedPipeline).toBe(true);
@ -52,7 +52,7 @@ describe('IDE pipelines mutations', () => {
id: '51', id: '51',
path: 'test', path: 'test',
commit: { id: '123' }, commit: { id: '123' },
details: { status: jasmine.any(Object) }, details: { status: expect.any(Object) },
yamlError: undefined, yamlError: undefined,
}); });
}); });
@ -95,12 +95,9 @@ describe('IDE pipelines mutations', () => {
}); });
}); });
describe(types.REQUEST_JOBS, () => { describe('REQUEST_JOBS', () => {
beforeEach(() => { beforeEach(() => {
mockedState.stages = stages.map((stage, i) => ({ mockedState.stages = stages.map((stage, i) => ({ ...stage, id: i }));
...stage,
id: i,
}));
}); });
it('sets isLoading on stage', () => { it('sets isLoading on stage', () => {
@ -110,12 +107,9 @@ describe('IDE pipelines mutations', () => {
}); });
}); });
describe(types.RECEIVE_JOBS_ERROR, () => { describe('RECEIVE_JOBS_ERROR', () => {
beforeEach(() => { beforeEach(() => {
mockedState.stages = stages.map((stage, i) => ({ mockedState.stages = stages.map((stage, i) => ({ ...stage, id: i }));
...stage,
id: i,
}));
}); });
it('sets isLoading on stage after error', () => { it('sets isLoading on stage after error', () => {
@ -125,29 +119,22 @@ describe('IDE pipelines mutations', () => {
}); });
}); });
describe(types.RECEIVE_JOBS_SUCCESS, () => { describe('RECEIVE_JOBS_SUCCESS', () => {
let data; let data;
beforeEach(() => { beforeEach(() => {
mockedState.stages = stages.map((stage, i) => ({ mockedState.stages = stages.map((stage, i) => ({ ...stage, id: i }));
...stage,
id: i,
}));
data = { data = { latest_statuses: [...jobs] };
latest_statuses: [...jobs],
};
}); });
it('updates loading', () => { it('updates loading', () => {
mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, { id: mockedState.stages[0].id, data }); mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, { id: mockedState.stages[0].id, data });
expect(mockedState.stages[0].isLoading).toBe(false); expect(mockedState.stages[0].isLoading).toBe(false);
}); });
it('sets jobs on stage', () => { it('sets jobs on stage', () => {
mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, { id: mockedState.stages[0].id, data }); mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, { id: mockedState.stages[0].id, data });
expect(mockedState.stages[0].jobs.length).toBe(jobs.length); expect(mockedState.stages[0].jobs.length).toBe(jobs.length);
expect(mockedState.stages[0].jobs).toEqual( expect(mockedState.stages[0].jobs).toEqual(
jobs.map(job => ({ jobs.map(job => ({
@ -164,13 +151,9 @@ describe('IDE pipelines mutations', () => {
}); });
}); });
describe(types.TOGGLE_STAGE_COLLAPSE, () => { describe('TOGGLE_STAGE_COLLAPSE', () => {
beforeEach(() => { beforeEach(() => {
mockedState.stages = stages.map((stage, i) => ({ mockedState.stages = stages.map((stage, i) => ({ ...stage, id: i, isCollapsed: false }));
...stage,
id: i,
isCollapsed: false,
}));
}); });
it('toggles collapsed state', () => { it('toggles collapsed state', () => {
@ -184,7 +167,7 @@ describe('IDE pipelines mutations', () => {
}); });
}); });
describe(types.SET_DETAIL_JOB, () => { describe('SET_DETAIL_JOB', () => {
it('sets detail job', () => { it('sets detail job', () => {
mutations[types.SET_DETAIL_JOB](mockedState, jobs[0]); mutations[types.SET_DETAIL_JOB](mockedState, jobs[0]);
@ -192,7 +175,7 @@ describe('IDE pipelines mutations', () => {
}); });
}); });
describe(types.REQUEST_JOB_TRACE, () => { describe('REQUEST_JOB_TRACE', () => {
beforeEach(() => { beforeEach(() => {
mockedState.detailJob = { ...jobs[0] }; mockedState.detailJob = { ...jobs[0] };
}); });
@ -204,7 +187,7 @@ describe('IDE pipelines mutations', () => {
}); });
}); });
describe(types.RECEIVE_JOB_TRACE_ERROR, () => { describe('RECEIVE_JOB_TRACE_ERROR', () => {
beforeEach(() => { beforeEach(() => {
mockedState.detailJob = { ...jobs[0], isLoading: true }; mockedState.detailJob = { ...jobs[0], isLoading: true };
}); });
@ -216,14 +199,13 @@ describe('IDE pipelines mutations', () => {
}); });
}); });
describe(types.RECEIVE_JOB_TRACE_SUCCESS, () => { describe('RECEIVE_JOB_TRACE_SUCCESS', () => {
beforeEach(() => { beforeEach(() => {
mockedState.detailJob = { ...jobs[0], isLoading: true }; mockedState.detailJob = { ...jobs[0], isLoading: true };
}); });
it('sets output on detail job', () => { it('sets output on detail job', () => {
mutations[types.RECEIVE_JOB_TRACE_SUCCESS](mockedState, { html: 'html' }); mutations[types.RECEIVE_JOB_TRACE_SUCCESS](mockedState, { html: 'html' });
expect(mockedState.detailJob.output).toBe('html'); expect(mockedState.detailJob.output).toBe('html');
expect(mockedState.detailJob.isLoading).toBe(false); expect(mockedState.detailJob.isLoading).toBe(false);
}); });

View File

@ -9,10 +9,7 @@ describe('IDE store file mutations', () => {
beforeEach(() => { beforeEach(() => {
localState = state(); localState = state();
localFile = { localFile = { ...file(), type: 'blob' };
...file(),
type: 'blob',
};
localState.entries[localFile.path] = localFile; localState.entries[localFile.path] = localFile;
}); });
@ -28,11 +25,7 @@ describe('IDE store file mutations', () => {
}); });
it('sets pending tab as not active', () => { it('sets pending tab as not active', () => {
localState.openFiles.push({ localState.openFiles.push({ ...localFile, pending: true, active: true });
...localFile,
pending: true,
active: true,
});
mutations.SET_FILE_ACTIVE(localState, { mutations.SET_FILE_ACTIVE(localState, {
path: localFile.path, path: localFile.path,
@ -132,7 +125,7 @@ describe('IDE store file mutations', () => {
localFile, localFile,
].forEach(f => { ].forEach(f => {
expect(f).toEqual( expect(f).toEqual(
jasmine.objectContaining({ expect.objectContaining({
path, path,
name, name,
raw: null, raw: null,
@ -154,10 +147,7 @@ describe('IDE store file mutations', () => {
}); });
it('adds raw data to open pending file', () => { it('adds raw data to open pending file', () => {
localState.openFiles.push({ localState.openFiles.push({ ...localFile, pending: true });
...localFile,
pending: true,
});
mutations.SET_FILE_RAW_DATA(localState, { mutations.SET_FILE_RAW_DATA(localState, {
file: localFile, file: localFile,
@ -168,11 +158,7 @@ describe('IDE store file mutations', () => {
}); });
it('does not add raw data to open pending tempFile file', () => { it('does not add raw data to open pending tempFile file', () => {
localState.openFiles.push({ localState.openFiles.push({ ...localFile, pending: true, tempFile: true });
...localFile,
pending: true,
tempFile: true,
});
mutations.SET_FILE_RAW_DATA(localState, { mutations.SET_FILE_RAW_DATA(localState, {
file: localFile, file: localFile,
@ -234,7 +220,9 @@ describe('IDE store file mutations', () => {
it('sets file mr change', () => { it('sets file mr change', () => {
mutations.SET_FILE_MERGE_REQUEST_CHANGE(localState, { mutations.SET_FILE_MERGE_REQUEST_CHANGE(localState, {
file: localFile, file: localFile,
mrChange: { diff: 'ABC' }, mrChange: {
diff: 'ABC',
},
}); });
expect(localFile.mrChange.diff).toBe('ABC'); expect(localFile.mrChange.diff).toBe('ABC');
@ -311,12 +299,7 @@ describe('IDE store file mutations', () => {
mutations.DISCARD_FILE_CHANGES(localState, localFile.path); mutations.DISCARD_FILE_CHANGES(localState, localFile.path);
expect(localState.trees['gitlab-ce/master'].tree).toEqual([ expect(localState.trees['gitlab-ce/master'].tree).toEqual([{ ...localFile, deleted: false }]);
{
...localFile,
deleted: false,
},
]);
}); });
it('adds to parent tree if deleted', () => { it('adds to parent tree if deleted', () => {
@ -328,12 +311,7 @@ describe('IDE store file mutations', () => {
mutations.DISCARD_FILE_CHANGES(localState, localFile.path); mutations.DISCARD_FILE_CHANGES(localState, localFile.path);
expect(localState.entries.parentPath.tree).toEqual([ expect(localState.entries.parentPath.tree).toEqual([{ ...localFile, deleted: false }]);
{
...localFile,
deleted: false,
},
]);
}); });
}); });
@ -379,11 +357,7 @@ describe('IDE store file mutations', () => {
let f; let f;
beforeEach(() => { beforeEach(() => {
f = { f = { ...file(), type: 'blob', staged: true };
...file(),
type: 'blob',
staged: true,
};
localState.stagedFiles.push(f); localState.stagedFiles.push(f);
localState.changedFiles.push(f); localState.changedFiles.push(f);
@ -422,19 +396,16 @@ describe('IDE store file mutations', () => {
describe('ADD_PENDING_TAB', () => { describe('ADD_PENDING_TAB', () => {
beforeEach(() => { beforeEach(() => {
const f = { const f = { ...file('openFile'), path: 'openFile', active: true, opened: true };
...file('openFile'),
path: 'openFile',
active: true,
opened: true,
};
localState.entries[f.path] = f; localState.entries[f.path] = f;
localState.openFiles.push(f); localState.openFiles.push(f);
}); });
it('adds file into openFiles as pending', () => { it('adds file into openFiles as pending', () => {
mutations.ADD_PENDING_TAB(localState, { file: localFile }); mutations.ADD_PENDING_TAB(localState, {
file: localFile,
});
expect(localState.openFiles.length).toBe(1); expect(localState.openFiles.length).toBe(1);
expect(localState.openFiles[0].pending).toBe(true); expect(localState.openFiles[0].pending).toBe(true);
@ -445,11 +416,15 @@ describe('IDE store file mutations', () => {
const newFile = file('test'); const newFile = file('test');
localState.entries[newFile.path] = newFile; localState.entries[newFile.path] = newFile;
mutations.ADD_PENDING_TAB(localState, { file: localFile }); mutations.ADD_PENDING_TAB(localState, {
file: localFile,
});
expect(localState.openFiles.length).toBe(1); expect(localState.openFiles.length).toBe(1);
mutations.ADD_PENDING_TAB(localState, { file: file('test') }); mutations.ADD_PENDING_TAB(localState, {
file: file('test'),
});
expect(localState.openFiles.length).toBe(1); expect(localState.openFiles.length).toBe(1);
expect(localState.openFiles[0].name).toBe('test'); expect(localState.openFiles[0].name).toBe('test');

View File

@ -51,7 +51,9 @@ describe('Multi-file store tree mutations', () => {
}); });
it('keeps loading state', () => { it('keeps loading state', () => {
mutations.CREATE_TREE(localState, { treePath: 'project/master' }); mutations.CREATE_TREE(localState, {
treePath: 'project/master',
});
mutations.SET_DIRECTORY_DATA(localState, { mutations.SET_DIRECTORY_DATA(localState, {
data, data,
treePath: 'project/master', treePath: 'project/master',

View File

@ -1,3 +1,4 @@
import { TEST_HOST } from 'helpers/test_constants';
import mutations from '~/ide/stores/mutations'; import mutations from '~/ide/stores/mutations';
import state from '~/ide/stores/state'; import state from '~/ide/stores/state';
import { file } from '../helpers'; import { file } from '../helpers';
@ -25,21 +26,30 @@ describe('Multi-file store mutations', () => {
describe('TOGGLE_LOADING', () => { describe('TOGGLE_LOADING', () => {
it('toggles loading of entry', () => { it('toggles loading of entry', () => {
mutations.TOGGLE_LOADING(localState, { entry }); mutations.TOGGLE_LOADING(localState, {
entry,
});
expect(entry.loading).toBeTruthy(); expect(entry.loading).toBeTruthy();
mutations.TOGGLE_LOADING(localState, { entry }); mutations.TOGGLE_LOADING(localState, {
entry,
});
expect(entry.loading).toBeFalsy(); expect(entry.loading).toBeFalsy();
}); });
it('toggles loading of entry and sets specific value', () => { it('toggles loading of entry and sets specific value', () => {
mutations.TOGGLE_LOADING(localState, { entry }); mutations.TOGGLE_LOADING(localState, {
entry,
});
expect(entry.loading).toBeTruthy(); expect(entry.loading).toBeTruthy();
mutations.TOGGLE_LOADING(localState, { entry, forceValue: true }); mutations.TOGGLE_LOADING(localState, {
entry,
forceValue: true,
});
expect(entry.loading).toBeTruthy(); expect(entry.loading).toBeTruthy();
}); });
@ -123,11 +133,7 @@ describe('Multi-file store mutations', () => {
mutations.CREATE_TMP_ENTRY(localState, { mutations.CREATE_TMP_ENTRY(localState, {
data: { data: {
entries: { entries: {
test: { test: { ...tmpFile, tempFile: true, changed: true },
...tmpFile,
tempFile: true,
changed: true,
},
}, },
treeList: [tmpFile], treeList: [tmpFile],
}, },
@ -141,18 +147,11 @@ describe('Multi-file store mutations', () => {
it('marks entry as replacing previous entry if the old one has been deleted', () => { it('marks entry as replacing previous entry if the old one has been deleted', () => {
const tmpFile = file('test'); const tmpFile = file('test');
localState.entries.test = { localState.entries.test = { ...tmpFile, deleted: true };
...tmpFile,
deleted: true,
};
mutations.CREATE_TMP_ENTRY(localState, { mutations.CREATE_TMP_ENTRY(localState, {
data: { data: {
entries: { entries: {
test: { test: { ...tmpFile, tempFile: true, changed: true },
...tmpFile,
tempFile: true,
changed: true,
},
}, },
treeList: [tmpFile], treeList: [tmpFile],
}, },
@ -167,21 +166,23 @@ describe('Multi-file store mutations', () => {
describe('UPDATE_TEMP_FLAG', () => { describe('UPDATE_TEMP_FLAG', () => {
beforeEach(() => { beforeEach(() => {
localState.entries.test = { localState.entries.test = { ...file(), tempFile: true, changed: true };
...file(),
tempFile: true,
changed: true,
};
}); });
it('updates tempFile flag', () => { it('updates tempFile flag', () => {
mutations.UPDATE_TEMP_FLAG(localState, { path: 'test', tempFile: false }); mutations.UPDATE_TEMP_FLAG(localState, {
path: 'test',
tempFile: false,
});
expect(localState.entries.test.tempFile).toBe(false); expect(localState.entries.test.tempFile).toBe(false);
}); });
it('updates changed flag', () => { it('updates changed flag', () => {
mutations.UPDATE_TEMP_FLAG(localState, { path: 'test', tempFile: false }); mutations.UPDATE_TEMP_FLAG(localState, {
path: 'test',
tempFile: false,
});
expect(localState.entries.test.changed).toBe(false); expect(localState.entries.test.changed).toBe(false);
}); });
@ -303,23 +304,28 @@ describe('Multi-file store mutations', () => {
const f = { const f = {
...file('test'), ...file('test'),
prevPath: 'testing-123', prevPath: 'testing-123',
rawPath: `${gl.TEST_HOST}/testing-123`, rawPath: `${TEST_HOST}/testing-123`,
permalink: `${gl.TEST_HOST}/testing-123`, permalink: `${TEST_HOST}/testing-123`,
commitsPath: `${gl.TEST_HOST}/testing-123`, commitsPath: `${TEST_HOST}/testing-123`,
blamePath: `${gl.TEST_HOST}/testing-123`, blamePath: `${TEST_HOST}/testing-123`,
replaces: true, replaces: true,
}; };
localState.entries.test = f; localState.entries.test = f;
localState.changedFiles.push(f); localState.changedFiles.push(f);
mutations.UPDATE_FILE_AFTER_COMMIT(localState, { file: f, lastCommit: { commit: {} } }); mutations.UPDATE_FILE_AFTER_COMMIT(localState, {
file: f,
lastCommit: {
commit: {},
},
});
expect(f).toEqual( expect(f).toEqual(
jasmine.objectContaining({ expect.objectContaining({
rawPath: `${gl.TEST_HOST}/test`, rawPath: `${TEST_HOST}/test`,
permalink: `${gl.TEST_HOST}/test`, permalink: `${TEST_HOST}/test`,
commitsPath: `${gl.TEST_HOST}/test`, commitsPath: `${TEST_HOST}/test`,
blamePath: `${gl.TEST_HOST}/test`, blamePath: `${TEST_HOST}/test`,
replaces: false, replaces: false,
prevId: undefined, prevId: undefined,
prevPath: undefined, prevPath: undefined,
@ -335,7 +341,10 @@ describe('Multi-file store mutations', () => {
it('sets entryModal', () => { it('sets entryModal', () => {
localState.entries.testPath = file(); localState.entries.testPath = file();
mutations.OPEN_NEW_ENTRY_MODAL(localState, { type: 'test', path: 'testPath' }); mutations.OPEN_NEW_ENTRY_MODAL(localState, {
type: 'test',
path: 'testPath',
});
expect(localState.entryModal).toEqual({ expect(localState.entryModal).toEqual({
type: 'test', type: 'test',
@ -348,7 +357,9 @@ describe('Multi-file store mutations', () => {
describe('RENAME_ENTRY', () => { describe('RENAME_ENTRY', () => {
beforeEach(() => { beforeEach(() => {
localState.trees = { localState.trees = {
'gitlab-ce/master': { tree: [] }, 'gitlab-ce/master': {
tree: [],
},
}; };
localState.currentProjectId = 'gitlab-ce'; localState.currentProjectId = 'gitlab-ce';
localState.currentBranchId = 'master'; localState.currentBranchId = 'master';
@ -365,7 +376,7 @@ describe('Multi-file store mutations', () => {
}); });
expect(localState.entries).toEqual({ expect(localState.entries).toEqual({
newPath: jasmine.objectContaining({ newPath: expect.objectContaining({
path: 'newPath', path: 'newPath',
prevPath: 'oldPath', prevPath: 'oldPath',
}), }),
@ -386,7 +397,7 @@ describe('Multi-file store mutations', () => {
}); });
expect(localState.entries).toEqual({ expect(localState.entries).toEqual({
newestPath: jasmine.objectContaining({ newestPath: expect.objectContaining({
path: 'newestPath', path: 'newestPath',
prevPath: 'oldPath', prevPath: 'oldPath',
}), }),
@ -396,10 +407,7 @@ describe('Multi-file store mutations', () => {
it('correctly handles the same entry within a consecutively renamed folder', () => { it('correctly handles the same entry within a consecutively renamed folder', () => {
const oldPath = file('root-folder/oldPath', 'root-folder/oldPath', 'blob'); const oldPath = file('root-folder/oldPath', 'root-folder/oldPath', 'blob');
localState.entries = { localState.entries = {
'root-folder': { 'root-folder': { ...file('root-folder', 'root-folder', 'tree'), tree: [oldPath] },
...file('root-folder', 'root-folder', 'tree'),
tree: [oldPath],
},
'root-folder/oldPath': oldPath, 'root-folder/oldPath': oldPath,
}; };
Object.assign(localState.entries['root-folder/oldPath'], { Object.assign(localState.entries['root-folder/oldPath'], {
@ -422,10 +430,10 @@ describe('Multi-file store mutations', () => {
}); });
expect(localState.entries).toEqual({ expect(localState.entries).toEqual({
'root-folder': jasmine.objectContaining({ 'root-folder': expect.objectContaining({
path: 'root-folder', path: 'root-folder',
}), }),
'simply-renamed/oldPath': jasmine.objectContaining({ 'simply-renamed/oldPath': expect.objectContaining({
path: 'simply-renamed/oldPath', path: 'simply-renamed/oldPath',
prevPath: 'root-folder/oldPath', prevPath: 'root-folder/oldPath',
}), }),
@ -450,8 +458,7 @@ describe('Multi-file store mutations', () => {
path: 'newPath', path: 'newPath',
name: 'newPath', name: 'newPath',
url: `project/-/newPath`, url: `project/-/newPath`,
key: jasmine.stringMatching('newPath'), key: expect.stringMatching('newPath'),
prevId: 'oldPath', prevId: 'oldPath',
prevName: 'oldPath', prevName: 'oldPath',
prevPath: 'oldPath', prevPath: 'oldPath',
@ -473,13 +480,13 @@ describe('Multi-file store mutations', () => {
}); });
expect(localState.entries.newPath).not.toEqual( expect(localState.entries.newPath).not.toEqual(
jasmine.objectContaining({ expect.objectContaining({
prevId: jasmine.anything(), prevId: expect.anything(),
prevName: jasmine.anything(), prevName: expect.anything(),
prevPath: jasmine.anything(), prevPath: expect.anything(),
prevUrl: jasmine.anything(), prevUrl: expect.anything(),
prevKey: jasmine.anything(), prevKey: expect.anything(),
prevParentPath: jasmine.anything(), prevParentPath: expect.anything(),
}), }),
); );
}); });
@ -487,10 +494,7 @@ describe('Multi-file store mutations', () => {
it('properly handles files with spaces in name', () => { it('properly handles files with spaces in name', () => {
const path = 'my fancy path'; const path = 'my fancy path';
const newPath = 'new path'; const newPath = 'new path';
const oldEntry = { const oldEntry = { ...file(path, path, 'blob'), url: `project/-/${encodeURI(path)}` };
...file(path, path, 'blob'),
url: `project/-/${encodeURI(path)}`,
};
localState.entries[path] = oldEntry; localState.entries[path] = oldEntry;
@ -507,8 +511,7 @@ describe('Multi-file store mutations', () => {
path: newPath, path: newPath,
name: newPath, name: newPath,
url: `project/-/new%20path`, url: `project/-/new%20path`,
key: jasmine.stringMatching(newPath), key: expect.stringMatching(newPath),
prevId: path, prevId: path,
prevName: path, prevName: path,
prevPath: path, prevPath: path,
@ -540,7 +543,11 @@ describe('Multi-file store mutations', () => {
const alpha = file('alpha', 'alpha', 'blob'); const alpha = file('alpha', 'alpha', 'blob');
const beta = file('beta', 'beta', 'blob'); const beta = file('beta', 'beta', 'blob');
const gamma = file('gamma', 'gamma', 'blob'); const gamma = file('gamma', 'gamma', 'blob');
localState.entries = { alpha, beta, gamma }; localState.entries = {
alpha,
beta,
gamma,
};
localState.trees['gitlab-ce/master'].tree = [alpha, beta, gamma]; localState.trees['gitlab-ce/master'].tree = [alpha, beta, gamma];
@ -552,9 +559,13 @@ describe('Multi-file store mutations', () => {
}); });
expect(localState.trees['gitlab-ce/master'].tree).toEqual([ expect(localState.trees['gitlab-ce/master'].tree).toEqual([
jasmine.objectContaining({ name: 'beta' }), expect.objectContaining({
jasmine.objectContaining({ name: 'gamma' }), name: 'beta',
jasmine.objectContaining({ }),
expect.objectContaining({
name: 'gamma',
}),
expect.objectContaining({
path: 'theta', path: 'theta',
name: 'theta', name: 'theta',
}), }),
@ -570,23 +581,26 @@ describe('Multi-file store mutations', () => {
openFiles: [localState.entries.oldPath], openFiles: [localState.entries.oldPath],
}); });
mutations.RENAME_ENTRY(localState, { path: 'oldPath', name: 'newPath' }); mutations.RENAME_ENTRY(localState, {
path: 'oldPath',
name: 'newPath',
});
expect(localState.openFiles.length).toBe(1); expect(localState.openFiles.length).toBe(1);
expect(localState.openFiles[0].path).toBe('newPath'); expect(localState.openFiles[0].path).toBe('newPath');
}); });
it('does not add renamed entry to changedFiles', () => { it('does not add renamed entry to changedFiles', () => {
mutations.RENAME_ENTRY(localState, { path: 'oldPath', name: 'newPath' }); mutations.RENAME_ENTRY(localState, {
path: 'oldPath',
name: 'newPath',
});
expect(localState.changedFiles.length).toBe(0); expect(localState.changedFiles.length).toBe(0);
}); });
it('updates existing changedFiles entry with the renamed one', () => { it('updates existing changedFiles entry with the renamed one', () => {
const origFile = { const origFile = { ...file('oldPath', 'oldPath', 'blob'), content: 'Foo' };
...file('oldPath', 'oldPath', 'blob'),
content: 'Foo',
};
Object.assign(localState, { Object.assign(localState, {
changedFiles: [origFile], changedFiles: [origFile],
@ -595,10 +609,13 @@ describe('Multi-file store mutations', () => {
oldPath: origFile, oldPath: origFile,
}); });
mutations.RENAME_ENTRY(localState, { path: 'oldPath', name: 'newPath' }); mutations.RENAME_ENTRY(localState, {
path: 'oldPath',
name: 'newPath',
});
expect(localState.changedFiles).toEqual([ expect(localState.changedFiles).toEqual([
jasmine.objectContaining({ expect.objectContaining({
path: 'newPath', path: 'newPath',
content: 'Foo', content: 'Foo',
}), }),
@ -613,13 +630,19 @@ describe('Multi-file store mutations', () => {
{}, {},
); );
mutations.RENAME_ENTRY(localState, { path: 'oldPath', name: 'newPath' }); mutations.RENAME_ENTRY(localState, {
path: 'oldPath',
name: 'newPath',
});
expect(localState.entries.newPath).toEqual(jasmine.objectContaining(expectedObj)); expect(localState.entries.newPath).toEqual(expect.objectContaining(expectedObj));
mutations.RENAME_ENTRY(localState, { path: 'newPath', name: 'newer' }); mutations.RENAME_ENTRY(localState, {
path: 'newPath',
name: 'newer',
});
expect(localState.entries.newer).toEqual(jasmine.objectContaining(expectedObj)); expect(localState.entries.newer).toEqual(expect.objectContaining(expectedObj));
}); });
describe('renaming back to original', () => { describe('renaming back to original', () => {
@ -638,12 +661,16 @@ describe('Multi-file store mutations', () => {
renamed: renamedEntry, renamed: renamedEntry,
}; };
mutations.RENAME_ENTRY(localState, { path: 'renamed', name: 'orig', parentPath: 'lorem' }); mutations.RENAME_ENTRY(localState, {
path: 'renamed',
name: 'orig',
parentPath: 'lorem',
});
}); });
it('renames entry and clears prev properties', () => { it('renames entry and clears prev properties', () => {
expect(localState.entries).toEqual({ expect(localState.entries).toEqual({
'lorem/orig': jasmine.objectContaining({ 'lorem/orig': expect.objectContaining({
id: 'lorem/orig', id: 'lorem/orig',
path: 'lorem/orig', path: 'lorem/orig',
name: 'orig', name: 'orig',
@ -672,7 +699,10 @@ describe('Multi-file store mutations', () => {
it('sets properly constucted key while preserving the original one', () => { it('sets properly constucted key while preserving the original one', () => {
const key = 'oldPath.txt-blob-oldPath.txt'; const key = 'oldPath.txt-blob-oldPath.txt';
localState.entries['oldPath.txt'].key = key; localState.entries['oldPath.txt'].key = key;
mutations.RENAME_ENTRY(localState, { path: 'oldPath.txt', name: 'newPath.md' }); mutations.RENAME_ENTRY(localState, {
path: 'oldPath.txt',
name: 'newPath.md',
});
expect(localState.entries['newPath.md'].key).toBe('newPath.md-blob-newPath.md'); expect(localState.entries['newPath.md'].key).toBe('newPath.md-blob-newPath.md');
expect(localState.entries['newPath.md'].prevKey).toBe(key); expect(localState.entries['newPath.md'].prevKey).toBe(key);
@ -680,14 +710,20 @@ describe('Multi-file store mutations', () => {
it('correctly updates key for an entry without an extension', () => { it('correctly updates key for an entry without an extension', () => {
localState.entries.oldPath.key = 'oldPath-blob-oldPath'; localState.entries.oldPath.key = 'oldPath-blob-oldPath';
mutations.RENAME_ENTRY(localState, { path: 'oldPath', name: 'newPath.md' }); mutations.RENAME_ENTRY(localState, {
path: 'oldPath',
name: 'newPath.md',
});
expect(localState.entries['newPath.md'].key).toBe('newPath.md-blob-newPath.md'); expect(localState.entries['newPath.md'].key).toBe('newPath.md-blob-newPath.md');
}); });
it('correctly updates key when new name does not have an extension', () => { it('correctly updates key when new name does not have an extension', () => {
localState.entries['oldPath.txt'].key = 'oldPath.txt-blob-oldPath.txt'; localState.entries['oldPath.txt'].key = 'oldPath.txt-blob-oldPath.txt';
mutations.RENAME_ENTRY(localState, { path: 'oldPath.txt', name: 'newPath' }); mutations.RENAME_ENTRY(localState, {
path: 'oldPath.txt',
name: 'newPath',
});
expect(localState.entries.newPath.key).toBe('newPath-blob-newPath'); expect(localState.entries.newPath.key).toBe('newPath-blob-newPath');
}); });

View File

@ -104,21 +104,9 @@ describe('Multi-file store utils', () => {
base64: true, base64: true,
lastCommitSha: '123456789', lastCommitSha: '123456789',
}, },
{ { ...file('deletedFile'), path: 'deletedFile', deleted: true },
...file('deletedFile'), { ...file('renamedFile'), path: 'renamedFile', prevPath: 'prevPath' },
path: 'deletedFile', { ...file('replacingFile'), path: 'replacingFile', replaces: true },
deleted: true,
},
{
...file('renamedFile'),
path: 'renamedFile',
prevPath: 'prevPath',
},
{
...file('replacingFile'),
path: 'replacingFile',
replaces: true,
},
], ],
currentBranchId: 'master', currentBranchId: 'master',
}; };
@ -237,15 +225,27 @@ describe('Multi-file store utils', () => {
describe('commitActionForFile', () => { describe('commitActionForFile', () => {
it('returns deleted for deleted file', () => { it('returns deleted for deleted file', () => {
expect(utils.commitActionForFile({ deleted: true })).toBe(commitActionTypes.delete); expect(
utils.commitActionForFile({
deleted: true,
}),
).toBe(commitActionTypes.delete);
}); });
it('returns create for tempFile', () => { it('returns create for tempFile', () => {
expect(utils.commitActionForFile({ tempFile: true })).toBe(commitActionTypes.create); expect(
utils.commitActionForFile({
tempFile: true,
}),
).toBe(commitActionTypes.create);
}); });
it('returns move for moved file', () => { it('returns move for moved file', () => {
expect(utils.commitActionForFile({ prevPath: 'test' })).toBe(commitActionTypes.move); expect(
utils.commitActionForFile({
prevPath: 'test',
}),
).toBe(commitActionTypes.move);
}); });
it('returns update by default', () => { it('returns update by default', () => {
@ -341,12 +341,7 @@ describe('Multi-file store utils', () => {
fromTree[0].tree.push({ fromTree[0].tree.push({
...file('alpha'), ...file('alpha'),
path: 'foo/alpha', path: 'foo/alpha',
tree: [ tree: [{ ...file('beta.md'), path: 'foo/alpha/beta.md' }],
{
...file('beta.md'),
path: 'foo/alpha/beta.md',
},
],
}); });
toTree.push({ toTree.push({
@ -355,12 +350,7 @@ describe('Multi-file store utils', () => {
{ {
...file('alpha'), ...file('alpha'),
path: 'foo/alpha', path: 'foo/alpha',
tree: [ tree: [{ ...file('gamma.md'), path: 'foo/alpha/gamma.md' }],
{
...file('gamma.md'),
path: 'foo/alpha/gamma.md',
},
],
}, },
], ],
}); });
@ -381,12 +371,7 @@ describe('Multi-file store utils', () => {
fromTree[0].tree.push({ fromTree[0].tree.push({
...file('alpha'), ...file('alpha'),
path: 'foo/alpha', path: 'foo/alpha',
tree: [ tree: [{ ...file('beta.md'), path: 'foo/alpha/beta.md' }],
{
...file('beta.md'),
path: 'foo/alpha/beta.md',
},
],
}); });
toTree.push({ toTree.push({
@ -395,12 +380,7 @@ describe('Multi-file store utils', () => {
{ {
...file('alpha'), ...file('alpha'),
path: 'foo/alpha', path: 'foo/alpha',
tree: [ tree: [{ ...file('gamma.md'), path: 'foo/alpha/gamma.md' }],
{
...file('gamma.md'),
path: 'foo/alpha/gamma.md',
},
],
}, },
], ],
}); });
@ -431,10 +411,7 @@ describe('Multi-file store utils', () => {
}); });
it('swaps existing entry with a new one', () => { it('swaps existing entry with a new one', () => {
const file1 = { const file1 = { ...file('old'), key: 'foo' };
...file('old'),
key: 'foo',
};
const file2 = file('new'); const file2 = file('new');
const arr = [file1]; const arr = [file1];
@ -511,8 +488,12 @@ describe('Multi-file store utils', () => {
expect(branchInfo.tree.length).toBe(2); expect(branchInfo.tree.length).toBe(2);
expect(branchInfo.tree).toEqual([ expect(branchInfo.tree).toEqual([
jasmine.objectContaining({ name: 'newPath' }), expect.objectContaining({
jasmine.objectContaining({ name: 'oldPath' }), name: 'newPath',
}),
expect.objectContaining({
name: 'oldPath',
}),
]); ]);
}); });
@ -521,7 +502,9 @@ describe('Multi-file store utils', () => {
expect(localState.entries.parentPath.tree.length).toBe(1); expect(localState.entries.parentPath.tree.length).toBe(1);
expect(localState.entries.parentPath.tree).toEqual([ expect(localState.entries.parentPath.tree).toEqual([
jasmine.objectContaining({ name: 'newPath' }), expect.objectContaining({
name: 'newPath',
}),
]); ]);
localState.entries.parentPath.tree = [localState.entries.oldPath]; localState.entries.parentPath.tree = [localState.entries.oldPath];
@ -530,8 +513,12 @@ describe('Multi-file store utils', () => {
expect(localState.entries.parentPath.tree.length).toBe(2); expect(localState.entries.parentPath.tree.length).toBe(2);
expect(localState.entries.parentPath.tree).toEqual([ expect(localState.entries.parentPath.tree).toEqual([
jasmine.objectContaining({ name: 'newPath' }), expect.objectContaining({
jasmine.objectContaining({ name: 'oldPath' }), name: 'newPath',
}),
expect.objectContaining({
name: 'oldPath',
}),
]); ]);
}); });
}); });
@ -542,11 +529,19 @@ describe('Multi-file store utils', () => {
utils.swapInParentTreeWithSorting(localState, localState.entries.oldPath.key, 'newPath'); utils.swapInParentTreeWithSorting(localState, localState.entries.oldPath.key, 'newPath');
expect(branchInfo.tree).toEqual([jasmine.objectContaining({ name: 'newPath' })]); expect(branchInfo.tree).toEqual([
expect.objectContaining({
name: 'newPath',
}),
]);
utils.swapInParentTreeWithSorting(localState, localState.entries.newPath.key, 'oldPath'); utils.swapInParentTreeWithSorting(localState, localState.entries.newPath.key, 'oldPath');
expect(branchInfo.tree).toEqual([jasmine.objectContaining({ name: 'oldPath' })]); expect(branchInfo.tree).toEqual([
expect.objectContaining({
name: 'oldPath',
}),
]);
}); });
it('sorts tree after swapping the entries', () => { it('sorts tree after swapping the entries', () => {
@ -554,32 +549,55 @@ describe('Multi-file store utils', () => {
const beta = file('beta', 'beta', 'blob'); const beta = file('beta', 'beta', 'blob');
const gamma = file('gamma', 'gamma', 'blob'); const gamma = file('gamma', 'gamma', 'blob');
const theta = file('theta', 'theta', 'blob'); const theta = file('theta', 'theta', 'blob');
localState.entries = { alpha, beta, gamma, theta }; localState.entries = {
alpha,
beta,
gamma,
theta,
};
branchInfo.tree = [alpha, beta, gamma]; branchInfo.tree = [alpha, beta, gamma];
utils.swapInParentTreeWithSorting(localState, alpha.key, 'theta'); utils.swapInParentTreeWithSorting(localState, alpha.key, 'theta');
expect(branchInfo.tree).toEqual([ expect(branchInfo.tree).toEqual([
jasmine.objectContaining({ name: 'beta' }), expect.objectContaining({
jasmine.objectContaining({ name: 'gamma' }), name: 'beta',
jasmine.objectContaining({ name: 'theta' }), }),
expect.objectContaining({
name: 'gamma',
}),
expect.objectContaining({
name: 'theta',
}),
]); ]);
utils.swapInParentTreeWithSorting(localState, gamma.key, 'alpha'); utils.swapInParentTreeWithSorting(localState, gamma.key, 'alpha');
expect(branchInfo.tree).toEqual([ expect(branchInfo.tree).toEqual([
jasmine.objectContaining({ name: 'alpha' }), expect.objectContaining({
jasmine.objectContaining({ name: 'beta' }), name: 'alpha',
jasmine.objectContaining({ name: 'theta' }), }),
expect.objectContaining({
name: 'beta',
}),
expect.objectContaining({
name: 'theta',
}),
]); ]);
utils.swapInParentTreeWithSorting(localState, beta.key, 'gamma'); utils.swapInParentTreeWithSorting(localState, beta.key, 'gamma');
expect(branchInfo.tree).toEqual([ expect(branchInfo.tree).toEqual([
jasmine.objectContaining({ name: 'alpha' }), expect.objectContaining({
jasmine.objectContaining({ name: 'gamma' }), name: 'alpha',
jasmine.objectContaining({ name: 'theta' }), }),
expect.objectContaining({
name: 'gamma',
}),
expect.objectContaining({
name: 'theta',
}),
]); ]);
}); });
}); });
@ -587,11 +605,26 @@ describe('Multi-file store utils', () => {
describe('cleanTrailingSlash', () => { describe('cleanTrailingSlash', () => {
[ [
{ input: '', output: '' }, {
{ input: 'abc', output: 'abc' }, input: '',
{ input: 'abc/', output: 'abc' }, output: '',
{ input: 'abc/def', output: 'abc/def' }, },
{ input: 'abc/def/', output: 'abc/def' }, {
input: 'abc',
output: 'abc',
},
{
input: 'abc/',
output: 'abc',
},
{
input: 'abc/def',
output: 'abc/def',
},
{
input: 'abc/def/',
output: 'abc/def',
},
].forEach(({ input, output }) => { ].forEach(({ input, output }) => {
it(`cleans trailing slash from string "${input}"`, () => { it(`cleans trailing slash from string "${input}"`, () => {
expect(utils.cleanTrailingSlash(input)).toEqual(output); expect(utils.cleanTrailingSlash(input)).toEqual(output);
@ -601,13 +634,34 @@ describe('Multi-file store utils', () => {
describe('pathsAreEqual', () => { describe('pathsAreEqual', () => {
[ [
{ args: ['abc', 'abc'], output: true }, {
{ args: ['abc', 'def'], output: false }, args: ['abc', 'abc'],
{ args: ['abc/', 'abc'], output: true }, output: true,
{ args: ['abc/abc', 'abc'], output: false }, },
{ args: ['/', ''], output: true }, {
{ args: ['', '/'], output: true }, args: ['abc', 'def'],
{ args: [false, '/'], output: true }, output: false,
},
{
args: ['abc/', 'abc'],
output: true,
},
{
args: ['abc/abc', 'abc'],
output: false,
},
{
args: ['/', ''],
output: true,
},
{
args: ['', '/'],
output: true,
},
{
args: [false, '/'],
output: true,
},
].forEach(({ args, output }) => { ].forEach(({ args, output }) => {
it(`cleans and tests equality (${JSON.stringify(args)})`, () => { it(`cleans and tests equality (${JSON.stringify(args)})`, () => {
expect(utils.pathsAreEqual(...args)).toEqual(output); expect(utils.pathsAreEqual(...args)).toEqual(output);
@ -618,10 +672,22 @@ describe('Multi-file store utils', () => {
describe('addFinalNewlineIfNeeded', () => { describe('addFinalNewlineIfNeeded', () => {
it('adds a newline if it doesnt already exist', () => { it('adds a newline if it doesnt already exist', () => {
[ [
{ input: 'some text', output: 'some text\n' }, {
{ input: 'some text\n', output: 'some text\n' }, input: 'some text',
{ input: 'some text\n\n', output: 'some text\n\n' }, output: 'some text\n',
{ input: 'some\n text', output: 'some\n text\n' }, },
{
input: 'some text\n',
output: 'some text\n',
},
{
input: 'some text\n\n',
output: 'some text\n\n',
},
{
input: 'some\n text',
output: 'some\n text\n',
},
].forEach(({ input, output }) => { ].forEach(({ input, output }) => {
expect(utils.addFinalNewlineIfNeeded(input)).toEqual(output); expect(utils.addFinalNewlineIfNeeded(input)).toEqual(output);
}); });

View File

@ -26,15 +26,18 @@ describe('WebIDE utils', () => {
entry.deleted = true; entry.deleted = true;
expect(getCommitIconMap(entry)).toEqual(commitItemIconMap.deleted); expect(getCommitIconMap(entry)).toEqual(commitItemIconMap.deleted);
}); });
it('renders "addition" icon for temp entries', () => { it('renders "addition" icon for temp entries', () => {
entry.tempFile = true; entry.tempFile = true;
expect(getCommitIconMap(entry)).toEqual(commitItemIconMap.addition); expect(getCommitIconMap(entry)).toEqual(commitItemIconMap.addition);
}); });
it('renders "modified" icon for newly-renamed entries', () => { it('renders "modified" icon for newly-renamed entries', () => {
entry.prevPath = 'foo/bar'; entry.prevPath = 'foo/bar';
entry.tempFile = false; entry.tempFile = false;
expect(getCommitIconMap(entry)).toEqual(commitItemIconMap.modified); expect(getCommitIconMap(entry)).toEqual(commitItemIconMap.modified);
}); });
it('renders "modified" icon even for temp entries if they are newly-renamed', () => { it('renders "modified" icon even for temp entries if they are newly-renamed', () => {
entry.prevPath = 'foo/bar'; entry.prevPath = 'foo/bar';
entry.tempFile = true; entry.tempFile = true;

View File

@ -1,54 +1 @@
import * as pathUtils from 'path'; export * from '../../frontend/ide/helpers';
import { decorateData } from '~/ide/stores/utils';
import state from '~/ide/stores/state';
import commitState from '~/ide/stores/modules/commit/state';
import mergeRequestsState from '~/ide/stores/modules/merge_requests/state';
import pipelinesState from '~/ide/stores/modules/pipelines/state';
import branchesState from '~/ide/stores/modules/branches/state';
import fileTemplatesState from '~/ide/stores/modules/file_templates/state';
import paneState from '~/ide/stores/modules/pane/state';
export const resetStore = store => {
const newState = {
...state(),
commit: commitState(),
mergeRequests: mergeRequestsState(),
pipelines: pipelinesState(),
branches: branchesState(),
fileTemplates: fileTemplatesState(),
rightPane: paneState(),
};
store.replaceState(newState);
};
export const file = (name = 'name', id = name, type = '', parent = null) =>
decorateData({
id,
type,
icon: 'icon',
url: 'url',
name,
path: parent ? `${parent.path}/${name}` : name,
parentPath: parent ? parent.path : '',
lastCommit: {},
});
export const createEntriesFromPaths = paths =>
paths
.map(path => ({
name: pathUtils.basename(path),
dir: pathUtils.dirname(path),
ext: pathUtils.extname(path),
}))
.reduce((entries, path, idx) => {
const { name } = path;
const parent = path.dir ? entries[path.dir] : null;
const type = path.ext ? 'blob' : 'tree';
const entry = file(name, (idx + 1).toString(), type, parent);
return {
[entry.path]: entry,
...entries,
};
}, {});

View File

@ -1,66 +0,0 @@
import testAction from 'spec/helpers/vuex_action_helper';
import * as actions from '~/ide/stores/modules/pane/actions';
import * as types from '~/ide/stores/modules/pane/mutation_types';
describe('IDE pane module actions', () => {
const TEST_VIEW = { name: 'test' };
const TEST_VIEW_KEEP_ALIVE = { name: 'test-keep-alive', keepAlive: true };
describe('toggleOpen', () => {
it('dispatches open if closed', done => {
testAction(
actions.toggleOpen,
TEST_VIEW,
{ isOpen: false },
[],
[{ type: 'open', payload: TEST_VIEW }],
done,
);
});
it('dispatches close if opened', done => {
testAction(actions.toggleOpen, TEST_VIEW, { isOpen: true }, [], [{ type: 'close' }], done);
});
});
describe('open', () => {
it('commits SET_OPEN', done => {
testAction(actions.open, null, {}, [{ type: types.SET_OPEN, payload: true }], [], done);
});
it('commits SET_CURRENT_VIEW if view is given', done => {
testAction(
actions.open,
TEST_VIEW,
{},
[
{ type: types.SET_OPEN, payload: true },
{ type: types.SET_CURRENT_VIEW, payload: TEST_VIEW.name },
],
[],
done,
);
});
it('commits KEEP_ALIVE_VIEW if keepAlive is true', done => {
testAction(
actions.open,
TEST_VIEW_KEEP_ALIVE,
{},
[
{ type: types.SET_OPEN, payload: true },
{ type: types.SET_CURRENT_VIEW, payload: TEST_VIEW_KEEP_ALIVE.name },
{ type: types.KEEP_ALIVE_VIEW, payload: TEST_VIEW_KEEP_ALIVE.name },
],
[],
done,
);
});
});
describe('close', () => {
it('commits SET_OPEN', done => {
testAction(actions.close, null, {}, [{ type: types.SET_OPEN, payload: false }], [], done);
});
});
});

View File

@ -136,7 +136,9 @@ describe Gitlab::Auth::LDAP::Access do
context 'without ActiveDirectory enabled' do context 'without ActiveDirectory enabled' do
before do before do
allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true) allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true)
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive(:active_directory).and_return(false) allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive(:active_directory).and_return(false)
end
end end
it 'returns true' do it 'returns true' do

View File

@ -58,7 +58,9 @@ describe Gitlab::Auth::LDAP::AuthHash do
end end
before do before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive(:attributes).and_return(attributes) allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive(:attributes).and_return(attributes)
end
end end
it "has the correct username" do it "has the correct username" do

View File

@ -18,8 +18,9 @@ describe Gitlab::Auth::LDAP::Authentication do
# try only to fake the LDAP call # try only to fake the LDAP call
adapter = double('adapter', dn: dn).as_null_object adapter = double('adapter', dn: dn).as_null_object
allow_any_instance_of(described_class) allow_next_instance_of(described_class) do |instance|
.to receive(:adapter).and_return(adapter) allow(instance).to receive(:adapter).and_return(adapter)
end
expect(described_class.login(login, password)).to be_truthy expect(described_class.login(login, password)).to be_truthy
end end
@ -27,8 +28,9 @@ describe Gitlab::Auth::LDAP::Authentication do
it "is false if the user does not exist" do it "is false if the user does not exist" do
# try only to fake the LDAP call # try only to fake the LDAP call
adapter = double('adapter', dn: dn).as_null_object adapter = double('adapter', dn: dn).as_null_object
allow_any_instance_of(described_class) allow_next_instance_of(described_class) do |instance|
.to receive(:adapter).and_return(adapter) allow(instance).to receive(:adapter).and_return(adapter)
end
expect(described_class.login(login, password)).to be_falsey expect(described_class.login(login, password)).to be_falsey
end end
@ -38,8 +40,9 @@ describe Gitlab::Auth::LDAP::Authentication do
# try only to fake the LDAP call # try only to fake the LDAP call
adapter = double('adapter', bind_as: nil).as_null_object adapter = double('adapter', bind_as: nil).as_null_object
allow_any_instance_of(described_class) allow_next_instance_of(described_class) do |instance|
.to receive(:adapter).and_return(adapter) allow(instance).to receive(:adapter).and_return(adapter)
end
expect(described_class.login(login, password)).to be_falsey expect(described_class.login(login, password)).to be_falsey
end end

View File

@ -396,7 +396,9 @@ describe Gitlab::Auth::OAuth::User do
context "and no account for the LDAP user" do context "and no account for the LDAP user" do
context 'dont block on create (LDAP)' do context 'dont block on create (LDAP)' do
before do before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: false) allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: false)
end
end end
it do it do
@ -408,7 +410,9 @@ describe Gitlab::Auth::OAuth::User do
context 'block on create (LDAP)' do context 'block on create (LDAP)' do
before do before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: true) allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: true)
end
end end
it do it do
@ -424,7 +428,9 @@ describe Gitlab::Auth::OAuth::User do
context 'dont block on create (LDAP)' do context 'dont block on create (LDAP)' do
before do before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: false) allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: false)
end
end end
it do it do
@ -436,7 +442,9 @@ describe Gitlab::Auth::OAuth::User do
context 'block on create (LDAP)' do context 'block on create (LDAP)' do
before do before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: true) allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: true)
end
end end
it do it do
@ -480,7 +488,9 @@ describe Gitlab::Auth::OAuth::User do
context 'dont block on create (LDAP)' do context 'dont block on create (LDAP)' do
before do before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: false) allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: false)
end
end end
it do it do
@ -492,7 +502,9 @@ describe Gitlab::Auth::OAuth::User do
context 'block on create (LDAP)' do context 'block on create (LDAP)' do
before do before do
allow_any_instance_of(Gitlab::Auth::LDAP::Config).to receive_messages(block_auto_created_users: true) allow_next_instance_of(Gitlab::Auth::LDAP::Config) do |instance|
allow(instance).to receive_messages(block_auto_created_users: true)
end
end end
it do it do

View File

@ -75,7 +75,9 @@ describe Gitlab::BareRepositoryImport::Importer, :seed_helper do
end end
it 'does not schedule an import' do it 'does not schedule an import' do
expect_any_instance_of(Project).not_to receive(:import_schedule) expect_next_instance_of(Project) do |instance|
expect(instance).not_to receive(:import_schedule)
end
importer.create_project_if_needed importer.create_project_if_needed
end end

View File

@ -9,7 +9,9 @@ describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cache do
describe '.load_for_project' do describe '.load_for_project' do
it "loads the status" do it "loads the status" do
expect_any_instance_of(described_class).to receive(:load_status) expect_next_instance_of(described_class) do |instance|
expect(instance).to receive(:load_status)
end
described_class.load_for_project(project) described_class.load_for_project(project)
end end

View File

@ -32,7 +32,9 @@ describe Gitlab::Checks::BranchCheck do
end end
it 'raises an error if the user is not allowed to merge to protected branches' do it 'raises an error if the user is not allowed to merge to protected branches' do
expect_any_instance_of(Gitlab::Checks::MatchingMergeRequest).to receive(:match?).and_return(true) expect_next_instance_of(Gitlab::Checks::MatchingMergeRequest) do |instance|
expect(instance).to receive(:match?).and_return(true)
end
expect(user_access).to receive(:can_merge_to_branch?).and_return(false) expect(user_access).to receive(:can_merge_to_branch?).and_return(false)
expect(user_access).to receive(:can_push_to_branch?).and_return(false) expect(user_access).to receive(:can_push_to_branch?).and_return(false)

View File

@ -14,31 +14,41 @@ describe Gitlab::Checks::ChangeAccess do
end end
it 'calls pushes checks' do it 'calls pushes checks' do
expect_any_instance_of(Gitlab::Checks::PushCheck).to receive(:validate!) expect_next_instance_of(Gitlab::Checks::PushCheck) do |instance|
expect(instance).to receive(:validate!)
end
subject.exec subject.exec
end end
it 'calls branches checks' do it 'calls branches checks' do
expect_any_instance_of(Gitlab::Checks::BranchCheck).to receive(:validate!) expect_next_instance_of(Gitlab::Checks::BranchCheck) do |instance|
expect(instance).to receive(:validate!)
end
subject.exec subject.exec
end end
it 'calls tags checks' do it 'calls tags checks' do
expect_any_instance_of(Gitlab::Checks::TagCheck).to receive(:validate!) expect_next_instance_of(Gitlab::Checks::TagCheck) do |instance|
expect(instance).to receive(:validate!)
end
subject.exec subject.exec
end end
it 'calls lfs checks' do it 'calls lfs checks' do
expect_any_instance_of(Gitlab::Checks::LfsCheck).to receive(:validate!) expect_next_instance_of(Gitlab::Checks::LfsCheck) do |instance|
expect(instance).to receive(:validate!)
end
subject.exec subject.exec
end end
it 'calls diff checks' do it 'calls diff checks' do
expect_any_instance_of(Gitlab::Checks::DiffCheck).to receive(:validate!) expect_next_instance_of(Gitlab::Checks::DiffCheck) do |instance|
expect(instance).to receive(:validate!)
end
subject.exec subject.exec
end end

View File

@ -12,12 +12,16 @@ describe Gitlab::Ci::Build::Credentials::Factory do
end end
before do before do
allow_any_instance_of(described_class).to receive(:providers).and_return([TestProvider]) allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:providers).and_return([TestProvider])
end
end end
context 'when provider is valid' do context 'when provider is valid' do
before do before do
allow_any_instance_of(TestProvider).to receive(:valid?).and_return(true) allow_next_instance_of(TestProvider) do |instance|
allow(instance).to receive(:valid?).and_return(true)
end
end end
it 'generates an array of credentials objects' do it 'generates an array of credentials objects' do
@ -29,7 +33,9 @@ describe Gitlab::Ci::Build::Credentials::Factory do
context 'when provider is not valid' do context 'when provider is not valid' do
before do before do
allow_any_instance_of(TestProvider).to receive(:valid?).and_return(false) allow_next_instance_of(TestProvider) do |instance|
allow(instance).to receive(:valid?).and_return(false)
end
end end
it 'generates an array without specific credential object' do it 'generates an array without specific credential object' do

View File

@ -15,8 +15,9 @@ describe Gitlab::Ci::Config::External::File::Project do
before do before do
project.add_developer(user) project.add_developer(user)
allow_any_instance_of(Gitlab::Ci::Config::External::Context) allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
.to receive(:check_execution_time!) allow(instance).to receive(:check_execution_time!)
end
end end
describe '#matching?' do describe '#matching?' do
@ -159,8 +160,8 @@ describe Gitlab::Ci::Config::External::File::Project do
private private
def stub_project_blob(ref, path) def stub_project_blob(ref, path)
allow_any_instance_of(Repository) allow_next_instance_of(Repository) do |instance|
.to receive(:blob_data_at) allow(instance).to receive(:blob_data_at).with(ref, path) { yield }
.with(ref, path) { yield } end
end end
end end

View File

@ -21,8 +21,9 @@ describe Gitlab::Ci::Config::External::File::Remote do
end end
before do before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context) allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
.to receive(:check_execution_time!) allow(instance).to receive(:check_execution_time!)
end
end end
describe '#matching?' do describe '#matching?' do

View File

@ -14,8 +14,9 @@ describe Gitlab::Ci::Config::External::File::Template do
let(:template_file) { described_class.new(params, context) } let(:template_file) { described_class.new(params, context) }
before do before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context) allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
.to receive(:check_execution_time!) allow(instance).to receive(:check_execution_time!)
end
end end
describe '#matching?' do describe '#matching?' do

View File

@ -23,8 +23,9 @@ describe Gitlab::Ci::Config::External::Mapper do
before do before do
stub_full_request(remote_url).to_return(body: file_content) stub_full_request(remote_url).to_return(body: file_content)
allow_any_instance_of(Gitlab::Ci::Config::External::Context) allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
.to receive(:check_execution_time!) allow(instance).to receive(:check_execution_time!)
end
end end
describe '#process' do describe '#process' do

View File

@ -8,8 +8,9 @@ describe Gitlab::Ci::Config do
set(:user) { create(:user) } set(:user) { create(:user) }
before do before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context) allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
.to receive(:check_execution_time!) allow(instance).to receive(:check_execution_time!)
end
end end
let(:config) do let(:config) do
@ -358,18 +359,11 @@ describe Gitlab::Ci::Config do
context "when it takes too long to evaluate includes" do context "when it takes too long to evaluate includes" do
before do before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context) allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
.to receive(:check_execution_time!) allow(instance).to receive(:check_execution_time!).and_call_original
.and_call_original allow(instance).to receive(:set_deadline).with(described_class::TIMEOUT_SECONDS).and_call_original
allow(instance).to receive(:execution_expired?).and_return(true)
allow_any_instance_of(Gitlab::Ci::Config::External::Context) end
.to receive(:set_deadline)
.with(described_class::TIMEOUT_SECONDS)
.and_call_original
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
.to receive(:execution_expired?)
.and_return(true)
end end
it 'raises error TimeoutError' do it 'raises error TimeoutError' do
@ -384,9 +378,9 @@ describe Gitlab::Ci::Config do
context 'when context expansion timeout is disabled' do context 'when context expansion timeout is disabled' do
before do before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context) allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
.to receive(:check_execution_time!) allow(instance).to receive(:check_execution_time!).and_call_original
.and_call_original end
allow(Feature) allow(Feature)
.to receive(:enabled?) .to receive(:enabled?)

View File

@ -81,7 +81,9 @@ describe Gitlab::Ci::Pipeline::Seed::Stage do
context 'when a ref is protected' do context 'when a ref is protected' do
before do before do
allow_any_instance_of(Project).to receive(:protected_for?).and_return(true) allow_next_instance_of(Project) do |instance|
allow(instance).to receive(:protected_for?).and_return(true)
end
end end
it 'returns protected builds' do it 'returns protected builds' do
@ -91,7 +93,9 @@ describe Gitlab::Ci::Pipeline::Seed::Stage do
context 'when a ref is not protected' do context 'when a ref is not protected' do
before do before do
allow_any_instance_of(Project).to receive(:protected_for?).and_return(false) allow_next_instance_of(Project) do |instance|
allow(instance).to receive(:protected_for?).and_return(false)
end
end end
it 'returns unprotected builds' do it 'returns unprotected builds' do

View File

@ -112,8 +112,9 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
end end
it 'calls get_chunk only once' do it 'calls get_chunk only once' do
expect_any_instance_of(Gitlab::Ci::Trace::ChunkedIO) expect_next_instance_of(Gitlab::Ci::Trace::ChunkedIO) do |instance|
.to receive(:current_chunk).once.and_call_original expect(instance).to receive(:current_chunk).once.and_call_original
end
chunked_io.each_line { |line| } chunked_io.each_line { |line| }
end end

View File

@ -9,7 +9,9 @@ shared_examples 'base stage' do
before do before do
allow(stage).to receive(:project_median).and_return(1.12) allow(stage).to receive(:project_median).and_return(1.12)
allow_any_instance_of(Gitlab::CycleAnalytics::BaseEventFetcher).to receive(:event_result).and_return({}) allow_next_instance_of(Gitlab::CycleAnalytics::BaseEventFetcher) do |instance|
allow(instance).to receive(:event_result).and_return({})
end
end end
it 'has the median data value' do it 'has the median data value' do

View File

@ -17,7 +17,9 @@ describe Gitlab::CycleAnalytics::UsageData do
projects.each_with_index do |project, time| projects.each_with_index do |project, time|
issue = create(:issue, project: project, created_at: (time + 1).hour.ago) issue = create(:issue, project: project, created_at: (time + 1).hour.ago)
allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([issue]) allow_next_instance_of(Gitlab::ReferenceExtractor) do |instance|
allow(instance).to receive(:issues).and_return([issue])
end
milestone = create(:milestone, project: project) milestone = create(:milestone, project: project)
mr = create_merge_request_closing_issue(user, project, issue, commit_message: "References #{issue.to_reference}") mr = create_merge_request_closing_issue(user, project, issue, commit_message: "References #{issue.to_reference}")

View File

@ -10,17 +10,25 @@ describe Gitlab::Diff::FileCollection::MergeRequestDiff do
describe '#diff_files' do describe '#diff_files' do
it 'does not highlight binary files' do it 'does not highlight binary files' do
allow_any_instance_of(Gitlab::Diff::File).to receive(:text?).and_return(false) allow_next_instance_of(Gitlab::Diff::File) do |instance|
allow(instance).to receive(:text?).and_return(false)
end
expect_any_instance_of(Gitlab::Diff::File).not_to receive(:highlighted_diff_lines) expect_next_instance_of(Gitlab::Diff::File) do |instance|
expect(instance).not_to receive(:highlighted_diff_lines)
end
diff_files diff_files
end end
it 'does not highlight files marked as undiffable in .gitattributes' do it 'does not highlight files marked as undiffable in .gitattributes' do
allow_any_instance_of(Gitlab::Diff::File).to receive(:diffable?).and_return(false) allow_next_instance_of(Gitlab::Diff::File) do |instance|
allow(instance).to receive(:diffable?).and_return(false)
end
expect_any_instance_of(Gitlab::Diff::File).not_to receive(:highlighted_diff_lines) expect_next_instance_of(Gitlab::Diff::File) do |instance|
expect(instance).not_to receive(:highlighted_diff_lines)
end
diff_files diff_files
end end

View File

@ -95,7 +95,9 @@ describe Gitlab::Email::Handler::CreateMergeRequestHandler do
context "something is wrong" do context "something is wrong" do
context "when the merge request could not be saved" do context "when the merge request could not be saved" do
before do before do
allow_any_instance_of(MergeRequest).to receive(:save).and_return(false) allow_next_instance_of(MergeRequest) do |instance|
allow(instance).to receive(:save).and_return(false)
end
end end
it "raises an InvalidMergeRequestError" do it "raises an InvalidMergeRequestError" do

View File

@ -38,8 +38,9 @@ describe Gitlab::EtagCaching::Middleware do
end end
it 'generates ETag' do it 'generates ETag' do
expect_any_instance_of(Gitlab::EtagCaching::Store) expect_next_instance_of(Gitlab::EtagCaching::Store) do |instance|
.to receive(:touch).and_return('123') expect(instance).to receive(:touch).and_return('123')
end
middleware.call(build_request(path, if_none_match)) middleware.call(build_request(path, if_none_match))
end end
@ -177,9 +178,9 @@ describe Gitlab::EtagCaching::Middleware do
'SCRIPT_NAME' => '/relative-gitlab' 'SCRIPT_NAME' => '/relative-gitlab'
} }
expect_any_instance_of(Gitlab::EtagCaching::Store) expect_next_instance_of(Gitlab::EtagCaching::Store) do |instance|
.to receive(:get).with("/relative-gitlab#{enabled_path}") expect(instance).to receive(:get).with("/relative-gitlab#{enabled_path}").and_return(nil)
.and_return(nil) end
middleware.call(env) middleware.call(env)
end end
@ -190,8 +191,9 @@ describe Gitlab::EtagCaching::Middleware do
end end
def mock_value_in_store(value) def mock_value_in_store(value)
allow_any_instance_of(Gitlab::EtagCaching::Store) allow_next_instance_of(Gitlab::EtagCaching::Store) do |instance|
.to receive(:get).and_return(value) allow(instance).to receive(:get).and_return(value)
end
end end
def build_request(path, if_none_match) def build_request(path, if_none_match)

View File

@ -158,7 +158,9 @@ describe Gitlab::Experimentation do
context 'the user is part of the control group' do context 'the user is part of the control group' do
before do before do
allow_any_instance_of(described_class).to receive(:experiment_enabled?).with(:test_experiment).and_return(false) allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
end
end end
it 'pushes the right parameters to gon' do it 'pushes the right parameters to gon' do

View File

@ -20,6 +20,8 @@ describe Gitlab::FogbugzImport::Client do
end end
def stub_api(users) def stub_api(users)
allow_any_instance_of(::Fogbugz::Interface).to receive(:command).with(:listPeople).and_return(users) allow_next_instance_of(::Fogbugz::Interface) do |instance|
allow(instance).to receive(:command).with(:listPeople).and_return(users)
end
end end
end end

View File

@ -0,0 +1,73 @@
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::FogbugzImport::Importer do
let(:project) { create(:project_empty_repo) }
let(:importer) { described_class.new(project) }
let(:repo) do
instance_double(Gitlab::FogbugzImport::Repository,
safe_name: 'vim',
path: 'vim',
raw_data: '')
end
let(:import_data) { { 'repo' => repo } }
let(:credentials) do
{
'fb_session' => {
'uri' => 'https://testing.fogbugz.com',
'token' => 'token'
}
}
end
let(:closed_bug) do
{
fOpen: 'false',
sTitle: 'Closed bug',
sLatestTextSummary: "",
dtOpened: Time.now.to_s,
dtLastUpdated: Time.now.to_s,
events: { event: [] }
}.with_indifferent_access
end
let(:opened_bug) do
{
fOpen: 'true',
sTitle: 'Opened bug',
sLatestTextSummary: "",
dtOpened: Time.now.to_s,
dtLastUpdated: Time.now.to_s,
events: { event: [] }
}.with_indifferent_access
end
let(:fogbugz_bugs) { [opened_bug, closed_bug] }
before do
project.create_import_data(data: import_data, credentials: credentials)
allow_any_instance_of(::Fogbugz::Interface).to receive(:command).with(:listCategories).and_return([])
allow_any_instance_of(Gitlab::FogbugzImport::Client).to receive(:cases).and_return(fogbugz_bugs)
end
it 'imports bugs' do
expect { importer.execute }.to change { Issue.count }.by(2)
end
it 'imports opened bugs' do
importer.execute
issue = Issue.where(project_id: project.id).find_by_title(opened_bug[:sTitle])
expect(issue.state_id).to eq(Issue.available_states[:opened])
end
it 'imports closed bugs' do
importer.execute
issue = Issue.where(project_id: project.id).find_by_title(closed_bug[:sTitle])
expect(issue.state_id).to eq(Issue.available_states[:closed])
end
end

View File

@ -134,7 +134,9 @@ describe Gitlab::Git::Blob, :seed_helper do
describe '.find with Rugged enabled', :enable_rugged do describe '.find with Rugged enabled', :enable_rugged do
it 'calls out to the Rugged implementation' do it 'calls out to the Rugged implementation' do
allow_any_instance_of(Rugged).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original allow_next_instance_of(Rugged) do |instance|
allow(instance).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original
end
described_class.find(repository, SeedRepo::Commit::ID, 'files/images/6049019_460s.jpg') described_class.find(repository, SeedRepo::Commit::ID, 'files/images/6049019_460s.jpg')
end end

View File

@ -176,7 +176,9 @@ describe Gitlab::Git::Commit, :seed_helper do
describe '.find with Rugged enabled', :enable_rugged do describe '.find with Rugged enabled', :enable_rugged do
it 'calls out to the Rugged implementation' do it 'calls out to the Rugged implementation' do
allow_any_instance_of(Rugged).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original allow_next_instance_of(Rugged) do |instance|
allow(instance).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original
end
described_class.find(repository, SeedRepo::Commit::ID) described_class.find(repository, SeedRepo::Commit::ID)
end end
@ -438,7 +440,9 @@ describe Gitlab::Git::Commit, :seed_helper do
it_should_behave_like '.batch_by_oid' it_should_behave_like '.batch_by_oid'
it 'calls out to the Rugged implementation' do it 'calls out to the Rugged implementation' do
allow_any_instance_of(Rugged).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original allow_next_instance_of(Rugged) do |instance|
allow(instance).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original
end
described_class.batch_by_oid(repository, [SeedRepo::Commit::ID]) described_class.batch_by_oid(repository, [SeedRepo::Commit::ID])
end end

View File

@ -145,7 +145,9 @@ describe Gitlab::Git::Tree, :seed_helper do
describe '.where with Rugged enabled', :enable_rugged do describe '.where with Rugged enabled', :enable_rugged do
it 'calls out to the Rugged implementation' do it 'calls out to the Rugged implementation' do
allow_any_instance_of(Rugged).to receive(:lookup).with(SeedRepo::Commit::ID) allow_next_instance_of(Rugged) do |instance|
allow(instance).to receive(:lookup).with(SeedRepo::Commit::ID)
end
described_class.where(repository, SeedRepo::Commit::ID, 'files', false) described_class.where(repository, SeedRepo::Commit::ID, 'files', false)
end end

View File

@ -730,7 +730,9 @@ describe Gitlab::GitAccess do
it 'checks LFS integrity only for first change' do it 'checks LFS integrity only for first change' do
allow(project).to receive(:lfs_enabled?).and_return(true) allow(project).to receive(:lfs_enabled?).and_return(true)
expect_any_instance_of(Gitlab::Checks::LfsIntegrity).to receive(:objects_missing?).exactly(1).times expect_next_instance_of(Gitlab::Checks::LfsIntegrity) do |instance|
expect(instance).to receive(:objects_missing?).exactly(1).times
end
push_access_check push_access_check
end end

View File

@ -10,10 +10,11 @@ describe Gitlab::GitalyClient::CleanupService do
describe '#apply_bfg_object_map_stream' do describe '#apply_bfg_object_map_stream' do
it 'sends an apply_bfg_object_map_stream message' do it 'sends an apply_bfg_object_map_stream message' do
expect_any_instance_of(Gitaly::CleanupService::Stub) expect_next_instance_of(Gitaly::CleanupService::Stub) do |instance|
.to receive(:apply_bfg_object_map_stream) expect(instance).to receive(:apply_bfg_object_map_stream)
.with(kind_of(Enumerator), kind_of(Hash)) .with(kind_of(Enumerator), kind_of(Hash))
.and_return([]) .and_return([])
end
client.apply_bfg_object_map_stream(StringIO.new) client.apply_bfg_object_map_stream(StringIO.new)
end end

View File

@ -55,7 +55,9 @@ describe Gitlab::GitalyClient do
it 'returns an empty string when the storage is not found in the response' do it 'returns an empty string when the storage is not found in the response' do
response = double("response") response = double("response")
allow(response).to receive(:storage_statuses).and_return([]) allow(response).to receive(:storage_statuses).and_return([])
allow_any_instance_of(Gitlab::GitalyClient::ServerService).to receive(:info).and_return(response) allow_next_instance_of(Gitlab::GitalyClient::ServerService) do |instance|
allow(instance).to receive(:info).and_return(response)
end
expect(described_class.filesystem_id('default')).to eq(nil) expect(described_class.filesystem_id('default')).to eq(nil)
end end

View File

@ -144,9 +144,9 @@ describe Gitlab::GithubImport::Importer::DiffNoteImporter do
describe '#find_merge_request_id' do describe '#find_merge_request_id' do
it 'returns a merge request ID' do it 'returns a merge request ID' do
expect_any_instance_of(Gitlab::GithubImport::IssuableFinder) expect_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |instance|
.to receive(:database_id) expect(instance).to receive(:database_id).and_return(10)
.and_return(10) end
expect(importer.find_merge_request_id).to eq(10) expect(importer.find_merge_request_id).to eq(10)
end end

View File

@ -74,9 +74,9 @@ describe Gitlab::GithubImport::Importer::LabelLinksImporter do
describe '#find_target_id' do describe '#find_target_id' do
it 'returns the ID of the issuable to create the label link for' do it 'returns the ID of the issuable to create the label link for' do
expect_any_instance_of(Gitlab::GithubImport::IssuableFinder) expect_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |instance|
.to receive(:database_id) expect(instance).to receive(:database_id).and_return(10)
.and_return(10) end
expect(importer.find_target_id).to eq(10) expect(importer.find_target_id).to eq(10)
end end

View File

@ -50,8 +50,9 @@ describe Gitlab::GithubImport::Importer::LabelsImporter, :clean_gitlab_redis_cac
describe '#build_labels_cache' do describe '#build_labels_cache' do
it 'builds the labels cache' do it 'builds the labels cache' do
expect_any_instance_of(Gitlab::GithubImport::LabelFinder) expect_next_instance_of(Gitlab::GithubImport::LabelFinder) do |instance|
.to receive(:build_cache) expect(instance).to receive(:build_cache)
end
importer.build_labels_cache importer.build_labels_cache
end end

View File

@ -80,8 +80,9 @@ describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab_redis
describe '#build_milestones_cache' do describe '#build_milestones_cache' do
it 'builds the milestones cache' do it 'builds the milestones cache' do
expect_any_instance_of(Gitlab::GithubImport::MilestoneFinder) expect_next_instance_of(Gitlab::GithubImport::MilestoneFinder) do |instance|
.to receive(:build_cache) expect(instance).to receive(:build_cache)
end
importer.build_milestones_cache importer.build_milestones_cache
end end

View File

@ -143,9 +143,9 @@ describe Gitlab::GithubImport::Importer::NoteImporter do
describe '#find_noteable_id' do describe '#find_noteable_id' do
it 'returns the ID of the noteable' do it 'returns the ID of the noteable' do
expect_any_instance_of(Gitlab::GithubImport::IssuableFinder) expect_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |instance|
.to receive(:database_id) expect(instance).to receive(:database_id).and_return(10)
.and_return(10) end
expect(importer.find_noteable_id).to eq(10) expect(importer.find_noteable_id).to eq(10)
end end

View File

@ -9,8 +9,9 @@ describe Gitlab::GithubImport::SequentialImporter do
project = double(:project, id: 1, repository: repository) project = double(:project, id: 1, repository: repository)
importer = described_class.new(project, token: 'foo') importer = described_class.new(project, token: 'foo')
expect_any_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
.to receive(:execute) expect(instance).to receive(:execute)
end
described_class::SEQUENTIAL_IMPORTERS.each do |klass| described_class::SEQUENTIAL_IMPORTERS.each do |klass|
instance = double(:instance) instance = double(:instance)

View File

@ -21,18 +21,24 @@ describe Gitlab::GitlabImport::Client do
it 'uses membership and simple flags' do it 'uses membership and simple flags' do
stub_request('/api/v4/projects?membership=true&page=1&per_page=100&simple=true') stub_request('/api/v4/projects?membership=true&page=1&per_page=100&simple=true')
expect_any_instance_of(OAuth2::Response).to receive(:parsed).and_return([]) expect_next_instance_of(OAuth2::Response) do |instance|
expect(instance).to receive(:parsed).and_return([])
end
expect(client.projects.to_a).to eq [] expect(client.projects.to_a).to eq []
end end
shared_examples 'pagination params' do shared_examples 'pagination params' do
before do before do
allow_any_instance_of(OAuth2::Response).to receive(:parsed).and_return([]) allow_next_instance_of(OAuth2::Response) do |instance|
allow(instance).to receive(:parsed).and_return([])
end
end end
it 'allows page_limit param' do it 'allows page_limit param' do
allow_any_instance_of(OAuth2::Response).to receive(:parsed).and_return(element_list) allow_next_instance_of(OAuth2::Response) do |instance|
allow(instance).to receive(:parsed).and_return(element_list)
end
expect(client).to receive(:lazy_page_iterator).with(hash_including(page_limit: 2)).and_call_original expect(client).to receive(:lazy_page_iterator).with(hash_including(page_limit: 2)).and_call_original

View File

@ -109,7 +109,9 @@ describe Gitlab::HttpIO do
end end
it 'calls get_chunk only once' do it 'calls get_chunk only once' do
expect_any_instance_of(Net::HTTP).to receive(:request).once.and_call_original expect_next_instance_of(Net::HTTP) do |instance|
expect(instance).to receive(:request).once.and_call_original
end
http_io.each_line { |line| } http_io.each_line { |line| }
end end

View File

@ -43,7 +43,9 @@ describe Gitlab::RequestContext do
let(:ip) { '192.168.1.11' } let(:ip) { '192.168.1.11' }
before do before do
allow_any_instance_of(Rack::Request).to receive(:ip).and_return(ip) allow_next_instance_of(Rack::Request) do |instance|
allow(instance).to receive(:ip).and_return(ip)
end
described_class.new(app).call(env) described_class.new(app).call(env)
end end

View File

@ -80,6 +80,17 @@ describe Commit do
expect(commit.author).to eq(user) expect(commit.author).to eq(user)
end end
context 'with a user with an unconfirmed e-mail' do
before do
user = create(:user)
create(:email, user: user, email: commit.author_email)
end
it 'returns no user' do
expect(commit.author).to be_nil
end
end
context 'using eager loading' do context 'using eager loading' do
let!(:alice) { create(:user, email: 'alice@example.com') } let!(:alice) { create(:user, email: 'alice@example.com') }
let!(:bob) { create(:user, email: 'hunter2@example.com') } let!(:bob) { create(:user, email: 'hunter2@example.com') }
@ -115,7 +126,7 @@ describe Commit do
let!(:commits) { [alice_commit, bob_commit, eve_commit, jeff_commit] } let!(:commits) { [alice_commit, bob_commit, eve_commit, jeff_commit] }
before do before do
create(:email, user: bob, email: 'bob@example.com') create(:email, :confirmed, user: bob, email: 'bob@example.com')
end end
it 'executes only two SQL queries' do it 'executes only two SQL queries' do
@ -179,6 +190,32 @@ describe Commit do
end end
end end
describe '#committer' do
context 'with a confirmed e-mail' do
it 'returns the user' do
user = create(:user, email: commit.committer_email)
expect(commit.committer).to eq(user)
end
end
context 'with an unconfirmed e-mail' do
let(:user) { create(:user) }
before do
create(:email, user: user, email: commit.committer_email)
end
it 'returns no user' do
expect(commit.committer).to be_nil
end
it 'returns the user' do
expect(commit.committer(confirmed: false)).to eq(user)
end
end
end
describe '#to_reference' do describe '#to_reference' do
let(:project) { create(:project, :repository, path: 'sample-project') } let(:project) { create(:project, :repository, path: 'sample-project') }