Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-09-10 15:11:12 +00:00
parent 8fd149139d
commit 6a6824a5ce
36 changed files with 364 additions and 68 deletions

View File

@ -6,6 +6,7 @@ import {
PACKAGE_TYPE_CONAN, PACKAGE_TYPE_CONAN,
PACKAGE_TYPE_MAVEN, PACKAGE_TYPE_MAVEN,
PACKAGE_TYPE_COMPOSER, PACKAGE_TYPE_COMPOSER,
PACKAGE_TYPE_PYPI,
} from '~/packages_and_registries/package_registry/constants'; } from '~/packages_and_registries/package_registry/constants';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue'; import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import DetailsRow from '~/vue_shared/components/registry/details_row.vue'; import DetailsRow from '~/vue_shared/components/registry/details_row.vue';
@ -22,6 +23,7 @@ export default {
composerJson: s__( composerJson: s__(
'PackageRegistry|Composer.json with license: %{license} and version: %{version}', 'PackageRegistry|Composer.json with license: %{license} and version: %{version}',
), ),
requiredPython: s__('PackageRegistry|Required Python: %{pythonVersion}'),
}, },
components: { components: {
DetailsRow, DetailsRow,
@ -43,6 +45,7 @@ export default {
PACKAGE_TYPE_CONAN, PACKAGE_TYPE_CONAN,
PACKAGE_TYPE_MAVEN, PACKAGE_TYPE_MAVEN,
PACKAGE_TYPE_COMPOSER, PACKAGE_TYPE_COMPOSER,
PACKAGE_TYPE_PYPI,
].includes(this.packageEntity.packageType) && this.packageEntity.metadata ].includes(this.packageEntity.packageType) && this.packageEntity.metadata
); );
}, },
@ -58,6 +61,9 @@ export default {
showComposerMetadata() { showComposerMetadata() {
return this.packageEntity.packageType === PACKAGE_TYPE_COMPOSER; return this.packageEntity.packageType === PACKAGE_TYPE_COMPOSER;
}, },
showPypiMetadata() {
return this.packageEntity.packageType === PACKAGE_TYPE_PYPI;
},
}, },
}; };
</script> </script>
@ -141,6 +147,19 @@ export default {
</gl-sprintf> </gl-sprintf>
</details-row> </details-row>
</template> </template>
<details-row
v-else-if="showPypiMetadata"
icon="information-o"
padding="gl-p-4"
data-testid="pypi-required-python"
>
<gl-sprintf :message="$options.i18n.requiredPython">
<template #pythonVersion>
<strong>{{ packageEntity.metadata.requiredPython }}</strong>
</template>
</gl-sprintf>
</details-row>
</div> </div>
</div> </div>
</template> </template>

View File

@ -80,6 +80,8 @@ class WebHook < ApplicationRecord
end end
def backoff! def backoff!
return if backoff_count >= MAX_FAILURES && disabled_until && disabled_until > Time.current
assign_attributes(disabled_until: next_backoff.from_now, backoff_count: backoff_count.succ.clamp(0, MAX_FAILURES)) assign_attributes(disabled_until: next_backoff.from_now, backoff_count: backoff_count.succ.clamp(0, MAX_FAILURES))
save(validate: false) save(validate: false)
end end

View File

@ -34,6 +34,14 @@ class WorkItem::Type < ApplicationRecord
validates :name, length: { maximum: 255 } validates :name, length: { maximum: 255 }
validates :icon_name, length: { maximum: 255 } validates :icon_name, length: { maximum: 255 }
def self.default_by_type(type)
find_by(namespace_id: nil, base_type: type)
end
def self.default_issue_type
default_by_type(:issue)
end
private private
def strip_whitespace def strip_whitespace

View File

@ -34,6 +34,13 @@ module Issues
private private
def find_work_item_type_id(issue_type)
work_item_type = WorkItem::Type.default_by_type(issue_type)
work_item_type ||= WorkItem::Type.default_issue_type
work_item_type.id
end
def filter_params(issue) def filter_params(issue)
super super

View File

@ -6,6 +6,7 @@ module Issues
def execute def execute
filter_resolve_discussion_params filter_resolve_discussion_params
@issue = project.issues.new(issue_params).tap do |issue| @issue = project.issues.new(issue_params).tap do |issue|
ensure_milestone_available(issue) ensure_milestone_available(issue)
end end
@ -60,6 +61,13 @@ module Issues
def issue_params def issue_params
@issue_params ||= build_issue_params @issue_params ||= build_issue_params
# If :issue_type is nil then params[:issue_type] was either nil
# or not permitted. Either way, the :issue_type will default
# to the column default of `issue`. And that means we need to
# ensure the work_item_type_id is set
@issue_params[:work_item_type_id] = get_work_item_type_id(@issue_params[:issue_type])
@issue_params
end end
private private
@ -81,6 +89,11 @@ module Issues
{ author: current_user } { author: current_user }
.merge(issue_params_with_info_from_discussions) .merge(issue_params_with_info_from_discussions)
.merge(allowed_issue_params) .merge(allowed_issue_params)
.with_indifferent_access
end
def get_work_item_type_id(issue_type = :issue)
find_work_item_type_id(issue_type)
end end
end end
end end

View File

@ -26,6 +26,8 @@ module Issues
end end
def before_update(issue, skip_spam_check: false) def before_update(issue, skip_spam_check: false)
change_work_item_type(issue)
return if skip_spam_check return if skip_spam_check
Spam::SpamActionService.new( Spam::SpamActionService.new(
@ -36,6 +38,14 @@ module Issues
).execute ).execute
end end
def change_work_item_type(issue)
return unless issue.changed_attributes['issue_type']
type_id = find_work_item_type_id(issue.issue_type)
issue.work_item_type_id = type_id
end
def handle_changes(issue, options) def handle_changes(issue, options)
super super
old_associations = options.fetch(:old_associations, {}) old_associations = options.fetch(:old_associations, {})

View File

@ -501,6 +501,79 @@ two other clusters of nodes supporting a Geo **secondary** site. One for the
main database and the other for the tracking database. For more information, main database and the other for the tracking database. For more information,
see [High Availability with Omnibus GitLab](../../postgresql/replication_and_failover.md). see [High Availability with Omnibus GitLab](../../postgresql/replication_and_failover.md).
### Changing the replication password
To change the password for the [replication user](https://wiki.postgresql.org/wiki/Streaming_Replication)
when using Omnibus-managed PostgreSQL instances:
On the GitLab Geo **primary** server:
1. The default value for the replication user is `gitlab_replicator`, but if you've set a custom replication
user in your `/etc/gitlab/gitlab.rb` under the `postgresql['sql_replication_user']` setting, make sure to
adapt the following instructions for your own user.
Generate an MD5 hash of the desired password:
```shell
sudo gitlab-ctl pg-password-md5 gitlab_replicator
# Enter password: <your_password_here>
# Confirm password: <your_password_here>
# 950233c0dfc2f39c64cf30457c3b7f1e
```
Edit `/etc/gitlab/gitlab.rb`:
```ruby
# Fill with the hash generated by `gitlab-ctl pg-password-md5 gitlab_replicator`
postgresql['sql_replication_password'] = '<md5_hash_of_your_password>'
```
1. Save the file and reconfigure GitLab to change the replication user's password in PostgreSQL:
```shell
sudo gitlab-ctl reconfigure
```
1. Restart PostgreSQL for the replication password change to take effect:
```shell
sudo gitlab-ctl restart postgresql
```
Until the password is updated on any **secondary** servers, the [PostgreSQL log](../../logs.md#postgresql-logs) on
the secondaries will report the following error message:
```console
FATAL: could not connect to the primary server: FATAL: password authentication failed for user "gitlab_replicator"
```
On all GitLab Geo **secondary** servers:
1. The first step isn't necessary from a configuration perspective, since the hashed `'sql_replication_password'`
is not used on the GitLab Geo **secondary**. However in the event that **secondary** needs to be promoted
to the GitLab Geo **primary**, make sure to match the `'sql_replication_password'` in the secondary
server configuration.
Edit `/etc/gitlab/gitlab.rb`:
```ruby
# Fill with the hash generated by `gitlab-ctl pg-password-md5 gitlab_replicator` on the Geo primary
postgresql['sql_replication_password'] = '<md5_hash_of_your_password>'
```
1. During the initial replication setup, the `gitlab-ctl replicate-geo-database` command writes the plaintext
password for the replication user account to two locations:
- `gitlab-geo.conf`: Used by the PostgreSQL replication process, written to the PostgreSQL data
directory, by default at `/var/opt/gitlab/postgresql/data/gitlab-geo.conf`.
- `.pgpass`: Used by the `gitlab-psql` user, located by default at `/var/opt/gitlab/postgresql/.pgpass`.
Update the plaintext password in both of these files, and restart PostgreSQL:
```shell
sudo gitlab-ctl restart postgresql
```
## Multi-node database replication ## Multi-node database replication
In GitLab 14.0, Patroni replaced `repmgr` as the supported In GitLab 14.0, Patroni replaced `repmgr` as the supported

View File

@ -790,6 +790,28 @@ translate correctly if you extract individual words from the sentence.
When in doubt, try to follow the best practices described in this [Mozilla Developer documentation](https://developer.mozilla.org/en-US/docs/Mozilla/Localization/Localization_content_best_practices#Splitting). When in doubt, try to follow the best practices described in this [Mozilla Developer documentation](https://developer.mozilla.org/en-US/docs/Mozilla/Localization/Localization_content_best_practices#Splitting).
### Always pass string literals to the translation helpers
The `bin/rake gettext:regenerate` script parses the codebase and extracts all the strings from the
[translation helpers](#preparing-a-page-for-translation) ready to be translated.
The script cannot resolve the strings if they are passed as variables or function calls. Therefore,
make sure to always pass string literals to the helpers.
```javascript
// Good
__('Some label');
s__('Namespace', 'Label');
s__('Namespace|Label');
n__('%d apple', '%d apples', appleCount);
// Bad
__(LABEL);
s__(getLabel());
s__(NAMESPACE, LABEL);
n__(LABEL_SINGULAR, LABEL_PLURAL, appleCount);
```
## Updating the PO files with the new content ## Updating the PO files with the new content
Now that the new content is marked for translation, run this command to update the Now that the new content is marked for translation, run this command to update the

View File

@ -902,25 +902,20 @@ Elasticsearch::Transport::Transport::Errors::BadRequest([400] {
This is because we changed the index mapping in GitLab 8.12 and the old indexes should be removed and built from scratch again, This is because we changed the index mapping in GitLab 8.12 and the old indexes should be removed and built from scratch again,
see details in the [update guide](../update/upgrading_from_source.md). see details in the [update guide](../update/upgrading_from_source.md).
- Exception `Elasticsearch::Transport::Transport::Errors::BadRequest` ### `Elasticsearch::Transport::Transport::Errors::BadRequest`
If you have this exception (just like in the case above but the actual message is different) please check if you have the correct Elasticsearch version and you met the other [requirements](#system-requirements). If you have this exception (just like in the case above but the actual message is different) please check if you have the correct Elasticsearch version and you met the other [requirements](#system-requirements).
There is also an easy way to check it automatically with `sudo gitlab-rake gitlab:check` command. There is also an easy way to check it automatically with `sudo gitlab-rake gitlab:check` command.
- Exception `Elasticsearch::Transport::Transport::Errors::RequestEntityTooLarge` ### `Elasticsearch::Transport::Transport::Errors::RequestEntityTooLarge`
```plaintext ```plaintext
[413] {"Message":"Request size exceeded 10485760 bytes"} [413] {"Message":"Request size exceeded 10485760 bytes"}
``` ```
This exception is seen when your Elasticsearch cluster is configured to reject This exception is seen when your Elasticsearch cluster is configured to reject requests above a certain size (10MiB in this case). This corresponds to the `http.max_content_length` setting in `elasticsearch.yml`. Increase it to a larger size and restart your Elasticsearch cluster.
requests above a certain size (10MiB in this case). This corresponds to the
`http.max_content_length` setting in `elasticsearch.yml`. Increase it to a
larger size and restart your Elasticsearch cluster.
AWS has [fixed limits](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/aes-limits.html) AWS has [fixed limits](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/aes-limits.html) for this setting ("Maximum Size of HTTP Request Payloads"), based on the size of the underlying instance.
for this setting ("Maximum Size of HTTP Request Payloads"), based on the size of
the underlying instance.
### My single node Elasticsearch cluster status never goes from `yellow` to `green` even though everything seems to be running properly ### My single node Elasticsearch cluster status never goes from `yellow` to `green` even though everything seems to be running properly

View File

@ -11,10 +11,10 @@ on Atlassian cloud. To create the API token:
1. Sign in to [`id.atlassian.com`](https://id.atlassian.com/manage-profile/security/api-tokens) 1. Sign in to [`id.atlassian.com`](https://id.atlassian.com/manage-profile/security/api-tokens)
with your email address. Use an account with *write* access to Jira projects. with your email address. Use an account with *write* access to Jira projects.
1. Go to **Settings > API tokens**. 1. Go to **Settings > Atlassian account settings > Security > Create and manage API tokens**.
1. Select **Create API token** to display a modal window with an API token. 1. Select **Create API token** to display a modal window with an API token.
1. To copy the API token, select **Copy to clipboard**, or select **View** and write 1. In the dialog, enter a label for your token and select **Create**.
down the new API token. You need this value when you 1. To copy the API token, select **Copy**, then paste the token somewhere safe. You need this value when you
[configure GitLab](configure.md). [configure GitLab](configure.md).
You need the newly created token, and the email You need the newly created token, and the email

View File

@ -6,7 +6,7 @@ module Gitlab
# The name of the Gitlab::SafeRequestStore cache key. # The name of the Gitlab::SafeRequestStore cache key.
CACHE_KEY = :issuables_count_for_state CACHE_KEY = :issuables_count_for_state
# The expiration time for the Rails cache. # The expiration time for the Rails cache.
CACHE_EXPIRES_IN = 10.minutes CACHE_EXPIRES_IN = 1.hour
THRESHOLD = 1000 THRESHOLD = 1000
# The state values that can be safely casted to a Symbol. # The state values that can be safely casted to a Symbol.

View File

@ -20,6 +20,10 @@ module Gitlab
def self.dev_url def self.dev_url
'https://dev.gitlab.org' 'https://dev.gitlab.org'
end end
def self.registry_prefix
'registry.gitlab.com'
end
end end
end end

View File

@ -24043,6 +24043,9 @@ msgstr ""
msgid "PackageRegistry|Remove package" msgid "PackageRegistry|Remove package"
msgstr "" msgstr ""
msgid "PackageRegistry|Required Python: %{pythonVersion}"
msgstr ""
msgid "PackageRegistry|RubyGems" msgid "PackageRegistry|RubyGems"
msgstr "" msgstr ""

View File

@ -428,17 +428,21 @@ RSpec.describe Boards::IssuesController do
describe 'POST create' do describe 'POST create' do
context 'with valid params' do context 'with valid params' do
it 'returns a successful 200 response' do before do
create_issue user: user, board: board, list: list1, title: 'New issue' create_issue user: user, board: board, list: list1, title: 'New issue'
end
it 'returns a successful 200 response' do
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
end end
it 'returns the created issue' do it 'returns the created issue' do
create_issue user: user, board: board, list: list1, title: 'New issue'
expect(response).to match_response_schema('entities/issue_board') expect(response).to match_response_schema('entities/issue_board')
end end
it 'sets the default work_item_type' do
expect(Issue.last.work_item_type.base_type).to eq('issue')
end
end end
context 'with invalid params' do context 'with invalid params' do

View File

@ -1176,12 +1176,22 @@ RSpec.describe Projects::IssuesController do
project.issues.first project.issues.first
end end
context 'when creating an incident' do
it 'sets the correct issue_type' do
issue = post_new_issue(issue_type: 'incident')
expect(issue.issue_type).to eq('incident')
expect(issue.work_item_type.base_type).to eq('incident')
end
end
it 'creates the issue successfully', :aggregate_failures do it 'creates the issue successfully', :aggregate_failures do
issue = post_new_issue issue = post_new_issue
expect(issue).to be_a(Issue) expect(issue).to be_a(Issue)
expect(issue.persisted?).to eq(true) expect(issue.persisted?).to eq(true)
expect(issue.issue_type).to eq('issue') expect(issue.issue_type).to eq('issue')
expect(issue.work_item_type.base_type).to eq('issue')
end end
context 'resolving discussions in MergeRequest' do context 'resolving discussions in MergeRequest' do

View File

@ -8,6 +8,7 @@ FactoryBot.define do
updated_by { author } updated_by { author }
relative_position { RelativePositioning::START_POSITION } relative_position { RelativePositioning::START_POSITION }
issue_type { :issue } issue_type { :issue }
association :work_item_type, :default
trait :confidential do trait :confidential do
confidential { true } confidential { true }
@ -59,6 +60,7 @@ FactoryBot.define do
factory :incident do factory :incident do
issue_type { :incident } issue_type { :incident }
association :work_item_type, :default, :incident
end end
end end
end end

View File

@ -8,6 +8,17 @@ FactoryBot.define do
base_type { WorkItem::Type.base_types[:issue] } base_type { WorkItem::Type.base_types[:issue] }
icon_name { 'issue-type-issue' } icon_name { 'issue-type-issue' }
initialize_with do
type_base_attributes = attributes.with_indifferent_access.slice(:base_type, :namespace, :namespace_id)
# Expect base_types to exist on the DB
if type_base_attributes.slice(:namespace, :namespace_id).compact.empty?
WorkItem::Type.find_or_initialize_by(type_base_attributes).tap { |type| type.assign_attributes(attributes) }
else
WorkItem::Type.new(attributes)
end
end
trait :default do trait :default do
namespace { nil } namespace { nil }
end end

View File

@ -6,6 +6,7 @@ import {
nugetMetadata, nugetMetadata,
packageData, packageData,
composerMetadata, composerMetadata,
pypiMetadata,
} from 'jest/packages_and_registries/package_registry/mock_data'; } from 'jest/packages_and_registries/package_registry/mock_data';
import component from '~/packages_and_registries/package_registry/components/details/additional_metadata.vue'; import component from '~/packages_and_registries/package_registry/components/details/additional_metadata.vue';
import { import {
@ -14,6 +15,7 @@ import {
PACKAGE_TYPE_MAVEN, PACKAGE_TYPE_MAVEN,
PACKAGE_TYPE_NPM, PACKAGE_TYPE_NPM,
PACKAGE_TYPE_COMPOSER, PACKAGE_TYPE_COMPOSER,
PACKAGE_TYPE_PYPI,
} from '~/packages_and_registries/package_registry/constants'; } from '~/packages_and_registries/package_registry/constants';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue'; import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import DetailsRow from '~/vue_shared/components/registry/details_row.vue'; import DetailsRow from '~/vue_shared/components/registry/details_row.vue';
@ -22,6 +24,7 @@ const mavenPackage = { packageType: PACKAGE_TYPE_MAVEN, metadata: mavenMetadata(
const conanPackage = { packageType: PACKAGE_TYPE_CONAN, metadata: conanMetadata() }; const conanPackage = { packageType: PACKAGE_TYPE_CONAN, metadata: conanMetadata() };
const nugetPackage = { packageType: PACKAGE_TYPE_NUGET, metadata: nugetMetadata() }; const nugetPackage = { packageType: PACKAGE_TYPE_NUGET, metadata: nugetMetadata() };
const composerPackage = { packageType: PACKAGE_TYPE_COMPOSER, metadata: composerMetadata() }; const composerPackage = { packageType: PACKAGE_TYPE_COMPOSER, metadata: composerMetadata() };
const pypiPackage = { packageType: PACKAGE_TYPE_PYPI, metadata: pypiMetadata() };
const npmPackage = { packageType: PACKAGE_TYPE_NPM, metadata: {} }; const npmPackage = { packageType: PACKAGE_TYPE_NPM, metadata: {} };
describe('Package Additional Metadata', () => { describe('Package Additional Metadata', () => {
@ -58,6 +61,7 @@ describe('Package Additional Metadata', () => {
const findComposerTargetSha = () => wrapper.findByTestId('composer-target-sha'); const findComposerTargetSha = () => wrapper.findByTestId('composer-target-sha');
const findComposerTargetShaCopyButton = () => wrapper.findComponent(ClipboardButton); const findComposerTargetShaCopyButton = () => wrapper.findComponent(ClipboardButton);
const findComposerJson = () => wrapper.findByTestId('composer-json'); const findComposerJson = () => wrapper.findByTestId('composer-json');
const findPypiRequiredPython = () => wrapper.findByTestId('pypi-required-python');
it('has the correct title', () => { it('has the correct title', () => {
mountComponent(); mountComponent();
@ -74,6 +78,7 @@ describe('Package Additional Metadata', () => {
${conanPackage} | ${true} | ${PACKAGE_TYPE_CONAN} ${conanPackage} | ${true} | ${PACKAGE_TYPE_CONAN}
${nugetPackage} | ${true} | ${PACKAGE_TYPE_NUGET} ${nugetPackage} | ${true} | ${PACKAGE_TYPE_NUGET}
${composerPackage} | ${true} | ${PACKAGE_TYPE_COMPOSER} ${composerPackage} | ${true} | ${PACKAGE_TYPE_COMPOSER}
${pypiPackage} | ${true} | ${PACKAGE_TYPE_PYPI}
${npmPackage} | ${false} | ${PACKAGE_TYPE_NPM} ${npmPackage} | ${false} | ${PACKAGE_TYPE_NPM}
`( `(
`It is $visible that the component is visible when the package is $packageType`, `It is $visible that the component is visible when the package is $packageType`,
@ -160,4 +165,20 @@ describe('Package Additional Metadata', () => {
}); });
}); });
}); });
describe('pypi metadata', () => {
beforeEach(() => {
mountComponent({ packageEntity: pypiPackage });
});
it.each`
name | finderFunction | text | icon
${'pypi-required-python'} | ${findPypiRequiredPython} | ${'Required Python: 1.0.0'} | ${'information-o'}
`('$name element', ({ finderFunction, text, icon }) => {
const element = finderFunction();
expect(element.exists()).toBe(true);
expect(element.text()).toBe(text);
expect(element.props('icon')).toBe(icon);
});
});
}); });

View File

@ -133,7 +133,7 @@ export const composerMetadata = () => ({
}, },
}); });
export const pypyMetadata = () => ({ export const pypiMetadata = () => ({
requiredPython: '1.0.0', requiredPython: '1.0.0',
}); });
@ -157,7 +157,7 @@ export const packageDetailsQuery = (extendPackage) => ({
metadata: { metadata: {
...conanMetadata(), ...conanMetadata(),
...composerMetadata(), ...composerMetadata(),
...pypyMetadata(), ...pypiMetadata(),
...mavenMetadata(), ...mavenMetadata(),
...nugetMetadata(), ...nugetMetadata(),
}, },

View File

@ -71,7 +71,7 @@ RSpec.describe Gitlab::IssuablesCountForState do
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) } let_it_be(:group) { create(:group) }
let(:cache_options) { { expires_in: 10.minutes } } let(:cache_options) { { expires_in: 1.hour } }
let(:cache_key) { ['group', group.id, 'issues'] } let(:cache_key) { ['group', group.id, 'issues'] }
let(:threshold) { described_class::THRESHOLD } let(:threshold) { described_class::THRESHOLD }
let(:states_count) { { opened: 1, closed: 1, all: 2 } } let(:states_count) { { opened: 1, closed: 1, all: 2 } }

View File

@ -6,7 +6,17 @@ require_migration!('create_base_work_item_types')
RSpec.describe CreateBaseWorkItemTypes, :migration do RSpec.describe CreateBaseWorkItemTypes, :migration do
let!(:work_item_types) { table(:work_item_types) } let!(:work_item_types) { table(:work_item_types) }
after(:all) do
# Make sure base types are recreated after running the migration
# because migration specs are not run in a transaction
WorkItem::Type.delete_all
Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter.import
end
it 'creates default data' do it 'creates default data' do
# Need to delete all as base types are seeded before entire test suite
WorkItem::Type.delete_all
reversible_migration do |migration| reversible_migration do |migration|
migration.before -> { migration.before -> {
# Depending on whether the migration has been run before, # Depending on whether the migration has been run before,

View File

@ -6,8 +6,18 @@ require_migration!('upsert_base_work_item_types')
RSpec.describe UpsertBaseWorkItemTypes, :migration do RSpec.describe UpsertBaseWorkItemTypes, :migration do
let!(:work_item_types) { table(:work_item_types) } let!(:work_item_types) { table(:work_item_types) }
after(:all) do
# Make sure base types are recreated after running the migration
# because migration specs are not run in a transaction
WorkItem::Type.delete_all
Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter.import
end
context 'when no default types exist' do context 'when no default types exist' do
it 'creates default data' do it 'creates default data' do
# Need to delete all as base types are seeded before entire test suite
WorkItem::Type.delete_all
expect(work_item_types.count).to eq(0) expect(work_item_types.count).to eq(0)
reversible_migration do |migration| reversible_migration do |migration|
@ -26,10 +36,6 @@ RSpec.describe UpsertBaseWorkItemTypes, :migration do
end end
context 'when default types already exist' do context 'when default types already exist' do
before do
Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter.import
end
it 'does not create default types again' do it 'does not create default types again' do
expect(work_item_types.all.pluck(:base_type)).to match_array(WorkItem::Type.base_types.values) expect(work_item_types.all.pluck(:base_type)).to match_array(WorkItem::Type.base_types.values)

View File

@ -10,7 +10,11 @@ RSpec.describe WebHook do
let(:hook) { build(:project_hook, project: project) } let(:hook) { build(:project_hook, project: project) }
around do |example| around do |example|
freeze_time { example.run } if example.metadata[:skip_freeze_time]
example.run
else
freeze_time { example.run }
end
end end
describe 'associations' do describe 'associations' do
@ -326,10 +330,28 @@ RSpec.describe WebHook do
expect { hook.backoff! }.to change(hook, :backoff_count).by(1) expect { hook.backoff! }.to change(hook, :backoff_count).by(1)
end end
it 'does not let the backoff count exceed the maximum failure count' do context 'when we have backed off MAX_FAILURES times' do
hook.backoff_count = described_class::MAX_FAILURES before do
stub_const("#{described_class}::MAX_FAILURES", 5)
5.times { hook.backoff! }
end
expect { hook.backoff! }.not_to change(hook, :backoff_count) it 'does not let the backoff count exceed the maximum failure count' do
expect { hook.backoff! }.not_to change(hook, :backoff_count)
end
it 'does not change disabled_until', :skip_freeze_time do
travel_to(hook.disabled_until - 1.minute) do
expect { hook.backoff! }.not_to change(hook, :disabled_until)
end
end
it 'changes disabled_until when it has elapsed', :skip_freeze_time do
travel_to(hook.disabled_until + 1.minute) do
expect { hook.backoff! }.to change { hook.disabled_until }
expect(hook.backoff_count).to eq(described_class::MAX_FAILURES)
end
end
end end
include_examples 'is tolerant of invalid records' do include_examples 'is tolerant of invalid records' do

View File

@ -19,8 +19,10 @@ RSpec.describe WorkItem::Type do
it 'deletes type but not unrelated issues' do it 'deletes type but not unrelated issues' do
type = create(:work_item_type) type = create(:work_item_type)
expect(WorkItem::Type.count).to eq(5)
expect { type.destroy! }.not_to change(Issue, :count) expect { type.destroy! }.not_to change(Issue, :count)
expect(WorkItem::Type.count).to eq 0 expect(WorkItem::Type.count).to eq(4)
end end
end end
@ -28,7 +30,7 @@ RSpec.describe WorkItem::Type do
type = create(:work_item_type, work_items: [work_item]) type = create(:work_item_type, work_items: [work_item])
expect { type.destroy! }.to raise_error(ActiveRecord::InvalidForeignKey) expect { type.destroy! }.to raise_error(ActiveRecord::InvalidForeignKey)
expect(Issue.count).to eq 1 expect(Issue.count).to eq(1)
end end
end end

View File

@ -12,6 +12,6 @@ RSpec.configure do |config|
end end
config.after(:all) do config.after(:all) do
delete_from_all_tables! delete_from_all_tables!(except: deletion_except_tables)
end end
end end

View File

@ -39,11 +39,14 @@ RSpec.describe 'Create an issue' do
end end
it 'creates the issue' do it 'creates the issue' do
post_graphql_mutation(mutation, current_user: current_user) expect do
post_graphql_mutation(mutation, current_user: current_user)
end.to change(Issue, :count).by(1)
expect(response).to have_gitlab_http_status(:success) expect(response).to have_gitlab_http_status(:success)
expect(mutation_response['issue']).to include(input) expect(mutation_response['issue']).to include(input)
expect(mutation_response['issue']).to include('discussionLocked' => true) expect(mutation_response['issue']).to include('discussionLocked' => true)
expect(Issue.last.work_item_type.base_type).to eq('issue')
end end
end end
end end

View File

@ -44,6 +44,19 @@ RSpec.describe 'Update of an existing issue' do
expect(mutation_response['issue']).to include('discussionLocked' => true) expect(mutation_response['issue']).to include('discussionLocked' => true)
end end
context 'when issue_type is updated' do
let(:input) { { 'iid' => issue.iid.to_s, 'type' => 'INCIDENT' } }
it 'updates issue_type and work_item_type' do
expect do
post_graphql_mutation(mutation, current_user: current_user)
issue.reload
end.to change { issue.work_item_type.base_type }.from('issue').to('incident').and(
change(issue, :issue_type).from('issue').to('incident')
)
end
end
context 'setting labels' do context 'setting labels' do
let(:mutation) do let(:mutation) do
graphql_mutation(:update_issue, input_params) do graphql_mutation(:update_issue, input_params) do

View File

@ -3,6 +3,8 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe Issues::BuildService do RSpec.describe Issues::BuildService do
using RSpec::Parameterized::TableSyntax
let_it_be(:project) { create(:project, :repository) } let_it_be(:project) { create(:project, :repository) }
let_it_be(:developer) { create(:user) } let_it_be(:developer) { create(:user) }
let_it_be(:guest) { create(:user) } let_it_be(:guest) { create(:user) }
@ -144,6 +146,8 @@ RSpec.describe Issues::BuildService do
issue = build_issue(milestone_id: milestone.id) issue = build_issue(milestone_id: milestone.id)
expect(issue.milestone).to eq(milestone) expect(issue.milestone).to eq(milestone)
expect(issue.issue_type).to eq('issue')
expect(issue.work_item_type.base_type).to eq('issue')
end end
it 'sets milestone to nil if it is not available for the project' do it 'sets milestone to nil if it is not available for the project' do
@ -152,6 +156,15 @@ RSpec.describe Issues::BuildService do
expect(issue.milestone).to be_nil expect(issue.milestone).to be_nil
end end
context 'when issue_type is incident' do
it 'sets the correct issue type' do
issue = build_issue(issue_type: 'incident')
expect(issue.issue_type).to eq('incident')
expect(issue.work_item_type.base_type).to eq('incident')
end
end
end end
context 'as guest' do context 'as guest' do
@ -165,22 +178,13 @@ RSpec.describe Issues::BuildService do
end end
context 'setting issue type' do context 'setting issue type' do
it 'defaults to issue if issue_type not given' do shared_examples 'builds an issue' do
issue = build_issue specify do
issue = build_issue(issue_type: issue_type)
expect(issue).to be_issue expect(issue.issue_type).to eq(resulting_issue_type)
end expect(issue.work_item_type_id).to eq(work_item_type_id)
end
it 'sets issue' do
issue = build_issue(issue_type: 'issue')
expect(issue).to be_issue
end
it 'sets incident' do
issue = build_issue(issue_type: 'incident')
expect(issue).to be_incident
end end
it 'cannot set invalid issue type' do it 'cannot set invalid issue type' do
@ -188,6 +192,24 @@ RSpec.describe Issues::BuildService do
expect(issue).to be_issue expect(issue).to be_issue
end end
context 'with a corresponding WorkItem::Type' do
let_it_be(:type_issue_id) { WorkItem::Type.default_issue_type.id }
let_it_be(:type_incident_id) { WorkItem::Type.default_by_type(:incident).id }
where(:issue_type, :work_item_type_id, :resulting_issue_type) do
nil | ref(:type_issue_id) | 'issue'
'issue' | ref(:type_issue_id) | 'issue'
'incident' | ref(:type_incident_id) | 'incident'
'test_case' | ref(:type_issue_id) | 'issue' # update once support for test_case is enabled
'requirement' | ref(:type_issue_id) | 'issue' # update once support for requirement is enabled
'invalid' | ref(:type_issue_id) | 'issue'
end
with_them do
it_behaves_like 'builds an issue'
end
end
end end
end end
end end

View File

@ -43,10 +43,11 @@ RSpec.describe Issues::CreateService do
expect(issue).to be_persisted expect(issue).to be_persisted
expect(issue.title).to eq('Awesome issue') expect(issue.title).to eq('Awesome issue')
expect(issue.assignees).to eq [assignee] expect(issue.assignees).to eq([assignee])
expect(issue.labels).to match_array labels expect(issue.labels).to match_array(labels)
expect(issue.milestone).to eq milestone expect(issue.milestone).to eq(milestone)
expect(issue.due_date).to eq Date.tomorrow expect(issue.due_date).to eq(Date.tomorrow)
expect(issue.work_item_type.base_type).to eq('issue')
end end
context 'when skip_system_notes is true' do context 'when skip_system_notes is true' do

View File

@ -228,15 +228,19 @@ RSpec.describe Issues::UpdateService, :mailer do
context 'from incident to issue' do context 'from incident to issue' do
let(:issue) { create(:incident, project: project) } let(:issue) { create(:incident, project: project) }
it 'changed from an incident to an issue type' do
expect { update_issue(issue_type: 'issue') }
.to change(issue, :issue_type).from('incident').to('issue')
.and(change { issue.work_item_type.base_type }.from('incident').to('issue'))
end
context 'for an incident with multiple labels' do context 'for an incident with multiple labels' do
let(:issue) { create(:incident, project: project, labels: [label_1, label_2]) } let(:issue) { create(:incident, project: project, labels: [label_1, label_2]) }
before do
update_issue(issue_type: 'issue')
end
it 'removes an `incident` label if one exists on the incident' do it 'removes an `incident` label if one exists on the incident' do
expect(issue.labels).to eq([label_2]) expect { update_issue(issue_type: 'issue') }.to change(issue, :label_ids)
.from(containing_exactly(label_1.id, label_2.id))
.to([label_2.id])
end end
end end
@ -244,12 +248,10 @@ RSpec.describe Issues::UpdateService, :mailer do
let(:issue) { create(:incident, project: project, labels: [label_1, label_2]) } let(:issue) { create(:incident, project: project, labels: [label_1, label_2]) }
let(:params) { { label_ids: [label_1.id, label_2.id], remove_label_ids: [] } } let(:params) { { label_ids: [label_1.id, label_2.id], remove_label_ids: [] } }
before do
update_issue(issue_type: 'issue')
end
it 'adds an incident label id to remove_label_ids for it to be removed' do it 'adds an incident label id to remove_label_ids for it to be removed' do
expect(issue.label_ids).to contain_exactly(label_2.id) expect { update_issue(issue_type: 'issue') }.to change(issue, :label_ids)
.from(containing_exactly(label_1.id, label_2.id))
.to([label_2.id])
end end
end end
end end

View File

@ -220,6 +220,8 @@ RSpec.configure do |config|
# Enable all features by default for testing # Enable all features by default for testing
# Reset any changes in after hook. # Reset any changes in after hook.
stub_all_feature_flags stub_all_feature_flags
TestEnv.seed_db
end end
config.after(:all) do config.after(:all) do

View File

@ -14,7 +14,7 @@ RSpec.configure do |config|
end end
config.append_after(:context, :migration) do config.append_after(:context, :migration) do
delete_from_all_tables! delete_from_all_tables!(except: ['work_item_types'])
# Postgres maximum number of columns in a table is 1600 (https://github.com/postgres/postgres/blob/de41869b64d57160f58852eab20a27f248188135/src/include/access/htup_details.h#L23-L47). # Postgres maximum number of columns in a table is 1600 (https://github.com/postgres/postgres/blob/de41869b64d57160f58852eab20a27f248188135/src/include/access/htup_details.h#L23-L47).
# And since: # And since:
@ -61,7 +61,7 @@ RSpec.configure do |config|
example.run example.run
delete_from_all_tables! delete_from_all_tables!(except: ['work_item_types'])
self.class.use_transactional_tests = true self.class.use_transactional_tests = true
end end

View File

@ -12,7 +12,7 @@ module DbCleaner
end end
def deletion_except_tables def deletion_except_tables
[] ['work_item_types']
end end
def setup_database_cleaner def setup_database_cleaner

View File

@ -452,6 +452,10 @@ module TestEnv
example_group example_group
end end
def seed_db
Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter.import
end
private private
# These are directories that should be preserved at cleanup time # These are directories that should be preserved at cleanup time

View File

@ -13,6 +13,7 @@
RSpec.shared_examples 'incident issue' do RSpec.shared_examples 'incident issue' do
it 'has incident as issue type' do it 'has incident as issue type' do
expect(issue.issue_type).to eq('incident') expect(issue.issue_type).to eq('incident')
expect(issue.work_item_type.base_type).to eq('incident')
end end
end end
@ -41,6 +42,7 @@ RSpec.shared_examples 'not an incident issue' do
it 'has not incident as issue type' do it 'has not incident as issue type' do
expect(issue.issue_type).not_to eq('incident') expect(issue.issue_type).not_to eq('incident')
expect(issue.work_item_type.base_type).not_to eq('incident')
end end
it 'has not an incident label' do it 'has not an incident label' do

View File

@ -2,6 +2,9 @@
RSpec.shared_examples 'work item base types importer' do RSpec.shared_examples 'work item base types importer' do
it 'creates all base work item types' do it 'creates all base work item types' do
# Fixtures need to run on a pristine DB, but the test suite preloads the base types before(:suite)
WorkItem::Type.delete_all
expect { subject }.to change(WorkItem::Type, :count).from(0).to(WorkItem::Type::BASE_TYPES.count) expect { subject }.to change(WorkItem::Type, :count).from(0).to(WorkItem::Type::BASE_TYPES.count)
end end
end end