Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-09-28 15:11:30 +00:00
parent 24e5ef9b1a
commit 4abacac9af
51 changed files with 467 additions and 229 deletions

View File

@ -41,6 +41,11 @@ export default {
type: Object,
required: true,
},
hideLineNumbers: {
type: Boolean,
required: false,
default: false,
},
},
computed: {
viewer() {
@ -80,6 +85,7 @@ export default {
:is-raw-content="isRawContent"
:file-name="blob.name"
:type="activeViewer.fileType"
:hide-line-numbers="hideLineNumbers"
data-qa-selector="file_content"
/>
</template>

View File

@ -48,7 +48,12 @@ export default (elements) => {
Vue.use(VueApollo);
const apolloProvider = new VueApollo({
defaultClient: createDefaultClient(),
defaultClient: createDefaultClient(
{},
{
assumeImmutableResults: true,
},
),
});
const listenerAddedAttr = 'data-mr-listener-added';

View File

@ -42,9 +42,6 @@ export default {
this.switchViewer(
this.hasRichViewer && !window.location.hash ? RICH_BLOB_VIEWER : SIMPLE_BLOB_VIEWER,
);
if (this.hasRichViewer && !this.blobViewer) {
this.loadLegacyViewer();
}
},
error() {
this.displayError();
@ -69,6 +66,7 @@ export default {
data() {
return {
legacyRichViewer: null,
legacySimpleViewer: null,
isBinary: false,
isLoadingLegacyViewer: false,
activeViewerType: SIMPLE_BLOB_VIEWER,
@ -115,7 +113,7 @@ export default {
return isLoggedIn();
},
isLoading() {
return this.$apollo.queries.project.loading || this.isLoadingLegacyViewer;
return this.$apollo.queries.project.loading;
},
isBinaryFileType() {
return this.isBinary || this.blobInfo.simpleViewer?.fileType !== 'text';
@ -153,22 +151,41 @@ export default {
},
},
methods: {
loadLegacyViewer() {
loadLegacyViewer(type) {
if (this.legacyViewerLoaded(type)) {
return;
}
this.isLoadingLegacyViewer = true;
axios
.get(`${this.blobInfo.webPath}?format=json&viewer=rich`)
.get(`${this.blobInfo.webPath}?format=json&viewer=${type}`)
.then(({ data: { html, binary } }) => {
this.legacyRichViewer = html;
if (type === 'simple') {
this.legacySimpleViewer = html;
} else {
this.legacyRichViewer = html;
}
this.isBinary = binary;
this.isLoadingLegacyViewer = false;
})
.catch(() => this.displayError());
},
legacyViewerLoaded(type) {
return (
(type === SIMPLE_BLOB_VIEWER && this.legacySimpleViewer) ||
(type === RICH_BLOB_VIEWER && this.legacyRichViewer)
);
},
displayError() {
createFlash({ message: __('An error occurred while loading the file. Please try again.') });
},
switchViewer(newViewer) {
this.activeViewerType = newViewer || SIMPLE_BLOB_VIEWER;
if (!this.blobViewer) {
this.loadLegacyViewer(this.activeViewerType);
}
},
},
};
@ -210,10 +227,11 @@ export default {
v-if="!blobViewer"
:rich-viewer="legacyRichViewer"
:blob="blobInfo"
:content="blobInfo.rawTextBlob"
:content="legacySimpleViewer"
:is-raw-content="true"
:active-viewer="viewer"
:loading="false"
:hide-line-numbers="true"
:loading="isLoadingLegacyViewer"
/>
<component :is="blobViewer" v-else v-bind="viewerProps" class="blob-viewer" />
</div>

View File

@ -3,7 +3,9 @@ export const loadViewer = (type) => {
case 'empty':
return () => import(/* webpackChunkName: 'blob_empty_viewer' */ './empty_viewer.vue');
case 'text':
return () => import(/* webpackChunkName: 'blob_text_viewer' */ './text_viewer.vue');
return gon.features.refactorTextViewer
? () => import(/* webpackChunkName: 'blob_text_viewer' */ './text_viewer.vue')
: null;
case 'download':
return () => import(/* webpackChunkName: 'blob_download_viewer' */ './download_viewer.vue');
case 'image':

View File

@ -140,9 +140,7 @@ export default {
<div
class="gl-mt-2 gl-mb-2 align-content-around align-items-start flex-wrap align-self-center d-flex"
>
<div class="gl-mr-4">
{{ data.text }}
</div>
<div v-safe-html="data.text" class="gl-mr-4"></div>
<div v-if="data.link">
<gl-link :href="data.link.href">{{ data.link.text }}</gl-link>
</div>

View File

@ -27,6 +27,11 @@ export default {
required: false,
default: '',
},
hideLineNumbers: {
type: Boolean,
required: false,
default: false,
},
},
mounted() {
eventHub.$emit(SNIPPET_MEASURE_BLOBS_CONTENT);

View File

@ -8,8 +8,6 @@ export default {
name: 'SimpleViewer',
components: {
GlIcon,
SourceEditor: () =>
import(/* webpackChunkName: 'SourceEditor' */ '~/vue_shared/components/source_editor.vue'),
},
mixins: [ViewerMixin, glFeatureFlagsMixin()],
inject: ['blobHash'],
@ -22,9 +20,6 @@ export default {
lineNumbers() {
return this.content.split('\n').length;
},
refactorBlobViewerEnabled() {
return this.glFeatures.refactorBlobViewer;
},
},
mounted() {
const { hash } = window.location;
@ -52,14 +47,8 @@ export default {
</script>
<template>
<div>
<source-editor
v-if="isRawContent && refactorBlobViewerEnabled"
:value="content"
:file-name="fileName"
:editor-options="{ readOnly: true }"
/>
<div v-else class="file-content code js-syntax-highlight" :class="$options.userColorScheme">
<div class="line-numbers">
<div class="file-content code js-syntax-highlight" :class="$options.userColorScheme">
<div v-if="!hideLineNumbers" class="line-numbers">
<a
v-for="line in lineNumbers"
:id="`L${line}`"

View File

@ -43,6 +43,7 @@ class Projects::BlobController < Projects::ApplicationController
before_action do
push_frontend_feature_flag(:refactor_blob_viewer, @project, default_enabled: :yaml)
push_frontend_feature_flag(:refactor_text_viewer, @project, default_enabled: :yaml)
push_frontend_feature_flag(:consolidated_edit_button, @project, default_enabled: :yaml)
push_licensed_feature(:file_locks) if @project.licensed_feature_available?(:file_locks)
end

View File

@ -33,6 +33,7 @@ class ProjectsController < Projects::ApplicationController
before_action do
push_frontend_feature_flag(:refactor_blob_viewer, @project, default_enabled: :yaml)
push_frontend_feature_flag(:refactor_text_viewer, @project, default_enabled: :yaml)
push_frontend_feature_flag(:increase_page_size_exponentially, @project, default_enabled: :yaml)
push_frontend_feature_flag(:paginated_tree_graphql_query, @project, default_enabled: :yaml)
end

View File

@ -178,7 +178,13 @@ class Member < ApplicationRecord
after_destroy :post_destroy_hook, unless: :pending?, if: :hook_prerequisites_met?
after_save :log_invitation_token_cleanup
after_commit :refresh_member_authorized_projects, unless: :importing?
after_commit on: [:create, :update], unless: :importing? do
refresh_member_authorized_projects(blocking: true)
end
after_commit on: [:destroy], unless: :importing? do
refresh_member_authorized_projects(blocking: Feature.disabled?(:member_destroy_async_auth_refresh, type: :ops))
end
default_value_for :notification_level, NotificationSetting.levels[:global]
@ -395,8 +401,8 @@ class Member < ApplicationRecord
# transaction has been committed, resulting in the job either throwing an
# error or not doing any meaningful work.
# rubocop: disable CodeReuse/ServiceClass
def refresh_member_authorized_projects
UserProjectAccessChangedService.new(user_id).execute
def refresh_member_authorized_projects(blocking:)
UserProjectAccessChangedService.new(user_id).execute(blocking: blocking)
end
# rubocop: enable CodeReuse/ServiceClass

View File

@ -50,8 +50,10 @@ class GroupMember < Member
{ group: group }
end
private
override :refresh_member_authorized_projects
def refresh_member_authorized_projects
def refresh_member_authorized_projects(blocking:)
# Here, `destroyed_by_association` will be present if the
# GroupMember is being destroyed due to the `dependent: :destroy`
# callback on Group. In this case, there is no need to refresh the
@ -63,8 +65,6 @@ class GroupMember < Member
super
end
private
def access_level_inclusion
return if access_level.in?(Gitlab::Access.all_values)

View File

@ -90,24 +90,28 @@ class ProjectMember < Member
{ project: project }
end
private
override :refresh_member_authorized_projects
def refresh_member_authorized_projects
def refresh_member_authorized_projects(blocking:)
return super unless Feature.enabled?(:specialized_service_for_project_member_auth_refresh)
return unless user
# rubocop:disable CodeReuse/ServiceClass
AuthorizedProjectUpdate::ProjectRecalculatePerUserService.new(project, user).execute
if blocking
AuthorizedProjectUpdate::ProjectRecalculatePerUserService.new(project, user).execute
else
AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker.perform_async(project.id, user.id)
end
# Until we compare the inconsistency rates of the new, specialized service and
# the old approach, we still run AuthorizedProjectsWorker
# but with some delay and lower urgency as a safety net.
UserProjectAccessChangedService.new(user_id)
.execute(blocking: false, priority: UserProjectAccessChangedService::LOW_PRIORITY)
.execute(blocking: false, priority: UserProjectAccessChangedService::LOW_PRIORITY)
# rubocop:enable CodeReuse/ServiceClass
end
private
def send_invite
run_after_commit_or_now { notification_service.invite_project_member(self, @raw_invite_token) }

View File

@ -318,6 +318,7 @@ class User < ApplicationRecord
delegate :webauthn_xid, :webauthn_xid=, to: :user_detail, allow_nil: true
delegate :pronouns, :pronouns=, to: :user_detail, allow_nil: true
delegate :pronunciation, :pronunciation=, to: :user_detail, allow_nil: true
delegate :registration_objective, :registration_objective=, to: :user_detail, allow_nil: true
accepts_nested_attributes_for :user_preference, update_only: true
accepts_nested_attributes_for :user_detail, update_only: true

View File

@ -3,8 +3,11 @@
class UserDetail < ApplicationRecord
extend ::Gitlab::Utils::Override
include IgnorableColumns
ignore_columns %i[bio_html cached_markdown_version], remove_with: '13.6', remove_after: '2021-10-22'
REGISTRATION_OBJECTIVE_PAIRS = { basics: 0, move_repository: 1, code_storage: 2, exploring: 3, ci: 4, other: 5, joining_team: 6 }.freeze
belongs_to :user
validates :pronouns, length: { maximum: 50 }
@ -14,6 +17,8 @@ class UserDetail < ApplicationRecord
before_save :prevent_nil_bio
enum registration_objective: REGISTRATION_OBJECTIVE_PAIRS, _suffix: true
private
def prevent_nil_bio

View File

@ -30,6 +30,15 @@
:weight: 1
:idempotent: true
:tags: []
- :name: authorized_project_update:authorized_project_update_project_recalculate_per_user
:worker_name: AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker
:feature_category: :authentication_and_authorization
:has_external_dependencies:
:urgency: :high
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: authorized_project_update:authorized_project_update_user_refresh_from_replica
:worker_name: AuthorizedProjectUpdate::UserRefreshFromReplicaWorker
:feature_category: :authentication_and_authorization

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
module AuthorizedProjectUpdate
class ProjectRecalculatePerUserWorker < ProjectRecalculateWorker
data_consistency :always
feature_category :authentication_and_authorization
urgency :high
queue_namespace :authorized_project_update
deduplicate :until_executing, including_scheduled: true
idempotent!
def perform(project_id, user_id)
project = Project.find_by_id(project_id)
user = User.find_by_id(user_id)
return unless project && user
in_lock(lock_key(project), ttl: 10.seconds) do
AuthorizedProjectUpdate::ProjectRecalculatePerUserService.new(project, user).execute
end
end
end
end

View File

@ -26,7 +26,9 @@ module AuthorizedProjectUpdate
private
def lock_key(project)
"#{self.class.name.underscore}/projects/#{project.id}"
# The self.class.name.underscore value is hardcoded here as the prefix, so that the same
# lock_key for this superclass will be used by the ProjectRecalculatePerUserWorker subclass.
"authorized_project_update/project_recalculate_worker/projects/#{project.id}"
end
end
end

View File

@ -0,0 +1,8 @@
---
name: refactor_text_viewer
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/70909
rollout_issue_url:
milestone: '14.4'
type: development
group: 'group::source code'
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: member_destroy_async_auth_refresh
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/66424
rollout_issue_url:
milestone: '14.4'
type: ops
group: group::access
default_enabled: false

View File

@ -0,0 +1,7 @@
# frozen_string_literal: true
class AddRegistrationObjectiveToUserDetail < Gitlab::Database::Migration[1.0]
def change
add_column :user_details, :registration_objective, :smallint
end
end

View File

@ -0,0 +1 @@
9204c844b22ad0d3a938ed908377c8baacdda038725a5cf105e4b11841c1ae21

View File

@ -19764,6 +19764,7 @@ CREATE TABLE user_details (
provisioned_by_group_id bigint,
pronouns text,
pronunciation text,
registration_objective smallint,
CONSTRAINT check_245664af82 CHECK ((char_length(webauthn_xid) <= 100)),
CONSTRAINT check_b132136b01 CHECK ((char_length(other_role) <= 100)),
CONSTRAINT check_eeeaf8d4f0 CHECK ((char_length(pronouns) <= 50)),

View File

@ -379,6 +379,18 @@ Some [known database inconsistency issues](#known-issues) exist in Gitaly Cluste
remain on your current service for now. We can adjust the date for
[NFS support removal](#nfs-deprecation-notice) if this applies to you.
### Migrate off Gitaly Cluster
If you have repositories stored on a Gitaly Cluster, but you'd like to migrate
them back to direct Gitaly storage:
1. Create and configure a new
[Gitaly server](configure_gitaly.md#run-gitaly-on-its-own-server).
1. [Move the repositories](../operations/moving_repositories.md#move-repositories)
to the newly created storage. There are different possibilities to move them
by shard or by group, this gives you the opportunity to spread them over
multiple Gitaly servers.
## Monitor Gitaly and Gitaly Cluster
You can use the available logs and [Prometheus metrics](../monitoring/prometheus/index.md) to

View File

@ -141,7 +141,7 @@ at GitLab so far:
- Their availability:
- No "OOO"/"PTO"/"Parental Leave" in their GitLab or Slack status.
- No `:red_circle:`/`:palm_tree:`/`:beach:`/`:beach_umbrella:`/`:beach_with_umbrella:` emojis in GitLab or Slack status.
- (Experimental) Their timezone: people for which the local hour is between
- (Experimental) Their time zone: people for which the local hour is between
6 AM and 2 PM are eligible to be picked. This is to ensure they have a good
chance to get to perform a review during their current work day. The experimentation is tracked in
[this issue](https://gitlab.com/gitlab-org/quality/team-tasks/-/issues/563)

View File

@ -50,6 +50,6 @@ Some additional information is included at the bottom of the comment:
| Result | Description |
|----------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| Migrations pending on GitLab.com | A summary of migrations not deployed yet to GitLab.com. This info is useful when testing a migration that was merged but not deployed yet. |
| Migrations pending on GitLab.com | A summary of migrations not deployed yet to GitLab.com. This information is useful when testing a migration that was merged but not deployed yet. |
| Clone details | A link to the `Postgres.ai` thin clone created for this testing pipeline, along with information about its expiry. This can be used to further explore the results of running the migration. Only accessible by database maintainers or with an access request. |
| Artifacts | A link to the pipeline's artifacts. Full query logs for each migration (ending in `.log`) are available there and only accessible by database maintainers or with an access request. |

View File

@ -8,17 +8,21 @@ info: To determine the technical writer assigned to the Stage/Group associated w
This document gives a few examples of the usage of database transactions in application code.
For further reference please check PostgreSQL documentation about [transactions](https://www.postgresql.org/docs/current/tutorial-transactions.html).
For further reference, check PostgreSQL documentation about [transactions](https://www.postgresql.org/docs/current/tutorial-transactions.html).
## Database decomposition and sharding
The [sharding group](https://about.gitlab.com/handbook/engineering/development/enablement/sharding/) plans to split the main GitLab database and move some of the database tables to other database servers.
The [sharding group](https://about.gitlab.com/handbook/engineering/development/enablement/sharding/) plans
to split the main GitLab database and move some of the database tables to other database servers.
The group will start decomposing the `ci_*` related database tables first. To maintain the current application development experience, tooling and static analyzers will be added to the codebase to ensure correct data access and data modification methods. By using the correct form for defining database transactions, we can save significant refactoring work in the future.
We'll start decomposing the `ci_*`-related database tables first. To maintain the current application
development experience, we'll add tooling and static analyzers to the codebase to ensure correct
data access and data modification methods. By using the correct form for defining database transactions,
we can save significant refactoring work in the future.
## The transaction block
The `ActiveRecord` library provides a convenient way to group database statements into a transaction.
The `ActiveRecord` library provides a convenient way to group database statements into a transaction:
```ruby
issue = Issue.find(10)
@ -30,16 +34,19 @@ ApplicationRecord.transaction do
end
```
This transaction involves two database tables, in case of an error, each `UPDATE` statement will be rolled back to the previous, consistent state.
This transaction involves two database tables. In case of an error, each `UPDATE`
statement rolls back to the previous consistent state.
NOTE:
Avoid referencing the `ActiveRecord::Base` class and use `ApplicationRecord` instead.
## Transaction and database locks
When a transaction block is opened, the database will try to acquire the necessary locks on the resources. The type of locks will depend on the actual database statements.
When a transaction block is opened, the database tries to acquire the necessary
locks on the resources. The type of locks depend on the actual database statements.
Consider a concurrent update scenario where the following code is executed at the same time from two different processes:
Consider a concurrent update scenario where the following code is executed at the
same time from two different processes:
```ruby
issue = Issue.find(10)
@ -51,15 +58,22 @@ ApplicationRecord.transaction do
end
```
The database will try to acquire the `FOR UPDATE` lock for the referenced `issue` and `project` records. In our case, we have two competing transactions for these locks, one of them will successfully acquire them. The other transaction will have to wait in the lock queue until the first transaction finishes. The execution of the second transaction is blocked at this point.
The database tries to acquire the `FOR UPDATE` lock for the referenced `issue` and
`project` records. In our case, we have two competing transactions for these locks,
and only one of them will successfully acquire them. The other transaction will have
to wait in the lock queue until the first transaction finishes. The execution of the
second transaction is blocked at this point.
## Transaction speed
To prevent lock contention and maintain stable application performance, the transaction block should finish as fast as possible. When a transaction acquires locks, it will hold on to them until the transaction finishes.
To prevent lock contention and maintain stable application performance, the transaction
block should finish as fast as possible. When a transaction acquires locks, it holds
on to them until the transaction finishes.
Apart from application performance, long-running transactions can also affect the application upgrade processes by blocking database migrations.
Apart from application performance, long-running transactions can also affect application
upgrade processes by blocking database migrations.
### Dangerous example: 3rd party API calls
### Dangerous example: third-party API calls
Consider the following example:
@ -73,20 +87,29 @@ Member.transaction do
end
```
Here, we ensure that the `notification_email_sent` column is updated only when the `send_notification_email` method succeeds. The `send_notification_email` method executes a network request to an email sending service. If the underlying infrastructure does not specify timeouts or the network call takes too long time, the database transaction will stay open.
Here, we ensure that the `notification_email_sent` column is updated only when the
`send_notification_email` method succeeds. The `send_notification_email` method
executes a network request to an email sending service. If the underlying infrastructure
does not specify timeouts or the network call takes too long time, the database transaction
stays open.
Ideally, a transaction should only contain database statements.
Avoid doing in a `transaction` block:
- External network requests such as: triggering Sidekiq jobs, sending emails, HTTP API calls and running database statements using a different connection.
- External network requests such as:
- Triggering Sidekiq jobs.
- Sending emails.
- HTTP API calls.
- Running database statements using a different connection.
- File system operations.
- Long, CPU intensive computation.
- Calling `sleep(n)`.
## Explicit model referencing
If a transaction modifies records from the same database table, it's advised to use the `Model.transaction` block:
If a transaction modifies records from the same database table, we advise to use the
`Model.transaction` block:
```ruby
build_1 = Ci::Build.find(1)
@ -98,7 +121,8 @@ Ci::Build.transaction do
end
```
The transaction above will use the same database connection for the transaction as the models in the `transaction` block. In a multi-database environment the following example would be dangerous:
The transaction above uses the same database connection for the transaction as the models
in the `transaction` block. In a multi-database environment the following example is dangerous:
```ruby
# `ci_builds` table is located on another database
@ -114,4 +138,6 @@ ActiveRecord::Base.transaction do
end
```
The `ActiveRecord::Base` class uses a different database connection than the `Ci::Build` records. The two statements in the transaction block will not be part of the transaction and will not be rolled back in case something goes wrong. They act as 3rd part calls.
The `ActiveRecord::Base` class uses a different database connection than the `Ci::Build` records.
The two statements in the transaction block will not be part of the transaction and will not be
rolled back in case something goes wrong. They act as 3rd part calls.

View File

@ -879,7 +879,7 @@ See the [text data type](database/strings_and_the_text_data_type.md) style guide
## Timestamp column type
By default, Rails uses the `timestamp` data type that stores timestamp data
without timezone information. The `timestamp` data type is used by calling
without time zone information. The `timestamp` data type is used by calling
either the `add_timestamps` or the `timestamps` method.
Also, Rails converts the `:datetime` data type to the `timestamp` one.
@ -904,15 +904,15 @@ end
```
Instead of using these methods, one should use the following methods to store
timestamps with timezones:
timestamps with time zones:
- `add_timestamps_with_timezone`
- `timestamps_with_timezone`
- `datetime_with_timezone`
This ensures all timestamps have a time zone specified. This, in turn, means
existing timestamps don't suddenly use a different timezone when the system's
timezone changes. It also makes it very clear which timezone was used in the
existing timestamps don't suddenly use a different time zone when the system's
time zone changes. It also makes it very clear which time zone was used in the
first place.
## Storing JSON in database

View File

@ -25,7 +25,7 @@ module Backup
args += ['-parallel', @parallel.to_s] if @parallel
args += ['-parallel-storage', @parallel_storage.to_s] if @parallel_storage
@stdin, stdout, @thread = Open3.popen2(ENV, bin_path, command, '-path', backup_repos_path, *args)
@stdin, stdout, @thread = Open3.popen2(build_env, bin_path, command, '-path', backup_repos_path, *args)
@out_reader = Thread.new do
IO.copy_stream(stdout, @progress)
@ -63,6 +63,13 @@ module Backup
private
def build_env
{
'SSL_CERT_FILE' => OpenSSL::X509::DEFAULT_CERT_FILE,
'SSL_CERT_DIR' => OpenSSL::X509::DEFAULT_CERT_DIR
}.merge(ENV)
end
def started?
@thread.present?
end

View File

@ -7273,9 +7273,6 @@ msgstr ""
msgid "ClusterAgents|Last contact"
msgstr ""
msgid "ClusterAgents|Last used"
msgstr ""
msgid "ClusterAgents|Learn how to create an agent access token"
msgstr ""

View File

@ -239,7 +239,7 @@ RSpec.describe Projects::BranchesController do
end
end
context 'without issue feature access' do
context 'without issue feature access', :sidekiq_inline do
before do
project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE)

View File

@ -409,7 +409,7 @@ RSpec.describe Projects::CompareController do
end
end
context 'when the user does not have access to the project' do
context 'when the user does not have access to the project', :sidekiq_inline do
before do
project.team.truncate
project.update!(visibility: 'private')

View File

@ -159,8 +159,13 @@ describe('Blob content viewer component', () => {
const findBlobContent = () => wrapper.findComponent(BlobContent);
const findBlobButtonGroup = () => wrapper.findComponent(BlobButtonGroup);
beforeEach(() => {
gon.features = { refactorTextViewer: true };
});
afterEach(() => {
wrapper.destroy();
mockAxios.reset();
});
it('renders a GlLoadingIcon component', () => {
@ -183,7 +188,6 @@ describe('Blob content viewer component', () => {
it('renders a BlobContent component', () => {
expect(findBlobContent().props('loading')).toEqual(false);
expect(findBlobContent().props('content')).toEqual('raw content');
expect(findBlobContent().props('isRawContent')).toBe(true);
expect(findBlobContent().props('activeViewer')).toEqual({
fileType: 'text',
@ -192,6 +196,16 @@ describe('Blob content viewer component', () => {
renderError: null,
});
});
describe('legacy viewers', () => {
it('loads a legacy viewer when a viewer component is not available', async () => {
createComponentWithApollo({ blobs: { ...simpleMockData, fileType: 'unknown' } });
await waitForPromises();
expect(mockAxios.history.get).toHaveLength(1);
expect(mockAxios.history.get[0].url).toEqual('some_file.js?format=json&viewer=simple');
});
});
});
describe('rich viewer', () => {
@ -210,7 +224,6 @@ describe('Blob content viewer component', () => {
it('renders a BlobContent component', () => {
expect(findBlobContent().props('loading')).toEqual(false);
expect(findBlobContent().props('content')).toEqual('raw content');
expect(findBlobContent().props('isRawContent')).toBe(true);
expect(findBlobContent().props('activeViewer')).toEqual({
fileType: 'markup',
@ -241,18 +254,12 @@ describe('Blob content viewer component', () => {
});
describe('legacy viewers', () => {
it('does not load a legacy viewer when a rich viewer is not available', async () => {
createComponentWithApollo({ blobs: simpleMockData });
await waitForPromises();
expect(mockAxios.history.get).toHaveLength(0);
});
it('loads a legacy viewer when a rich viewer is available', async () => {
createComponentWithApollo({ blobs: richMockData });
it('loads a legacy viewer when a viewer component is not available', async () => {
createComponentWithApollo({ blobs: { ...richMockData, fileType: 'unknown' } });
await waitForPromises();
expect(mockAxios.history.get).toHaveLength(1);
expect(mockAxios.history.get[0].url).toEqual('some_file.js?format=json&viewer=rich');
});
});

View File

@ -1,25 +1,16 @@
import { shallowMount } from '@vue/test-utils';
import waitForPromises from 'helpers/wait_for_promises';
import { HIGHLIGHT_CLASS_NAME } from '~/vue_shared/components/blob_viewers/constants';
import SimpleViewer from '~/vue_shared/components/blob_viewers/simple_viewer.vue';
import SourceEditor from '~/vue_shared/components/source_editor.vue';
describe('Blob Simple Viewer component', () => {
let wrapper;
const contentMock = `<span id="LC1">First</span>\n<span id="LC2">Second</span>\n<span id="LC3">Third</span>`;
const blobHash = 'foo-bar';
function createComponent(
content = contentMock,
isRawContent = false,
isRefactorFlagEnabled = false,
) {
function createComponent(content = contentMock, isRawContent = false) {
wrapper = shallowMount(SimpleViewer, {
provide: {
blobHash,
glFeatures: {
refactorBlobViewer: isRefactorFlagEnabled,
},
},
propsData: {
content,
@ -94,32 +85,4 @@ describe('Blob Simple Viewer component', () => {
});
});
});
describe('Vue refactoring to use Source Editor', () => {
const findSourceEditor = () => wrapper.find(SourceEditor);
it.each`
doesRender | condition | isRawContent | isRefactorFlagEnabled
${'Does not'} | ${'rawContent is not specified'} | ${false} | ${true}
${'Does not'} | ${'feature flag is disabled is not specified'} | ${true} | ${false}
${'Does not'} | ${'both, the FF and rawContent are not specified'} | ${false} | ${false}
${'Does'} | ${'both, the FF and rawContent are specified'} | ${true} | ${true}
`(
'$doesRender render Source Editor component in readonly mode when $condition',
async ({ isRawContent, isRefactorFlagEnabled } = {}) => {
createComponent('raw content', isRawContent, isRefactorFlagEnabled);
await waitForPromises();
if (isRawContent && isRefactorFlagEnabled) {
expect(findSourceEditor().exists()).toBe(true);
expect(findSourceEditor().props('value')).toBe('raw content');
expect(findSourceEditor().props('fileName')).toBe('test.js');
expect(findSourceEditor().props('editorOptions')).toEqual({ readOnly: true });
} else {
expect(findSourceEditor().exists()).toBe(false);
}
},
);
});
});

View File

@ -187,7 +187,7 @@ RSpec.describe GitlabSchema.types['Project'] do
expect(analyzer['enabled']).to eq(true)
end
context "with guest user" do
context 'with guest user' do
before do
project.add_guest(user)
end
@ -195,7 +195,7 @@ RSpec.describe GitlabSchema.types['Project'] do
context 'when project is private' do
let(:project) { create(:project, :private, :repository) }
it "returns no configuration" do
it 'returns no configuration' do
secure_analyzers_prefix = subject.dig('data', 'project', 'sastCiConfiguration')
expect(secure_analyzers_prefix).to be_nil
end
@ -215,7 +215,7 @@ RSpec.describe GitlabSchema.types['Project'] do
end
end
context "with non-member user" do
context 'with non-member user', :sidekiq_inline do
before do
project.team.truncate
end
@ -223,7 +223,7 @@ RSpec.describe GitlabSchema.types['Project'] do
context 'when project is private' do
let(:project) { create(:project, :private, :repository) }
it "returns no configuration" do
it 'returns no configuration' do
secure_analyzers_prefix = subject.dig('data', 'project', 'sastCiConfiguration')
expect(secure_analyzers_prefix).to be_nil
end
@ -241,7 +241,7 @@ RSpec.describe GitlabSchema.types['Project'] do
end
context 'when repository is accessible only by team members' do
it "returns no configuration" do
it 'returns no configuration' do
project.project_feature.update!(
merge_requests_access_level: ProjectFeature::DISABLED,
builds_access_level: ProjectFeature::DISABLED,

View File

@ -5,12 +5,20 @@ require 'spec_helper'
RSpec.describe Backup::GitalyBackup do
let(:parallel) { nil }
let(:parallel_storage) { nil }
let(:progress) do
Tempfile.new('progress').tap do |progress|
progress.unlink
end
end
let(:expected_env) do
{
'SSL_CERT_FILE' => OpenSSL::X509::DEFAULT_CERT_FILE,
'SSL_CERT_DIR' => OpenSSL::X509::DEFAULT_CERT_DIR
}.merge(ENV)
end
after do
progress.close
end
@ -32,7 +40,7 @@ RSpec.describe Backup::GitalyBackup do
project_snippet = create(:project_snippet, :repository, project: project)
personal_snippet = create(:personal_snippet, :repository, author: project.owner)
expect(Open3).to receive(:popen2).with(ENV, anything, 'create', '-path', anything).and_call_original
expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything).and_call_original
subject.start(:create)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
@ -53,7 +61,7 @@ RSpec.describe Backup::GitalyBackup do
let(:parallel) { 3 }
it 'passes parallel option through' do
expect(Open3).to receive(:popen2).with(ENV, anything, 'create', '-path', anything, '-parallel', '3').and_call_original
expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel', '3').and_call_original
subject.start(:create)
subject.wait
@ -64,7 +72,7 @@ RSpec.describe Backup::GitalyBackup do
let(:parallel_storage) { 3 }
it 'passes parallel option through' do
expect(Open3).to receive(:popen2).with(ENV, anything, 'create', '-path', anything, '-parallel-storage', '3').and_call_original
expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel-storage', '3').and_call_original
subject.start(:create)
subject.wait
@ -90,6 +98,26 @@ RSpec.describe Backup::GitalyBackup do
it_behaves_like 'creates a repository backup'
end
context 'custom SSL envs set' do
let(:ssl_env) do
{
'SSL_CERT_FILE' => '/some/cert/file',
'SSL_CERT_DIR' => '/some/cert'
}
end
before do
stub_const('ENV', ssl_env)
end
it 'passes through SSL envs' do
expect(Open3).to receive(:popen2).with(ssl_env, anything, 'create', '-path', anything).and_call_original
subject.start(:create)
subject.wait
end
end
end
context 'restore' do
@ -109,7 +137,7 @@ RSpec.describe Backup::GitalyBackup do
copy_bundle_to_backup_path('personal_snippet_repo.bundle', personal_snippet.disk_path + '.bundle')
copy_bundle_to_backup_path('project_snippet_repo.bundle', project_snippet.disk_path + '.bundle')
expect(Open3).to receive(:popen2).with(ENV, anything, 'restore', '-path', anything).and_call_original
expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything).and_call_original
subject.start(:restore)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
@ -132,7 +160,7 @@ RSpec.describe Backup::GitalyBackup do
let(:parallel) { 3 }
it 'passes parallel option through' do
expect(Open3).to receive(:popen2).with(ENV, anything, 'restore', '-path', anything, '-parallel', '3').and_call_original
expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel', '3').and_call_original
subject.start(:restore)
subject.wait
@ -143,7 +171,7 @@ RSpec.describe Backup::GitalyBackup do
let(:parallel_storage) { 3 }
it 'passes parallel option through' do
expect(Open3).to receive(:popen2).with(ENV, anything, 'restore', '-path', anything, '-parallel-storage', '3').and_call_original
expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel-storage', '3').and_call_original
subject.start(:restore)
subject.wait

View File

@ -98,7 +98,7 @@ RSpec.describe Gitlab::Middleware::Go do
end
end
context 'without access to the project' do
context 'without access to the project', :sidekiq_inline do
before do
project.team.find_member(current_user).destroy
end

View File

@ -95,7 +95,7 @@ RSpec.describe Gitlab::SlashCommands::IssueMove, service: true do
end
end
context 'when the user cannot see the target project' do
context 'when the user cannot see the target project', :sidekiq_inline do
it 'returns not found' do
message = "issue move #{issue.iid} #{other_project.full_path}"
other_project.team.truncate

View File

@ -7,11 +7,11 @@ RSpec.describe Member do
using RSpec::Parameterized::TableSyntax
describe "Associations" do
describe 'Associations' do
it { is_expected.to belong_to(:user) }
end
describe "Validation" do
describe 'Validation' do
subject { described_class.new(access_level: Member::GUEST) }
it { is_expected.to validate_presence_of(:user) }
@ -28,7 +28,7 @@ RSpec.describe Member do
subject { build(:project_member) }
end
context "when an invite email is provided" do
context 'when an invite email is provided' do
let_it_be(:project) { create(:project) }
let(:member) { build(:project_member, source: project, invite_email: "user@example.com", user: nil) }
@ -37,29 +37,29 @@ RSpec.describe Member do
expect(member).to be_valid
end
it "requires a valid invite email" do
it 'requires a valid invite email' do
member.invite_email = "nope"
expect(member).not_to be_valid
end
it "requires a unique invite email scoped to this source" do
it 'requires a unique invite email scoped to this source' do
create(:project_member, source: member.source, invite_email: member.invite_email)
expect(member).not_to be_valid
end
end
context "when an invite email is not provided" do
context 'when an invite email is not provided' do
let(:member) { build(:project_member) }
it "requires a user" do
it 'requires a user' do
member.user = nil
expect(member).not_to be_valid
end
it "is valid otherwise" do
it 'is valid otherwise' do
expect(member).to be_valid
end
end
@ -107,13 +107,13 @@ RSpec.describe Member do
end
end
context "when a child member inherits its access level" do
context 'when a child member inherits its access level' do
let(:user) { create(:user) }
let(:member) { create(:group_member, :developer, user: user) }
let(:child_group) { create(:group, parent: member.group) }
let(:child_member) { build(:group_member, group: child_group, user: user) }
it "requires a higher level" do
it 'requires a higher level' do
child_member.access_level = GroupMember::REPORTER
child_member.validate
@ -123,7 +123,7 @@ RSpec.describe Member do
# Membership in a subgroup confers certain access rights, such as being
# able to merge or push code to protected branches.
it "is valid with an equal level" do
it 'is valid with an equal level' do
child_member.access_level = GroupMember::DEVELOPER
child_member.validate
@ -131,7 +131,7 @@ RSpec.describe Member do
expect(child_member).to be_valid
end
it "is valid with a higher level" do
it 'is valid with a higher level' do
child_member.access_level = GroupMember::MAINTAINER
child_member.validate
@ -538,7 +538,7 @@ RSpec.describe Member do
end
end
describe "Delegate methods" do
describe 'Delegate methods' do
it { is_expected.to respond_to(:user_name) }
it { is_expected.to respond_to(:user_email) }
end
@ -608,29 +608,29 @@ RSpec.describe Member do
end
end
describe "#accept_invite!" do
describe '#accept_invite!' do
let!(:member) { create(:project_member, invite_email: "user@example.com", user: nil) }
let(:user) { create(:user) }
it "resets the invite token" do
it 'resets the invite token' do
member.accept_invite!(user)
expect(member.invite_token).to be_nil
end
it "sets the invite accepted timestamp" do
it 'sets the invite accepted timestamp' do
member.accept_invite!(user)
expect(member.invite_accepted_at).not_to be_nil
end
it "sets the user" do
it 'sets the user' do
member.accept_invite!(user)
expect(member.user).to eq(user)
end
it "calls #after_accept_invite" do
it 'calls #after_accept_invite' do
expect(member).to receive(:after_accept_invite)
member.accept_invite!(user)
@ -657,26 +657,26 @@ RSpec.describe Member do
end
end
describe "#decline_invite!" do
describe '#decline_invite!' do
let!(:member) { create(:project_member, invite_email: "user@example.com", user: nil) }
it "destroys the member" do
it 'destroys the member' do
member.decline_invite!
expect(member).to be_destroyed
end
it "calls #after_decline_invite" do
it 'calls #after_decline_invite' do
expect(member).to receive(:after_decline_invite)
member.decline_invite!
end
end
describe "#generate_invite_token" do
describe '#generate_invite_token' do
let!(:member) { create(:project_member, invite_email: "user@example.com", user: nil) }
it "sets the invite token" do
it 'sets the invite token' do
expect { member.generate_invite_token }.to change { member.invite_token }
end
end
@ -684,12 +684,12 @@ RSpec.describe Member do
describe 'generate invite token on create' do
let!(:member) { build(:project_member, invite_email: "user@example.com") }
it "sets the invite token" do
it 'sets the invite token' do
expect { member.save! }.to change { member.invite_token }.to(kind_of(String))
end
context 'when invite was already accepted' do
it "does not set invite token" do
it 'does not set invite token' do
member.invite_accepted_at = 1.day.ago
expect { member.save! }.not_to change { member.invite_token }.from(nil)
@ -744,7 +744,7 @@ RSpec.describe Member do
end
end
describe "#invite_to_unknown_user?" do
describe '#invite_to_unknown_user?' do
subject { member.invite_to_unknown_user? }
let(:member) { create(:project_member, invite_email: "user@example.com", invite_token: '1234', user: user) }
@ -762,7 +762,7 @@ RSpec.describe Member do
end
end
describe "destroying a record", :delete do
describe 'destroying a record', :delete, :sidekiq_inline do
it "refreshes user's authorized projects" do
project = create(:project, :private)
user = create(:user)

View File

@ -244,12 +244,32 @@ RSpec.describe ProjectMember do
project.add_user(user, Gitlab::Access::GUEST)
end
it 'changes access level' do
expect { action }.to change { user.can?(:guest_access, project) }.from(true).to(false)
context 'when :member_destroy_async_auth_refresh feature flag is enabled' do
it 'changes access level', :sidekiq_inline do
expect { action }.to change { user.can?(:guest_access, project) }.from(true).to(false)
end
it 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker to recalculate authorizations' do
expect(AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker).to receive(:perform_async).with(project.id, user.id)
action
end
it_behaves_like 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations'
end
it_behaves_like 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserService to recalculate authorizations'
it_behaves_like 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations'
context 'when :member_destroy_async_auth_refresh feature flag is disabled' do
before do
stub_feature_flags(member_destroy_async_auth_refresh: false)
end
it 'changes access level' do
expect { action }.to change { user.can?(:guest_access, project) }.from(true).to(false)
end
it_behaves_like 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserService to recalculate authorizations'
it_behaves_like 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations'
end
end
context 'when the feature flag `specialized_service_for_project_member_auth_refresh` is disabled' do
@ -298,7 +318,7 @@ RSpec.describe ProjectMember do
project.add_user(user, Gitlab::Access::GUEST)
end
it 'changes access level' do
it 'changes access level', :sidekiq_inline do
expect { action }.to change { user.can?(:guest_access, project) }.from(true).to(false)
end

View File

@ -11,6 +11,8 @@ RSpec.describe NamespaceSetting, type: :model do
it { is_expected.to belong_to(:namespace) }
end
it { is_expected.to define_enum_for(:jobs_to_be_done).with_values([:basics, :move_repository, :code_storage, :exploring, :ci, :other]).with_suffix }
describe "validations" do
describe "#default_branch_name_content" do
let_it_be(:group) { create(:group) }

View File

@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe UserDetail do
it { is_expected.to belong_to(:user) }
it { is_expected.to define_enum_for(:registration_objective).with_values([:basics, :move_repository, :code_storage, :exploring, :ci, :other, :joining_team]).with_suffix }
describe 'validations' do
describe '#job_title' do

View File

@ -79,6 +79,9 @@ RSpec.describe User do
it { is_expected.to delegate_method(:bio).to(:user_detail).allow_nil }
it { is_expected.to delegate_method(:bio=).to(:user_detail).with_arguments(:args).allow_nil }
it { is_expected.to delegate_method(:registration_objective).to(:user_detail).allow_nil }
it { is_expected.to delegate_method(:registration_objective=).to(:user_detail).with_arguments(:args).allow_nil }
end
describe 'associations' do
@ -123,7 +126,7 @@ RSpec.describe User do
it { is_expected.to have_many(:callouts).class_name('UserCallout') }
it { is_expected.to have_many(:group_callouts).class_name('Users::GroupCallout') }
describe "#user_detail" do
describe '#user_detail' do
it 'does not persist `user_detail` by default' do
expect(create(:user).user_detail).not_to be_persisted
end
@ -160,25 +163,25 @@ RSpec.describe User do
end
end
describe "#abuse_report" do
describe '#abuse_report' do
let(:current_user) { create(:user) }
let(:other_user) { create(:user) }
it { is_expected.to have_one(:abuse_report) }
it "refers to the abuse report whose user_id is the current user" do
it 'refers to the abuse report whose user_id is the current user' do
abuse_report = create(:abuse_report, reporter: other_user, user: current_user)
expect(current_user.abuse_report).to eq(abuse_report)
end
it "does not refer to the abuse report whose reporter_id is the current user" do
it 'does not refer to the abuse report whose reporter_id is the current user' do
create(:abuse_report, reporter: current_user, user: other_user)
expect(current_user.abuse_report).to be_nil
end
it "does not update the user_id of an abuse report when the user is updated" do
it 'does not update the user_id of an abuse report when the user is updated' do
abuse_report = create(:abuse_report, reporter: current_user, user: other_user)
current_user.block
@ -716,7 +719,7 @@ RSpec.describe User do
end
end
describe "scopes" do
describe 'scopes' do
context 'blocked users' do
let_it_be(:active_user) { create(:user) }
let_it_be(:blocked_user) { create(:user, :blocked) }
@ -754,8 +757,8 @@ RSpec.describe User do
end
end
describe ".with_two_factor" do
it "returns users with 2fa enabled via OTP" do
describe '.with_two_factor' do
it 'returns users with 2fa enabled via OTP' do
user_with_2fa = create(:user, :two_factor_via_otp)
user_without_2fa = create(:user)
users_with_two_factor = described_class.with_two_factor.pluck(:id)
@ -764,8 +767,8 @@ RSpec.describe User do
expect(users_with_two_factor).not_to include(user_without_2fa.id)
end
shared_examples "returns the right users" do |trait|
it "returns users with 2fa enabled via hardware token" do
shared_examples 'returns the right users' do |trait|
it 'returns users with 2fa enabled via hardware token' do
user_with_2fa = create(:user, trait)
user_without_2fa = create(:user)
users_with_two_factor = described_class.with_two_factor.pluck(:id)
@ -774,7 +777,7 @@ RSpec.describe User do
expect(users_with_two_factor).not_to include(user_without_2fa.id)
end
it "returns users with 2fa enabled via OTP and hardware token" do
it 'returns users with 2fa enabled via OTP and hardware token' do
user_with_2fa = create(:user, :two_factor_via_otp, trait)
user_without_2fa = create(:user)
users_with_two_factor = described_class.with_two_factor.pluck(:id)
@ -792,17 +795,17 @@ RSpec.describe User do
end
end
describe "and U2F" do
describe 'and U2F' do
it_behaves_like "returns the right users", :two_factor_via_u2f
end
describe "and WebAuthn" do
describe 'and WebAuthn' do
it_behaves_like "returns the right users", :two_factor_via_webauthn
end
end
describe ".without_two_factor" do
it "excludes users with 2fa enabled via OTP" do
describe '.without_two_factor' do
it 'excludes users with 2fa enabled via OTP' do
user_with_2fa = create(:user, :two_factor_via_otp)
user_without_2fa = create(:user)
users_without_two_factor = described_class.without_two_factor.pluck(:id)
@ -811,8 +814,8 @@ RSpec.describe User do
expect(users_without_two_factor).not_to include(user_with_2fa.id)
end
describe "and u2f" do
it "excludes users with 2fa enabled via U2F" do
describe 'and u2f' do
it 'excludes users with 2fa enabled via U2F' do
user_with_2fa = create(:user, :two_factor_via_u2f)
user_without_2fa = create(:user)
users_without_two_factor = described_class.without_two_factor.pluck(:id)
@ -821,7 +824,7 @@ RSpec.describe User do
expect(users_without_two_factor).not_to include(user_with_2fa.id)
end
it "excludes users with 2fa enabled via OTP and U2F" do
it 'excludes users with 2fa enabled via OTP and U2F' do
user_with_2fa = create(:user, :two_factor_via_otp, :two_factor_via_u2f)
user_without_2fa = create(:user)
users_without_two_factor = described_class.without_two_factor.pluck(:id)
@ -831,8 +834,8 @@ RSpec.describe User do
end
end
describe "and webauthn" do
it "excludes users with 2fa enabled via WebAuthn" do
describe 'and webauthn' do
it 'excludes users with 2fa enabled via WebAuthn' do
user_with_2fa = create(:user, :two_factor_via_webauthn)
user_without_2fa = create(:user)
users_without_two_factor = described_class.without_two_factor.pluck(:id)
@ -841,7 +844,7 @@ RSpec.describe User do
expect(users_without_two_factor).not_to include(user_with_2fa.id)
end
it "excludes users with 2fa enabled via OTP and WebAuthn" do
it 'excludes users with 2fa enabled via OTP and WebAuthn' do
user_with_2fa = create(:user, :two_factor_via_otp, :two_factor_via_webauthn)
user_without_2fa = create(:user)
users_without_two_factor = described_class.without_two_factor.pluck(:id)
@ -1074,7 +1077,7 @@ RSpec.describe User do
end
end
describe "Respond to" do
describe 'Respond to' do
it { is_expected.to respond_to(:admin?) }
it { is_expected.to respond_to(:name) }
it { is_expected.to respond_to(:external?) }
@ -1096,7 +1099,7 @@ RSpec.describe User do
let(:user) { create(:user) }
let(:external_user) { create(:user, external: true) }
it "sets other properties as well" do
it 'sets other properties as well' do
expect(external_user.can_create_team).to be_falsey
expect(external_user.can_create_group).to be_falsey
expect(external_user.projects_limit).to be 0
@ -1515,7 +1518,7 @@ RSpec.describe User do
end
describe '#generate_password' do
it "does not generate password by default" do
it 'does not generate password by default' do
user = create(:user, password: 'abcdefghe')
expect(user.password).to eq('abcdefghe')
@ -1883,14 +1886,14 @@ RSpec.describe User do
describe 'deactivating a user' do
let(:user) { create(:user, name: 'John Smith') }
context "an active user" do
it "can be deactivated" do
context 'an active user' do
it 'can be deactivated' do
user.deactivate
expect(user.deactivated?).to be_truthy
end
context "when user deactivation emails are disabled" do
context 'when user deactivation emails are disabled' do
before do
stub_application_setting(user_deactivation_emails_enabled: false)
end
@ -1901,7 +1904,7 @@ RSpec.describe User do
end
end
context "when user deactivation emails are enabled" do
context 'when user deactivation emails are enabled' do
it 'sends deactivated user an email' do
expect_next_instance_of(NotificationService) do |notification|
allow(notification).to receive(:user_deactivated).with(user.name, user.notification_email_or_default)
@ -1912,12 +1915,12 @@ RSpec.describe User do
end
end
context "a user who is blocked" do
context 'a user who is blocked' do
before do
user.block
end
it "cannot be deactivated" do
it 'cannot be deactivated' do
user.deactivate
expect(user.reload.deactivated?).to be_falsy
@ -2084,7 +2087,7 @@ RSpec.describe User do
describe 'with defaults' do
let(:user) { described_class.new }
it "applies defaults to user" do
it 'applies defaults to user' do
expect(user.projects_limit).to eq(Gitlab.config.gitlab.default_projects_limit)
expect(user.can_create_group).to eq(Gitlab.config.gitlab.default_can_create_group)
expect(user.theme_id).to eq(Gitlab.config.gitlab.default_theme)
@ -2096,7 +2099,7 @@ RSpec.describe User do
describe 'with default overrides' do
let(:user) { described_class.new(projects_limit: 123, can_create_group: false, can_create_team: true) }
it "applies defaults to user" do
it 'applies defaults to user' do
expect(user.projects_limit).to eq(123)
expect(user.can_create_group).to be_falsey
expect(user.theme_id).to eq(1)
@ -2115,7 +2118,7 @@ RSpec.describe User do
stub_application_setting(user_default_external: true)
end
it "creates external user by default" do
it 'creates external user by default' do
user = create(:user)
expect(user.external).to be_truthy
@ -2124,7 +2127,7 @@ RSpec.describe User do
end
describe 'with default overrides' do
it "creates a non-external user" do
it 'creates a non-external user' do
user = create(:user, external: false)
expect(user.external).to be_falsey
@ -2140,7 +2143,7 @@ RSpec.describe User do
}
protocol_and_expectation.each do |protocol, expected|
it "has correct require_ssh_key?" do
it 'has correct require_ssh_key?' do
stub_application_setting(enabled_git_access_protocol: protocol)
user = build(:user)
@ -2624,7 +2627,7 @@ RSpec.describe User do
describe 'all_ssh_keys' do
it { is_expected.to have_many(:keys).dependent(:destroy) }
it "has all ssh keys" do
it 'has all ssh keys' do
user = create :user
key = create :key, key: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD33bWLBxu48Sev9Fert1yzEO4WGcWglWF7K/AwblIUFselOt/QdOL9DSjpQGxLagO1s9wl53STIO8qGS4Ms0EJZyIXOEFMjFJ5xmjSy+S37By4sG7SsltQEHMxtbtFOaW5LV2wCrX+rUsRNqLMamZjgjcPO0/EgGCXIGMAYW4O7cwGZdXWYIhQ1Vwy+CsVMDdPkPgBXqK7nR/ey8KMs8ho5fMNgB5hBw/AL9fNGhRw3QTD6Q12Nkhl4VZES2EsZqlpNnJttnPdp847DUsT6yuLRlfiQfz5Cn9ysHFdXObMN5VYIiPFwHeYCZp1X2S4fDZooRE8uOLTfxWHPXwrhqSH", user_id: user.id
@ -2660,10 +2663,10 @@ RSpec.describe User do
end
end
describe "#clear_avatar_caches" do
describe '#clear_avatar_caches' do
let(:user) { create(:user) }
it "clears the avatar cache when saving" do
it 'clears the avatar cache when saving' do
allow(user).to receive(:avatar_changed?).and_return(true)
expect(Gitlab::AvatarCache).to receive(:delete_by_email).with(*user.verified_emails)
@ -3189,7 +3192,7 @@ RSpec.describe User do
end
end
describe "#last_active_at" do
describe '#last_active_at' do
let(:last_activity_on) { 5.days.ago.to_date }
let(:current_sign_in_at) { 8.days.ago }
@ -3227,7 +3230,7 @@ RSpec.describe User do
end
end
describe "#can_be_deactivated?" do
describe '#can_be_deactivated?' do
let(:activity) { {} }
let(:user) { create(:user, name: 'John Smith', **activity) }
let(:day_within_minium_inactive_days_threshold) { User::MINIMUM_INACTIVE_DAYS.pred.days.ago }
@ -3245,7 +3248,7 @@ RSpec.describe User do
end
end
context "a user who is not active" do
context 'a user who is not active' do
before do
user.block
end
@ -3286,7 +3289,7 @@ RSpec.describe User do
end
end
describe "#contributed_projects" do
describe '#contributed_projects' do
subject { create(:user) }
let!(:project1) { create(:project) }
@ -3301,11 +3304,11 @@ RSpec.describe User do
project2.add_maintainer(subject)
end
it "includes IDs for projects the user has pushed to" do
it 'includes IDs for projects the user has pushed to' do
expect(subject.contributed_projects).to include(project1)
end
it "includes IDs for projects the user has had merge requests merged into" do
it 'includes IDs for projects the user has had merge requests merged into' do
expect(subject.contributed_projects).to include(project3)
end
@ -3399,7 +3402,7 @@ RSpec.describe User do
end
end
describe "#recent_push" do
describe '#recent_push' do
let(:user) { build(:user) }
let(:project) { build(:project) }
let(:event) { build(:push_event) }
@ -3563,7 +3566,7 @@ RSpec.describe User do
expect(user.authorized_projects).to include(project)
end
it "includes personal projects user has been given access to" do
it 'includes personal projects user has been given access to' do
user1 = create(:user)
user2 = create(:user)
project = create(:project, :private, namespace: user1.namespace)
@ -3573,7 +3576,7 @@ RSpec.describe User do
expect(user2.authorized_projects).to include(project)
end
it "includes projects of groups user has been added to" do
it 'includes projects of groups user has been added to' do
group = create(:group)
project = create(:project, group: group)
user = create(:user)
@ -3583,7 +3586,7 @@ RSpec.describe User do
expect(user.authorized_projects).to include(project)
end
it "does not include projects of groups user has been removed from" do
it 'does not include projects of groups user has been removed from', :sidekiq_inline do
group = create(:group)
project = create(:project, group: group)
user = create(:user)
@ -3608,7 +3611,7 @@ RSpec.describe User do
expect(user.authorized_projects).to include(project)
end
it "does not include destroyed projects user had access to" do
it 'does not include destroyed projects user had access to' do
user1 = create(:user)
user2 = create(:user)
project = create(:project, :private, namespace: user1.namespace)
@ -3622,7 +3625,7 @@ RSpec.describe User do
expect(user2.authorized_projects).not_to include(project)
end
it "does not include projects of destroyed groups user had access to" do
it 'does not include projects of destroyed groups user had access to' do
group = create(:group)
project = create(:project, namespace: group)
user = create(:user)
@ -4175,7 +4178,7 @@ RSpec.describe User do
expect(user.admin).to be true
end
it "accepts string values in addition to symbols" do
it 'accepts string values in addition to symbols' do
user.access_level = 'admin'
expect(user.access_level).to eq(:admin)
@ -4256,7 +4259,7 @@ RSpec.describe User do
expect(ghost.user_type).to eq 'ghost'
end
it "does not create a second ghost user if one is already present" do
it 'does not create a second ghost user if one is already present' do
expect do
described_class.ghost
described_class.ghost
@ -4265,7 +4268,7 @@ RSpec.describe User do
end
context "when a regular user exists with the username 'ghost'" do
it "creates a ghost user with a non-conflicting username" do
it 'creates a ghost user with a non-conflicting username' do
create(:user, username: 'ghost')
ghost = described_class.ghost
@ -4275,7 +4278,7 @@ RSpec.describe User do
end
context "when a regular user exists with the email 'ghost@example.com'" do
it "creates a ghost user with a non-conflicting email" do
it 'creates a ghost user with a non-conflicting email' do
create(:user, email: 'ghost@example.com')
ghost = described_class.ghost
@ -4755,7 +4758,7 @@ RSpec.describe User do
it { is_expected.to be true }
end
context 'when email and username aren\'t changed' do
context "when email and username aren't changed" do
before do
user.name = 'new_name'
end
@ -5066,26 +5069,26 @@ RSpec.describe User do
subject { user.required_terms_not_accepted? }
context "when terms are not enforced" do
context 'when terms are not enforced' do
it { is_expected.to be_falsey }
end
context "when terms are enforced" do
context 'when terms are enforced' do
before do
enforce_terms
end
it "is not accepted by the user" do
it 'is not accepted by the user' do
expect(subject).to be_truthy
end
it "is accepted by the user" do
it 'is accepted by the user' do
accept_terms(user)
expect(subject).to be_falsey
end
it "auto accepts the term for project bots" do
it 'auto accepts the term for project bots' do
expect(project_bot.required_terms_not_accepted?).to be_falsey
end
end

View File

@ -38,7 +38,7 @@ RSpec.describe API::PackageFiles do
expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns 404 for a user without access to the project' do
it 'returns 404 for a user without access to the project', :sidekiq_inline do
project.team.truncate
get api(url, user)

View File

@ -275,7 +275,7 @@ RSpec.describe MergeRequestPollCachedWidgetEntity do
expect(subject[:merge_pipeline]).to be_nil
end
context 'when is merged' do
context 'when is merged', :sidekiq_inline do
let(:resource) { create(:merged_merge_request, source_project: project, merge_commit_sha: project.commit.id) }
let(:pipeline) { create(:ci_empty_pipeline, project: project, ref: resource.target_branch, sha: resource.merge_commit_sha) }

View File

@ -17,7 +17,7 @@ RSpec.describe MergeRequests::AssignIssuesService do
expect(service.assignable_issues.map(&:id)).to include(issue.id)
end
it 'ignores issues the user cannot update assignee on' do
it 'ignores issues the user cannot update assignee on', :sidekiq_inline do
project.team.truncate
expect(service.assignable_issues).to be_empty

View File

@ -440,7 +440,7 @@ RSpec.describe MergeRequests::BuildService do
expect(merge_request.title).to eq('Closes #1234 Second commit')
end
it 'adds the remaining lines of the first multi-line commit message as the description' do
it 'adds the remaining lines of the first multi-line commit message as the description', :sidekiq_inline do
expect(merge_request.description).to eq('Create the app')
end
end

View File

@ -701,7 +701,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
let(:push_options) { { create: true } }
let(:changes) { new_branch_changes }
it 'records an error' do
it 'records an error', :sidekiq_inline do
Members::DestroyService.new(user1).execute(ProjectMember.find_by!(user_id: user1.id))
service.execute

View File

@ -47,7 +47,7 @@ RSpec.describe Notes::QuickActionsService do
let(:note_text) { "/relate #{other_issue.to_reference}" }
let(:note) { create(:note_on_issue, noteable: issue, project: project, note: note_text) }
context 'user cannot relate issues' do
context 'user cannot relate issues', :sidekiq_inline do
before do
project.team.find_member(maintainer.id).destroy!
project.update!(visibility: Gitlab::VisibilityLevel::PUBLIC)

View File

@ -3155,7 +3155,7 @@ RSpec.describe NotificationService, :mailer do
notification.pipeline_finished(pipeline)
end
it 'does not send emails' do
it 'does not send emails', :sidekiq_inline do
should_not_email_anyone
end
end

View File

@ -26,7 +26,7 @@ RSpec.describe Projects::MoveAccessService do
describe '#execute' do
shared_examples 'move the accesses' do
it do
it 'moves the accesses', :sidekiq_inline do
expect(project_with_access.project_members.count).to eq 4
expect(project_with_access.project_group_links.count).to eq 3
expect(project_with_access.authorized_users.count).to eq 4

View File

@ -0,0 +1,70 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker do
include ExclusiveLeaseHelpers
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
subject(:worker) { described_class.new }
include_examples 'an idempotent worker' do
let(:job_args) { [project.id, user.id] }
it 'does not change authorizations when run twice' do
project.add_developer(user)
user.project_authorizations.delete_all
expect { worker.perform(project.id, user.id) }.to change { project.project_authorizations.reload.size }.by(1)
expect { worker.perform(project.id, user.id) }.not_to change { project.project_authorizations.reload.size }
end
end
describe '#perform' do
it 'does not fail if the project does not exist' do
expect do
worker.perform(non_existing_record_id, user.id)
end.not_to raise_error
end
it 'does not fail if the user does not exist' do
expect do
worker.perform(project.id, non_existing_record_id)
end.not_to raise_error
end
it 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserService' do
expect_next_instance_of(AuthorizedProjectUpdate::ProjectRecalculatePerUserService, project, user) do |service|
expect(service).to receive(:execute)
end
worker.perform(project.id, user.id)
end
context 'exclusive lease' do
let(:lock_key) { "#{described_class.superclass.name.underscore}/projects/#{project.id}" }
let(:timeout) { 10.seconds }
context 'when exclusive lease has not been taken' do
it 'obtains a new exclusive lease' do
expect_to_obtain_exclusive_lease(lock_key, timeout: timeout)
worker.perform(project.id, user.id)
end
end
context 'when exclusive lease has already been taken' do
before do
stub_exclusive_lease_taken(lock_key, timeout: timeout)
end
it 'raises an error' do
expect { worker.perform(project.id, user.id) }.to raise_error(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
end
end
end
end
end