Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-09-04 18:08:48 +00:00
parent adf76f8f1d
commit b355535770
59 changed files with 1080 additions and 345 deletions

View file

@ -246,6 +246,13 @@ Gitlab/Json:
- 'lib/quality/**/*'
- 'lib/gitlab/danger/**/*'
Gitlab/AvoidUploadedFileFromParams:
Enabled: true
Exclude:
- 'lib/gitlab/middleware/multipart.rb'
- 'spec/**/*'
- 'ee/spec/**/*'
GitlabSecurity/PublicSend:
Enabled: true
Exclude:

View file

@ -8,6 +8,8 @@ import { extractDiscussions, extractParticipants } from '../utils/design_managem
import { ACTIVE_DISCUSSION_SOURCE_TYPES } from '../constants';
import DesignDiscussion from './design_notes/design_discussion.vue';
import Participants from '~/sidebar/components/participants/participants.vue';
import TodoButton from '~/vue_shared/components/todo_button.vue';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
export default {
components: {
@ -16,7 +18,9 @@ export default {
GlCollapse,
GlButton,
GlPopover,
TodoButton,
},
mixins: [glFeatureFlagsMixin()],
props: {
design: {
type: Object,
@ -59,6 +63,14 @@ export default {
resolvedCommentsToggleIcon() {
return this.resolvedDiscussionsExpanded ? 'chevron-down' : 'chevron-right';
},
showTodoButton() {
return this.glFeatures.designManagementTodoButton;
},
sidebarWrapperClass() {
return {
'gl-pt-0': this.showTodoButton,
};
},
},
watch: {
isResolvedCommentsPopoverHidden(newVal) {
@ -101,7 +113,14 @@ export default {
</script>
<template>
<div class="image-notes" @click="handleSidebarClick">
<div class="image-notes" :class="sidebarWrapperClass" @click="handleSidebarClick">
<div
v-if="showTodoButton"
class="gl-py-4 gl-mb-4 gl-display-flex gl-justify-content-space-between gl-align-items-center gl-border-b-1 gl-border-b-solid gl-border-b-gray-100"
>
<span>{{ __('To-Do') }}</span>
<todo-button issuable-type="design" :issuable-id="design.iid" />
</div>
<h2 class="gl-font-weight-bold gl-mt-0">
{{ issue.title }}
</h2>

View file

@ -1,8 +1,7 @@
<script>
/* eslint-disable vue/no-v-html */
import { mapActions, mapGetters, mapState } from 'vuex';
import $ from 'jquery';
import { GlTooltipDirective } from '@gitlab/ui';
import { GlTooltipDirective, GlSafeHtmlDirective as SafeHtml } from '@gitlab/ui';
import DiffTableCell from './diff_table_cell.vue';
import {
MATCH_LINE_TYPE,
@ -22,6 +21,7 @@ export default {
},
directives: {
GlTooltip: GlTooltipDirective,
SafeHtml,
},
props: {
fileHash: {
@ -161,10 +161,10 @@ export default {
<td :class="parallelViewLeftLineType" class="line-coverage left-side"></td>
<td
:id="line.left.line_code"
v-safe-html="line.left.rich_text"
:class="parallelViewLeftLineType"
class="line_content with-coverage parallel left-side"
@mousedown="handleParallelLineMouseDown"
v-html="line.left.rich_text"
></td>
</template>
<template v-else>
@ -193,6 +193,7 @@ export default {
></td>
<td
:id="line.right.line_code"
v-safe-html="line.right.rich_text"
:class="[
line.right.type,
{
@ -201,7 +202,6 @@ export default {
]"
class="line_content with-coverage parallel right-side"
@mousedown="handleParallelLineMouseDown"
v-html="line.right.rich_text"
></td>
</template>
<template v-else>

View file

@ -61,7 +61,7 @@ export default {
</script>
<template>
<list-item data-qa-selector="packages-row">
<list-item data-qa-selector="package_row">
<template #left-primary>
<div class="gl-display-flex gl-align-items-center gl-mr-3">
<gl-link :href="packageLink" class="gl-text-body" data-qa-selector="package_link">

View file

@ -22,10 +22,10 @@ export default {
strings: {
alertTitle: __('You are about to permanently delete this project'),
alertBody: __(
'Once a project is permanently deleted it %{strongStart}cannot be recovered%{strongEnd}. Permanently deleting this project will %{strongStart}immediately delete%{strongEnd} its respositories and %{strongStart}all related resources%{strongEnd} including issues, merge requests etc.',
'Once a project is permanently deleted it %{strongStart}cannot be recovered%{strongEnd}. Permanently deleting this project will %{strongStart}immediately delete%{strongEnd} its repositories and %{strongStart}all related resources%{strongEnd} including issues, merge requests etc.',
),
modalBody: __(
"This action cannot be undone. You will lose the project's respository and all conent: issues, merge requests, etc.",
"This action cannot be undone. You will lose the project's repository and all content: issues, merge requests, etc.",
),
},
};

View file

@ -1,8 +1,9 @@
<script>
/* eslint-disable vue/no-v-html */
import { initial, first, last } from 'lodash';
import { GlSafeHtmlDirective as SafeHtml } from '@gitlab/ui';
export default {
directives: { SafeHtml },
props: {
crumbs: {
type: Array,
@ -42,14 +43,14 @@ export default {
<li
v-for="(crumb, index) in rootCrumbs"
:key="index"
v-safe-html="crumb.innerHTML"
:class="crumb.className"
v-html="crumb.innerHTML"
></li>
<li v-if="!isRootRoute">
<router-link ref="rootRouteLink" :to="rootRoute.path">
{{ rootRoute.meta.nameGenerator(rootRoute) }}
</router-link>
<component :is="divider.tagName" :class="divider.classList" v-html="divider.innerHTML" />
<component :is="divider.tagName" v-safe-html="divider.innerHTML" :class="divider.classList" />
</li>
<li>
<component :is="lastCrumb.tagName" ref="lastCrumb" :class="lastCrumb.className">

View file

@ -49,6 +49,7 @@ class Projects::IssuesController < Projects::ApplicationController
push_frontend_feature_flag(:vue_issuable_sidebar, project.group)
push_frontend_feature_flag(:tribute_autocomplete, @project)
push_frontend_feature_flag(:vue_issuables_list, project)
push_frontend_feature_flag(:design_management_todo_button, project)
end
before_action only: :show do

View file

@ -22,10 +22,11 @@ module Pages
end
def source
{
type: 'file',
path: File.join(project.full_path, 'public/')
}
if artifacts_archive && !artifacts_archive.file_storage?
zip_source
else
file_source
end
end
def prefix
@ -39,5 +40,28 @@ module Pages
private
attr_reader :project, :trim_prefix, :domain
def artifacts_archive
return unless Feature.enabled?(:pages_artifacts_archive, project)
# Using build artifacts is temporary solution for quick test
# in production environment, we'll replace this with proper
# `pages_deployments` later
project.pages_metadatum.artifacts_archive&.file
end
def zip_source
{
type: 'zip',
path: artifacts_archive.url(expire_at: 1.day.from_now)
}
end
def file_source
{
type: 'file',
path: File.join(project.full_path, 'public/')
}
end
end
end

View file

@ -27,7 +27,7 @@ module Users
def inject_validators
class << @user
validates :role, presence: true
validates :setup_for_company, inclusion: { in: [true, false], message: :blank }
validates :setup_for_company, inclusion: { in: [true, false], message: :blank } if Gitlab.com?
end
end
end

View file

@ -27,10 +27,10 @@
.nav-controls
- if @todos.any?(&:pending?)
.gl-mr-3
= link_to destroy_all_dashboard_todos_path(todos_filter_params), class: 'btn btn-loading d-flex align-items-center js-todos-mark-all', method: :delete, data: { href: destroy_all_dashboard_todos_path(todos_filter_params) } do
= link_to destroy_all_dashboard_todos_path(todos_filter_params), class: 'btn btn-loading align-items-center js-todos-mark-all', method: :delete, data: { href: destroy_all_dashboard_todos_path(todos_filter_params) } do
Mark all as done
%span.spinner.ml-1
= link_to bulk_restore_dashboard_todos_path, class: 'btn btn-loading d-flex align-items-center js-todos-undo-all hidden', method: :patch , data: { href: bulk_restore_dashboard_todos_path(todos_filter_params) } do
= link_to bulk_restore_dashboard_todos_path, class: 'btn btn-loading align-items-center js-todos-undo-all hidden', method: :patch , data: { href: bulk_restore_dashboard_todos_path(todos_filter_params) } do
Undo mark all as done
%span.spinner.ml-1

View file

@ -0,0 +1,4 @@
#error_explanation
.alert.alert-danger
- @feature_flag.errors.full_messages.each do |message|
%p= message

View file

@ -0,0 +1,14 @@
- @gfm_form = Feature.enabled?(:feature_flags_issue_links, @project, default_enabled: true)
- add_to_breadcrumbs s_('FeatureFlags|Feature Flags'), project_feature_flags_path(@project)
- breadcrumb_title @feature_flag.name
- page_title s_('FeatureFlags|Edit Feature Flag')
#js-edit-feature-flag{ data: { endpoint: project_feature_flag_path(@project, @feature_flag),
project_id: @project.id,
feature_flags_path: project_feature_flags_path(@project),
environments_endpoint: search_project_environments_path(@project, format: :json),
user_callouts_path: user_callouts_path,
user_callout_id: UserCalloutsHelper::FEATURE_FLAGS_NEW_VERISION,
show_user_callout: show_feature_flags_new_version?.to_s,
feature_flag_issues_endpoint: feature_flag_issues_links_endpoint(@project, @feature_flag, current_user) } }

View file

@ -0,0 +1,15 @@
- page_title s_('FeatureFlags|Feature Flags')
#feature-flags-vue{ data: { endpoint: project_feature_flags_path(@project, format: :json),
"project-id" => @project.id,
"project-name" => @project.name,
"error-state-svg-path" => image_path('illustrations/feature_flag.svg'),
"feature-flags-help-page-path" => help_page_path("operations/feature_flags"),
"feature-flags-client-libraries-help-page-path" => help_page_path("operations/feature_flags", anchor: "choose-a-client-library"),
"feature-flags-client-example-help-page-path" => help_page_path("operations/feature_flags", anchor: "golang-application-example"),
"unleash-api-url" => (unleash_api_url(@project) if can?(current_user, :admin_feature_flag, @project)),
"unleash-api-instance-id" => (unleash_api_instance_id(@project) if can?(current_user, :admin_feature_flag, @project)),
"can-user-admin-feature-flag" => can?(current_user, :admin_feature_flag, @project),
"new-feature-flag-path" => can?(current_user, :create_feature_flag, @project) ? new_project_feature_flag_path(@project): nil,
"rotate-instance-id-path" => can?(current_user, :admin_feature_flags_client, @project) ? reset_token_project_feature_flags_client_path(@project, format: :json) : nil,
"new-user-list-path" => can?(current_user, :admin_feature_flags_user_lists, @project) ? new_project_feature_flags_user_list_path(@project) : nil } }

View file

@ -0,0 +1,12 @@
- @breadcrumb_link = new_project_feature_flag_path(@project)
- add_to_breadcrumbs s_('FeatureFlags|Feature Flags'), project_feature_flags_path(@project)
- breadcrumb_title s_('FeatureFlags|New')
- page_title s_('FeatureFlags|New Feature Flag')
#js-new-feature-flag{ data: { endpoint: project_feature_flags_path(@project, format: :json),
feature_flags_path: project_feature_flags_path(@project),
environments_endpoint: search_project_environments_path(@project, format: :json),
user_callouts_path: user_callouts_path,
user_callout_id: UserCalloutsHelper::FEATURE_FLAGS_NEW_VERISION,
show_user_callout: show_feature_flags_new_version?.to_s,
project_id: @project.id } }

View file

@ -0,0 +1,7 @@
- add_to_breadcrumbs s_('FeatureFlags|Feature Flags'), project_feature_flags_path(@project)
- breadcrumb_title s_('FeatureFlags|Edit User List')
- page_title s_('FeatureFlags|Edit User List')
#js-edit-user-list{ data: { 'user-lists-docs-path' => help_page_path('operations/feature_flags.md', anchor: 'user-list'),
'user-list-iid' => @user_list.iid,
'project-id' => @project.id } }

View file

@ -0,0 +1,8 @@
- @breadcrumb_link = new_project_feature_flags_user_list_path(@project)
- add_to_breadcrumbs s_('FeatureFlags|Feature Flags'), project_feature_flags_path(@project)
- breadcrumb_title s_('FeatureFlags|New User List')
- page_title s_('FeatureFlags|New User List')
#js-new-user-list{ data: { 'user-lists-docs-path' => help_page_path('operations/feature_flags.md', anchor: 'user-list'),
'feature-flags-path' => project_feature_flags_path(@project),
'project-id' => @project.id } }

View file

@ -0,0 +1,7 @@
- add_to_breadcrumbs s_('FeatureFlags|Feature Flags'), project_feature_flags_path(@project)
- breadcrumb_title s_('FeatureFlags|List details')
- page_title s_('FeatureFlags|Feature Flag User List Details')
#js-edit-user-list{ data: { project_id: @project.id,
user_list_iid: @user_list.iid,
empty_state_path: image_path('illustrations/feature_flag.svg') } }

View file

@ -1,60 +0,0 @@
- sort_value = @sort
- sort_title = packages_sort_option_title(sort_value)
- if @packages.any?
.d-flex.justify-content-end
.dropdown.inline.gl-mt-3.gl-mb-3.package-sort-dropdown
.btn-group{ role: 'group' }
.btn-group{ role: 'group' }
%button.dropdown-menu-toggle{ type: 'button', data: { toggle: 'dropdown', display: 'static' }, class: 'btn btn-default' }
= sort_title
= icon('chevron-down')
%ul.dropdown-menu.dropdown-menu-right.dropdown-menu-selectable.dropdown-menu-sort
%li
= sortable_item(sort_title_created_date, package_sort_path(sort: sort_value_recently_created), sort_title)
= sortable_item(sort_title_name, package_sort_path(sort: sort_value_name_desc), sort_title)
= sortable_item(sort_title_version, package_sort_path(sort: sort_value_version_desc), sort_title)
= sortable_item(sort_title_type, package_sort_path(sort: sort_value_type_desc), sort_title)
= packages_sort_direction_button(sort_value)
.table-holder
.gl-responsive-table-row.table-row-header.bg-secondary-50.px-2.border-top{ role: 'row' }
.table-section.section-30{ role: 'rowheader' }
= _('Name')
.table-section.section-20{ role: 'rowheader' }
= _('Version')
.table-section.section-20{ role: 'rowheader' }
= _('Type')
.table-section.section-20{ role: 'rowheader' }
= _('Created')
.table-section.section-10{ role: 'rowheader' }
- @packages.each do |package|
.gl-responsive-table-row.package-row.px-2{ data: { qa_selector: "package_row" } }
.table-section.section-30
.table-mobile-header{ role: "rowheader" }= _("Name")
.table-mobile-content.flex-truncate-parent
= link_to package.name, project_package_path(@project, package), class: 'flex-truncate-child', data: { qa_selector: "package_link" }
.table-section.section-20
.table-mobile-header{ role: "rowheader" }= _("Version")
.table-mobile-content
= package.version
.table-section.section-20
.table-mobile-header{ role: "rowheader" }= _("Type")
.table-mobile-content
= package.package_type
.table-section.section-20
.table-mobile-header{ role: "rowheader" }= _("Created")
.table-mobile-content
= time_ago_with_tooltip(package.created_at)
.table-section.section-10
.table-mobile-header{ role: "rowheader" }
.table-mobile-content
- if can_destroy_package
.float-right
= link_to project_package_path(@project, package), method: :delete, data: { confirm: _("Are you sure?") }, class: "btn btn-grouped btn-remove", title: _('Delete Package') do
= sprite_icon('remove')
= paginate @packages, theme: "gitlab"
- else
.row.empty-state
.col-12
= render 'shared/packages/no_packages'

View file

@ -1,22 +1,26 @@
- content_for(:page_title, _('Welcome to GitLab %{name}!') % { name: current_user.name })
.text-center.mb-3
= html_escape(_('In order to tailor your experience with GitLab we%{br_tag}would like to know a bit more about you.')) % { br_tag: '<br/>'.html_safe }
.signup-box.p-3.mb-2
.signup-body
= form_for(current_user, url: users_sign_up_update_registration_path, html: { class: 'new_new_user gl-show-field-errors', 'aria-live' => 'assertive' }) do |f|
.devise-errors.mt-0
= render 'devise/shared/error_messages', resource: current_user
.form-group
= f.label :role, _('Role'), class: 'label-bold'
= f.select :role, ::User.roles.keys.map { |role| [role.titleize, role] }, {}, class: 'form-control'
.form-group
= f.label :setup_for_company, _('Are you setting up GitLab for a company?'), class: 'label-bold'
.d-flex.justify-content-center
.w-25
= f.radio_button :setup_for_company, true
= f.label :setup_for_company, _('Yes'), value: 'true'
.w-25
= f.radio_button :setup_for_company, false
= f.label :setup_for_company, _('No'), value: 'false'
.submit-container.mt-3
= f.submit _('Get started!'), class: 'btn-register btn btn-block mb-0 p-2'
- page_title _('Your profile')
.row.gl-flex-grow-1.gl-bg-gray-10
.d-flex.gl-flex-direction-column.gl-align-items-center.gl-w-full.gl-p-5
.edit-profile.login-page.d-flex.flex-column.gl-align-items-center.pt-lg-3
= render_if_exists "registrations/welcome/progress_bar"
%h2.gl-text-center= html_escape(_('Welcome to GitLab%{br_tag}%{name}!')) % { name: html_escape(current_user.first_name), br_tag: '<br/>'.html_safe }
%p
.gl-text-center= html_escape(_('In order to personalize your experience with GitLab%{br_tag}we would like to know a bit more about you.')) % { br_tag: '<br/>'.html_safe }
= form_for(current_user, url: users_sign_up_update_registration_path, html: { class: 'card gl-w-full! gl-p-5', 'aria-live' => 'assertive' }) do |f|
.devise-errors
= render 'devise/shared/error_messages', resource: current_user
.row
.form-group.col-sm-12
= f.label :role, _('Role'), class: 'label-bold'
= f.select :role, ::User.roles.keys.map { |role| [role.titleize, role] }, {}, class: 'form-control', autofocus: true
.form-text.gl-text-gray-500.gl-mt-3= _('This will help us personalize your onboarding experience.')
= render_if_exists "registrations/welcome/setup_for_company", f: f
.row
.form-group.col-sm-12.gl-mb-0
- if partial_exists? "registrations/welcome/button"
= render "registrations/welcome/button"
- else
= f.submit _('Get started!'), class: 'btn-register btn btn-block gl-mb-0 gl-p-3'

View file

@ -0,0 +1,5 @@
---
title: Replace v-html with v-safe-html in parallel_diff_table_row.vue
merge_request: 41206
author: Kev @KevSlashNull
type: other

View file

@ -0,0 +1,5 @@
---
title: Replace v-html with v-safe-html in registry_breadcrumb.vue
merge_request: 41207
author: Kev @KevSlashNull
type: other

View file

@ -0,0 +1,5 @@
---
title: Corrected some spelling mistakes in the project deletion confirmation modal
merge_request: 41576
author:
type: changed

View file

@ -0,0 +1,5 @@
---
title: Centerize text on Mark all as done button on To-Do List
merge_request: 41269
author: Takuya Noguchi
type: fixed

View file

@ -0,0 +1,5 @@
---
title: Update Secret-Detection template to use commits file
merge_request: 41364
author:
type: changed

View file

@ -0,0 +1,7 @@
---
name: design_management_todo_button
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39935
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/245074
group: group::knowledge
type: development
default_enabled: false

View file

@ -0,0 +1,7 @@
---
name: pages_artifacts_archive
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/40361
rollout_issue_url:
group: group::release management
type: development
default_enabled: false

View file

@ -1,13 +1,15 @@
# frozen_string_literal: true
class EnableBtreeGistExtension < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
execute 'CREATE EXTENSION IF NOT EXISTS btree_gist'
create_extension :btree_gist
end
def down
execute 'DROP EXTENSION IF EXISTS btree_gist'
drop_extension :btree_gist
end
end

View file

@ -245,6 +245,18 @@ p.each do |project|
end
```
### Incorrect repository statistics shown in the GUI
After [reducing a repository size with third-party tools](../../user/project/repository/reducing_the_repo_size_using_git.md)
the displayed size may still show old sizes or commit numbers. To force an update, do:
```ruby
p = Project.find_by_full_path('<namespace>/<project>')
pp p.statistics
p.statistics.refresh!
pp p.statistics # compare with earlier values
```
## Wikis
### Recreate

View file

@ -71,8 +71,8 @@ runs by enabling the [Skip outdated deployment jobs](../pipelines/settings.md#sk
Example of a problematic pipeline flow **before** enabling Skip outdated deployment jobs:
1. Pipeline-A is created on the master branch.
1. Later, Pipeline-B is created on the master branch (with a newer commit SHA).
1. Pipeline-A is created on the `master` branch.
1. Later, Pipeline-B is created on the `master` branch (with a newer commit SHA).
1. The `deploy` job in Pipeline-B finishes first, and deploys the newer code.
1. The `deploy` job in Pipeline-A finished later, and deploys the older code, **overwriting** the newer (latest) deployment.

View file

@ -176,7 +176,7 @@ the contribution acceptance criteria below:
exposing a bug in existing code). Every new class should have corresponding
unit tests, even if the class is exercised at a higher level, such as a feature test.
- If a failing CI build seems to be unrelated to your contribution, you can try
restarting the failing CI job, rebasing from master to bring in updates that
restarting the failing CI job, rebasing from `master` to bring in updates that
may resolve the failure, or if it has not been fixed yet, ask a developer to
help you fix the test.
1. The MR initially contains a few logically organized commits.

View file

@ -0,0 +1,76 @@
---
last_updated: 2020-09-01
---
# Managing PostgreSQL extensions
This guide documents how to manage PostgreSQL extensions for installations with an external
PostgreSQL database.
GitLab requires certain extensions to be installed into the GitLab database. For example,
GitLab relies on `pg_trgm` and the `btree_gist` extensions.
In order to install extensions, PostgreSQL requires the user to have superuser privileges.
Typically, the GitLab database user is not a superuser. Therefore, regular database migrations
cannot be used in installing extensions and instead, extensions have to be installed manually
prior to upgrading GitLab to a newer version.
## Installing PostgreSQL extensions manually
In order to install a PostgreSQL extension, this procedure should be followed:
1. Connect to the GitLab PostgreSQL database using a superuser, for example:
```shell
sudo gitlab-psql -d gitlabhq_production
```
1. Install the extension (`btree_gist` in this example) using [`CREATE EXTENSION`](https://www.postgresql.org/docs/11/sql-createextension.html):
```sql
CREATE EXTENSION IF NOT EXISTS btree_gist
```
1. Verify installed extensions:
```shell
gitlabhq_production=# \dx
List of installed extensions
Name | Version | Schema | Description
------------+---------+------------+-------------------------------------------------------------------
btree_gist | 1.5 | public | support for indexing common datatypes in GiST
pg_trgm | 1.4 | public | text similarity measurement and index searching based on trigrams
plpgsql | 1.0 | pg_catalog | PL/pgSQL procedural language
(3 rows)
```
On some systems you may need to install an additional package (for example,
`postgresql-contrib`) for certain extensions to become available.
## A typical migration failure scenario
The following is an example of a situation when the extension hasn't been installed before running migrations.
In this scenario, the database migration fails to create the extension `btree_gist` because of insufficient
privileges.
```shell
== 20200515152649 EnableBtreeGistExtension: migrating =========================
-- execute("CREATE EXTENSION IF NOT EXISTS btree_gist")
GitLab requires the PostgreSQL extension 'btree_gist' installed in database 'gitlabhq_production', but
the database user is not allowed to install the extension.
You can either install the extension manually using a database superuser:
CREATE EXTENSION IF NOT EXISTS btree_gist
Or, you can solve this by logging in to the GitLab database (gitlabhq_production) using a superuser and running:
ALTER regular WITH SUPERUSER
This query will grant the user superuser permissions, ensuring any database extensions
can be installed through migrations.
```
In order to recover from this situation, the extension needs to be installed manually using a superuser, and
the database migration (or GitLab upgrade) can be retried afterwards.

View file

@ -143,11 +143,8 @@ GitLab version | Minimum PostgreSQL version
12.10 | 11
13.0 | 11
You must also ensure the `pg_trgm` and `btree_gist` extensions are loaded into every
GitLab database. These extensions [can be enabled](https://www.postgresql.org/docs/11/sql-createextension.html) using a PostgreSQL super user.
On some systems you may need to install an additional package (for example,
`postgresql-contrib`) for this extension to become available.
You must also ensure the `pg_trgm` and `btree_gist` extensions are [loaded into every
GitLab database](postgresql_extensions.html).
NOTE: **Note:**
Support for [PostgreSQL 9.6 and 10 has been removed in GitLab 13.0](https://about.gitlab.com/releases/2020/05/22/gitlab-13-0-released/#postgresql-11-is-now-the-minimum-required-version-to-install-gitlab) so that GitLab can benefit from PostgreSQL 11 improvements, such as partitioning. For the schedule of transitioning to PostgreSQL 12, see [the related epic](https://gitlab.com/groups/gitlab-org/-/epics/2184).

View file

@ -310,3 +310,4 @@ for more information.
- [Restoring from backup after a failed upgrade](restore_after_failure.md)
- [Upgrading PostgreSQL Using Slony](upgrading_postgresql_using_slony.md), for
upgrading a PostgreSQL database with minimal downtime.
- [Managing PostgreSQL extensions](../install/postgresql_extensions.md)

View file

@ -0,0 +1,170 @@
---
stage: Secure
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
type: reference
---
# Secure and Defend terminology
This terminology list for GitLab Secure and Defend aims to:
- Promote a ubiquitous language for discussing application security.
- Improve the effectiveness of communication regarding GitLab's application security features.
- Get new contributors up to speed faster.
NOTE: **Note:**
This document defines application security terms in the specific context of GitLab's Secure and
Defend products. Terms may therefore have different meanings outside of GitLab Secure and Defend.
## Terms
### Analyzer
Software that performs a scan. The scan analyzes an attack surface for vulnerabilities and produces
a report containing findings. Reports adhere to the [Secure report format](#secure-report-format).
Analyzers integrate into GitLab using a CI job. The report produced by the analyzer is published as
an artifact once the job is complete. GitLab ingests this report, allowing users to visualize and
manage found vulnerabilities. For more information, see [Security Scanner Integration](../../../development/integrations/secure.md).
Many GitLab analyzers follow a standard approach using Docker to run a wrapped scanner. For example,
the Docker image `bandit-sast` is an analyzer that wraps the scanner `Bandit`. You can optionally
use the [Common library](https://gitlab.com/gitlab-org/security-products/analyzers/common)
to assist in building an Analyzer.
### Attack surface
The different places in an application that are vulnerable to attack. Secure products discover and
search the attack surface during scans. Each product defines the attack surface differently. For
example, SAST uses files and line numbers, and DAST uses URLs.
### CVE
Common Vulnerabilities and Exposures (CVE®) is a list of common identifiers for publicly known
cybersecurity vulnerabilities. The list is managed by the [Mitre Corporation](https://cve.mitre.org/).
### CVSS
The Common Vulnerability Scoring System (CVSS) is a free and open industry standard for assessing
the severity of computer system security vulnerabilities.
### CWE
Common Weakness Enumeration (CWE™) is a community-developed list of common software and hardware
weakness types that have security ramifications. Weaknesses are flaws, faults, bugs,
vulnerabilities, or other errors in software or hardware implementation, code, design, or
architecture. If left unaddressed, weaknesses could result in systems, networks, or hardware being
vulnerable to attack. The CWE List and associated classification taxonomy serve as a language that
you can use to identify and describe these weaknesses in terms of CWEs.
### Duplicate finding
A legitimate finding that is reported multiple times. This can occur when different scanners
discover the same finding, or when a single scan inadvertently reports the same finding more than
once.
### False positive
A finding that doesn't exist but is incorrectly reported as existing.
### Feedback
Feedback the user provides about a finding. Types of feedback include dismissal, creating an issue,
or creating a merge request.
### Finding
An asset that has the potential to be vulnerable, identified within a project by an analyzer. Assets
include but are not restricted to source code, binary packages, containers, dependencies, networks,
applications, and infrastructure.
### Insignificant finding
A legitimate finding that a particular customer doesn't care about.
### Location fingerprint
A finding's location fingerprint is a text value that's unique for each location on the attack
surface. Each Secure product defines this according to its type of attack surface. For example, SAST
incorporates file path and line number.
### Pipeline Security tab
A page that displays findings discovered in the associated CI pipeline.
### Primary identifier
A finding's primary identifier is a value unique to that finding. The external type and external ID
of the finding's [first identifier](https://gitlab.com/gitlab-org/security-products/security-report-schemas/-/blob/v2.4.0-rc1/dist/sast-report-format.json#L228)
combine to create the value.
Examples of primary identifiers include ZAP's `PluginID`, or `CVE` for Klar. Note that the
identifier must be stable. Subsequent scans must return the same value for the same finding, even if
the location has slightly changed.
### Report finding
A [finding](#finding) that only exists in a report produced by an analyzer, and is yet to be
persisted to the database. The report finding becomes a [vulnerability finding](#vulnerability-finding)
once it's imported into the database.
### Scan type (report type)
The type of scan. This must be one of the following:
- `container_scanning`
- `dependency_scanning`
- `dast`
- `sast`
### Scanner
Software that can scan for vulnerabilities. The resulting scan report is typically not in the
[Secure report format](#secure-report-format). Examples include ESLint, Klar, and ZAP.
### Secure product
A group of features related to a specific area of application security with first-class support by
GitLab. Products include Container Scanning, Dependency Scanning, Dynamic Application Security
Testing (DAST), Secret Detection, Static Application Security Testing (SAST), and Fuzz Testing. Each
of these products typically include one or more analyzers.
### Secure report format
A standard report format that Secure products comply with when creating JSON reports. The format is described by a
[JSON schema](https://gitlab.com/gitlab-org/security-products/security-report-schemas).
### Security Dashboard
Provides an overview of all the vulnerabilities for a project, group, or GitLab instance.
Vulnerabilities are only created from findings discovered on the project's default branch.
### Vendor
The party maintaining an analyzer. As such, a vendor is responsible for integrating a scanner into
GitLab and keeping it compatible as they evolve. A vendor isn't necessarily the author or maintainer
of the scanner, as in the case of using an open core or OSS project as a base solution of an
offering. For scanners included as part of a GitLab distribution or GitLab subscription, the vendor
is listed as GitLab.
### Vulnerability
A flaw that has a negative impact on the security of its environment. Vulnerabilities describe the
error or weakness, and don't describe where the error is located (see [finding](#finding)).
Each vulnerability maps to a unique finding.
### Vulnerability finding
When a [report finding](#report-finding) is stored to the database, it becomes a vulnerability
[finding](#finding).
### Vulnerability tracking
Deals with the responsibility of matching findings across scans so that a finding's life cycle can
be understood. Engineers and security teams use this information to decide whether to merge code
changes, and to see unresolved findings and when they were introduced. Vulnerabilities are tracked
by comparing the location fingerprint, primary identifier, and report type.
### Vulnerability occurrence
Deprecated, see [finding](#finding).

View file

@ -23,7 +23,7 @@ several different ways:
- [Create issue](#creating-an-issue-for-a-vulnerability) - Create a new issue with the
title and description pre-populated with information from the vulnerability report.
By default, such issues are [confidential](../../project/issues/confidential_issues.md).
- [Solution](#automatic-remediation-solutions-for-vulnerabilities) - For some vulnerabilities,
- [Solution](#automatic-remediation-for-vulnerabilities) - For some vulnerabilities,
a solution is provided for how to fix the vulnerability.
## Changing vulnerability status
@ -47,28 +47,7 @@ project the vulnerability came from, and pre-populates it with useful informatio
the vulnerability report. After the issue is created, GitLab redirects you to the
issue page so you can edit, assign, or comment on the issue.
## Automatic remediation solutions for vulnerabilities
## Automatic remediation for vulnerabilities
You can fix some vulnerabilities by applying the solution that GitLab automatically
generates for you. GitLab supports the following scanners:
- [Dependency Scanning](../dependency_scanning/index.md): Automatic Patch creation
is only available for Node.js projects managed with `yarn`.
- [Container Scanning](../container_scanning/index.md).
When an automatic solution is available, the button in the header will show "Resolve with merge request":
![Resolve with Merge Request button](img/vulnerability_page_merge_request_button_v13_1.png)
Selecting the button will create a merge request with the automatic solution.
### Manually applying a suggested patch
To manually apply the patch that was generated by GitLab for a vulnerability, select the dropdown arrow on the "Resolve
with merge request" button, then select the "Download patch to resolve" option:
![Resolve with Merge Request button dropdown](img/vulnerability_page_merge_request_button_dropdown_v13_1.png)
This will change the button text to "Download patch to resolve". Click on it to download the patch:
![Download patch button](img/vulnerability_page_download_patch_button_v13_1.png)
generates for you. [Read more about the automatic remediation for vulnerabilities feature](../index.md#solutions-for-vulnerabilities-auto-remediation).

View file

@ -303,14 +303,9 @@ This process is not suitable for removing sensitive data like password or keys f
Information about commits, including file content, is cached in the database, and will remain
visible even after they have been removed from the repository.
<!-- ## Troubleshooting
## Troubleshooting
Include any troubleshooting steps that you can foresee. If you know beforehand what issues
one might have when setting this up, or when something is changed, or on upgrading, it's
important to describe those, too. Think of things that may go wrong and include them here.
This is important to minimize requests for support, and to avoid doc comments with
questions that you know someone might ask.
### Incorrect repository statistics shown in the GUI
Each scenario can be a third-level heading, e.g. `### Getting error message X`.
If you have none to add when creating a doc, leave this section in place
but commented out to help encourage others to add to it in the future. -->
If the displayed size or commit number is different from the exported `.tar.gz` or local repository,
you can ask a GitLab administrator to [force an update](../../../administration/troubleshooting/gitlab_rails_cheat_sheet.md#incorrect-repository-statistics-shown-in-the-gui).

View file

@ -8,6 +8,14 @@ module Backup
attr_reader :progress
attr_reader :config, :db_file_name
IGNORED_ERRORS = [
# Ignore the DROP errors; recent database dumps will use --if-exists with pg_dump
/does not exist$/,
# User may not have permissions to drop extensions or schemas
/must be owner of/
].freeze
IGNORED_ERRORS_REGEXP = Regexp.union(IGNORED_ERRORS).freeze
def initialize(progress, filename: nil)
@progress = progress
@config = YAML.load_file(File.join(Rails.root, 'config', 'database.yml'))[Rails.env]
@ -49,6 +57,8 @@ module Backup
end
report_success(success)
progress.flush
raise Backup::Error, 'Backup failed' unless success
end
@ -83,6 +93,10 @@ module Backup
protected
def ignore_error?(line)
IGNORED_ERRORS_REGEXP.match?(line)
end
def execute_and_track_errors(cmd, decompress_rd)
errors = []
@ -97,8 +111,7 @@ module Backup
err_reader = Thread.new do
until (raw_line = stderr.gets).nil?
warn(raw_line)
# Recent database dumps will use --if-exists with pg_dump
errors << raw_line unless raw_line =~ /does not exist$/
errors << raw_line unless ignore_error?(raw_line)
end
end

View file

@ -35,6 +35,7 @@ secret_detection:
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
script:
- git fetch origin $CI_DEFAULT_BRANCH $CI_BUILD_REF_NAME
- export SECRET_DETECTION_COMMIT_TO=$(git log --left-right --cherry-pick --pretty=format:"%H" refs/remotes/origin/$CI_DEFAULT_BRANCH...refs/remotes/origin/$CI_BUILD_REF_NAME | tail -n 1)
- export SECRET_DETECTION_COMMIT_FROM=$CI_COMMIT_SHA
- git log --left-right --cherry-pick --pretty=format:"%H" refs/remotes/origin/$CI_DEFAULT_BRANCH...refs/remotes/origin/$CI_BUILD_REF_NAME > "$CI_COMMIT_SHA"_commit_list.txt
- export SECRET_DETECTION_COMMITS_FILE="$CI_COMMIT_SHA"_commit_list.txt
- /analyzer run
- rm "$CI_COMMIT_SHA"_commit_list.txt

View file

@ -1212,6 +1212,63 @@ into similar problems in the future (e.g. when new tables are created).
)
end
def create_extension(extension)
execute('CREATE EXTENSION IF NOT EXISTS %s' % extension)
rescue ActiveRecord::StatementInvalid => e
dbname = Database.database_name
user = Database.username
warn(<<~MSG) if e.to_s =~ /permission denied/
GitLab requires the PostgreSQL extension '#{extension}' installed in database '#{dbname}', but
the database user is not allowed to install the extension.
You can either install the extension manually using a database superuser:
CREATE EXTENSION IF NOT EXISTS #{extension}
Or, you can solve this by logging in to the GitLab
database (#{dbname}) using a superuser and running:
ALTER #{user} WITH SUPERUSER
This query will grant the user superuser permissions, ensuring any database extensions
can be installed through migrations.
For more information, refer to https://docs.gitlab.com/ee/install/postgresql_extensions.html.
MSG
raise
end
def drop_extension(extension)
execute('DROP EXTENSION IF EXISTS %s' % extension)
rescue ActiveRecord::StatementInvalid => e
dbname = Database.database_name
user = Database.username
warn(<<~MSG) if e.to_s =~ /permission denied/
This migration attempts to drop the PostgreSQL extension '#{extension}'
installed in database '#{dbname}', but the database user is not allowed
to drop the extension.
You can either drop the extension manually using a database superuser:
DROP EXTENSION IF EXISTS #{extension}
Or, you can solve this by logging in to the GitLab
database (#{dbname}) using a superuser and running:
ALTER #{user} WITH SUPERUSER
This query will grant the user superuser permissions, ensuring any database extensions
can be dropped through migrations.
For more information, refer to https://docs.gitlab.com/ee/install/postgresql_extensions.html.
MSG
raise
end
private
def validate_check_constraint_name!(constraint_name)

View file

@ -28,7 +28,7 @@ module Gitlab
# We are using 'Marginalia::SidekiqInstrumentation' which does not support 'ActiveJob::Base'.
# Gitlab also uses 'ActionMailer::MailDeliveryJob' which inherits from ActiveJob::Base.
# So below condition is used to return metadata for such jobs.
if job.is_a?(ActionMailer::MailDeliveryJob) || job.is_a?(ActionMailer::DeliveryJob)
if job.is_a?(ActionMailer::MailDeliveryJob)
{
"class" => job.arguments.first,
"jid" => job.job_id

View file

@ -39,6 +39,7 @@ module Gitlab
.merge(analytics_unique_visits_data)
.merge(compliance_unique_visits_data)
.merge(search_unique_visits_data)
.merge(redis_hll_counters)
end
end
@ -618,6 +619,10 @@ module Gitlab
{}
end
def redis_hll_counters
{ redis_hll_counters: ::Gitlab::UsageDataCounters::HLLRedisCounter.unique_events_data }
end
def analytics_unique_visits_data
results = ::Gitlab::Analytics::UniqueVisits.analytics_events.each_with_object({}) do |target, hash|
hash[target] = redis_usage_data { unique_visit_service.unique_visits_for(targets: target) }

View file

@ -3,6 +3,8 @@
module Gitlab
module UsageDataCounters
module HLLRedisCounter
include Gitlab::Utils::UsageData
DEFAULT_WEEKLY_KEY_EXPIRY_LENGTH = 6.weeks
DEFAULT_DAILY_KEY_EXPIRY_LENGTH = 29.days
DEFAULT_REDIS_SLOT = ''.freeze
@ -53,14 +55,44 @@ module Gitlab
Gitlab::Redis::HLL.count(keys: keys)
end
def categories
@categories ||= known_events.map { |event| event[:category] }.uniq
end
# @param category [String] the category name
# @return [Array<String>] list of event names for given category
def events_for_category(category)
known_events.select { |event| event[:category] == category.to_s }.map { |event| event[:name] }
end
def unique_events_data
categories.each_with_object({}) do |category, category_results|
events_names = events_for_category(category)
event_results = events_names.each_with_object({}) do |event, hash|
hash[event] = unique_events(event_names: event, start_date: 7.days.ago.to_date, end_date: Date.current)
end
if eligible_for_totals?(events_names)
event_results["#{category}_total_unique_counts_weekly"] = unique_events(event_names: events_names, start_date: 7.days.ago.to_date, end_date: Date.current)
event_results["#{category}_total_unique_counts_monthly"] = unique_events(event_names: events_names, start_date: 4.weeks.ago.to_date, end_date: Date.current)
end
category_results["#{category}"] = event_results
end
end
private
# Allow to add totals for events that are in the same redis slot, category and have the same aggregation level
# and if there are more than 1 event
def eligible_for_totals?(events_names)
return false if events_names.size <= 1
events = events_for(events_names)
events_in_same_slot?(events) && events_in_same_category?(events) && events_same_aggregation?(events)
end
def keys_for_aggregation(aggregation, events:, start_date:, end_date:)
if aggregation.to_sym == :daily
daily_redis_keys(events: events, start_date: start_date, end_date: end_date)
@ -78,8 +110,11 @@ module Gitlab
end
def events_in_same_slot?(events)
# if we check one event then redis_slot is only one to check
return true if events.size == 1
slot = events.first[:redis_slot]
events.all? { |event| event[:redis_slot] == slot }
events.all? { |event| event[:redis_slot].present? && event[:redis_slot] == slot }
end
def events_in_same_category?(events)

View file

@ -3214,9 +3214,6 @@ msgstr ""
msgid "Are you ABSOLUTELY SURE you wish to delete this project?"
msgstr ""
msgid "Are you setting up GitLab for a company?"
msgstr ""
msgid "Are you sure that you want to archive this project?"
msgstr ""
@ -8043,9 +8040,6 @@ msgstr ""
msgid "Delete Comment"
msgstr ""
msgid "Delete Package"
msgstr ""
msgid "Delete Snippet"
msgstr ""
@ -13136,9 +13130,6 @@ msgstr ""
msgid "In order to personalize your experience with GitLab%{br_tag}we would like to know a bit more about you."
msgstr ""
msgid "In order to tailor your experience with GitLab we%{br_tag}would like to know a bit more about you."
msgstr ""
msgid "In progress"
msgstr ""
@ -17237,7 +17228,7 @@ msgstr ""
msgid "OnDemandScans|Use existing site profile"
msgstr ""
msgid "Once a project is permanently deleted it %{strongStart}cannot be recovered%{strongEnd}. Permanently deleting this project will %{strongStart}immediately delete%{strongEnd} its respositories and %{strongStart}all related resources%{strongEnd} including issues, merge requests etc."
msgid "Once a project is permanently deleted it %{strongStart}cannot be recovered%{strongEnd}. Permanently deleting this project will %{strongStart}immediately delete%{strongEnd} its repositories and %{strongStart}all related resources%{strongEnd} including issues, merge requests etc."
msgstr ""
msgid "Once a project is permanently deleted it cannot be recovered. You will lose this project's repository and all content: issues, merge requests etc."
@ -20002,6 +19993,9 @@ msgstr ""
msgid "Promoted issue to an epic."
msgstr ""
msgid "Promotion is not supported."
msgstr ""
msgid "Promotions|Burndown Charts are visual representations of the progress of completing a milestone. At a glance, you see the current state for the completion a given milestone. Without them, you would have to organize the data from the milestone and plot it yourself to have the same sense of progress."
msgstr ""
@ -25244,7 +25238,7 @@ msgstr ""
msgid "This action can lead to data loss. To prevent accidental actions we ask you to confirm your intention."
msgstr ""
msgid "This action cannot be undone. You will lose the project's respository and all conent: issues, merge requests, etc."
msgid "This action cannot be undone. You will lose the project's repository and all content: issues, merge requests, etc."
msgstr ""
msgid "This action will %{strongOpen}permanently delete%{strongClose} %{codeOpen}%{project}%{codeClose} %{strongOpen}immediately%{strongClose}, including its repositories and all content: issues, merge requests, etc."
@ -27832,7 +27826,7 @@ msgstr ""
msgid "We sent you an email with reset password instructions"
msgstr ""
msgid "We tried to automatically renew your %{strong}%{plan_name}%{strong_close} subscription for %{strong}%{namespace_name}%{strong_close} on %{expires_on} but something went wrong so your subscription was downgraded to the free plan. Don't worry, your data is safe. We suggest you check your payment method and get in touch with our support team (%{support_link}). They'll gladly help with your subscription renewal."
msgid "We tried to automatically renew your subscription for %{strong}%{namespace_name}%{strong_close} on %{expires_on} but something went wrong so your subscription was downgraded to the free plan. Don't worry, your data is safe. We suggest you check your payment method and get in touch with our support team (%{support_link}). They'll gladly help with your subscription renewal."
msgstr ""
msgid "We want to be sure it is you, please confirm you are not a robot."
@ -27982,15 +27976,12 @@ msgstr ""
msgid "Welcome to GitLab"
msgstr ""
msgid "Welcome to GitLab %{name}!"
msgid "Welcome to GitLab%{br_tag}%{name}!"
msgstr ""
msgid "Welcome to GitLab, %{first_name}!"
msgstr ""
msgid "Welcome to GitLab.com%{br_tag}@%{name}!"
msgstr ""
msgid "Welcome to the guided GitLab tour"
msgstr ""
@ -28518,10 +28509,10 @@ msgstr ""
msgid "You could not create a new trigger."
msgstr ""
msgid "You didn't renew your %{strong}%{plan_name}%{strong_close} subscription for %{strong}%{namespace_name}%{strong_close} so it was downgraded to the free plan."
msgid "You didn't renew your subscription for %{strong}%{namespace_name}%{strong_close} so it was downgraded to the free plan."
msgstr ""
msgid "You didn't renew your %{strong}%{plan_name}%{strong_close} subscription so it was downgraded to the GitLab Core Plan."
msgid "You didn't renew your subscription so it was downgraded to the GitLab Core Plan."
msgstr ""
msgid "You do not have an active license"

View file

@ -5,7 +5,7 @@ module QA
module Project
module Packages
class Index < QA::Page::Base
view 'app/views/projects/packages/packages/_legacy_package_list.html.haml' do
view 'app/assets/javascripts/packages/shared/components/package_list_row.vue' do
element :package_row
element :package_link
end

View file

@ -0,0 +1,51 @@
# frozen_string_literal: true
module RuboCop
module Cop
module Gitlab
# This cop checks for `UploadedFile.from_params` usage.
# See https://docs.gitlab.com/ee/development/uploads.html#how-to-add-a-new-upload-route
#
# @example
#
# # bad
# class MyAwfulApi < Grape::API::Instance
# params do
# optional 'file.path', type: String
# optional 'file.name', type: String
# optional 'file.type', type: String
# optional 'file.size', type: Integer
# optional 'file.md5', type: String
# optional 'file.sha1', type: String
# optional 'file.sha256', type: String
# end
# put '/files' do
# uploaded_file = UploadedFile.from_params(params, :file, FileUploader.workhorse_local_upload_path)
# end
# end
#
# # good
# class MyMuchBetterApi < Grape::API::Instance
# params do
# requires :file, type: ::API::Validations::Types::WorkhorseFile
# end
# put '/files' do
# uploaded_file = declared_params[:file]
# end
# end
class AvoidUploadedFileFromParams < RuboCop::Cop::Cop
MSG = 'Use the `UploadedFile` set by `multipart.rb` instead of calling `UploadedFile.from_params` directly. See https://docs.gitlab.com/ee/development/uploads.html#how-to-add-a-new-upload-route'
def_node_matcher :calling_uploaded_file_from_params?, <<~PATTERN
(send (const nil? :UploadedFile) :from_params ...)
PATTERN
def on_send(node)
return unless calling_uploaded_file_from_params?(node)
add_offense(node, location: :expression)
end
end
end
end
end

View file

@ -485,8 +485,8 @@ RSpec.describe 'With experimental flow' do
it_behaves_like 'Signup name validation', 'new_user_first_name', 127
it_behaves_like 'Signup name validation', 'new_user_last_name', 127
describe 'when role is required' do
it 'after registering, it redirects to step 2 of the signup process, sets the name and role and then redirects to the original requested url' do
context 'when role is required' do
it 'redirects to step 2 of the signup process, sets the role and redirects back' do
new_user = build_stubbed(:user)
visit new_user_registration_path
fill_in 'new_user_first_name', with: new_user.first_name
@ -500,12 +500,11 @@ RSpec.describe 'With experimental flow' do
expect(page).to have_current_path(users_sign_up_welcome_path)
select 'Software Developer', from: 'user_role'
choose 'user_setup_for_company_true'
click_button 'Get started!'
new_user = User.find_by_username(new_user.username)
expect(new_user.software_developer_role?).to be_truthy
expect(new_user.setup_for_company).to be_truthy
expect(new_user.setup_for_company).to be_nil
expect(page).to have_current_path(new_project_path)
end
end
@ -521,14 +520,13 @@ RSpec.describe 'With experimental flow' do
it 'terms are checked by default' do
new_user = build_stubbed(:user)
visit new_user_registration_path
fill_in 'new_user_username', with: new_user.username
fill_in 'new_user_email', with: new_user.email
visit new_user_registration_path
fill_in 'new_user_first_name', with: new_user.first_name
fill_in 'new_user_last_name', with: new_user.last_name
fill_in 'new_user_username', with: new_user.username
fill_in 'new_user_email', with: new_user.email
fill_in 'new_user_password', with: new_user.password
click_button 'Register'
expect(current_path).to eq users_sign_up_welcome_path

View file

@ -6,6 +6,7 @@ import Participants from '~/sidebar/components/participants/participants.vue';
import DesignDiscussion from '~/design_management/components/design_notes/design_discussion.vue';
import design from '../mock_data/design';
import updateActiveDiscussionMutation from '~/design_management/graphql/mutations/update_active_discussion.mutation.graphql';
import TodoButton from '~/vue_shared/components/todo_button.vue';
const scrollIntoViewMock = jest.fn();
HTMLElement.prototype.scrollIntoView = scrollIntoViewMock;
@ -42,7 +43,7 @@ describe('Design management design sidebar component', () => {
const findNewDiscussionDisclaimer = () =>
wrapper.find('[data-testid="new-discussion-disclaimer"]');
function createComponent(props = {}) {
function createComponent(props = {}, { enableTodoButton } = {}) {
wrapper = shallowMount(DesignSidebar, {
propsData: {
design,
@ -57,6 +58,9 @@ describe('Design management design sidebar component', () => {
},
},
stubs: { GlPopover },
provide: {
glFeatures: { designManagementTodoButton: enableTodoButton },
},
});
}
@ -241,4 +245,23 @@ describe('Design management design sidebar component', () => {
expect(Cookies.set).toHaveBeenCalledWith(cookieKey, 'true', { expires: 365 * 10 });
});
});
it('does not render To-Do button by default', () => {
createComponent();
expect(wrapper.find(TodoButton).exists()).toBe(false);
});
describe('when `design_management_todo_button` feature flag is enabled', () => {
beforeEach(() => {
createComponent({}, { enableTodoButton: true });
});
it('renders sidebar root element with no top padding', () => {
expect(wrapper.classes()).toContain('gl-pt-0');
});
it('renders todo_button component', () => {
expect(wrapper.find(TodoButton).exists()).toBe(true);
});
});
});

View file

@ -32,6 +32,8 @@ exports[`Design management design index page renders design index 1`] = `
<div
class="image-notes"
>
<!---->
<h2
class="gl-font-weight-bold gl-mt-0"
>
@ -179,6 +181,8 @@ exports[`Design management design index page with error GlAlert is rendered in c
<div
class="image-notes"
>
<!---->
<h2
class="gl-font-weight-bold gl-mt-0"
>

View file

@ -3,7 +3,7 @@
exports[`packages_list_row renders 1`] = `
<div
class="gl-display-flex gl-flex-direction-column gl-border-b-solid gl-border-t-solid gl-border-t-1 gl-border-b-1 gl-border-t-transparent gl-border-b-gray-100"
data-qa-selector="packages-row"
data-qa-selector="package_row"
>
<div
class="gl-display-flex gl-align-items-center gl-py-5"

View file

@ -51,12 +51,12 @@ exports[`Project remove modal initialized matches the snapshot 1`] = `
variant="danger"
>
<gl-sprintf-stub
message="Once a project is permanently deleted it %{strongStart}cannot be recovered%{strongEnd}. Permanently deleting this project will %{strongStart}immediately delete%{strongEnd} its respositories and %{strongStart}all related resources%{strongEnd} including issues, merge requests etc."
message="Once a project is permanently deleted it %{strongStart}cannot be recovered%{strongEnd}. Permanently deleting this project will %{strongStart}immediately delete%{strongEnd} its repositories and %{strongStart}all related resources%{strongEnd} including issues, merge requests etc."
/>
</gl-alert-stub>
<p>
This action cannot be undone. You will lose the project's respository and all conent: issues, merge requests, etc.
This action cannot be undone. You will lose the project's repository and all content: issues, merge requests, etc.
</p>
<p

View file

@ -38,7 +38,7 @@ RSpec.describe Backup::Database do
context 'when the restore command prints errors' do
let(:visible_error) { "This is a test error\n" }
let(:noise) { "Table projects does not exist\n" }
let(:noise) { "Table projects does not exist\nmust be owner of extension pg_trgm\n" }
let(:cmd) { %W[#{Gem.ruby} -e $stderr.write("#{noise}#{visible_error}")] }
it 'filters out noise from errors' do

View file

@ -2329,4 +2329,56 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
end
describe '#create_extension' do
subject { model.create_extension(extension) }
let(:extension) { :btree_gist }
it 'executes CREATE EXTENSION statement' do
expect(model).to receive(:execute).with(/CREATE EXTENSION IF NOT EXISTS #{extension}/)
subject
end
context 'without proper permissions' do
before do
allow(model).to receive(:execute).with(/CREATE EXTENSION IF NOT EXISTS #{extension}/).and_raise(ActiveRecord::StatementInvalid, 'InsufficientPrivilege: permission denied')
end
it 'raises the exception' do
expect { subject }.to raise_error(ActiveRecord::StatementInvalid, /InsufficientPrivilege/)
end
it 'prints an error message' do
expect { subject }.to output(/user is not allowed/).to_stderr.and raise_error
end
end
end
describe '#drop_extension' do
subject { model.drop_extension(extension) }
let(:extension) { 'btree_gist' }
it 'executes CREATE EXTENSION statement' do
expect(model).to receive(:execute).with(/DROP EXTENSION IF EXISTS #{extension}/)
subject
end
context 'without proper permissions' do
before do
allow(model).to receive(:execute).with(/DROP EXTENSION IF EXISTS #{extension}/).and_raise(ActiveRecord::StatementInvalid, 'InsufficientPrivilege: permission denied')
end
it 'raises the exception' do
expect { subject }.to raise_error(ActiveRecord::StatementInvalid, /InsufficientPrivilege/)
end
it 'prints an error message' do
expect { subject }.to output(/user is not allowed/).to_stderr.and raise_error
end
end
end
end

View file

@ -8,31 +8,6 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
let(:entity3) { '34rfjuuy-ce56-sa35-ds34-dfer567dfrf2' }
let(:entity4) { '8b9a2671-2abf-4bec-a682-22f6a8f7bf31' }
let(:weekly_event) { 'g_analytics_contribution' }
let(:daily_event) { 'g_analytics_search' }
let(:analytics_slot_event) { 'g_analytics_contribution' }
let(:compliance_slot_event) { 'g_compliance_dashboard' }
let(:category_analytics) { 'g_analytics_search' }
let(:category_productivity) { 'g_analytics_productivity' }
let(:no_slot) { 'no_slot' }
let(:different_aggregation) { 'different_aggregation' }
let(:custom_daily_event) { 'g_analytics_custom' }
let(:known_events) do
[
{ name: weekly_event, redis_slot: "analytics", category: "analytics", expiry: 84, aggregation: "weekly" },
{ name: daily_event, redis_slot: "analytics", category: "analytics", expiry: 84, aggregation: "daily" },
{ name: category_productivity, redis_slot: "analytics", category: "productivity", aggregation: "weekly" },
{ name: compliance_slot_event, redis_slot: "compliance", category: "compliance", aggregation: "weekly" },
{ name: no_slot, category: "global", aggregation: "daily" },
{ name: different_aggregation, category: "global", aggregation: "monthly" }
].map(&:with_indifferent_access)
end
before do
allow(described_class).to receive(:known_events).and_return(known_events)
end
around do |example|
# We need to freeze to a reference time
# because visits are grouped by the week number in the year
@ -43,144 +18,223 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
Timecop.freeze(reference_time) { example.run }
end
describe '.events_for_category' do
it 'gets the event names for given category' do
expect(described_class.events_for_category(:analytics)).to contain_exactly(weekly_event, daily_event)
describe '.categories' do
it 'gets all unique category names' do
expect(described_class.categories).to contain_exactly('analytics', 'compliance', 'ide_edit', 'search')
end
end
describe '.track_event' do
it "raise error if metrics don't have same aggregation" do
expect { described_class.track_event(entity1, different_aggregation, Date.current) } .to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownAggregation)
describe 'known_events' do
let(:weekly_event) { 'g_analytics_contribution' }
let(:daily_event) { 'g_analytics_search' }
let(:analytics_slot_event) { 'g_analytics_contribution' }
let(:compliance_slot_event) { 'g_compliance_dashboard' }
let(:category_analytics_event) { 'g_analytics_search' }
let(:category_productivity_event) { 'g_analytics_productivity' }
let(:no_slot) { 'no_slot' }
let(:different_aggregation) { 'different_aggregation' }
let(:custom_daily_event) { 'g_analytics_custom' }
let(:global_category) { 'global' }
let(:compliance_category) {'compliance' }
let(:productivity_category) {'productivity' }
let(:analytics_category) { 'analytics' }
let(:known_events) do
[
{ name: weekly_event, redis_slot: "analytics", category: analytics_category, expiry: 84, aggregation: "weekly" },
{ name: daily_event, redis_slot: "analytics", category: analytics_category, expiry: 84, aggregation: "daily" },
{ name: category_productivity_event, redis_slot: "analytics", category: productivity_category, aggregation: "weekly" },
{ name: compliance_slot_event, redis_slot: "compliance", category: compliance_category, aggregation: "weekly" },
{ name: no_slot, category: global_category, aggregation: "daily" },
{ name: different_aggregation, category: global_category, aggregation: "monthly" }
].map(&:with_indifferent_access)
end
it 'raise error if metrics of unknown aggregation' do
expect { described_class.track_event(entity1, 'unknown', Date.current) } .to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
end
context 'for weekly events' do
it 'sets the keys in Redis to expire automatically after the given expiry time' do
described_class.track_event(entity1, "g_analytics_contribution")
Gitlab::Redis::SharedState.with do |redis|
keys = redis.scan_each(match: "g_{analytics}_contribution-*").to_a
expect(keys).not_to be_empty
keys.each do |key|
expect(redis.ttl(key)).to be_within(5.seconds).of(12.weeks)
end
end
end
it 'sets the keys in Redis to expire automatically after 6 weeks by default' do
described_class.track_event(entity1, "g_compliance_dashboard")
Gitlab::Redis::SharedState.with do |redis|
keys = redis.scan_each(match: "g_{compliance}_dashboard-*").to_a
expect(keys).not_to be_empty
keys.each do |key|
expect(redis.ttl(key)).to be_within(5.seconds).of(6.weeks)
end
end
end
end
context 'for daily events' do
it 'sets the keys in Redis to expire after the given expiry time' do
described_class.track_event(entity1, "g_analytics_search")
Gitlab::Redis::SharedState.with do |redis|
keys = redis.scan_each(match: "*-g_{analytics}_search").to_a
expect(keys).not_to be_empty
keys.each do |key|
expect(redis.ttl(key)).to be_within(5.seconds).of(84.days)
end
end
end
it 'sets the keys in Redis to expire after 29 days by default' do
described_class.track_event(entity1, "no_slot")
Gitlab::Redis::SharedState.with do |redis|
keys = redis.scan_each(match: "*-{no_slot}").to_a
expect(keys).not_to be_empty
keys.each do |key|
expect(redis.ttl(key)).to be_within(5.seconds).of(29.days)
end
end
end
end
end
describe '.unique_events' do
before do
# events in current week, should not be counted as week is not complete
described_class.track_event(entity1, weekly_event, Date.current)
described_class.track_event(entity2, weekly_event, Date.current)
# Events last week
described_class.track_event(entity1, weekly_event, 2.days.ago)
described_class.track_event(entity1, weekly_event, 2.days.ago)
described_class.track_event(entity1, no_slot, 2.days.ago)
# Events 2 weeks ago
described_class.track_event(entity1, weekly_event, 2.weeks.ago)
# Events 4 weeks ago
described_class.track_event(entity3, weekly_event, 4.weeks.ago)
described_class.track_event(entity4, weekly_event, 29.days.ago)
# events in current day should be counted in daily aggregation
described_class.track_event(entity1, daily_event, Date.current)
described_class.track_event(entity2, daily_event, Date.current)
# Events last week
described_class.track_event(entity1, daily_event, 2.days.ago)
described_class.track_event(entity1, daily_event, 2.days.ago)
# Events 2 weeks ago
described_class.track_event(entity1, daily_event, 14.days.ago)
# Events 4 weeks ago
described_class.track_event(entity3, daily_event, 28.days.ago)
described_class.track_event(entity4, daily_event, 29.days.ago)
allow(described_class).to receive(:known_events).and_return(known_events)
end
it 'raise error if metrics are not in the same slot' do
expect { described_class.unique_events(event_names: [compliance_slot_event, analytics_slot_event], start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should be in same slot')
describe '.events_for_category' do
it 'gets the event names for given category' do
expect(described_class.events_for_category(:analytics)).to contain_exactly(weekly_event, daily_event)
end
end
it 'raise error if metrics are not in the same category' do
expect { described_class.unique_events(event_names: [category_analytics, category_productivity], start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should be in same category')
describe '.track_event' do
it "raise error if metrics don't have same aggregation" do
expect { described_class.track_event(entity1, different_aggregation, Date.current) } .to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownAggregation)
end
it 'raise error if metrics of unknown aggregation' do
expect { described_class.track_event(entity1, 'unknown', Date.current) } .to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
end
context 'for weekly events' do
it 'sets the keys in Redis to expire automatically after the given expiry time' do
described_class.track_event(entity1, "g_analytics_contribution")
Gitlab::Redis::SharedState.with do |redis|
keys = redis.scan_each(match: "g_{analytics}_contribution-*").to_a
expect(keys).not_to be_empty
keys.each do |key|
expect(redis.ttl(key)).to be_within(5.seconds).of(12.weeks)
end
end
end
it 'sets the keys in Redis to expire automatically after 6 weeks by default' do
described_class.track_event(entity1, "g_compliance_dashboard")
Gitlab::Redis::SharedState.with do |redis|
keys = redis.scan_each(match: "g_{compliance}_dashboard-*").to_a
expect(keys).not_to be_empty
keys.each do |key|
expect(redis.ttl(key)).to be_within(5.seconds).of(6.weeks)
end
end
end
end
context 'for daily events' do
it 'sets the keys in Redis to expire after the given expiry time' do
described_class.track_event(entity1, "g_analytics_search")
Gitlab::Redis::SharedState.with do |redis|
keys = redis.scan_each(match: "*-g_{analytics}_search").to_a
expect(keys).not_to be_empty
keys.each do |key|
expect(redis.ttl(key)).to be_within(5.seconds).of(84.days)
end
end
end
it 'sets the keys in Redis to expire after 29 days by default' do
described_class.track_event(entity1, "no_slot")
Gitlab::Redis::SharedState.with do |redis|
keys = redis.scan_each(match: "*-{no_slot}").to_a
expect(keys).not_to be_empty
keys.each do |key|
expect(redis.ttl(key)).to be_within(5.seconds).of(29.days)
end
end
end
end
end
it "raise error if metrics don't have same aggregation" do
expect { described_class.unique_events(event_names: [daily_event, weekly_event], start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should have same aggregation level')
describe '.unique_events' do
before do
# events in current week, should not be counted as week is not complete
described_class.track_event(entity1, weekly_event, Date.current)
described_class.track_event(entity2, weekly_event, Date.current)
# Events last week
described_class.track_event(entity1, weekly_event, 2.days.ago)
described_class.track_event(entity1, weekly_event, 2.days.ago)
described_class.track_event(entity1, no_slot, 2.days.ago)
# Events 2 weeks ago
described_class.track_event(entity1, weekly_event, 2.weeks.ago)
# Events 4 weeks ago
described_class.track_event(entity3, weekly_event, 4.weeks.ago)
described_class.track_event(entity4, weekly_event, 29.days.ago)
# events in current day should be counted in daily aggregation
described_class.track_event(entity1, daily_event, Date.current)
described_class.track_event(entity2, daily_event, Date.current)
# Events last week
described_class.track_event(entity1, daily_event, 2.days.ago)
described_class.track_event(entity1, daily_event, 2.days.ago)
# Events 2 weeks ago
described_class.track_event(entity1, daily_event, 14.days.ago)
# Events 4 weeks ago
described_class.track_event(entity3, daily_event, 28.days.ago)
described_class.track_event(entity4, daily_event, 29.days.ago)
end
it 'raise error if metrics are not in the same slot' do
expect { described_class.unique_events(event_names: [compliance_slot_event, analytics_slot_event], start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should be in same slot')
end
it 'raise error if metrics are not in the same category' do
expect { described_class.unique_events(event_names: [category_analytics_event, category_productivity_event], start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should be in same category')
end
it "raise error if metrics don't have same aggregation" do
expect { described_class.unique_events(event_names: [daily_event, weekly_event], start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should have same aggregation level')
end
context 'when data for the last complete week' do
it { expect(described_class.unique_events(event_names: weekly_event, start_date: 1.week.ago, end_date: Date.current)).to eq(1) }
end
context 'when data for the last 4 complete weeks' do
it { expect(described_class.unique_events(event_names: weekly_event, start_date: 4.weeks.ago, end_date: Date.current)).to eq(2) }
end
context 'when data for the week 4 weeks ago' do
it { expect(described_class.unique_events(event_names: weekly_event, start_date: 4.weeks.ago, end_date: 3.weeks.ago)).to eq(1) }
end
context 'when using daily aggregation' do
it { expect(described_class.unique_events(event_names: daily_event, start_date: 7.days.ago, end_date: Date.current)).to eq(2) }
it { expect(described_class.unique_events(event_names: daily_event, start_date: 28.days.ago, end_date: Date.current)).to eq(3) }
it { expect(described_class.unique_events(event_names: daily_event, start_date: 28.days.ago, end_date: 21.days.ago)).to eq(1) }
end
context 'when no slot is set' do
it { expect(described_class.unique_events(event_names: no_slot, start_date: 7.days.ago, end_date: Date.current)).to eq(1) }
end
end
end
describe 'unique_events_data' do
let(:known_events) do
[
{ name: 'event1_slot', redis_slot: "slot", category: 'category1', aggregation: "weekly" },
{ name: 'event2_slot', redis_slot: "slot", category: 'category1', aggregation: "weekly" },
{ name: 'event3', category: 'category2', aggregation: "weekly" },
{ name: 'event4', category: 'category2', aggregation: "weekly" }
].map(&:with_indifferent_access)
end
context 'when data for the last complete week' do
it { expect(described_class.unique_events(event_names: weekly_event, start_date: 1.week.ago, end_date: Date.current)).to eq(1) }
before do
allow(described_class).to receive(:known_events).and_return(known_events)
allow(described_class).to receive(:categories).and_return(%w(category1 category2))
described_class.track_event(entity1, 'event1_slot', 2.days.ago)
described_class.track_event(entity2, 'event2_slot', 2.days.ago)
described_class.track_event(entity3, 'event2_slot', 2.weeks.ago)
# events in different slots
described_class.track_event(entity2, 'event3', 2.days.ago)
described_class.track_event(entity2, 'event4', 2.days.ago)
end
context 'when data for the last 4 complete weeks' do
it { expect(described_class.unique_events(event_names: weekly_event, start_date: 4.weeks.ago, end_date: Date.current)).to eq(2) }
end
it 'returns the number of unique events for all known events' do
results = {
'category1' => {
'event1_slot' => 1,
'event2_slot' => 1,
'category1_total_unique_counts_weekly' => 2,
'category1_total_unique_counts_monthly' => 3
},
'category2' => {
'event3' => 1,
'event4' => 1
}
}
context 'when data for the week 4 weeks ago' do
it { expect(described_class.unique_events(event_names: weekly_event, start_date: 4.weeks.ago, end_date: 3.weeks.ago)).to eq(1) }
end
context 'when using daily aggregation' do
it { expect(described_class.unique_events(event_names: daily_event, start_date: 7.days.ago, end_date: Date.current)).to eq(2) }
it { expect(described_class.unique_events(event_names: daily_event, start_date: 28.days.ago, end_date: Date.current)).to eq(3) }
it { expect(described_class.unique_events(event_names: daily_event, start_date: 28.days.ago, end_date: 21.days.ago)).to eq(1) }
end
context 'when no slot is set' do
it { expect(described_class.unique_events(event_names: no_slot, start_date: 7.days.ago, end_date: Date.current)).to eq(1) }
expect(subject.unique_events_data).to eq(results)
end
end
end

View file

@ -1141,6 +1141,24 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
describe 'redis_hll_counters' do
subject { described_class.redis_hll_counters }
let(:categories) { ::Gitlab::UsageDataCounters::HLLRedisCounter.categories }
it 'has all know_events' do
expect(subject).to have_key(:redis_hll_counters)
expect(subject[:redis_hll_counters].keys).to match_array(categories)
categories.each do |category|
keys = ::Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category(category) + ["#{category}_total_unique_counts_weekly", "#{category}_total_unique_counts_monthly"]
expect(subject[:redis_hll_counters][category].keys).to match_array(keys)
end
end
end
describe '.service_desk_counts' do
subject { described_class.send(:service_desk_counts) }

View file

@ -3,20 +3,20 @@
require 'spec_helper'
RSpec.describe Pages::LookupPath do
let(:project) do
instance_double(Project,
id: 12345,
private_pages?: true,
pages_https_only?: true,
full_path: 'the/full/path'
)
let_it_be(:project) do
create(:project, :pages_private, pages_https_only: true)
end
subject(:lookup_path) { described_class.new(project) }
before do
stub_pages_setting(access_control: true, external_https: ["1.1.1.1:443"])
stub_artifacts_object_storage
end
describe '#project_id' do
it 'delegates to Project#id' do
expect(lookup_path.project_id).to eq(12345)
expect(lookup_path.project_id).to eq(project.id)
end
end
@ -47,12 +47,49 @@ RSpec.describe Pages::LookupPath do
end
describe '#source' do
it 'sets the source type to "file"' do
expect(lookup_path.source[:type]).to eq('file')
shared_examples 'uses disk storage' do
it 'sets the source type to "file"' do
expect(lookup_path.source[:type]).to eq('file')
end
it 'sets the source path to the project full path suffixed with "public/' do
expect(lookup_path.source[:path]).to eq(project.full_path + "/public/")
end
end
it 'sets the source path to the project full path suffixed with "public/' do
expect(lookup_path.source[:path]).to eq('the/full/path/public/')
include_examples 'uses disk storage'
context 'when artifact_id from build job is present in pages metadata' do
let(:artifacts_archive) { create(:ci_job_artifact, :zip, :remote_store, project: project) }
before do
project.mark_pages_as_deployed(artifacts_archive: artifacts_archive)
end
it 'sets the source type to "zip"' do
expect(lookup_path.source[:type]).to eq('zip')
end
it 'sets the source path to the artifacts archive URL' do
Timecop.freeze do
expect(lookup_path.source[:path]).to eq(artifacts_archive.file.url(expire_at: 1.day.from_now))
expect(lookup_path.source[:path]).to include("Expires=86400")
end
end
context 'when artifact is not uploaded to object storage' do
let(:artifacts_archive) { create(:ci_job_artifact, :zip) }
include_examples 'uses disk storage'
end
context 'when feature flag is disabled' do
before do
stub_feature_flags(pages_artifacts_archive: false)
end
include_examples 'uses disk storage'
end
end
end

View file

@ -0,0 +1,21 @@
# frozen_string_literal: true
require 'fast_spec_helper'
require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/avoid_uploaded_file_from_params'
RSpec.describe RuboCop::Cop::Gitlab::AvoidUploadedFileFromParams, type: :rubocop do
include CopHelper
subject(:cop) { described_class.new }
context 'UploadedFile.from_params' do
it 'flags its call' do
expect_offense(<<~SOURCE)
UploadedFile.from_params(params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use the `UploadedFile` set by `multipart.rb` instead of calling `UploadedFile.from_params` directly. See https://docs.gitlab.com/ee/development/uploads.html#how-to-add-a-new-upload-route
SOURCE
end
end
end

View file

@ -48,12 +48,27 @@ RSpec.describe Users::SignupService do
expect(user.reload.setup_for_company).to be(false)
end
it 'returns an error result when setup_for_company is missing' do
result = update_user(user, setup_for_company: '')
context 'when on .com' do
before do
allow(Gitlab).to receive(:com?).and_return(true)
end
expect(user.reload.setup_for_company).not_to be_blank
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq("Setup for company can't be blank")
it 'returns an error result when setup_for_company is missing' do
result = update_user(user, setup_for_company: '')
expect(user.reload.setup_for_company).not_to be_blank
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq("Setup for company can't be blank")
end
end
context 'when not on .com' do
it 'returns success when setup_for_company is blank' do
result = update_user(user, setup_for_company: '')
expect(result).to eq(status: :success)
expect(user.reload.setup_for_company).to be(nil)
end
end
end

View file

@ -14,7 +14,7 @@ RSpec.shared_examples 'packages list' do |check_project_name: false|
end
def package_table_row(index)
page.all("#{packages_table_selector} > [data-qa-selector=\"packages-row\"]")[index].text
page.all("#{packages_table_selector} > [data-qa-selector=\"package_row\"]")[index].text
end
end

View file

@ -0,0 +1,26 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'registrations/welcome' do
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { User.new }
before do
allow(view).to receive(:current_user).and_return(user)
allow(view).to receive(:in_subscription_flow?).and_return(false)
allow(view).to receive(:in_trial_flow?).and_return(false)
allow(view).to receive(:in_invitation_flow?).and_return(false)
allow(view).to receive(:in_oauth_flow?).and_return(false)
allow(view).to receive(:experiment_enabled?).with(:onboarding_issues).and_return(false)
allow(Gitlab).to receive(:com?).and_return(false)
render
end
subject { rendered }
it { is_expected.not_to have_selector('label[for="user_setup_for_company"]') }
it { is_expected.to have_button('Get started!') }
end