Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-10-14 21:09:20 +00:00
parent 3c6cad91a1
commit db2275b561
115 changed files with 1045 additions and 1958 deletions

View File

@ -60,7 +60,7 @@ Geo secondary sites have a [Geo tracking database](https://gitlab.com/gitlab-org
disable_ddl_transaction!
def up
ApplicationRecord.transaction do
Geo::TrackingBase.transaction do
create_table :cool_widget_registry, id: :bigserial, force: :cascade do |t|
t.bigint :cool_widget_id, null: false
t.datetime_with_timezone :created_at, null: false
@ -269,7 +269,6 @@ That's all of the required database changes.
def pool_repository
nil
end
...
def cool_widget_state
super || build_cool_widget_state
@ -389,14 +388,16 @@ That's all of the required database changes.
```ruby
# frozen_string_literal: true
class Geo::CoolWidgetRegistry < Geo::BaseRegistry
include ::Geo::ReplicableRegistry
include ::Geo::VerifiableRegistry
module Geo
class CoolWidgetRegistry < Geo::BaseRegistry
include ::Geo::ReplicableRegistry
include ::Geo::VerifiableRegistry
MODEL_CLASS = ::CoolWidget
MODEL_FOREIGN_KEY = :cool_widget_id
MODEL_CLASS = ::CoolWidget
MODEL_FOREIGN_KEY = :cool_widget_id
belongs_to :cool_widget, class_name: 'CoolWidget'
belongs_to :cool_widget, class_name: 'CoolWidget'
end
end
```
@ -463,13 +464,13 @@ That's all of the required database changes.
- [ ] Add the following to `spec/factories/cool_widgets.rb`:
```ruby
trait(:verification_succeeded) do
trait :verification_succeeded do
with_file
verification_checksum { 'abc' }
verification_state { CoolWidget.verification_state_value(:verification_succeeded) }
end
trait(:verification_failed) do
trait :verification_failed do
with_file
verification_failure { 'Could not calculate the checksum' }
verification_state { CoolWidget.verification_state_value(:verification_failed) }
@ -507,11 +508,11 @@ That's all of the required database changes.
factory :geo_cool_widget_state, class: 'Geo::CoolWidgetState' do
cool_widget
trait(:checksummed) do
trait :checksummed do
verification_checksum { 'abc' }
end
trait(:checksum_failure) do
trait :checksum_failure do
verification_failure { 'Could not calculate the checksum' }
end
end
@ -561,8 +562,9 @@ The GraphQL API is used by `Admin > Geo > Replication Details` views, and is dir
field :cool_widget_registries, ::Types::Geo::CoolWidgetRegistryType.connection_type,
null: true,
resolver: ::Resolvers::Geo::CoolWidgetRegistriesResolver,
description: 'Find Cool Widget registries on this Geo node',
feature_flag: :geo_cool_widget_replication
description: 'Find Cool Widget registries on this Geo node. '\
'Ignored if `geo_cool_widget_replication` feature flag is disabled.',
alpha: { milestone: '15.5' } # Update the milestone
```
- [ ] Add the new `cool_widget_registries` field name to the `expected_fields` array in `ee/spec/graphql/types/geo/geo_node_type_spec.rb`.
@ -627,13 +629,15 @@ The GraphQL API is used by `Admin > Geo > Replication Details` views, and is dir
module Geo
# rubocop:disable Graphql/AuthorizeTypes because it is included
class CoolWidgetRegistryType < BaseObject
graphql_name 'CoolWidgetRegistry'
include ::Types::Geo::RegistryType
graphql_name 'CoolWidgetRegistry'
description 'Represents the Geo replication and verification state of a cool_widget'
field :cool_widget_id, GraphQL::Types::ID, null: false, description: 'ID of the Cool Widget.'
end
# rubocop:enable Graphql/AuthorizeTypes
end
end
```
@ -717,14 +721,15 @@ As illustrated by the above two examples, batch destroy logic cannot be handled
- [ ] Add a step to `Test replication and verification of Cool Widgets on a non-GDK-deployment. For example, using GitLab Environment Toolkit`.
- [ ] Add a step to `Ping the Geo PM and EM to coordinate testing`. For example, you might add steps to generate Cool Widgets, and then a Geo engineer may take it from there.
- [ ] In `ee/config/feature_flags/development/geo_cool_widget_replication.yml`, set `default_enabled: true`
- [ ] In `ee/app/graphql/types/geo/geo_node_type.rb`, remove the `feature_flag` option for the released type:
- [ ] In `ee/app/graphql/types/geo/geo_node_type.rb`, remove the `alpha` option for the released type:
```ruby
field :cool_widget_registries, ::Types::Geo::CoolWidgetRegistryType.connection_type,
null: true,
resolver: ::Resolvers::Geo::CoolWidgetRegistriesResolver,
description: 'Find Cool Widget registries on this Geo node',
feature_flag: :geo_cool_widget_replication # REMOVE THIS LINE
description: 'Find Cool Widget registries on this Geo node. '\
'Ignored if `geo_cool_widget_replication` feature flag is disabled.',
alpha: { milestone: '15.5' } # Update the milestone
```
- [ ] Run `bundle exec rake gitlab:graphql:compile_docs` after the step above to regenerate the GraphQL docs.

View File

@ -58,11 +58,11 @@ Geo secondary sites have a [Geo tracking database](https://gitlab.com/gitlab-org
```ruby
# frozen_string_literal: true
class CreateCoolWidgetRegistry < Gitlab::Database::Migration[1.0]
class CreateCoolWidgetRegistry < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
def up
ApplicationRecord.transaction do
Geo::TrackingBase.transaction do
create_table :cool_widget_registry, id: :bigserial, force: :cascade do |t|
t.bigint :cool_widget_id, null: false
t.datetime_with_timezone :created_at, null: false
@ -247,7 +247,8 @@ That's all of the required database changes.
# we want to know which records to replicate. This is not easy to automate
# because for example:
#
# * The "selective sync" feature allows admins to choose which namespaces # to replicate, per secondary site. Most Models are scoped to a
# * The "selective sync" feature allows admins to choose which namespaces
# to replicate, per secondary site. Most Models are scoped to a
# namespace, but the nature of the relationship to a namespace varies
# between Models.
# * The "selective sync" feature allows admins to choose which shards to
@ -265,7 +266,6 @@ That's all of the required database changes.
CoolWidgetState
end
end
...
def cool_widget_state
super || build_cool_widget_state
@ -317,7 +317,7 @@ That's all of the required database changes.
end
```
- [ ] Generate the feature flag definition fileы by running the feature flag commands and following the command prompts:
- [ ] Generate the feature flag definition file by running the feature flag commands and following the command prompts:
```shell
bin/feature-flag --ee geo_cool_widget_replication --type development --group 'group::geo'
@ -355,14 +355,16 @@ That's all of the required database changes.
```ruby
# frozen_string_literal: true
class Geo::CoolWidgetRegistry < Geo::BaseRegistry
include ::Geo::ReplicableRegistry
include ::Geo::VerifiableRegistry
module Geo
class CoolWidgetRegistry < Geo::BaseRegistry
include ::Geo::ReplicableRegistry
include ::Geo::VerifiableRegistry
MODEL_CLASS = ::CoolWidget
MODEL_FOREIGN_KEY = :cool_widget_id
MODEL_CLASS = ::CoolWidget
MODEL_FOREIGN_KEY = :cool_widget_id
belongs_to :cool_widget, class_name: 'CoolWidget'
belongs_to :cool_widget, class_name: 'CoolWidget'
end
end
```
@ -429,13 +431,13 @@ That's all of the required database changes.
- [ ] Add the following to `spec/factories/cool_widgets.rb`:
```ruby
trait(:verification_succeeded) do
trait :verification_succeeded do
with_file
verification_checksum { 'abc' }
verification_state { CoolWidget.verification_state_value(:verification_succeeded) }
end
trait(:verification_failed) do
trait :verification_failed do
with_file
verification_failure { 'Could not calculate the checksum' }
verification_state { CoolWidget.verification_state_value(:verification_failed) }
@ -447,6 +449,8 @@ That's all of the required database changes.
- [ ] Following [the example of Merge Request Diffs](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63309) add a `Geo::CoolWidgetState` model in `ee/app/models/ee/geo/cool_widget_state.rb`:
``` ruby
# frozen_string_literal: true
module Geo
class CoolWidgetState < ApplicationRecord
include EachBatch
@ -471,11 +475,11 @@ That's all of the required database changes.
factory :geo_cool_widget_state, class: 'Geo::CoolWidgetState' do
cool_widget
trait(:checksummed) do
trait :checksummed do
verification_checksum { 'abc' }
end
trait(:checksum_failure) do
trait :checksum_failure do
verification_failure { 'Could not calculate the checksum' }
end
end
@ -525,8 +529,9 @@ The GraphQL API is used by `Admin > Geo > Replication Details` views, and is dir
field :cool_widget_registries, ::Types::Geo::CoolWidgetRegistryType.connection_type,
null: true,
resolver: ::Resolvers::Geo::CoolWidgetRegistriesResolver,
description: 'Find Cool Widget registries on this Geo node',
feature_flag: :geo_cool_widget_replication
description: 'Find Cool Widget registries on this Geo node. '\
'Ignored if `geo_cool_widget_replication` feature flag is disabled.',
alpha: { milestone: '15.5' } # Update the milestone
```
- [ ] Add the new `cool_widget_registries` field name to the `expected_fields` array in `ee/spec/graphql/types/geo/geo_node_type_spec.rb`.
@ -591,13 +596,15 @@ The GraphQL API is used by `Admin > Geo > Replication Details` views, and is dir
module Geo
# rubocop:disable Graphql/AuthorizeTypes because it is included
class CoolWidgetRegistryType < BaseObject
graphql_name 'CoolWidgetRegistry'
include ::Types::Geo::RegistryType
graphql_name 'CoolWidgetRegistry'
description 'Represents the Geo replication and verification state of a cool_widget'
field :cool_widget_id, GraphQL::ID_TYPE, null: false, description: 'ID of the Cool Widget'
field :cool_widget_id, GraphQL::Types::ID, null: false, description: 'ID of the Cool Widget.'
end
# rubocop:enable Graphql/AuthorizeTypes
end
end
```
@ -682,14 +689,15 @@ As illustrated by the above two examples, batch destroy logic cannot be handled
- [ ] Add a step to `Test replication and verification of Cool Widgets on a non-GDK-deployment. For example, using GitLab Environment Toolkit`.
- [ ] Add a step to `Ping the Geo PM and EM to coordinate testing`. For example, you might add steps to generate Cool Widgets, and then a Geo engineer may take it from there.
- [ ] In `ee/config/feature_flags/development/geo_cool_widget_replication.yml`, set `default_enabled: true`
- [ ] In `ee/app/graphql/types/geo/geo_node_type.rb`, remove the `feature_flag` option for the released type:
- [ ] In `ee/app/graphql/types/geo/geo_node_type.rb`, remove the `alpha` option for the released type:
```ruby
field :cool_widget_registries, ::Types::Geo::CoolWidgetRegistryType.connection_type,
null: true,
resolver: ::Resolvers::Geo::CoolWidgetRegistriesResolver,
description: 'Find Cool Widget registries on this Geo node',
feature_flag: :geo_cool_widget_replication # REMOVE THIS LINE
description: 'Find Cool Widget registries on this Geo node. '\
'Ignored if `geo_cool_widget_replication` feature flag is disabled.',
alpha: { milestone: '15.5' } # Update the milestone
```
- [ ] Run `bundle exec rake gitlab:graphql:compile_docs` after the step above to regenerate the GraphQL docs.

View File

@ -116,8 +116,7 @@ Style/FrozenStringLiteralComment:
EnforcedStyle: always_true
Style/SpecialGlobalVars:
# https://gitlab.com/gitlab-org/gitlab/-/issues/358427
EnforcedStyle: use_perl_names
EnforcedStyle: use_builtin_english_names
RSpec/FilePath:
Exclude:

View File

@ -0,0 +1,4 @@
---
# Cop supports --auto-correct.
Style/SpecialGlobalVars:
Details: grace period

View File

@ -1 +1 @@
3.0.2
3.0.3

View File

@ -3,7 +3,7 @@
source 'https://rubygems.org'
if ENV['BUNDLER_CHECKSUM_VERIFICATION_OPT_IN'] # this verification is still experimental
$:.unshift(File.expand_path("vendor/gems/bundler-checksum/lib", __dir__))
$LOAD_PATH.unshift(File.expand_path("vendor/gems/bundler-checksum/lib", __dir__))
require 'bundler-checksum'
Bundler::Checksum.patch!
end

View File

@ -1,7 +1,7 @@
import setHighlightClass from 'ee_else_ce/search/highlight_blob_search_result';
import { queryToObject } from '~/lib/utils/url_utility';
import refreshCounts from '~/pages/search/show/refresh_counts';
import { initSidebar } from './sidebar';
import { initSidebar, sidebarInitState } from './sidebar';
import { initSearchSort } from './sort';
import createStore from './store';
import { initTopbar } from './topbar';
@ -9,14 +9,18 @@ import { initBlobRefSwitcher } from './under_topbar';
export const initSearchApp = () => {
const query = queryToObject(window.location.search);
const navigation = sidebarInitState();
const store = createStore({ query });
const store = createStore({ query, navigation });
initTopbar(store);
initSidebar(store);
initSearchSort(store);
setHighlightClass(query.search); // Code Highlighting
refreshCounts(); // Other Scope Tab Counts
initBlobRefSwitcher(); // Code Search Branch Picker
if (!gon.features?.searchPageVerticalNav) {
refreshCounts(); // Other Scope Tab Counts
}
};

View File

@ -17,6 +17,9 @@ export default {
showReset() {
return this.urlQuery.state || this.urlQuery.confidential;
},
showSidebar() {
return this.urlQuery.scope === 'issues' || this.urlQuery.scope === 'merge_requests';
},
},
methods: {
...mapActions(['applyQuery', 'resetQuery']),
@ -29,15 +32,17 @@ export default {
class="search-sidebar gl-display-flex gl-flex-direction-column gl-mr-4 gl-mb-6 gl-mt-5"
@submit.prevent="applyQuery"
>
<status-filter />
<confidentiality-filter />
<div class="gl-display-flex gl-align-items-center gl-mt-3">
<gl-button category="primary" variant="confirm" type="submit" :disabled="!sidebarDirty">
{{ __('Apply') }}
</gl-button>
<gl-link v-if="showReset" class="gl-ml-auto" @click="resetQuery">{{
__('Reset filters')
}}</gl-link>
</div>
<template v-if="showSidebar">
<status-filter />
<confidentiality-filter />
<div class="gl-display-flex gl-align-items-center gl-mt-3">
<gl-button category="primary" variant="confirm" type="submit" :disabled="!sidebarDirty">
{{ __('Apply') }}
</gl-button>
<gl-link v-if="showReset" class="gl-ml-auto" @click="resetQuery">{{
__('Reset filters')
}}</gl-link>
</div>
</template>
</form>
</template>

View File

@ -4,6 +4,15 @@ import GlobalSearchSidebar from './components/app.vue';
Vue.use(Translate);
export const sidebarInitState = () => {
const el = document.getElementById('js-search-sidebar');
if (!el) return {};
const { navigation } = el.dataset;
return JSON.parse(navigation);
};
export const initSidebar = (store) => {
const el = document.getElementById('js-search-sidebar');

View File

@ -1,7 +1,9 @@
<script>
import { GlSearchBoxByClick } from '@gitlab/ui';
import { mapState, mapActions } from 'vuex';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import { s__ } from '~/locale';
import { parseBoolean } from '~/lib/utils/common_utils';
import GroupFilter from './group_filter.vue';
import ProjectFilter from './project_filter.vue';
@ -16,6 +18,7 @@ export default {
GroupFilter,
ProjectFilter,
},
mixins: [glFeatureFlagsMixin()],
props: {
groupInitialData: {
type: Object,
@ -39,7 +42,10 @@ export default {
},
},
showFilters() {
return !this.query.snippets || this.query.snippets === 'false';
return !parseBoolean(this.query.snippets);
},
hasVerticalNav() {
return this.glFeatures.searchPageVerticalNav;
},
},
created() {
@ -52,24 +58,27 @@ export default {
</script>
<template>
<section class="search-page-form gl-lg-display-flex gl-align-items-flex-end">
<div class="gl-flex-grow-1 gl-mb-4 gl-lg-mb-0 gl-lg-mr-2">
<label>{{ $options.i18n.searchLabel }}</label>
<gl-search-box-by-click
id="dashboard_search"
v-model="search"
name="search"
:placeholder="$options.i18n.searchPlaceholder"
@submit="applyQuery"
/>
</div>
<div v-if="showFilters" class="gl-mb-4 gl-lg-mb-0 gl-lg-mx-2">
<label class="gl-display-block">{{ __('Group') }}</label>
<group-filter :initial-data="groupInitialData" />
</div>
<div v-if="showFilters" class="gl-mb-4 gl-lg-mb-0 gl-lg-mx-2">
<label class="gl-display-block">{{ __('Project') }}</label>
<project-filter :initial-data="projectInitialData" />
<section class="search-page-form gl-lg-display-flex gl-flex-direction-column">
<div class="gl-lg-display-flex gl-flex-direction-row gl-align-items-flex-end">
<div class="gl-flex-grow-1 gl-mb-4 gl-lg-mb-0 gl-lg-mr-2">
<label>{{ $options.i18n.searchLabel }}</label>
<gl-search-box-by-click
id="dashboard_search"
v-model="search"
name="search"
:placeholder="$options.i18n.searchPlaceholder"
@submit="applyQuery"
/>
</div>
<div v-if="showFilters" class="gl-mb-4 gl-lg-mb-0 gl-lg-mx-2">
<label class="gl-display-block">{{ __('Group') }}</label>
<group-filter :initial-data="groupInitialData" />
</div>
<div v-if="showFilters" class="gl-mb-4 gl-lg-mb-0 gl-lg-mx-2">
<label class="gl-display-block">{{ __('Project') }}</label>
<project-filter :initial-data="projectInitialData" />
</div>
</div>
<hr v-if="hasVerticalNav" class="gl-mt-5 gl-mb-0 gl-border-gray-100" />
</section>
</template>

View File

@ -1,23 +0,0 @@
# frozen_string_literal: true
module Boards
class ApplicationController < ::ApplicationController
respond_to :json
rescue_from ActiveRecord::RecordNotFound, with: :record_not_found
private
def board
@board ||= Board.find(params[:board_id])
end
def board_parent
@board_parent ||= board.resource_parent
end
def record_not_found(exception)
render json: { error: exception.message }, status: :not_found
end
end
end

View File

@ -1,163 +0,0 @@
# frozen_string_literal: true
module Boards
class IssuesController < Boards::ApplicationController
# This is the maximum amount of issues which can be moved by one request to
# bulk_move for now. This is temporary and might be removed in future by
# introducing an alternative (async?) approach.
# (related: https://gitlab.com/groups/gitlab-org/-/epics/382)
MAX_MOVE_ISSUES_COUNT = 50
include BoardsResponses
include ControllerWithCrossProjectAccessCheck
requires_cross_project_access if: -> { board&.group_board? }
before_action :disable_query_limiting, only: [:bulk_move]
before_action :authorize_read_issue, only: [:index]
before_action :authorize_create_issue, only: [:create]
before_action :authorize_update_issue, only: [:update]
skip_before_action :authenticate_user!, only: [:index]
before_action :validate_id_list, only: [:bulk_move]
before_action :can_move_issues?, only: [:bulk_move]
feature_category :team_planning
urgency :low
def index
list_service = Boards::Issues::ListService.new(board_parent, current_user, filter_params)
issues = issues_from(list_service)
::Boards::Issues::ListService.initialize_relative_positions(board, current_user, issues)
render_issues(issues, list_service.metadata)
end
def create
result = Boards::Issues::CreateService.new(board_parent, project, current_user, issue_params).execute
if result.success?
render json: serialize_as_json(result[:issue])
elsif result[:issue]
render json: result[:issue].errors, status: :unprocessable_entity
else
render json: result.errors, status: result.http_status || 422
end
end
def bulk_move
service = Boards::Issues::MoveService.new(board_parent, current_user, move_params(true))
issues = Issue.find(params[:ids])
render json: service.execute_multiple(issues)
end
def update
service = Boards::Issues::MoveService.new(board_parent, current_user, move_params)
if service.execute(issue)
head :ok
else
head :unprocessable_entity
end
end
private
def issues_from(list_service)
issues = list_service.execute
issues.page(params[:page]).per(params[:per] || 20)
.without_count
.preload(associations_to_preload) # rubocop: disable CodeReuse/ActiveRecord
.load
end
def associations_to_preload
[
:milestone,
:assignees,
project: [
:route,
{
namespace: [:route]
}
],
labels: [:priorities],
notes: [:award_emoji, :author]
]
end
def can_move_issues?
head(:forbidden) unless can?(current_user, :admin_issue, board)
end
def serializer_options(issues)
{}
end
def render_issues(issues, metadata)
data = { issues: serialize_as_json(issues, opts: serializer_options(issues)) }
data.merge!(metadata)
render json: data
end
def issue
@issue ||= issues_finder.find(params[:id])
end
def filter_params
params.permit(*Boards::Issues::ListService.valid_params).merge(board_id: params[:board_id], id: params[:list_id])
.reject { |_, value| value.nil? }
end
def issues_finder
if board.group_board?
IssuesFinder.new(current_user, group_id: board_parent.id)
else
IssuesFinder.new(current_user, project_id: board_parent.id)
end
end
def project
@project ||= if board.group_board?
Project.find(issue_params[:project_id])
else
board_parent
end
end
def move_params(multiple = false)
id_param = multiple ? :ids : :id
params.permit(id_param, :board_id, :from_list_id, :to_list_id, :move_before_id, :move_after_id)
end
def issue_params
params.require(:issue)
.permit(:title, :milestone_id, :project_id)
.merge(board_id: params[:board_id], list_id: params[:list_id])
end
def serializer
IssueSerializer.new(current_user: current_user)
end
def serialize_as_json(resource, opts: {})
opts.merge!(include_full_project_path: board.group_board?, serializer: 'board')
serializer.represent(resource, opts)
end
def disable_query_limiting
Gitlab::QueryLimiting.disable!('https://gitlab.com/gitlab-org/gitlab/issues/35174')
end
def validate_id_list
head(:bad_request) unless params[:ids].is_a?(Array)
head(:unprocessable_entity) if params[:ids].size > MAX_MOVE_ISSUES_COUNT
end
end
end
Boards::IssuesController.prepend_mod_with('Boards::IssuesController')

View File

@ -1,103 +0,0 @@
# frozen_string_literal: true
module Boards
class ListsController < Boards::ApplicationController
include BoardsResponses
before_action :authorize_admin_list, only: [:create, :destroy, :generate]
before_action :authorize_read_list, only: [:index]
skip_before_action :authenticate_user!, only: [:index]
feature_category :team_planning
urgency :low
def index
lists = Boards::Lists::ListService.new(board.resource_parent, current_user).execute(board)
List.preload_preferences_for_user(lists, current_user)
render json: serialize_as_json(lists)
end
def create
response = Boards::Lists::CreateService.new(board.resource_parent, current_user, create_list_params).execute(board)
if response.success?
render json: serialize_as_json(response.payload[:list])
else
render json: { errors: response.errors }, status: :unprocessable_entity
end
end
def update
list = board.lists.find(params[:id])
service = Boards::Lists::UpdateService.new(board_parent, current_user, update_list_params)
result = service.execute(list)
if result.success?
head :ok
else
head result.http_status
end
end
def destroy
list = board.lists.destroyable.find(params[:id])
service = Boards::Lists::DestroyService.new(board_parent, current_user)
if service.execute(list).success?
head :ok
else
head :unprocessable_entity
end
end
def generate
service = Boards::Lists::GenerateService.new(board_parent, current_user)
if service.execute(board)
lists = board.lists.movable.preload_associated_models
List.preload_preferences_for_user(lists, current_user)
render json: serialize_as_json(lists)
else
head :unprocessable_entity
end
end
private
def list_creation_attrs
%i[label_id]
end
def list_update_attrs
%i[collapsed position]
end
def create_list_params
params.require(:list).permit(list_creation_attrs)
end
def update_list_params
params.require(:list).permit(list_update_attrs)
end
def serialize_as_json(resource)
resource.as_json(serialization_attrs)
end
def serialization_attrs
{
only: [:id, :list_type, :position],
methods: [:title],
label: true,
collapsed: true,
current_user: current_user
}
end
end
end
Boards::ListsController.prepend_mod_with('Boards::ListsController')

View File

@ -5,8 +5,6 @@ module BoardsActions
extend ActiveSupport::Concern
included do
include BoardsResponses
before_action :authorize_read_board!, only: [:index, :show]
before_action :redirect_to_recent_board, only: [:index]
before_action :board, only: [:index, :show]
@ -50,6 +48,24 @@ module BoardsActions
def board_visit_service
Boards::Visits::CreateService
end
def parent
strong_memoize(:parent) do
group? ? group : project
end
end
def board_path(board)
if group?
group_board_path(parent, board)
else
project_board_path(parent, board)
end
end
def group?
instance_variable_defined?(:@group)
end
end
BoardsActions.prepend_mod_with('BoardsActions')

View File

@ -1,65 +0,0 @@
# frozen_string_literal: true
module BoardsResponses
include Gitlab::Utils::StrongMemoize
# Overridden on EE module
def board_params
params.require(:board).permit(:name)
end
def parent
strong_memoize(:parent) do
group? ? group : project
end
end
def boards_path
if group?
group_boards_path(parent)
else
project_boards_path(parent)
end
end
def board_path(board)
if group?
group_board_path(parent, board)
else
project_board_path(parent, board)
end
end
def group?
instance_variable_defined?(:@group)
end
def authorize_read_list
authorize_action_for!(board, :read_issue_board_list)
end
def authorize_read_issue
authorize_action_for!(board, :read_issue)
end
def authorize_update_issue
authorize_action_for!(issue, :admin_issue)
end
def authorize_create_issue
list = List.find(issue_params[:list_id])
action = list.backlog? ? :create_issue : :admin_issue
authorize_action_for!(project, action)
end
def authorize_admin_list
authorize_action_for!(board, :admin_issue_board_list)
end
def authorize_action_for!(resource, ability)
return render_403 unless can?(current_user, ability, resource)
end
end
BoardsResponses.prepend_mod_with('BoardsResponses')

View File

@ -5,7 +5,6 @@ class Groups::BoardsController < Groups::ApplicationController
include RecordUserLastActivity
include Gitlab::Utils::StrongMemoize
before_action :assign_endpoint_vars
before_action do
push_frontend_feature_flag(:board_multi_select, group)
push_frontend_feature_flag(:realtime_labels, group)
@ -32,10 +31,6 @@ class Groups::BoardsController < Groups::ApplicationController
end
end
def assign_endpoint_vars
@boards_endpoint = group_boards_path(group)
end
def authorize_read_board!
access_denied! unless can?(current_user, :read_issue_board, group)
end

View File

@ -113,7 +113,7 @@ class Import::GithubController < Import::BaseController
end
def permitted_import_params
[:repo_id, :new_name, :target_namespace]
[:repo_id, :new_name, :target_namespace, { optional_stages: {} }]
end
def serialized_imported_projects(projects = already_added_projects)

View File

@ -5,7 +5,6 @@ class Projects::BoardsController < Projects::ApplicationController
include IssuableCollections
before_action :check_issues_available!
before_action :assign_endpoint_vars
before_action do
push_frontend_feature_flag(:board_multi_select, project)
push_frontend_feature_flag(:realtime_labels, project&.group)
@ -32,11 +31,6 @@ class Projects::BoardsController < Projects::ApplicationController
end
end
def assign_endpoint_vars
@boards_endpoint = project_boards_path(project)
@bulk_issues_path = bulk_update_project_issues_path(project)
end
def authorize_read_board!
access_denied! unless can?(current_user, :read_issue_board, project)
end

View File

@ -7,13 +7,10 @@ module BoardsHelper
def board_data
{
boards_endpoint: @boards_endpoint,
lists_endpoint: board_lists_path(board),
board_id: board.id,
disabled: board.disabled_for?(current_user).to_s,
root_path: root_path,
full_path: full_path,
bulk_update_path: @bulk_issues_path,
can_update: can_update?.to_s,
can_admin_list: can_admin_list?.to_s,
can_admin_board: can_admin_board?.to_s,
@ -94,14 +91,6 @@ module BoardsHelper
!multiple_boards_available? && current_board_parent.boards.size > 1
end
def current_board_path(board)
@current_board_path ||= if board.group_board?
group_board_path(current_board_parent, board)
else
project_board_path(current_board_parent, board)
end
end
def current_board_parent
@current_board_parent ||= @group || @project
end
@ -121,10 +110,6 @@ module BoardsHelper
def can_admin_board?
can?(current_user, :admin_issue_board, current_board_parent)
end
def can_admin_issue?
can?(current_user, :admin_issue, current_board_parent)
end
end
BoardsHelper.prepend_mod_with('BoardsHelper')

View File

@ -3,6 +3,10 @@
module IssuesHelper
include Issues::IssueTypeHelpers
def can_admin_issue?
can?(current_user, :admin_issue, @group || @project)
end
def issue_css_classes(issue)
classes = ["issue"]
classes << "closed" if issue.closed?

View File

@ -381,11 +381,12 @@ module SearchHelper
end
def search_filter_link_json(scope, label, data, search)
search_params = params.merge(search).merge({ scope: scope }).permit(SEARCH_GENERIC_PARAMS)
active_scope = @scope == scope
scope_name = scope.to_s
search_params = params.merge(search).merge({ scope: scope_name }).permit(SEARCH_GENERIC_PARAMS)
active_scope = @scope == scope_name
result = { label: label, scope: scope, data: data, link: search_path(search_params), active: active_scope }
result[:count] = @search_results.formatted_count(scope) if active_scope && !@timeout
result = { label: label, scope: scope_name, data: data, link: search_path(search_params), active: active_scope }
result[:count] = @search_results.formatted_count(scope_name) if active_scope && !@timeout
result[:count_link] = search_count_path(search_params) unless active_scope
result
@ -408,11 +409,9 @@ module SearchHelper
end
def search_navigation_json
result = {}
search_navigation.each do |scope, nav|
result[scope] = search_filter_link_json(scope.to_s, nav[:label], nav[:data], nav[:search]) if nav[:condition]
end
result.to_json
search_navigation.each_with_object({}) do |(key, value), hash|
hash[key] = search_filter_link_json(key, value[:label], value[:data], value[:search]) if value[:condition]
end.to_json
end
def search_filter_input_options(type, placeholder = _('Search or filter results...'))

View File

@ -1286,6 +1286,8 @@ class Project < ApplicationRecord
valid?(:import_url) || errors.messages[:import_url].nil?
end
# TODO: rename to build_or_assign_import_data as it doesn't save record
# https://gitlab.com/gitlab-org/gitlab/-/issues/377319
def create_or_update_import_data(data: nil, credentials: nil)
return if data.nil? && credentials.nil?

View File

@ -1,39 +0,0 @@
# frozen_string_literal: true
module Boards
module Lists
class GenerateService < Boards::BaseService
def execute(board)
return false unless board.lists.movable.empty?
List.transaction do
label_params.each do |params|
response = create_list(board, params)
raise ActiveRecord::Rollback unless response.success?
end
end
true
end
private
def create_list(board, params)
label = find_or_create_label(params)
Lists::CreateService.new(parent, current_user, label_id: label.id).execute(board)
end
def find_or_create_label(params)
::Labels::FindOrCreateService.new(current_user, parent, params).execute
end
def label_params
[
{ name: 'To Do', color: '#F0AD4E' },
{ name: 'Doing', color: '#5CB85C' }
]
end
end
end
end

View File

@ -9,21 +9,13 @@ module Import
attr_reader :params, :current_user
def execute(access_params, provider)
if blocked_url?
return log_and_return_error("Invalid URL: #{url}", _("Invalid URL: %{url}") % { url: url }, :bad_request)
end
unless authorized?
return error(_('This namespace has already been taken! Please choose another one.'), :unprocessable_entity)
end
if oversized?
return error(oversize_error_message, :unprocessable_entity)
end
context_error = validate_context
return context_error if context_error
project = create_project(access_params, provider)
if project.persisted?
store_import_settings(project)
success(project)
elsif project.errors[:import_source_disabled].present?
error(project.errors[:import_source_disabled], :forbidden)
@ -108,6 +100,16 @@ module Import
private
def validate_context
if blocked_url?
log_and_return_error("Invalid URL: #{url}", _("Invalid URL: %{url}") % { url: url }, :bad_request)
elsif !authorized?
error(_('This namespace has already been taken. Choose a different one.'), :unprocessable_entity)
elsif oversized?
error(oversize_error_message, :unprocessable_entity)
end
end
def log_error(exception)
Gitlab::GithubImport::Logger.error(
message: 'Import failed due to a GitHub error',
@ -126,6 +128,10 @@ module Import
error(translated_message, http_status)
end
def store_import_settings(project)
Gitlab::GithubImport::Settings.new(project).write(params[:optional_stages])
end
end
end

View File

@ -7,4 +7,7 @@
- paginatable = Feature.enabled?(:remove_legacy_github_client)
= render 'import/githubish_status', provider: 'github', paginatable: paginatable, default_namespace: @namespace
= render 'import/githubish_status',
provider: 'github', paginatable: paginatable,
default_namespace: @namespace,
optional_stages: Gitlab::GithubImport::Settings.stages_array

View File

@ -1,26 +1,19 @@
- search_bar_classes = 'search-sidebar gl-display-flex gl-flex-direction-column gl-mr-4'
= render_if_exists 'shared/promotions/promote_advanced_search'
= render partial: 'search/results_status', locals: { search_service: @search_service } unless @search_objects.to_a.empty?
- if Feature.enabled?(:search_page_vertical_nav, current_user) && %w[issues merge_requests].include?(@scope)
.results.gl-md-display-flex.gl-mt-0
#js-search-sidebar{ class: search_bar_classes, data: { navigation: search_navigation_json } }
.gl-w-full.gl-flex-grow-1.gl-overflow-x-hidden
= render partial: 'search/results_status', locals: { search_service: @search_service } unless @search_objects.to_a.empty?
= render partial: 'search/results_list'
.results.gl-md-display-flex.gl-mt-3
- if %w[issues merge_requests].include?(@scope)
#js-search-sidebar{ class: search_bar_classes }
.gl-w-full.gl-flex-grow-1.gl-overflow-x-hidden
- if @timeout
= render partial: "search/results/timeout"
- elsif @search_objects.to_a.empty?
= render partial: "search/results/empty"
- else
- if @scope == 'commits'
%ul.content-list.commit-list
= render partial: "search/results/commit", collection: @search_objects
- else
.search-results
- if @scope == 'projects'
.term
= render 'shared/projects/list', projects: @search_objects, pipeline_status: false
- else
= render_if_exists partial: "search/results/#{@scope.singularize}", collection: @search_objects
- else
= render partial: 'search/results_status', locals: { search_service: @search_service } unless @search_objects.to_a.empty?
- if @scope != 'projects'
= paginate_collection(@search_objects)
.results.gl-md-display-flex.gl-mt-3
- if %w[issues merge_requests].include?(@scope)
#js-search-sidebar{ class: search_bar_classes, data: { navigation: search_navigation_json } }
.gl-w-full.gl-flex-grow-1.gl-overflow-x-hidden
= render partial: 'search/results_list'

View File

@ -0,0 +1,18 @@
- if @timeout
= render partial: "search/results/timeout"
- elsif @search_objects.to_a.empty?
= render partial: "search/results/empty"
- else
- if @scope == 'commits'
%ul.content-list.commit-list
= render partial: "search/results/commit", collection: @search_objects
- else
.search-results
- if @scope == 'projects'
.term
= render 'shared/projects/list', projects: @search_objects, pipeline_status: false
- else
= render_if_exists partial: "search/results/#{@scope.singularize}", collection: @search_objects
- if @scope != 'projects'
= paginate_collection(@search_objects)

View File

@ -2,24 +2,8 @@
- return unless search_service.show_results_status?
.search-results-status
.row-content-block.gl-display-flex
.gl-md-display-flex.gl-text-left.gl-align-items-center.gl-flex-grow-1
- unless search_service.without_count?
= search_entries_info(search_service.search_objects, search_service.scope, params[:search])
- unless search_service.show_snippets?
- if search_service.project
- link_to_project = link_to(search_service.project.full_name, search_service.project, class: 'ml-md-1')
- if search_service.scope == 'blobs'
= _("in")
.mx-md-1
#js-blob-ref-switcher{ data: { "project-id" => search_service.project.id, "ref" => repository_ref(search_service.project), "field-name": "repository_ref" } }
= s_('SearchCodeResults|of %{link_to_project}').html_safe % { link_to_project: link_to_project }
- else
= _("in project %{link_to_project}").html_safe % { link_to_project: link_to_project }
- elsif search_service.group
- link_to_group = link_to(search_service.group.name, search_service.group, class: 'ml-md-1')
= _("in group %{link_to_group}").html_safe % { link_to_group: link_to_group }
- if search_service.show_sort_dropdown?
.gl-md-display-flex.gl-flex-direction-column
#js-search-sort{ data: { "search-sort-options" => search_sort_options.to_json } }
- if Feature.enabled?(:search_page_vertical_nav, current_user)
= render partial: 'search/results_status_vert_nav', locals: { search_service: @search_service }
- else
= render partial: 'search/results_status_horiz_nav', locals: { search_service: @search_service }

View File

@ -0,0 +1,22 @@
.search-results-status
.row-content-block.gl-display-flex
.gl-md-display-flex.gl-text-left.gl-align-items-center.gl-flex-grow-1
- unless search_service.without_count?
= search_entries_info(search_service.search_objects, search_service.scope, params[:search])
- unless search_service.show_snippets?
- if search_service.project
- link_to_project = link_to(search_service.project.full_name, search_service.project, class: 'ml-md-1')
- if search_service.scope == 'blobs'
= _("in")
.mx-md-1
#js-blob-ref-switcher{ data: { "project-id" => search_service.project.id, "ref" => repository_ref(search_service.project), "field-name": "repository_ref" } }
= s_('SearchCodeResults|of %{link_to_project}').html_safe % { link_to_project: link_to_project }
- else
= _("in project %{link_to_project}").html_safe % { link_to_project: link_to_project }
- elsif search_service.group
- link_to_group = link_to(search_service.group.name, search_service.group, class: 'ml-md-1')
= _("in group %{link_to_group}").html_safe % { link_to_group: link_to_group }
- if search_service.show_sort_dropdown?
.gl-md-display-flex.gl-flex-direction-column
#js-search-sort{ data: { "search-sort-options" => search_sort_options.to_json } }

View File

@ -0,0 +1,23 @@
.search-results-status
.gl-display-flex.gl-flex-direction-column
.gl-p-5.gl-display-flex
.gl-md-display-flex.gl-text-left.gl-align-items-center.gl-flex-grow-1
- unless search_service.without_count?
= search_entries_info(search_service.search_objects, search_service.scope, params[:search])
- unless search_service.show_snippets?
- if search_service.project
- link_to_project = link_to(search_service.project.full_name, search_service.project, class: 'ml-md-1')
- if search_service.scope == 'blobs'
= _("in")
.mx-md-1
#js-blob-ref-switcher{ data: { "project-id" => search_service.project.id, "ref" => repository_ref(search_service.project), "field-name": "repository_ref" } }
= s_('SearchCodeResults|of %{link_to_project}').html_safe % { link_to_project: link_to_project }
- else
= _("in project %{link_to_project}").html_safe % { link_to_project: link_to_project }
- elsif search_service.group
- link_to_group = link_to(search_service.group.name, search_service.group, class: 'ml-md-1')
= _("in group %{link_to_group}").html_safe % { link_to_group: link_to_group }
- if search_service.show_sort_dropdown?
.gl-md-display-flex.gl-flex-direction-column
#js-search-sort{ data: { "search-sort-options" => search_sort_options.to_json } }
%hr.gl-mb-5.gl-mt-0.gl-border-gray-100.gl-w-full

View File

@ -22,5 +22,6 @@
.gl-mt-3
#js-search-topbar{ data: { "group-initial-data": group_attributes.to_json, "project-initial-data": project_attributes.to_json } }
- if @search_term
= render 'search/category'
- if Feature.disabled?(:search_page_vertical_nav, current_user)
= render 'search/category'
= render 'search/results'

View File

@ -63,6 +63,10 @@ module Gitlab
import_stage: self.class.name
)
end
def import_settings(project)
Gitlab::GithubImport::Settings.new(project)
end
end
end
end

View File

@ -53,7 +53,7 @@ module Gitlab
end
def feature_disabled?(project)
Feature.disabled?(:github_importer_attachments_import, project.group, type: :ops)
import_settings(project).disabled?(:attachments_import)
end
end
end

View File

@ -15,9 +15,9 @@ module Gitlab
# client - An instance of Gitlab::GithubImport::Client.
# project - An instance of Project.
def import(client, project)
importer = importer_class(project)
return skip_to_next_stage(project) if importer.nil?
return skip_to_next_stage(project) if import_settings(project).disabled?(:single_endpoint_issue_events_import)
importer = ::Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
info(project.id, message: "starting importer", importer: importer.name)
waiter = importer.new(project, client).execute
move_to_next_stage(project, { waiter.key => waiter.jobs_remaining })
@ -25,16 +25,6 @@ module Gitlab
private
def importer_class(project)
if Feature.enabled?(:github_importer_single_endpoint_issue_events_import, project.group, type: :ops)
::Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
elsif Feature.enabled?(:github_importer_issue_events_import, project.group, type: :ops)
::Gitlab::GithubImport::Importer::IssueEventsImporter
else
nil
end
end
def skip_to_next_stage(project)
info(project.id, message: "skipping importer", importer: "IssueEventsImporter")
move_to_next_stage(project)

View File

@ -36,7 +36,7 @@ module Gitlab
private
def diff_notes_importer(project)
if project.group.present? && Feature.enabled?(:github_importer_single_endpoint_notes_import, project.group, type: :ops)
if import_settings(project).enabled?(:single_endpoint_notes_import)
Importer::SingleEndpointDiffNotesImporter
else
Importer::DiffNotesImporter

View File

@ -25,7 +25,7 @@ module Gitlab
end
def importers(project)
if project.group.present? && Feature.enabled?(:github_importer_single_endpoint_notes_import, project.group, type: :ops)
if import_settings(project).enabled?(:single_endpoint_notes_import)
[
Importer::SingleEndpointMergeRequestNotesImporter,
Importer::SingleEndpointIssueNotesImporter

View File

@ -1,7 +1,7 @@
---
name: search_page_vertical_nav
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/97784
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/342621
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/373613
milestone: '15.5'
type: development
group: group::global search

View File

@ -1,8 +0,0 @@
---
name: github_importer_attachments_import
introduced_by_url:
rollout_issue_url:
milestone: '15.4'
type: ops
group: group::import
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: github_importer_issue_events_import
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/89134
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/365977
milestone: '15.3'
type: ops
group: group::import
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: github_importer_single_endpoint_issue_events_import
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/89134
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/365977
milestone: '15.3'
type: ops
group: group::import
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: github_importer_single_endpoint_notes_import
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/67150
rollout_issue_url:
milestone: '14.2'
type: ops
group: group::import
default_enabled: false

View File

@ -136,28 +136,6 @@ InitializerConnections.with_disabled_database_connections do
get 'runner_setup/platforms' => 'runner_setup#platforms'
# Boards resources shared between group and projects
resources :boards, only: [] do
resources :lists, module: :boards, only: [:index, :create, :update, :destroy] do
collection do
post :generate
end
resources :issues, only: [:index, :create, :update]
end
resources :issues, module: :boards, only: [:index, :update] do
collection do
put :bulk_move, format: :json
end
end
Gitlab.ee do
resources :users, module: :boards, only: [:index]
resources :milestones, module: :boards, only: [:index]
end
end
get 'acme-challenge/' => 'acme_challenges#show'
scope :ide, as: :ide, format: false do

View File

@ -10,6 +10,12 @@ Review this page for upgrade instructions for your version. These steps
accompany the [general steps](upgrading_the_geo_sites.md#general-upgrade-steps)
for upgrading Geo sites.
## Upgrading to 15.1
[Geo proxying](../secondary_proxy/index.md) was [enabled by default for different URLs](https://gitlab.com/gitlab-org/gitlab/-/issues/346112) in 15.1. This may be a breaking change. If needed, you may [disable Geo proxying](../secondary_proxy/index.md#disable-geo-proxying).
If you are using SAML with different URLs, there is a [known issue which requires proxying to be disabled](https://gitlab.com/gitlab-org/gitlab/-/issues/377372).
## Upgrading to 14.9
**Do not** upgrade to GitLab 14.9.0. Instead, use 14.9.1 or later.
@ -33,6 +39,10 @@ results in a loop that consistently fails for all objects stored in object stora
For information on how to fix this, see
[Troubleshooting - Failed syncs with GitLab-managed object storage replication](troubleshooting.md#failed-syncs-with-gitlab-managed-object-storage-replication).
## Upgrading to 14.6
[Geo proxying](../secondary_proxy/index.md) was [enabled by default for unified URLs](https://gitlab.com/gitlab-org/gitlab/-/issues/325732) in 14.6. This may be a breaking change. If needed, you may [disable Geo proxying](../secondary_proxy/index.md#disable-geo-proxying).
## Upgrading to 14.4
There is [an issue in GitLab 14.4.0 through 14.4.2](../../../update/index.md#1440) that can affect Geo and other features that rely on cronjobs. We recommend upgrading to GitLab 14.4.3 or later.

View File

@ -45,7 +45,7 @@ you're ready to enable the Mailgun integration:
1. Sign in to GitLab as an [Administrator](../../user/permissions.md) user.
1. On the top bar, select **Main menu >** **{admin}** **Admin**.
1. On the left sidebar, go to **Settings > General** and expand the **Mailgun** section.
1. Select the **Enable Mailgun** check box.
1. Select the **Enable Mailgun** checkbox.
1. Enter the Mailgun HTTP webhook signing key as described in
[the Mailgun documentation](https://documentation.mailgun.com/en/latest/user_manual.html#webhooks-1) and
shown in the [API security](https://app.mailgun.com/app/account/security/api_keys) section for your Mailgun account.

View File

@ -14,13 +14,14 @@ Import your projects from GitHub to GitLab via the API.
POST /import/github
```
| Attribute | Type | Required | Description |
|------------|---------|----------|---------------------|
| `personal_access_token` | string | yes | GitHub personal access token |
| `repo_id` | integer | yes | GitHub repository ID |
| `new_name` | string | no | New repository name |
| `target_namespace` | string | yes | Namespace to import repository into. Supports subgroups like `/namespace/subgroup`. |
| `github_hostname` | string | no | Custom GitHub Enterprise hostname. Do not set for GitHub.com. |
| Attribute | Type | Required | Description |
|-------------------------|---------|----------|-------------------------------------------------------------------------------------|
| `personal_access_token` | string | yes | GitHub personal access token |
| `repo_id` | integer | yes | GitHub repository ID |
| `new_name` | string | no | New repository name |
| `target_namespace` | string | yes | Namespace to import repository into. Supports subgroups like `/namespace/subgroup`. |
| `github_hostname` | string | no | Custom GitHub Enterprise hostname. Do not set for GitHub.com. |
| `optional_stages` | object | no | [Additional items to import](../user/project/import/github.md#select-additional-items-to-import)|
```shell
curl --request POST \
@ -32,10 +33,23 @@ curl --request POST \
"repo_id": "12345",
"target_namespace": "group/subgroup",
"new_name": "NEW-NAME",
"github_hostname": "https://github.example.com"
"github_hostname": "https://github.example.com",
"optional_stages": {
"single_endpoint_issue_events_import": true,
"single_endpoint_notes_import": true,
"attachments_import": true
}
}'
```
The following keys are available for `optional_stages`:
- `single_endpoint_issue_events_import`, for issue and pull request events import.
- `single_endpoint_notes_import`, for an alternative and more thorough comments import.
- `attachments_import`, for Markdown attachments import.
For more information, see [Select additional items to import](../user/project/import/github.md#select-additional-items-to-import).
Example response:
```json

View File

@ -91,7 +91,7 @@ In GitLab 13.5 and earlier, there is only one checkbox, named
**Enable merge trains and pipelines for merged results**.
WARNING:
If you select the check box but don't configure your CI/CD to use
If you select the checkbox but don't configure your CI/CD to use
merge request pipelines, your merge requests may become stuck in an
unresolved state or your pipelines may be dropped.

View File

@ -75,9 +75,9 @@ NOTE:
Reviewer roulette is an internal tool for use on GitLab.com, and not available for use on customer installations.
The [Danger bot](dangerbot.md) randomly picks a reviewer and a maintainer for
each area of the codebase that your merge request seems to touch. It only makes
**recommendations** and you should override it if you think someone else is a better
fit!
each area of the codebase that your merge request seems to touch. It makes
**recommendations** for developer reviewers and you should override it if you think someone else is a better
fit. User-facing changes are required to have a UX review, too. Default to the recommended UX reviewer suggested.
It picks reviewers and maintainers from the list at the
[engineering projects](https://about.gitlab.com/handbook/engineering/projects/)

View File

@ -376,22 +376,28 @@ Enforce foreign keys including the partitioning key column. For example, in a ra
class PrepareForeignKeyForPartitioning < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
REFERENCED_TABLE_NAME = :references_table_name
FOREIGN_KEY_COLUMN = :foreign_key_id
FOREIGN_KEY_NAME = :fk_365d1db505_p
TABLE_NAME = :table_name
SOURCE_TABLE_NAME = :source_table_name
TARGET_TABLE_NAME = :target_table_name
COLUMN = :foreign_key_id
TARGET_COLUMN = :id
CONSTRAINT_NAME = :fk_365d1db505_p
PARTITION_COLUMN = :partition_id
def up
execute("ALTER TABLE #{REFERENCED_TABLE_NAME} ADD CONSTRAINT #{FOREIGN_KEY_NAME} " \
"FOREIGN KEY (#{FOREIGN_KEY_COLUMN}, #{PARTITION_COLUMN}) " \
"REFERENCES #{TABLE_NAME}(id, #{PARTITION_COLUMN}) ON DELETE CASCADE NOT VALID")
add_concurrent_foreign_key(
SOURCE_TABLE_NAME,
TARGET_TABLE_NAME,
column: [PARTITION_COLUMN, COLUMN],
target_column: [PARTITION_COLUMN, TARGET_COLUMN],
validate: false
name: CONSTRAINT_NAME
)
execute("ALTER TABLE #{TABLE_NAME} VALIDATE CONSTRAINT #{FOREIGN_KEY_NAME}")
validate_foreign_key(TARGET_TABLE_NAME, CONSTRAINT_NAME)
end
def down
execute("ALTER TABLE #{TABLE_NAME} DROP CONSTRAINT #{FOREIGN_KEY_NAME}")
drop_constraint(TARGET_TABLE_NAME, CONSTRAINT_NAME)
end
end
```

View File

@ -114,6 +114,9 @@ GitHub are stored in a single table. Therefore, they have globally-unique IDs an
Therefore, both issues and pull requests have a common API for most related things.
NOTE:
This stage is optional and can consume significant extra import time (controlled by `Gitlab::GithubImport::Settings`).
### 9. Stage::ImportNotesWorker
This worker imports regular comments for both issues and pull requests. For
@ -139,6 +142,9 @@ Each job:
1. Downloads the attachment.
1. Replaces the old link with a newly-generated link to GitLab.
NOTE:
It's an optional stage that could consume significant extra import time (controlled by `Gitlab::GithubImport::Settings`).
### 11. Stage::ImportProtectedBranchesWorker
This worker imports protected branch rules.

View File

@ -37,7 +37,7 @@ To override the general user and IP rate limits for requests to deprecated API e
1. On the top bar, select **Main menu > Admin**.
1. On the left sidebar, select **Settings > Network**.
1. Expand **Deprecated API Rate Limits**.
1. Select the check boxes for the types of rate limits you want to enable:
1. Select the checkboxes for the types of rate limits you want to enable:
- **Unauthenticated API request rate limit**
- **Authenticated API request rate limit**
1. _If you enabled unauthenticated API request rate limits:_

View File

@ -33,7 +33,7 @@ To override the general user and IP rate limits for requests to the Repository f
1. On the top bar, select **Main menu > Admin**.
1. On the left sidebar, select **Settings > Network**.
1. Expand **Files API Rate Limits**.
1. Select the check boxes for the types of rate limits you want to enable:
1. Select the checkboxes for the types of rate limits you want to enable:
- **Unauthenticated API request rate limit**
- **Authenticated API request rate limit**
1. _If you enabled unauthenticated API request rate limits:_

View File

@ -187,7 +187,12 @@ table.supported-languages ul {
<td>Go</td>
<td>All versions</td>
<td><a href="https://go.dev/">Go</a></td>
<td><code>go.sum</code></td>
<td>
<ul>
<li><code>go.mod</code></li>
<li><code>go.sum</code></li>
</ul>
</td>
<td>Y</td>
</tr>
<tr>
@ -353,12 +358,24 @@ The following package managers use lockfiles that GitLab analyzers are capable o
| Bundler | Not applicable | [1.17.3](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium/-/blob/master/qa/fixtures/ruby-bundler/default/Gemfile.lock#L118), [2.1.4](https://gitlab.com/gitlab-org/security-products/tests/ruby-bundler/-/blob/bundler2-FREEZE/Gemfile.lock#L118) |
| Composer | Not applicable | [1.x](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium/-/blob/master/qa/fixtures/php-composer/default/composer.lock) |
| Conan | 0.4 | [1.x](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium/-/blob/master/qa/fixtures/c-conan/default/conan.lock) |
| Go | Not applicable | [1.x](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium/-/blob/master/qa/fixtures/go-modules/default/go.sum) |
| Go | Not applicable | [1.x](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium/-/blob/master/qa/fixtures/go-modules/default/go.sum) <sup><strong><a href="#notes-regarding-parsing-lockfiles-1">1</a></strong></sup> |
| NuGet | v1 | [4.9](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium/-/blob/master/qa/fixtures/csharp-nuget-dotnetcore/default/src/web.api/packages.lock.json#L2) |
| npm | v1, v2 | [6.x](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium/-/blob/master/qa/fixtures/js-npm/default/package-lock.json#L4), [7.x](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium/-/blob/master/qa/fixtures/js-npm/lockfileVersion2/package-lock.json#L4) |
| yarn | v1 | [1.x](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium/-/blob/master/qa/fixtures/js-yarn/default/yarn.lock#L2) |
| Poetry | v1 | [1.x](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-python/-/blob/v3/qa/fixtures/python-poetry/default/poetry.lock) |
<!-- markdownlint-disable MD044 -->
<ol>
<li>
<a id="notes-regarding-parsing-lockfiles-1"></a>
<p>
Dependency Scanning will only parse <code>go.sum</code> if it's unable to generate the build list
used by the Go project.
</p>
</li>
</ol>
<!-- markdownlint-enable MD044 -->
#### Obtaining dependency information by running a package manager to generate a parsable file
To support the following package managers, the GitLab analyzers proceed in two steps:
@ -374,6 +391,7 @@ To support the following package managers, the GitLab analyzers proceed in two s
| setuptools | [50.3.2](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium/-/blob/v2.29.9/Dockerfile#L27) | [57.5.0](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-python/-/blob/v2.22.0/spec/image_spec.rb#L224-247) |
| pip | [20.2.4](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium/-/blob/v2.29.9/Dockerfile#L26) | [20.x](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-python/-/blob/v2.22.0/spec/image_spec.rb#L77-91) |
| Pipenv | [2018.11.26](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-python/-/blob/v2.18.4/requirements.txt#L13) | [2018.11.26](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-python/-/blob/v2.22.0/spec/image_spec.rb#L168-191)<sup><b><a href="#exported-dependency-information-notes-3">3</a></b></sup>, [2018.11.26](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-python/-/blob/v2.22.0/spec/image_spec.rb#L143-166) |
| Go | [1.17](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium/-/blob/7dc7a892b564abfcb160189f46b2ae6415e0dffa/build/gemnasium/alpine/Dockerfile#L88-91) | [1.17](https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium/-/blob/7dc7a892b564abfcb160189f46b2ae6415e0dffa/build/gemnasium/alpine/Dockerfile#L88-91)<sup><strong><a href="#exported-dependency-information-notes-4">4</a></strong></sup> |
<!-- markdownlint-disable MD044 -->
<ol>
@ -416,6 +434,13 @@ To support the following package managers, the GitLab analyzers proceed in two s
This test confirms that if a <code>Pipfile.lock</code> file is found, it will be used by <a href="https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium">Gemnasium</a> to scan the exact package versions listed in this file.
</p>
</li>
<li>
<a id="exported-dependency-information-notes-4"></a>
<p>
Because of the implementation of <code>go build</code>, the Go build process requires network access, a pre-loaded modcache via <code>go mod download</code>, or vendored dependencies. For more information,
refer to the Go documentation on <a href="https://pkg.go.dev/cmd/go#hdr-Compile_packages_and_dependencies">compiling packages and dependencies</a>.
</p>
</li>
</ol>
<!-- markdownlint-enable MD044 -->
@ -474,6 +499,12 @@ The following analyzers are executed, each of which have different behavior when
From GitLab 14.8 the `gemnasium` analyzer scans supported JavaScript projects for vendored libraries
(that is, those checked into the project but not managed by the package manager).
#### Go
When scanning a Go project, gemnasium invokes a builder and attempts to generate a [build list](https://go.dev/ref/mod#glos-build-list) using
[Minimal Version Selection](https://go.dev/ref/mod#glos-minimal-version-selection). If a non-fatal error is encountered, the build process signals
that the execution should proceed and falls back to parsing the available `go.sum` file.
#### PHP, Go, C, C++, .NET, C&#35;, Ruby, JavaScript
The analyzer for these languages supports multiple lockfiles.
@ -621,6 +652,10 @@ The following variables are used for configuring specific analyzers (used for a
| `DS_PIP_VERSION` | `gemnasium-python` | | Force the install of a specific pip version (example: `"19.3"`), otherwise the pip installed in the Docker image is used. ([Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/12811) in GitLab 12.7) |
| `DS_PIP_DEPENDENCY_PATH` | `gemnasium-python` | | Path to load Python pip dependencies from. ([Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/12412) in GitLab 12.2) |
| `DS_INCLUDE_DEV_DEPENDENCIES` | `gemnasium` | `"true"` | When set to `"false"`, development dependencies and their vulnerabilities are not reported. Only NPM and Poetry projects are supported. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/227861) in GitLab 15.1. |
| `GOOS` | `gemnasium` | `"linux"` | The operating system for which to compile Go code. |
| `GOARCH` | `gemnasium` | `"amd64"` | The architecture of the processor for which to compile Go code. |
| `GOFLAGS` | `gemansium` | | The flags passed to the `go build` tool. |
| `GOPRIVATE` | `gemnasium` | | A list of glob patterns and prefixes to be fetched from source. Read the Go private modules [documentation](https://go.dev/ref/mod#private-modules) for more information. |
#### Other variables
@ -1279,3 +1314,40 @@ gemnasium-python-dependency_scanning:
### Error: Project has `<number>` unresolved dependencies
The error message `Project has <number> unresolved dependencies` indicates a dependency resolution problem caused by your `gradle.build` or `gradle.build.kts` file. In the current release, `gemnasium-maven` cannot continue processing when an unresolved dependency is encountered. However, There is an [open issue](https://gitlab.com/gitlab-org/gitlab/-/issues/337083) to allow `gemnasium-maven` to recover from unresolved dependency errors and produce a dependency graph. Until this issue has been resolved, you'll need to consult the [Gradle dependency resolution docs](https://docs.gradle.org/current/userguide/dependency_resolution.html) for details on how to fix your `gradle.build` file.
### Setting build constraints when scanning Go projects
Dependency scanning runs within a `linux/amd64` container. As a result, the build list generated
for a Go project will contain dependencies that are compatible with this environment. If your deployment environment is not
`linux/amd64`, the final list of dependencies might contain additional incompatible
modules. The dependency list might also omit modules that are only compatible with your deployment environment. To prevent
this issue, you can configure the build process to target the operating system and architecture of the deployment
environment by setting the `GOOS` and `GOARCH` [environment variables](https://go.dev/ref/mod#minimal-version-selection)
of your `.gitlab-ci.yml` file.
For example:
```yaml
variables:
GOOS: "darwin"
GOARCH: "arm64"
```
You can also supply build tag constraints by using the `GOFLAGS` variable:
```yaml
variables:
GOFLAGS: "-tags=test_feature"
```
### Dependency Scanning of Go projects returns false positives
The `go.sum` file contains an entry of every module that was considered while generating the project's [build list](https://go.dev/ref/mod#glos-build-list).
Multiple versions of a module are included in the `go.sum` file, but the [MVS](https://go.dev/ref/mod#minimal-version-selection)
algorithm used by `go build` only selects one. As a result, when dependency scanning uses `go.sum`, it might report false positives.
To prevent false positives, gemnasium only uses `go.sum` if it is unable to generate the build list for the Go project. If `go.sum` is selected, a warning occurs:
```shell
[WARN] [Gemnasium] [2022-09-14T20:59:38Z] ▶ Selecting "go.sum" parser for "/test-projects/gitlab-shell/go.sum". False positives may occur. See https://gitlab.com/gitlab-org/gitlab/-/issues/321081.
```

View File

@ -88,6 +88,7 @@ This rule enforces the defined actions and schedules a scan on the provided date
| `type` | `string` | `schedule` | The rule's type. |
| `branches` | `array` of `string` | `*` or the branch's name | The branch the given policy applies to (supports wildcard). |
| `cadence` | `string` | CRON expression (for example, `0 0 * * *`) | A whitespace-separated string containing five fields that represents the scheduled time. |
| `agents` | `object` | | The name of the [GitLab agents](../../clusters/agent/index.md) where [cluster image scanning](../../clusters/agent/vulnerabilities.md) will run. The object key is the name of the Kubernetes cluster configured for your project in GitLab. You can use the optional value of the object to select and scan specific Kubernetes resources. |
GitLab supports the following types of CRON syntax for the `cadence` field:
@ -96,6 +97,31 @@ GitLab supports the following types of CRON syntax for the `cadence` field:
Other elements of the CRON syntax may work in the cadence field, however, GitLab does not officially test or support them. The CRON expression is evaluated in UTC by default. If you have a self-managed GitLab instance and have [changed the server timezone](../../../administration/timezone.md), the CRON expression is evaluated with the new timezone.
### `agent` schema
Use this schema to define `agents` objects in the [`schedule` rule type](#schedule-rule-type).
| Field | Type | Possible values | Description |
|--------------|---------------------|--------------------------|-------------|
| `namespaces` | `array` of `string` | | The namespace that is scanned. If empty, all namespaces will be scanned. |
#### Policy example
```yaml
- name: Enforce Container Scanning in cluster connected through gitlab-agent for production and staging namespaces
enabled: true
rules:
- type: schedule
cadence: '0 10 * * *'
agents:
gitlab-agent:
namespaces:
- 'production'
- 'staging'
actions:
- scan: container_scanning
```
## `scan` action type
This action executes the selected `scan` with additional parameters when conditions for at least one
@ -124,9 +150,8 @@ Note the following:
- A secret detection scan runs in `normal` mode when executed as part of a pipeline, and in
[`historic`](../secret_detection/index.md#full-history-secret-detection)
mode when executed as part of a scheduled scan.
- A container scanning scan configured for the `pipeline` rule type ignores the cluster defined in the `clusters` object.
They use predefined CI/CD variables defined for your project. Cluster selection with the `clusters` object is supported for the `schedule` rule type.
A cluster with a name provided in the `clusters` object must be created and configured for the project.
- A container scanning scan that is configured for the `pipeline` rule type ignores the agent defined in the `agents` object. The `agents` object is only considered for `schedule` rule types.
An agent with a name provided in the `agents` object must be created and configured for the project.
- The SAST scan uses the default template and runs in a [child pipeline](../../../ci/pipelines/downstream_pipelines.md#parent-child-pipelines).
## Example security policies project

View File

@ -118,6 +118,21 @@ If you are not using the GitHub integration, you can still perform an authorizat
To use a newer personal access token in imports after previously performing these steps, sign out of
your GitLab account and sign in again, or revoke the older personal access token in GitHub.
### Select additional items to import
To make imports as fast as possible, the following items aren't imported from GitHub by default:
- Issue and pull request events. For example, _opened_ or _closed_, _renamed_, and _labeled_ or _unlabeled_.
- All comments. In regular import of large repositories some comments might get skipped due to limitation of GitHub API.
- Markdown attachments from repository comments, release posts, issue descriptions, and pull request descriptions. These can include
images, text, or binary attachments. If not imported, links in Markdown to attachments break after you remove the attachments from GitHub.
You can choose to import these items, but this could significantly increase import time. To import these items, select the appropriate fields in the UI:
- **Import issue and pull request events**.
- **Use alternative comments import method**.
- **Import Markdown attachments**.
### Select which repositories to import
After you have authorized access to your GitHub repositories, you are redirected to the GitHub importer page and
@ -182,9 +197,9 @@ The following items of a project are imported:
- Release notes. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/15620) in GitLab 15.4.
- Comments and notes. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/18052) in GitLab 15.5.
NOTE: All attachment importers work under `github_importer_attachments_import` [feature flag](../../../administration/feature_flags.md) disabled by default.
- Release note attachments. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/15620) in GitLab 15.4 with `github_importer_attachments_import`
[feature flag](../../../administration/feature_flags.md) disabled by default.
All attachment imports are disabled by default behind
`github_importer_attachments_import` [feature flag](../../../administration/feature_flags.md). From GitLab 15.5, can be imported
[as an additional item](#select-additional-items-to-import). The feature flag was removed.
- Pull request review comments.
- Regular issue and pull request comments.
- [Git Large File Storage (LFS) Objects](../../../topics/git/lfs/index.md).
@ -194,6 +209,7 @@ The following items of a project are imported:
- Diff Notes suggestions ([GitLab.com and GitLab 14.7 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/340624)).
- Issue events and pull requests events. [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/7673) in GitLab 15.4 with `github_importer_issue_events_import`
[feature flag](../../../administration/feature_flags.md) disabled by default.
From GitLab 15.5, can be imported [as an additional item](#select-additional-items-to-import). The feature flag was removed.
References to pull requests and issues are preserved. Each imported repository maintains visibility level unless that
[visibility level is restricted](../../public_access.md#restrict-use-of-public-or-internal-projects), in which case it
@ -214,25 +230,11 @@ You can still create [status checks](../merge_requests/status_checks.md) in GitL
When GitHub Importer runs on extremely large projects not all notes & diff notes can be imported due to GitHub API `issues_comments` & `pull_requests_comments` endpoints limitation.
Not all pages can be fetched due to the following error coming from GitHub API: `In order to keep the API fast for everyone, pagination is limited for this resource. Check the rel=last link relation in the Link response header to see how far back you can traverse.`.
An alternative approach for importing notes and diff notes is available behind a feature flag.
An [alternative approach](#select-additional-items-to-import) for importing comments is available.
Instead of using `issues_comments` and `pull_requests_comments`, use individual resources `issue_comments` and `pull_request_comments` instead to pull notes from one object at a time.
This allows us to carry over any missing comments, however it increases the number of network requests required to perform the import, which means its execution takes a longer time.
To use the alternative way of importing notes, the `github_importer_single_endpoint_notes_import` feature flag must be enabled on the group project is being imported into.
Start a [Rails console](../../../administration/operations/rails_console.md#starting-a-rails-console-session).
```ruby
group = Group.find_by_full_path('my/group/fullpath')
# Enable
Feature.enable(:github_importer_single_endpoint_notes_import, group)
# Disable
Feature.disable(:github_importer_single_endpoint_notes_import, group)
```
## Reduce GitHub API request objects per page
Some GitHub API endpoints may return a 500 or 502 error for project imports from large repositories.

View File

@ -1100,6 +1100,7 @@ Payload example:
"object_kind": "pipeline",
"object_attributes":{
"id": 31,
"iid": 3,
"ref": "master",
"tag": false,
"sha": "bcbb5ec396a2c0f828686f14fac9b80b780504f2",

View File

@ -43,6 +43,7 @@ module API
optional :new_name, type: String, desc: 'New repo name'
requires :target_namespace, type: String, desc: 'Namespace to import repo into'
optional :github_hostname, type: String, desc: 'Custom GitHub enterprise hostname'
optional :optional_stages, type: Hash, desc: 'Optional stages of import to be performed'
end
post 'import/github' do
result = Import::GithubService.new(client, current_user, params).execute(access_params, provider)

View File

@ -63,6 +63,7 @@ module Gitlab
def hook_attrs(pipeline)
{
id: pipeline.id,
iid: pipeline.iid,
ref: pipeline.source_ref,
tag: pipeline.tag,
sha: pipeline.sha,

View File

@ -296,12 +296,11 @@ module Gitlab
with_lock_retries do
execute("LOCK TABLE #{target}, #{source} IN SHARE ROW EXCLUSIVE MODE") if reverse_lock_order
execute <<-EOF.strip_heredoc
ALTER TABLE #{source}
ADD CONSTRAINT #{options[:name]}
FOREIGN KEY (#{options[:column]})
REFERENCES #{target} (#{target_column})
FOREIGN KEY (#{multiple_columns(options[:column])})
REFERENCES #{target} (#{multiple_columns(target_column)})
#{on_delete_statement(options[:on_delete])}
NOT VALID;
EOF
@ -355,7 +354,7 @@ module Gitlab
# - For standard rails foreign keys the prefix is `fk_rails_`
#
def concurrent_foreign_key_name(table, column, prefix: 'fk_')
identifier = "#{table}_#{column}_fk"
identifier = "#{table}_#{multiple_columns(column, separator: '_')}_fk"
hashed_identifier = Digest::SHA256.hexdigest(identifier).first(10)
"#{prefix}#{hashed_identifier}"
@ -1539,6 +1538,10 @@ into similar problems in the future (e.g. when new tables are created).
private
def multiple_columns(columns, separator: ', ')
Array.wrap(columns).join(separator)
end
def cascade_statement(cascade)
cascade ? 'CASCADE' : ''
end

View File

@ -80,12 +80,16 @@ module Gitlab
end
def timeout
if project.group.present? && ::Feature.enabled?(:github_importer_single_endpoint_notes_import, project.group, type: :ops)
if import_settings.enabled?(:single_endpoint_notes_import)
Gitlab::Cache::Import::Caching::LONGER_TIMEOUT
else
Gitlab::Cache::Import::Caching::TIMEOUT
end
end
def import_settings
::Gitlab::GithubImport::Settings.new(project)
end
end
end
end

View File

@ -0,0 +1,72 @@
# frozen_string_literal: true
module Gitlab
module GithubImport
class Settings
OPTIONAL_STAGES = {
single_endpoint_issue_events_import: {
label: 'Import issue and pull request events',
details: <<-TEXT.split("\n").map(&:strip).join(' ')
For example, opened or closed, renamed, and labeled or unlabeled.
Time required to import these events depends on how many issues or pull requests your project has.
TEXT
},
single_endpoint_notes_import: {
label: 'Use alternative comments import method',
details: <<-TEXT.split("\n").map(&:strip).join(' ')
The default method can skip some comments in large projects because of limitations of the GitHub API.
TEXT
},
attachments_import: {
label: 'Import Markdown attachments',
details: <<-TEXT.split("\n").map(&:strip).join(' ')
Import Markdown attachments from repository comments, release posts, issue descriptions,
and pull request descriptions. These can include images, text, or binary attachments.
If not imported, links in Markdown to attachments break after you remove the attachments from GitHub.
TEXT
}
}.freeze
def self.stages_array
OPTIONAL_STAGES.map do |stage_name, data|
{
name: stage_name.to_s,
label: s_(format("GitHubImport|%{text}", text: data[:label])),
details: s_(format("GitHubImport|%{text}", text: data[:details]))
}
end
end
def initialize(project)
@project = project
end
def write(user_settings)
user_settings = user_settings.to_h.with_indifferent_access
optional_stages = fetch_stages_from_params(user_settings)
import_data = project.create_or_update_import_data(data: { optional_stages: optional_stages })
import_data.save!
end
def enabled?(stage_name)
project.import_data&.data&.dig('optional_stages', stage_name.to_s) || false
end
def disabled?(stage_name)
!enabled?(stage_name)
end
private
attr_reader :project
def fetch_stages_from_params(user_settings)
OPTIONAL_STAGES.keys.to_h do |stage_name|
enabled = Gitlab::Utils.to_boolean(user_settings[stage_name], default: false)
[stage_name, enabled]
end
end
end
end
end

View File

@ -4,10 +4,10 @@
# - SingleEndpointDiffNotesImporter
# - SingleEndpointIssueNotesImporter
# - SingleEndpointMergeRequestNotesImporter
# if `github_importer_single_endpoint_notes_import` feature flag is on.
# if enabled by Gitlab::GithubImport::Settings
#
# - SingleEndpointIssueEventsImporter
# if `github_importer_issue_events_import` feature flag is on.
# if enabled by Gitlab::GithubImport::Settings
#
# Fetches associated objects page by page to each item of parent collection.
# Currently `associated` is note or event.

View File

@ -25,9 +25,9 @@ module Gitlab
if matches.one?
matches.first
elsif matches.none?
raise UnknownProcessError, "Failed to identify runtime for process #{Process.pid} (#{$0})"
raise UnknownProcessError, "Failed to identify runtime for process #{Process.pid} (#{$PROGRAM_NAME})"
else
raise AmbiguousProcessError, "Ambiguous runtime #{matches} for process #{Process.pid} (#{$0})"
raise AmbiguousProcessError, "Ambiguous runtime #{matches} for process #{Process.pid} (#{$PROGRAM_NAME})"
end
end

View File

@ -39,7 +39,7 @@ module Prometheus
end
def process_name
$0
$PROGRAM_NAME
end
end
end

View File

@ -41388,6 +41388,9 @@ msgstr ""
msgid "This namespace has already been taken! Please choose another one."
msgstr ""
msgid "This namespace has already been taken. Choose a different one."
msgstr ""
msgid "This only applies to repository indexing operations."
msgstr ""

View File

@ -39,7 +39,7 @@ gem 'chemlab', '~> 0.9'
gem 'chemlab-library-www-gitlab-com', '~> 0.1'
# dependencies for jenkins client
gem 'nokogiri', '~> 1.12'
gem 'nokogiri', '~> 1.13', '>= 1.13.8'
gem 'deprecation_toolkit', '~> 1.5.1', require: false

View File

@ -329,7 +329,7 @@ DEPENDENCIES
gitlab-qa (~> 8)
influxdb-client (~> 1.17)
knapsack (~> 4.0)
nokogiri (~> 1.12)
nokogiri (~> 1.13, >= 1.13.8)
octokit (~> 5.6.1)
parallel (~> 1.19)
parallel_tests (~> 2.29)
@ -352,4 +352,4 @@ DEPENDENCIES
zeitwerk (~> 2.4)
BUNDLED WITH
2.3.15
2.3.23

View File

@ -1,6 +1,6 @@
# frozen_string_literal: true
$:.unshift(File.expand_path('lib', __dir__))
$LOAD_PATH.unshift(File.expand_path('lib', __dir__))
Gem::Specification.new do |spec|
spec.name = 'chemlab-library-gitlab'

View File

@ -25,7 +25,7 @@ class CancelPipeline
attr_reader :project, :pipeline_id, :client
end
if $0 == __FILE__
if $PROGRAM_NAME == __FILE__
options = API::DEFAULT_OPTIONS.dup
OptionParser.new do |opts|

View File

@ -60,7 +60,7 @@ class ArtifactFinder
end
end
if $0 == __FILE__
if $PROGRAM_NAME == __FILE__
options = API::DEFAULT_OPTIONS.dup
OptionParser.new do |opts|

View File

@ -95,7 +95,7 @@ class JobFinder
end
end
if $0 == __FILE__
if $PROGRAM_NAME == __FILE__
options = JobFinder::DEFAULT_OPTIONS.dup
OptionParser.new do |opts|

View File

@ -90,7 +90,7 @@ class GetFeatureFlagsFromFiles
end
end
if $0 == __FILE__
if $PROGRAM_NAME == __FILE__
options = API::DEFAULT_OPTIONS.dup
OptionParser.new do |opts|

View File

@ -87,7 +87,7 @@ class FailedTests
end
end
if $0 == __FILE__
if $PROGRAM_NAME == __FILE__
options = {
previous_tests_report_path: 'test_results/previous/test_reports.json',
output_directory: 'tmp/previous_failed_tests/',

View File

@ -8,8 +8,11 @@ finder_options = API::DEFAULT_OPTIONS.dup.merge(exclude_allowed_to_fail_jobs: tr
failed_jobs = PipelineFailedJobs.new(finder_options).execute
class SlackReporter
DEFAULT_FAILED_PIPELINE_REPORT_FILE = 'failed_pipeline_report.json'
def initialize(failed_jobs)
@failed_jobs = failed_jobs
@failed_pipeline_report_file = ENV.fetch('FAILED_PIPELINE_REPORT_FILE', DEFAULT_FAILED_PIPELINE_REPORT_FILE)
end
def report
@ -44,7 +47,7 @@ class SlackReporter
fields: [
{
type: "mrkdwn",
text: "*Source*\n#{source}"
text: "*Source*\n#{source} from #{project_link}"
},
{
type: "mrkdwn",
@ -62,12 +65,12 @@ class SlackReporter
]
}
File.write(ENV['FAILED_PIPELINE_REPORT_FILE'], JSON.pretty_generate(payload))
File.write(failed_pipeline_report_file, JSON.pretty_generate(payload))
end
private
attr_reader :failed_jobs
attr_reader :failed_jobs, :failed_pipeline_report_file
def title
"Pipeline #{pipeline_link} for #{branch_link} failed"
@ -93,6 +96,10 @@ class SlackReporter
"`#{ENV['CI_PIPELINE_SOURCE']}`"
end
def project_link
"<#{ENV['CI_PROJECT_URL']}|#{ENV['CI_PROJECT_NAME']}>"
end
def triggered_by_link
"<#{ENV['CI_SERVER_URL']}/#{ENV['GITLAB_USER_LOGIN']}|#{ENV['GITLAB_USER_NAME']}>"
end

View File

@ -149,7 +149,7 @@ class QueryLimitingReport
end
end
if $0 == __FILE__
if $PROGRAM_NAME == __FILE__
options = QueryLimitingReport::DEFAULT_OPTIONS.dup
OptionParser.new do |opts|

View File

@ -128,7 +128,7 @@ class PipelineTestReportBuilder
end
end
if $0 == __FILE__
if $PROGRAM_NAME == __FILE__
options = Host::DEFAULT_OPTIONS.dup
OptionParser.new do |opts|

View File

@ -38,7 +38,7 @@ end
options = Struct.new(:eval, :ruby_version, :print_help, keyword_init: true).new
parser = OptionParser.new do |opts|
opts.banner = "Usage: #{$0} [-e code] [FILE...]"
opts.banner = "Usage: #{$PROGRAM_NAME} [-e code] [FILE...]"
opts.on('-e FRAGMENT', '--eval FRAGMENT', 'Process a fragment of Ruby code') do |code|
options.eval = code

View File

@ -97,6 +97,6 @@ class FindJhBranch
end
end
if $0 == __FILE__
if $PROGRAM_NAME == __FILE__
puts FindJhBranch.new.run
end

View File

@ -191,7 +191,7 @@ class StaticAnalysis
end
end
if $0 == __FILE__
if $PROGRAM_NAME == __FILE__
options = {}
if ARGV.include?('--dry-run')

View File

@ -427,7 +427,7 @@ module Trigger
Job = Class.new(Pipeline)
end
if $0 == __FILE__
if $PROGRAM_NAME == __FILE__
case ARGV[0]
when 'cng'
Trigger::CNG.new.invoke!.wait!

View File

@ -1,613 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Boards::IssuesController do
include ExternalAuthorizationServiceHelpers
let(:project) { create(:project, :private) }
let(:board) { create(:board, project: project) }
let(:user) { create(:user) }
let(:guest) { create(:user) }
let(:planning) { create(:label, project: project, name: 'Planning') }
let(:development) { create(:label, project: project, name: 'Development') }
let!(:list1) { create(:list, board: board, label: planning, position: 0) }
let!(:list2) { create(:list, board: board, label: development, position: 1) }
before do
project.add_maintainer(user)
project.add_guest(guest)
end
describe 'GET index', :request_store do
let(:johndoe) { create(:user, avatar: fixture_file_upload(File.join('spec/fixtures/dk.png'))) }
context 'with invalid board id' do
it 'returns a not found 404 response' do
list_issues user: user, board: non_existing_record_id, list: list2
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when list id is present' do
context 'with valid list id' do
let(:group) { create(:group, :private, projects: [project]) }
let(:group_board) { create(:board, group: group) }
let!(:list3) { create(:list, board: group_board, label: development, position: 2) }
let(:sub_group_1) { create(:group, :private, parent: group) }
before do
group.add_maintainer(user)
end
it 'returns issues that have the list label applied' do
issue = create(:labeled_issue, project: project, labels: [planning])
create(:labeled_issue, project: project, labels: [planning])
create(:labeled_issue, project: project, labels: [development], due_date: Date.tomorrow)
create(:labeled_issue, project: project, labels: [development], assignees: [johndoe])
issue.subscribe(johndoe, project)
expect(Issue).to receive(:move_nulls_to_end)
list_issues user: user, board: board, list: list2
expect(response).to match_response_schema('entities/issue_boards')
expect(json_response['issues'].length).to eq 2
expect(development.issues.map(&:relative_position)).not_to include(nil)
end
it 'returns issues by closed_at in descending order in closed list' do
create(:closed_issue, project: project, title: 'New Issue 1', closed_at: 1.day.ago)
create(:closed_issue, project: project, title: 'New Issue 2', closed_at: 1.week.ago)
list_issues user: user, board: board, list: board.lists.last.id
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['issues'].length).to eq(2)
expect(json_response['issues'][0]['title']).to eq('New Issue 1')
expect(json_response['issues'][1]['title']).to eq('New Issue 2')
end
it 'avoids N+1 database queries' do
create(:labeled_issue, project: project, labels: [development])
control_count = ActiveRecord::QueryRecorder.new { list_issues(user: user, board: board, list: list2) }.count
# 25 issues is bigger than the page size
# the relative position will ignore the `#make_sure_position_set` queries
create_list(:labeled_issue, 25, project: project, labels: [development], assignees: [johndoe], relative_position: 1)
expect { list_issues(user: user, board: board, list: list2) }.not_to exceed_query_limit(control_count)
end
it 'avoids N+1 database queries when adding a project', :request_store do
create(:labeled_issue, project: project, labels: [development])
control_count = ActiveRecord::QueryRecorder.new { list_issues(user: user, board: group_board, list: list3) }.count
2.times do
p = create(:project, group: group)
create(:labeled_issue, project: p, labels: [development])
end
project_2 = create(:project, group: group)
create(:labeled_issue, project: project_2, labels: [development], assignees: [johndoe])
# because each issue without relative_position must be updated with
# a different value, we have 8 extra queries per issue
expect { list_issues(user: user, board: group_board, list: list3) }.not_to exceed_query_limit(control_count + (2 * 8 - 1))
end
it 'avoids N+1 database queries when adding a subgroup, project, and issue' do
create(:project, group: sub_group_1)
create(:labeled_issue, project: project, labels: [development])
control_count = ActiveRecord::QueryRecorder.new { list_issues(user: user, board: group_board, list: list3) }.count
project_2 = create(:project, group: group)
2.times do
p = create(:project, group: sub_group_1)
create(:labeled_issue, project: p, labels: [development])
end
create(:labeled_issue, project: project_2, labels: [development], assignees: [johndoe])
expect { list_issues(user: user, board: group_board, list: list3) }.not_to exceed_query_limit(control_count + (2 * 8 - 1))
end
it 'does not query issues table more than once' do
recorder = ActiveRecord::QueryRecorder.new { list_issues(user: user, board: board, list: list1) }
query_count = recorder.occurrences.select { |query,| query.match?(/FROM "?issues"?/) }.each_value.first
expect(query_count).to eq(1)
end
context 'when block_issue_repositioning feature flag is enabled' do
before do
stub_feature_flags(block_issue_repositioning: true)
end
it 'does not reposition issues with null position' do
expect(Issue).not_to receive(:move_nulls_to_end)
list_issues(user: user, board: group_board, list: list3)
end
end
end
context 'with invalid list id' do
it 'returns a not found 404 response' do
list_issues user: user, board: board, list: non_existing_record_id
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
context 'when list id is missing' do
it 'returns opened issues without board labels applied' do
bug = create(:label, project: project, name: 'Bug')
create(:issue, project: project)
create(:labeled_issue, project: project, labels: [planning])
create(:labeled_issue, project: project, labels: [development])
create(:labeled_issue, project: project, labels: [bug])
list_issues user: user, board: board
expect(response).to match_response_schema('entities/issue_boards')
expect(json_response['issues'].length).to eq 2
end
end
context 'with unauthorized user' do
let(:unauth_user) { create(:user) }
it 'returns a forbidden 403 response' do
list_issues user: unauth_user, board: board, list: list2
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'with external authorization' do
before do
sign_in(user)
enable_external_authorization_service_check
end
it 'returns a 403 for group boards' do
group = create(:group)
group_board = create(:board, group: group)
list_issues(user: user, board: group_board)
expect(response).to have_gitlab_http_status(:forbidden)
end
it 'is successful for project boards' do
project_board = create(:board, project: project)
list_issues(user: user, board: project_board)
expect(response).to have_gitlab_http_status(:ok)
end
end
describe 'PUT bulk_move' do
let(:todo) { create(:group_label, group: group, name: 'Todo') }
let(:development) { create(:group_label, group: group, name: 'Development') }
let(:user) { create(:group_member, :maintainer, user: create(:user), group: group ).user }
let(:guest) { create(:group_member, :guest, user: create(:user), group: group ).user }
let(:project) { create(:project, group: group) }
let(:group) { create(:group) }
let(:board) { create(:board, project: project) }
let(:list1) { create(:list, board: board, label: todo, position: 0) }
let(:list2) { create(:list, board: board, label: development, position: 1) }
let(:issue1) { create(:labeled_issue, project: project, labels: [todo], author: user, relative_position: 10) }
let(:issue2) { create(:labeled_issue, project: project, labels: [todo], author: user, relative_position: 20) }
let(:issue3) { create(:labeled_issue, project: project, labels: [todo], author: user, relative_position: 30) }
let(:issue4) { create(:labeled_issue, project: project, labels: [development], author: user, relative_position: 100) }
let(:move_params) do
{
board_id: board.id,
ids: [issue1.id, issue2.id, issue3.id],
from_list_id: list1.id,
to_list_id: list2.id,
move_before_id: issue4.id,
move_after_id: nil
}
end
before do
project.add_maintainer(user)
project.add_guest(guest)
end
shared_examples 'move issues endpoint provider' do
before do
sign_in(signed_in_user)
end
it 'responds as expected' do
put :bulk_move, params: move_issues_params
expect(response).to have_gitlab_http_status(expected_status)
if expected_status == 200
expect(json_response).to include(
'count' => move_issues_params[:ids].size,
'success' => true
)
expect(json_response['issues'].pluck('id')).to match_array(move_issues_params[:ids])
end
end
it 'moves issues as expected' do
put :bulk_move, params: move_issues_params
expect(response).to have_gitlab_http_status(expected_status)
list_issues user: requesting_user, board: board, list: list2
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('entities/issue_boards')
responded_issues = json_response['issues']
expect(responded_issues.length).to eq expected_issue_count
ids_in_order = responded_issues.pluck('id')
expect(ids_in_order).to eq(expected_issue_ids_in_order)
end
end
context 'when items are moved to another list' do
it_behaves_like 'move issues endpoint provider' do
let(:signed_in_user) { user }
let(:move_issues_params) { move_params }
let(:requesting_user) { user }
let(:expected_status) { 200 }
let(:expected_issue_count) { 4 }
let(:expected_issue_ids_in_order) { [issue4.id, issue1.id, issue2.id, issue3.id] }
end
end
context 'when moving just one issue' do
it_behaves_like 'move issues endpoint provider' do
let(:signed_in_user) { user }
let(:move_issues_params) do
move_params.dup.tap do |hash|
hash[:ids] = [issue2.id]
end
end
let(:requesting_user) { user }
let(:expected_status) { 200 }
let(:expected_issue_count) { 2 }
let(:expected_issue_ids_in_order) { [issue4.id, issue2.id] }
end
end
context 'when user is not allowed to move issue' do
it_behaves_like 'move issues endpoint provider' do
let(:signed_in_user) { guest }
let(:move_issues_params) do
move_params.dup.tap do |hash|
hash[:ids] = [issue2.id]
end
end
let(:requesting_user) { user }
let(:expected_status) { 403 }
let(:expected_issue_count) { 1 }
let(:expected_issue_ids_in_order) { [issue4.id] }
end
end
context 'when issues should be moved visually above existing issue in list' do
it_behaves_like 'move issues endpoint provider' do
let(:signed_in_user) { user }
let(:move_issues_params) do
move_params.dup.tap do |hash|
hash[:move_after_id] = issue4.id
hash[:move_before_id] = nil
end
end
let(:requesting_user) { user }
let(:expected_status) { 200 }
let(:expected_issue_count) { 4 }
let(:expected_issue_ids_in_order) { [issue1.id, issue2.id, issue3.id, issue4.id] }
end
end
context 'when destination list is empty' do
before do
# Remove issue from list
issue4.labels -= [development]
issue4.save!
end
it_behaves_like 'move issues endpoint provider' do
let(:signed_in_user) { user }
let(:move_issues_params) do
move_params.dup.tap do |hash|
hash[:move_before_id] = nil
end
end
let(:requesting_user) { user }
let(:expected_status) { 200 }
let(:expected_issue_count) { 3 }
let(:expected_issue_ids_in_order) { [issue1.id, issue2.id, issue3.id] }
end
end
context 'when no position arguments are given' do
it_behaves_like 'move issues endpoint provider' do
let(:signed_in_user) { user }
let(:move_issues_params) do
move_params.dup.tap do |hash|
hash[:move_before_id] = nil
end
end
let(:requesting_user) { user }
let(:expected_status) { 200 }
let(:expected_issue_count) { 4 }
let(:expected_issue_ids_in_order) { [issue1.id, issue2.id, issue3.id, issue4.id] }
end
end
context 'when move_before_id and move_after_id are given' do
let(:issue5) { create(:labeled_issue, project: project, labels: [development], author: user, relative_position: 90) }
it_behaves_like 'move issues endpoint provider' do
let(:signed_in_user) { user }
let(:move_issues_params) do
move_params.dup.tap do |hash|
hash[:move_before_id] = issue5.id
hash[:move_after_id] = issue4.id
end
end
let(:requesting_user) { user }
let(:expected_status) { 200 }
let(:expected_issue_count) { 5 }
let(:expected_issue_ids_in_order) { [issue5.id, issue1.id, issue2.id, issue3.id, issue4.id] }
end
end
context 'when request contains too many issues' do
it_behaves_like 'move issues endpoint provider' do
let(:signed_in_user) { user }
let(:move_issues_params) do
move_params.dup.tap do |hash|
hash[:ids] = (0..51).to_a
end
end
let(:requesting_user) { user }
let(:expected_status) { 422 }
let(:expected_issue_count) { 1 }
let(:expected_issue_ids_in_order) { [issue4.id] }
end
end
context 'when request is malformed' do
it_behaves_like 'move issues endpoint provider' do
let(:signed_in_user) { user }
let(:move_issues_params) do
move_params.dup.tap do |hash|
hash[:ids] = 'foobar'
end
end
let(:requesting_user) { user }
let(:expected_status) { 400 }
let(:expected_issue_count) { 1 }
let(:expected_issue_ids_in_order) { [issue4.id] }
end
end
end
def list_issues(user:, board:, list: nil)
sign_in(user)
params = {
board_id: board.to_param,
list_id: list.try(:to_param)
}
unless board.try(:parent).is_a?(Group)
params[:namespace_id] = project.namespace.to_param
params[:project_id] = project
end
get :index, params: params.compact
end
end
describe 'POST create' do
context 'when trying to create issue on an unauthorized project' do
let(:unauthorized_project) { create(:project, :private) }
let(:issue_params) { { project_id: unauthorized_project.id } }
it 'creates the issue on the board\'s project' do
expect do
create_issue user: user, board: board, list: list1, title: 'New issue', additional_issue_params: issue_params
end.to change(Issue, :count).by(1)
created_issue = Issue.last
expect(created_issue.project).to eq(project)
expect(unauthorized_project.reload.issues.count).to eq(0)
end
end
context 'with valid params' do
before do
create_issue user: user, board: board, list: list1, title: 'New issue'
end
it 'returns a successful 200 response' do
expect(response).to have_gitlab_http_status(:ok)
end
it 'returns the created issue' do
expect(response).to match_response_schema('entities/issue_board')
end
it 'sets the default work_item_type' do
expect(Issue.last.work_item_type.base_type).to eq('issue')
end
end
context 'with invalid params' do
context 'when title is nil' do
it 'returns an unprocessable entity 422 response' do
create_issue user: user, board: board, list: list1, title: nil
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
context 'when list does not belongs to project board' do
it 'returns a not found 404 response' do
list = create(:list)
create_issue user: user, board: board, list: list, title: 'New issue'
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'with invalid board id' do
it 'returns a not found 404 response' do
create_issue user: user, board: non_existing_record_id, list: list1, title: 'New issue'
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'with invalid list id' do
it 'returns a not found 404 response' do
create_issue user: user, board: board, list: non_existing_record_id, title: 'New issue'
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
context 'when create service returns an unrecoverable error' do
before do
allow_next_instance_of(Issues::CreateService) do |create_service|
allow(create_service).to receive(:execute).and_return(
ServiceResponse.error(message: 'unrecoverable error', http_status: 404)
)
end
end
it 'returns an array with errors an service http_status' do
create_issue user: user, board: board, list: list1, title: 'New issue'
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response).to contain_exactly('unrecoverable error')
end
end
context 'with guest user' do
context 'in open list' do
it 'returns a successful 200 response' do
open_list = board.lists.create!(list_type: :backlog)
create_issue user: guest, board: board, list: open_list, title: 'New issue'
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'in label list' do
it 'returns a forbidden 403 response' do
create_issue user: guest, board: board, list: list1, title: 'New issue'
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
def create_issue(user:, board:, list:, title:, additional_issue_params: {})
sign_in(user)
post :create, params: {
board_id: board.to_param,
list_id: list.to_param,
issue: { title: title, project_id: project.id }.merge(additional_issue_params)
},
format: :json
end
end
describe 'PATCH update' do
let!(:issue) { create(:labeled_issue, project: project, labels: [planning]) }
context 'with valid params' do
it 'returns a successful 200 response' do
move user: user, board: board, issue: issue, from_list_id: list1.id, to_list_id: list2.id
expect(response).to have_gitlab_http_status(:ok)
end
it 'moves issue to the desired list' do
move user: user, board: board, issue: issue, from_list_id: list1.id, to_list_id: list2.id
expect(issue.reload.labels).to contain_exactly(development)
end
end
context 'with invalid params' do
it 'returns a unprocessable entity 422 response for invalid lists' do
move user: user, board: board, issue: issue, from_list_id: nil, to_list_id: nil
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
it 'returns a not found 404 response for invalid board id' do
move user: user, board: non_existing_record_id, issue: issue, from_list_id: list1.id, to_list_id: list2.id
expect(response).to have_gitlab_http_status(:not_found)
end
it 'returns a not found 404 response for invalid issue id' do
move user: user, board: board, issue: double(id: non_existing_record_id), from_list_id: list1.id, to_list_id: list2.id
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'with unauthorized user' do
let(:guest) { create(:user) }
before do
project.add_guest(guest)
end
it 'returns a forbidden 403 response' do
move user: guest, board: board, issue: issue, from_list_id: list1.id, to_list_id: list2.id
expect(response).to have_gitlab_http_status(:forbidden)
end
end
def move(user:, board:, issue:, from_list_id:, to_list_id:)
sign_in(user)
patch :update, params: {
namespace_id: project.namespace.to_param,
project_id: project.id,
board_id: board.to_param,
id: issue.id,
from_list_id: from_list_id,
to_list_id: to_list_id
},
format: :json
end
end
end

View File

@ -1,333 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Boards::ListsController do
let(:project) { create(:project) }
let(:board) { create(:board, project: project) }
let(:user) { create(:user) }
let(:guest) { create(:user) }
before do
project.add_maintainer(user)
project.add_guest(guest)
end
describe 'GET index' do
before do
create(:list, board: board)
end
it 'returns a successful 200 response' do
read_board_list user: user, board: board
expect(response).to have_gitlab_http_status(:ok)
expect(response.media_type).to eq 'application/json'
end
it 'returns a list of board lists' do
read_board_list user: user, board: board
expect(response).to match_response_schema('lists')
expect(json_response.length).to eq 3
end
context 'when another user has list preferences' do
before do
board.lists.first.update_preferences_for(guest, collapsed: true)
end
it 'returns the complete list of board lists' do
read_board_list user: user, board: board
expect(json_response.length).to eq 3
end
end
context 'with unauthorized user' do
let(:unauth_user) { create(:user) }
it 'returns a forbidden 403 response' do
read_board_list user: unauth_user, board: board
expect(response).to have_gitlab_http_status(:forbidden)
end
end
def read_board_list(user:, board:)
sign_in(user)
get :index, params: {
namespace_id: project.namespace.to_param,
project_id: project,
board_id: board.to_param
},
format: :json
end
end
describe 'POST create' do
context 'with valid params' do
let(:label) { create(:label, project: project, name: 'Development') }
it 'returns a successful 200 response' do
create_board_list user: user, board: board, label_id: label.id
expect(response).to have_gitlab_http_status(:ok)
end
it 'returns the created list' do
create_board_list user: user, board: board, label_id: label.id
expect(response).to match_response_schema('list')
end
end
context 'with invalid params' do
context 'when label is nil' do
it 'returns an unprocessable entity 422 response' do
create_board_list user: user, board: board, label_id: nil
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['errors']).to eq(['Label not found'])
end
end
context 'when label that does not belongs to project' do
it 'returns an unprocessable entity 422 response' do
label = create(:label, name: 'Development')
create_board_list user: user, board: board, label_id: label.id
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['errors']).to eq(['Label not found'])
end
end
end
context 'with unauthorized user' do
it 'returns a forbidden 403 response' do
label = create(:label, project: project, name: 'Development')
create_board_list user: guest, board: board, label_id: label.id
expect(response).to have_gitlab_http_status(:forbidden)
end
end
def create_board_list(user:, board:, label_id:)
sign_in(user)
post :create, params: {
namespace_id: project.namespace.to_param,
project_id: project,
board_id: board.to_param,
list: { label_id: label_id }
},
format: :json
end
end
describe 'PATCH update' do
let!(:planning) { create(:list, board: board, position: 0) }
let!(:development) { create(:list, board: board, position: 1) }
context 'with valid position' do
it 'returns a successful 200 response' do
move user: user, board: board, list: planning, position: 1
expect(response).to have_gitlab_http_status(:ok)
end
it 'moves the list to the desired position' do
move user: user, board: board, list: planning, position: 1
expect(planning.reload.position).to eq 1
end
end
context 'with invalid position' do
it 'returns an unprocessable entity 422 response' do
move user: user, board: board, list: planning, position: 6
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
context 'with invalid list id' do
it 'returns a not found 404 response' do
move user: user, board: board, list: non_existing_record_id, position: 1
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'with unauthorized user' do
it 'returns a 422 unprocessable entity response' do
move user: guest, board: board, list: planning, position: 6
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
context 'with collapsed preference' do
it 'saves collapsed preference for user' do
save_setting user: user, board: board, list: planning, setting: { collapsed: true }
expect(planning.preferences_for(user).collapsed).to eq(true)
expect(response).to have_gitlab_http_status(:ok)
end
it 'saves not collapsed preference for user' do
save_setting user: user, board: board, list: planning, setting: { collapsed: false }
expect(planning.preferences_for(user).collapsed).to eq(false)
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'with a list_type other than :label' do
let!(:closed) { create(:closed_list, board: board, position: 2) }
it 'saves collapsed preference for user' do
save_setting user: user, board: board, list: closed, setting: { collapsed: true }
expect(closed.preferences_for(user).collapsed).to eq(true)
expect(response).to have_gitlab_http_status(:ok)
end
it 'saves not collapsed preference for user' do
save_setting user: user, board: board, list: closed, setting: { collapsed: false }
expect(closed.preferences_for(user).collapsed).to eq(false)
expect(response).to have_gitlab_http_status(:ok)
end
end
def move(user:, board:, list:, position:)
sign_in(user)
params = { namespace_id: project.namespace.to_param,
project_id: project.id,
board_id: board.to_param,
id: list.to_param,
list: { position: position },
format: :json }
patch :update, params: params, as: :json
end
def save_setting(user:, board:, list:, setting: {})
sign_in(user)
params = { namespace_id: project.namespace.to_param,
project_id: project.id,
board_id: board.to_param,
id: list.to_param,
list: setting,
format: :json }
patch :update, params: params, as: :json
end
end
describe 'DELETE destroy' do
let!(:planning) { create(:list, board: board, position: 0) }
context 'with valid list id' do
it 'returns a successful 200 response' do
remove_board_list user: user, board: board, list: planning
expect(response).to have_gitlab_http_status(:ok)
end
it 'removes list from board' do
expect { remove_board_list user: user, board: board, list: planning }.to change(board.lists, :size).by(-1)
end
end
context 'with invalid list id' do
it 'returns a not found 404 response' do
remove_board_list user: user, board: board, list: non_existing_record_id
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'with unauthorized user' do
it 'returns a forbidden 403 response' do
remove_board_list user: guest, board: board, list: planning
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'with an error service response' do
it 'returns an unprocessable entity response' do
allow(Boards::Lists::DestroyService).to receive(:new)
.and_return(double(execute: ServiceResponse.error(message: 'error')))
remove_board_list user: user, board: board, list: planning
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
def remove_board_list(user:, board:, list:)
sign_in(user)
delete :destroy, params: {
namespace_id: project.namespace.to_param,
project_id: project,
board_id: board.to_param,
id: list.to_param
},
format: :json
end
end
describe 'POST generate' do
context 'when board lists is empty' do
it 'returns a successful 200 response' do
generate_default_lists user: user, board: board
expect(response).to have_gitlab_http_status(:ok)
end
it 'returns the defaults lists' do
generate_default_lists user: user, board: board
expect(response).to match_response_schema('lists')
end
end
context 'when board lists is not empty' do
it 'returns an unprocessable entity 422 response' do
create(:list, board: board)
generate_default_lists user: user, board: board
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
context 'with unauthorized user' do
it 'returns a forbidden 403 response' do
generate_default_lists user: guest, board: board
expect(response).to have_gitlab_http_status(:forbidden)
end
end
def generate_default_lists(user:, board:)
sign_in(user)
post :generate, params: {
namespace_id: project.namespace.to_param,
project_id: project,
board_id: board.to_param
},
format: :json
end
end
end

View File

@ -19,12 +19,6 @@ RSpec.describe Projects::BoardsController do
expect { list_boards }.to change(project.boards, :count).by(1)
end
it 'sets boards_endpoint instance variable to a boards path' do
list_boards
expect(assigns(:boards_endpoint)).to eq project_boards_path(project)
end
it 'renders template' do
list_boards
@ -110,12 +104,6 @@ RSpec.describe Projects::BoardsController do
describe 'GET show' do
let_it_be(:board) { create(:board, project: project) }
it 'sets boards_endpoint instance variable to a boards path' do
read_board board: board
expect(assigns(:boards_endpoint)).to eq project_boards_path(project)
end
context 'when format is HTML' do
it 'renders template' do
expect { read_board board: board }.to change(BoardProjectRecentVisit, :count).by(1)

View File

@ -1,6 +1,6 @@
# frozen_string_literal: true
if $".include?(File.expand_path('spec_helper.rb', __dir__))
if $LOADED_FEATURES.include?(File.expand_path('spec_helper.rb', __dir__))
# There's no need to load anything here if spec_helper is already loaded
# because spec_helper is more extensive than fast_spec_helper
return

View File

@ -9,6 +9,7 @@ RSpec.describe 'Global search' do
let(:project) { create(:project, namespace: user.namespace) }
before do
stub_feature_flags(search_page_vertical_nav: false)
project.add_maintainer(user)
sign_in(user)
end

View File

@ -8,6 +8,7 @@ RSpec.describe 'User searches for code' do
context 'when signed in' do
before do
stub_feature_flags(search_page_vertical_nav: false)
project.add_maintainer(user)
sign_in(user)
end
@ -214,6 +215,7 @@ RSpec.describe 'User searches for code' do
let(:project) { create(:project, :public, :repository) }
before do
stub_feature_flags(search_page_vertical_nav: false)
visit(project_path(project))
end

View File

@ -7,6 +7,7 @@ RSpec.describe 'User searches for comments' do
let(:user) { create(:user) }
before do
stub_feature_flags(search_page_vertical_nav: false)
project.add_reporter(user)
sign_in(user)

View File

@ -8,6 +8,7 @@ RSpec.describe 'User searches for commits', :js do
let(:user) { create(:user) }
before do
stub_feature_flags(search_page_vertical_nav: false)
project.add_reporter(user)
sign_in(user)

View File

@ -18,6 +18,7 @@ RSpec.describe 'User searches for issues', :js do
before do
project.add_maintainer(user)
sign_in(user)
stub_feature_flags(search_page_vertical_nav: false)
visit(search_path)
end
@ -110,6 +111,7 @@ RSpec.describe 'User searches for issues', :js do
before do
stub_feature_flags(block_anonymous_global_searches: false)
stub_feature_flags(search_page_vertical_nav: false)
visit(search_path)
end
@ -127,6 +129,7 @@ RSpec.describe 'User searches for issues', :js do
context 'when block_anonymous_global_searches is enabled' do
before do
stub_feature_flags(search_page_vertical_nav: false)
visit(search_path)
end

View File

@ -15,6 +15,7 @@ RSpec.describe 'User searches for merge requests', :js do
end
before do
stub_feature_flags(search_page_vertical_nav: false)
project.add_maintainer(user)
sign_in(user)

View File

@ -11,6 +11,7 @@ RSpec.describe 'User searches for milestones', :js do
before do
project.add_maintainer(user)
sign_in(user)
stub_feature_flags(search_page_vertical_nav: false)
visit(search_path)
end

View File

@ -8,6 +8,7 @@ RSpec.describe 'User searches for projects', :js do
context 'when signed out' do
context 'when block_anonymous_global_searches is disabled' do
before do
stub_feature_flags(search_page_vertical_nav: false)
allow(Gitlab::ApplicationRateLimiter).to receive(:threshold).with(:search_rate_limit).and_return(1000)
allow(Gitlab::ApplicationRateLimiter).to receive(:threshold).with(:search_rate_limit_unauthenticated).and_return(1000)
stub_feature_flags(block_anonymous_global_searches: false)

View File

@ -8,6 +8,7 @@ RSpec.describe 'User searches for users' do
let(:user3) { create(:user, username: 'gob_2018', name: 'George Oscar Bluth') }
before do
stub_feature_flags(search_page_vertical_nav: false)
sign_in(user1)
end

View File

@ -8,6 +8,7 @@ RSpec.describe 'User searches for wiki pages', :js do
let!(:wiki_page) { create(:wiki_page, wiki: project.wiki, title: 'directory/title', content: 'Some Wiki content') }
before do
stub_feature_flags(search_page_vertical_nav: false)
project.add_maintainer(user)
sign_in(user)
@ -18,6 +19,10 @@ RSpec.describe 'User searches for wiki pages', :js do
include_examples 'search timeouts', 'wiki_blobs'
shared_examples 'search wiki blobs' do
before do
stub_feature_flags(search_page_vertical_nav: false)
end
it 'finds a page' do
find('[data-testid="project-filter"]').click

View File

@ -17,6 +17,7 @@ RSpec.describe 'User uses header search field', :js do
end
before do
stub_feature_flags(search_page_vertical_nav: false)
allow(Gitlab::ApplicationRateLimiter).to receive(:threshold).and_return(0)
allow(Gitlab::ApplicationRateLimiter).to receive(:threshold).with(:search_rate_limit).and_return(1000)
allow(Gitlab::ApplicationRateLimiter).to receive(:threshold).with(:search_rate_limit_unauthenticated).and_return(1000)

View File

@ -3,6 +3,10 @@
require 'spec_helper'
RSpec.describe 'Search Snippets' do
before do
stub_feature_flags(search_page_vertical_nav: false)
end
it 'user searches for snippets by title' do
public_snippet = create(:personal_snippet, :public, title: 'Beginning and Middle')
private_snippet = create(:personal_snippet, :private, title: 'Middle and End')

View File

@ -42,20 +42,39 @@ describe('GlobalSearchSidebar', () => {
const findResetLinkButton = () => wrapper.findComponent(GlLink);
describe('template', () => {
beforeEach(() => {
createComponent();
describe('scope=projects', () => {
beforeEach(() => {
createComponent({ urlQuery: { ...MOCK_QUERY, scope: 'projects' } });
});
it("doesn't render StatusFilter", () => {
expect(findStatusFilter().exists()).toBe(false);
});
it("doesn't render ConfidentialityFilter", () => {
expect(findConfidentialityFilter().exists()).toBe(false);
});
it("doesn't render ApplyButton", () => {
expect(findApplyButton().exists()).toBe(false);
});
});
it('renders StatusFilter always', () => {
expect(findStatusFilter().exists()).toBe(true);
});
describe('scope=issues', () => {
beforeEach(() => {
createComponent({ urlQuery: MOCK_QUERY });
});
it('renders StatusFilter', () => {
expect(findStatusFilter().exists()).toBe(true);
});
it('renders ConfidentialityFilter always', () => {
expect(findConfidentialityFilter().exists()).toBe(true);
});
it('renders ConfidentialityFilter', () => {
expect(findConfidentialityFilter().exists()).toBe(true);
});
it('renders ApplyButton always', () => {
expect(findApplyButton().exists()).toBe(true);
it('renders ApplyButton', () => {
expect(findApplyButton().exists()).toBe(true);
});
});
});
@ -115,7 +134,7 @@ describe('GlobalSearchSidebar', () => {
describe('actions', () => {
beforeEach(() => {
createComponent();
createComponent({});
});
it('clicking ApplyButton calls applyQuery', () => {

View File

@ -105,10 +105,6 @@ RSpec.describe BoardsHelper do
allow(helper).to receive(:can?).with(user, :admin_issue_board, project).and_return(false)
end
it 'returns a board_lists_path as lists_endpoint' do
expect(helper.board_data[:lists_endpoint]).to eq(board_lists_path(project_board))
end
it 'returns board type as parent' do
expect(helper.board_data[:parent]).to eq('project')
end

View File

@ -776,7 +776,7 @@ RSpec.describe SearchHelper do
end
context 'project data' do
let(:project) { create(:project) }
let_it_be(:project) { create(:project) }
let(:project_metadata) { { project_path: project.path, issues_path: "/issues" } }
let(:scope) { 'issues' }
let(:code_search) { true }
@ -852,7 +852,7 @@ RSpec.describe SearchHelper do
describe '.search_navigation' do
using RSpec::Parameterized::TableSyntax
let(:user) { build(:user) }
let(:project) { build(:project) }
let_it_be(:project) { build(:project) }
before do
allow(self).to receive(:current_user).and_return(user)
@ -1068,6 +1068,7 @@ RSpec.describe SearchHelper do
with_them do
it 'converts correctly' do
allow(self).to receive(:search_navigation).with(no_args).and_return(data)
expect(search_navigation_json).to instance_exec(&matcher)
end
end

View File

@ -30,6 +30,7 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
expect(attributes[:sha]).to eq(pipeline.sha)
expect(attributes[:tag]).to eq(pipeline.tag)
expect(attributes[:id]).to eq(pipeline.id)
expect(attributes[:iid]).to eq(pipeline.iid)
expect(attributes[:source]).to eq(pipeline.source)
expect(attributes[:status]).to eq(pipeline.status)
expect(attributes[:detailed_status]).to eq('passed')

View File

@ -757,6 +757,58 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.add_concurrent_foreign_key(:projects, :users, column: :user_id, reverse_lock_order: true)
end
end
context 'when creating foreign key for a group of columns' do
it 'references the custom target columns when provided', :aggregate_failures do
expect(model).to receive(:with_lock_retries).and_yield
expect(model).to receive(:execute).with(
"ALTER TABLE projects\n" \
"ADD CONSTRAINT fk_multiple_columns\n" \
"FOREIGN KEY \(partition_number, user_id\)\n" \
"REFERENCES users \(partition_number, id\)\n" \
"ON DELETE CASCADE\n" \
"NOT VALID;\n"
)
model.add_concurrent_foreign_key(
:projects,
:users,
column: [:partition_number, :user_id],
target_column: [:partition_number, :id],
validate: false,
name: :fk_multiple_columns
)
end
context 'when foreign key is already defined' do
before do
expect(model).to receive(:foreign_key_exists?).with(
:projects,
:users,
{
column: [:partition_number, :user_id],
name: :fk_multiple_columns,
on_delete: :cascade,
primary_key: [:partition_number, :id]
}
).and_return(true)
end
it 'does not create foreign key', :aggregate_failures do
expect(model).not_to receive(:with_lock_retries).and_yield
expect(model).not_to receive(:execute).with(/FOREIGN KEY/)
model.add_concurrent_foreign_key(
:projects,
:users,
column: [:partition_number, :user_id],
target_column: [:partition_number, :id],
validate: false,
name: :fk_multiple_columns
)
end
end
end
end
end
@ -813,6 +865,15 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(name).to be_an_instance_of(String)
expect(name.length).to eq(13)
end
context 'when using multiple columns' do
it 'returns the name of the foreign key', :aggregate_failures do
result = model.concurrent_foreign_key_name(:table_name, [:partition_number, :id])
expect(result).to be_an_instance_of(String)
expect(result.length).to eq(13)
end
end
end
describe '#foreign_key_exists?' do
@ -887,6 +948,62 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'compares by target table if no column given' do
expect(model.foreign_key_exists?(:projects, :other_table)).to be_falsey
end
context 'with foreign key using multiple columns' do
before do
key = ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(
:projects, :users,
{
column: [:partition_number, :id],
name: :fk_projects_users_partition_number_id,
on_delete: :cascade,
primary_key: [:partition_number, :id]
}
)
allow(model).to receive(:foreign_keys).with(:projects).and_return([key])
end
it 'finds existing foreign keys by columns' do
expect(model.foreign_key_exists?(:projects, :users, column: [:partition_number, :id])).to be_truthy
end
it 'finds existing foreign keys by name' do
expect(model.foreign_key_exists?(:projects, :users, name: :fk_projects_users_partition_number_id)).to be_truthy
end
it 'finds existing foreign_keys by name and column' do
expect(model.foreign_key_exists?(:projects, :users, name: :fk_projects_users_partition_number_id, column: [:partition_number, :id])).to be_truthy
end
it 'finds existing foreign_keys by name, column and on_delete' do
expect(model.foreign_key_exists?(:projects, :users, name: :fk_projects_users_partition_number_id, column: [:partition_number, :id], on_delete: :cascade)).to be_truthy
end
it 'finds existing foreign keys by target table only' do
expect(model.foreign_key_exists?(:projects, :users)).to be_truthy
end
it 'compares by column name if given' do
expect(model.foreign_key_exists?(:projects, :users, column: :id)).to be_falsey
end
it 'compares by target column name if given' do
expect(model.foreign_key_exists?(:projects, :users, primary_key: :user_id)).to be_falsey
expect(model.foreign_key_exists?(:projects, :users, primary_key: [:partition_number, :id])).to be_truthy
end
it 'compares by foreign key name if given' do
expect(model.foreign_key_exists?(:projects, :users, name: :non_existent_foreign_key_name)).to be_falsey
end
it 'compares by foreign key name and column if given' do
expect(model.foreign_key_exists?(:projects, :users, name: :non_existent_foreign_key_name, column: [:partition_number, :id])).to be_falsey
end
it 'compares by foreign key name, column and on_delete if given' do
expect(model.foreign_key_exists?(:projects, :users, name: :fk_projects_users_partition_number_id, column: [:partition_number, :id], on_delete: :nullify)).to be_falsey
end
end
end
describe '#disable_statement_timeout' do

Some files were not shown because too many files have changed in this diff Show More