Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
833eadad8c
commit
d64e3a8b28
112 changed files with 2151 additions and 305 deletions
|
@ -56,7 +56,7 @@ Style/FrozenStringLiteralComment:
|
|||
- 'qa/**/*'
|
||||
- 'rubocop/**/*'
|
||||
- 'scripts/**/*'
|
||||
- 'spec/**/*'
|
||||
- 'spec/lib/**/*'
|
||||
|
||||
RSpec/FilePath:
|
||||
Exclude:
|
||||
|
|
6
Gemfile
6
Gemfile
|
@ -387,7 +387,6 @@ group :development, :test do
|
|||
|
||||
gem 'benchmark-ips', '~> 2.3.0', require: false
|
||||
|
||||
gem 'license_finder', '~> 5.4', require: false
|
||||
gem 'knapsack', '~> 1.17'
|
||||
|
||||
gem 'stackprof', '~> 0.2.10', require: false
|
||||
|
@ -397,6 +396,11 @@ group :development, :test do
|
|||
gem 'timecop', '~> 0.8.0'
|
||||
end
|
||||
|
||||
# Gems required in omnibus-gitlab pipeline
|
||||
group :development, :test, :omnibus do
|
||||
gem 'license_finder', '~> 5.4', require: false
|
||||
end
|
||||
|
||||
group :test do
|
||||
gem 'shoulda-matchers', '~> 4.0.1', require: false
|
||||
gem 'email_spec', '~> 2.2.0'
|
||||
|
|
|
@ -48,7 +48,7 @@ document.addEventListener('DOMContentLoaded', () => {
|
|||
leaveByUrl('project');
|
||||
|
||||
if (document.getElementById('js-tree-list')) {
|
||||
import('~/repository')
|
||||
import('ee_else_ce/repository')
|
||||
.then(m => m.default())
|
||||
.catch(e => {
|
||||
throw e;
|
||||
|
|
|
@ -42,7 +42,7 @@ document.addEventListener('DOMContentLoaded', () => {
|
|||
GpgBadges.fetch();
|
||||
|
||||
if (document.getElementById('js-tree-list')) {
|
||||
import('~/repository')
|
||||
import('ee_else_ce/repository')
|
||||
.then(m => m.default())
|
||||
.catch(e => {
|
||||
throw e;
|
||||
|
|
|
@ -9,8 +9,10 @@ import { parseBoolean } from '../lib/utils/common_utils';
|
|||
|
||||
export default function setupVueRepositoryList() {
|
||||
const el = document.getElementById('js-tree-list');
|
||||
const { projectPath, projectShortPath, ref, fullName } = el.dataset;
|
||||
const { dataset } = el;
|
||||
const { projectPath, projectShortPath, ref, fullName } = dataset;
|
||||
const router = createRouter(projectPath, ref);
|
||||
const hideOnRootEls = document.querySelectorAll('.js-hide-on-root');
|
||||
|
||||
apolloProvider.clients.defaultClient.cache.writeData({
|
||||
data: {
|
||||
|
@ -35,6 +37,7 @@ export default function setupVueRepositoryList() {
|
|||
document
|
||||
.querySelectorAll('.js-hide-on-navigation')
|
||||
.forEach(elem => elem.classList.toggle('hidden', !isRoot));
|
||||
hideOnRootEls.forEach(elem => elem.classList.toggle('hidden', isRoot));
|
||||
});
|
||||
|
||||
const breadcrumbEl = document.getElementById('js-repo-breadcrumb');
|
||||
|
@ -88,7 +91,8 @@ export default function setupVueRepositoryList() {
|
|||
},
|
||||
});
|
||||
|
||||
return new Vue({
|
||||
// eslint-disable-next-line no-new
|
||||
new Vue({
|
||||
el,
|
||||
router,
|
||||
apolloProvider,
|
||||
|
@ -96,4 +100,6 @@ export default function setupVueRepositoryList() {
|
|||
return h(App);
|
||||
},
|
||||
});
|
||||
|
||||
return { router, data: dataset };
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ module PreviewMarkdown
|
|||
|
||||
# rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
def preview_markdown
|
||||
result = PreviewMarkdownService.new(@project, current_user, params).execute
|
||||
result = PreviewMarkdownService.new(@project, current_user, markdown_service_params).execute
|
||||
|
||||
markdown_params =
|
||||
case controller_name
|
||||
|
@ -26,6 +26,8 @@ module PreviewMarkdown
|
|||
end
|
||||
# rubocop:enable Gitlab/ModuleWithInstanceVariables
|
||||
|
||||
private
|
||||
|
||||
def projects_filter_params
|
||||
{
|
||||
issuable_state_filter_enabled: true,
|
||||
|
@ -33,10 +35,12 @@ module PreviewMarkdown
|
|||
}
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Override this method to customise the markdown for your controller
|
||||
def preview_markdown_params
|
||||
{}
|
||||
end
|
||||
|
||||
def markdown_service_params
|
||||
params
|
||||
end
|
||||
end
|
||||
|
|
|
@ -6,6 +6,7 @@ class GroupsController < Groups::ApplicationController
|
|||
include ParamsBackwardCompatibility
|
||||
include PreviewMarkdown
|
||||
include RecordUserLastActivity
|
||||
extend ::Gitlab::Utils::Override
|
||||
|
||||
respond_to :html
|
||||
|
||||
|
@ -233,6 +234,11 @@ class GroupsController < Groups::ApplicationController
|
|||
@group.self_and_descendants.public_or_visible_to_user(current_user)
|
||||
end
|
||||
end
|
||||
|
||||
override :markdown_service_params
|
||||
def markdown_service_params
|
||||
params.merge(group: group)
|
||||
end
|
||||
end
|
||||
|
||||
GroupsController.prepend_if_ee('EE::GroupsController')
|
||||
|
|
|
@ -66,7 +66,7 @@ module GitlabRoutingHelper
|
|||
end
|
||||
|
||||
def preview_markdown_path(parent, *args)
|
||||
return group_preview_markdown_path(parent) if parent.is_a?(Group)
|
||||
return group_preview_markdown_path(parent, *args) if parent.is_a?(Group)
|
||||
|
||||
if @snippet.is_a?(PersonalSnippet)
|
||||
preview_markdown_snippets_path
|
||||
|
|
|
@ -32,7 +32,7 @@ module ServicesHelper
|
|||
end
|
||||
|
||||
def service_save_button(service)
|
||||
button_tag(class: 'btn btn-success', type: 'submit', disabled: service.deprecated?) do
|
||||
button_tag(class: 'btn btn-success', type: 'submit', disabled: service.deprecated?, data: { qa_selector: 'save_changes_button' }) do
|
||||
icon('spinner spin', class: 'hidden js-btn-spinner') +
|
||||
content_tag(:span, 'Save changes', class: 'js-btn-label')
|
||||
end
|
||||
|
|
|
@ -186,6 +186,15 @@ module TreeHelper
|
|||
|
||||
attrs
|
||||
end
|
||||
|
||||
def vue_file_list_data(project, ref)
|
||||
{
|
||||
project_path: project.full_path,
|
||||
project_short_path: project.path,
|
||||
ref: ref,
|
||||
full_name: project.name_with_namespace
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
TreeHelper.prepend_if_ee('::EE::TreeHelper')
|
||||
|
|
|
@ -6,6 +6,7 @@ module Clusters
|
|||
include Gitlab::Utils::StrongMemoize
|
||||
include FromUnion
|
||||
include ReactiveCaching
|
||||
include AfterCommitQueue
|
||||
|
||||
self.table_name = 'clusters'
|
||||
|
||||
|
@ -126,7 +127,55 @@ module Clusters
|
|||
hierarchy_groups.flat_map(&:clusters) + Instance.new.clusters
|
||||
end
|
||||
|
||||
state_machine :cleanup_status, initial: :cleanup_not_started do
|
||||
state :cleanup_not_started, value: 1
|
||||
state :cleanup_uninstalling_applications, value: 2
|
||||
state :cleanup_removing_project_namespaces, value: 3
|
||||
state :cleanup_removing_service_account, value: 4
|
||||
state :cleanup_errored, value: 5
|
||||
|
||||
event :start_cleanup do |cluster|
|
||||
transition [:cleanup_not_started, :cleanup_errored] => :cleanup_uninstalling_applications
|
||||
end
|
||||
|
||||
event :continue_cleanup do
|
||||
transition(
|
||||
cleanup_uninstalling_applications: :cleanup_removing_project_namespaces,
|
||||
cleanup_removing_project_namespaces: :cleanup_removing_service_account)
|
||||
end
|
||||
|
||||
event :make_cleanup_errored do
|
||||
transition any => :cleanup_errored
|
||||
end
|
||||
|
||||
before_transition any => [:cleanup_errored] do |cluster, transition|
|
||||
status_reason = transition.args.first
|
||||
cluster.cleanup_status_reason = status_reason if status_reason
|
||||
end
|
||||
|
||||
after_transition [:cleanup_not_started, :cleanup_errored] => :cleanup_uninstalling_applications do |cluster|
|
||||
cluster.run_after_commit do
|
||||
Clusters::Cleanup::AppWorker.perform_async(cluster.id)
|
||||
end
|
||||
end
|
||||
|
||||
after_transition cleanup_uninstalling_applications: :cleanup_removing_project_namespaces do |cluster|
|
||||
cluster.run_after_commit do
|
||||
Clusters::Cleanup::ProjectNamespaceWorker.perform_async(cluster.id)
|
||||
end
|
||||
end
|
||||
|
||||
after_transition cleanup_removing_project_namespaces: :cleanup_removing_service_account do |cluster|
|
||||
cluster.run_after_commit do
|
||||
Clusters::Cleanup::ServiceAccountWorker.perform_async(cluster.id)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def status_name
|
||||
return cleanup_status_name if cleanup_errored?
|
||||
return :cleanup_ongoing unless cleanup_not_started?
|
||||
|
||||
provider&.status_name || connection_status.presence || :created
|
||||
end
|
||||
|
||||
|
|
|
@ -209,14 +209,20 @@ class MergeRequest < ApplicationRecord
|
|||
scope :by_target_branch, ->(branch_name) { where(target_branch: branch_name) }
|
||||
scope :preload_source_project, -> { preload(:source_project) }
|
||||
|
||||
scope :with_open_merge_when_pipeline_succeeds, -> do
|
||||
with_state(:opened).where(merge_when_pipeline_succeeds: true)
|
||||
scope :with_auto_merge_enabled, -> do
|
||||
with_state(:opened).where(auto_merge_enabled: true)
|
||||
end
|
||||
|
||||
after_save :keep_around_commit
|
||||
|
||||
alias_attribute :project, :target_project
|
||||
alias_attribute :project_id, :target_project_id
|
||||
|
||||
# Currently, `merge_when_pipeline_succeeds` column is used as a flag
|
||||
# to check if _any_ auto merge strategy is activated on the merge request.
|
||||
# Today, we have multiple strategies and MWPS is one of them.
|
||||
# we'd eventually rename the column for avoiding confusions, but in the mean time
|
||||
# please use `auto_merge_enabled` alias instead of `merge_when_pipeline_succeeds`.
|
||||
alias_attribute :auto_merge_enabled, :merge_when_pipeline_succeeds
|
||||
alias_method :issuing_parent, :target_project
|
||||
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class TodoPresenter < Gitlab::View::Presenter::Delegated
|
||||
include GlobalID::Identification
|
||||
|
||||
presents :todo
|
||||
end
|
||||
|
|
|
@ -89,6 +89,14 @@ class DiffFileBaseEntity < Grape::Entity
|
|||
|
||||
expose :viewer, using: DiffViewerEntity
|
||||
|
||||
expose :old_size do |diff_file|
|
||||
diff_file.old_blob&.raw_size
|
||||
end
|
||||
|
||||
expose :new_size do |diff_file|
|
||||
diff_file.new_blob&.raw_size
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def memoized_submodule_links(diff_file, options)
|
||||
|
|
|
@ -152,7 +152,8 @@ module MergeRequests
|
|||
def abort_ff_merge_requests_with_when_pipeline_succeeds
|
||||
return unless @project.ff_merge_must_be_possible?
|
||||
|
||||
requests_with_auto_merge_enabled_to(@push.branch_name).each do |merge_request|
|
||||
merge_requests_with_auto_merge_enabled_to(@push.branch_name).each do |merge_request|
|
||||
next unless merge_request.auto_merge_strategy == AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS
|
||||
next unless merge_request.should_be_rebased?
|
||||
|
||||
abort_auto_merge_with_todo(merge_request, 'target branch was updated')
|
||||
|
@ -167,11 +168,11 @@ module MergeRequests
|
|||
todo_service.merge_request_became_unmergeable(merge_request)
|
||||
end
|
||||
|
||||
def requests_with_auto_merge_enabled_to(target_branch)
|
||||
def merge_requests_with_auto_merge_enabled_to(target_branch)
|
||||
@project
|
||||
.merge_requests
|
||||
.by_target_branch(target_branch)
|
||||
.with_open_merge_when_pipeline_succeeds
|
||||
.with_auto_merge_enabled
|
||||
end
|
||||
|
||||
def mark_pending_todos_done
|
||||
|
|
|
@ -16,8 +16,12 @@ class PreviewMarkdownService < BaseService
|
|||
|
||||
private
|
||||
|
||||
def quick_action_types
|
||||
%w(Issue MergeRequest Commit)
|
||||
end
|
||||
|
||||
def explain_quick_actions(text)
|
||||
return text, [] unless %w(Issue MergeRequest Commit).include?(target_type)
|
||||
return text, [] unless quick_action_types.include?(target_type)
|
||||
|
||||
quick_actions_service = QuickActions::InterpretService.new(project, current_user)
|
||||
quick_actions_service.explain(text, find_commands_target)
|
||||
|
@ -51,7 +55,7 @@ class PreviewMarkdownService < BaseService
|
|||
|
||||
def find_commands_target
|
||||
QuickActions::TargetService
|
||||
.new(project, current_user)
|
||||
.new(project, current_user, group: params[:group])
|
||||
.execute(target_type, target_id)
|
||||
end
|
||||
|
||||
|
@ -63,3 +67,5 @@ class PreviewMarkdownService < BaseService
|
|||
params[:target_id]
|
||||
end
|
||||
end
|
||||
|
||||
PreviewMarkdownService.prepend_if_ee('EE::PreviewMarkdownService')
|
||||
|
|
|
@ -32,3 +32,5 @@ module QuickActions
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
QuickActions::TargetService.prepend_if_ee('EE::QuickActions::TargetService')
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
%fieldset
|
||||
.form-group
|
||||
.form-check
|
||||
= f.check_box :allow_local_requests_from_web_hooks_and_services, class: 'form-check-input'
|
||||
= f.check_box :allow_local_requests_from_web_hooks_and_services, class: 'form-check-input', data: { qa_selector: 'allow_requests_from_services_checkbox' }
|
||||
= f.label :allow_local_requests_from_web_hooks_and_services, class: 'form-check-label' do
|
||||
= _('Allow requests to the local network from web hooks and services')
|
||||
.form-check
|
||||
|
@ -27,4 +27,4 @@
|
|||
%span.form-text.text-muted
|
||||
= _('Resolves IP addresses once and uses them to submit requests')
|
||||
|
||||
= f.submit 'Save changes', class: "btn btn-success"
|
||||
= f.submit 'Save changes', class: "btn btn-success", data: { qa_selector: 'save_changes_button' }
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
.settings-content
|
||||
= render 'ip_limits'
|
||||
|
||||
%section.settings.as-outbound.no-animate#js-outbound-settings{ class: ('expanded' if expanded_by_default?) }
|
||||
%section.settings.as-outbound.no-animate#js-outbound-settings{ class: ('expanded' if expanded_by_default?), data: { qa_selector: 'outbound_requests_section' } }
|
||||
.settings-header
|
||||
%h4
|
||||
= _('Outbound requests')
|
||||
|
|
|
@ -163,7 +163,7 @@
|
|||
|
||||
- if project_nav_tab? :pipelines
|
||||
= nav_link(controller: [:pipelines, :builds, :jobs, :pipeline_schedules, :artifacts]) do
|
||||
= link_to project_pipelines_path(@project), class: 'shortcuts-pipelines qa-link-pipelines rspec-link-pipelines' do
|
||||
= link_to project_pipelines_path(@project), class: 'shortcuts-pipelines qa-link-pipelines rspec-link-pipelines', data: { qa_selector: 'ci_cd_link' } do
|
||||
.nav-icon-container
|
||||
= sprite_icon('rocket')
|
||||
%span.nav-item-name#js-onboarding-pipelines-link
|
||||
|
@ -347,7 +347,7 @@
|
|||
= _('Members')
|
||||
- if can_edit
|
||||
= nav_link(controller: [:integrations, :services, :hooks, :hook_logs]) do
|
||||
= link_to project_settings_integrations_path(@project), title: _('Integrations') do
|
||||
= link_to project_settings_integrations_path(@project), title: _('Integrations'), data: { qa_selector: 'integrations_settings_link' } do
|
||||
%span
|
||||
= _('Integrations')
|
||||
= nav_link(controller: :repository) do
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
= render 'stat_anchor_list', anchors: @project.statistics_buttons(show_auto_devops_callout: show_auto_devops_callout)
|
||||
|
||||
- if vue_file_list_enabled?
|
||||
#js-tree-list{ data: { project_path: @project.full_path, project_short_path: @project.path, ref: ref, full_name: @project.name_with_namespace } }
|
||||
#js-tree-list{ data: vue_file_list_data(project, ref) }
|
||||
- if can_edit_tree?
|
||||
= render 'projects/blob/upload', title: _('Upload New File'), placeholder: _('Upload New File'), button_title: _('Upload file'), form_path: project_create_blob_path(@project, @id), method: :post
|
||||
= render 'projects/blob/new_dir'
|
||||
|
|
|
@ -21,7 +21,7 @@
|
|||
%td{ "aria-label" => (service.activated? ? s_("ProjectService|%{service_title}: status on") : s_("ProjectService|%{service_title}: status off")) % { service_title: service.title } }
|
||||
= boolean_to_icon service.activated?
|
||||
%td
|
||||
= link_to edit_project_service_path(@project, service.to_param) do
|
||||
= link_to edit_project_service_path(@project, service.to_param), { data: { qa_selector: "#{service.title.downcase.gsub(/[\s\(\)]/,'_')}_link" } } do
|
||||
%strong= service.title
|
||||
%td.d-none.d-sm-block
|
||||
= service.description
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
= form.label name, title, class: "col-form-label col-sm-2"
|
||||
.col-sm-10
|
||||
- if type == 'text'
|
||||
= form.text_field name, class: "form-control", placeholder: placeholder, required: required, disabled: disabled
|
||||
= form.text_field name, class: "form-control", placeholder: placeholder, required: required, disabled: disabled, data: { qa_selector: "#{name.downcase.gsub('\s', '')}_field" }
|
||||
- elsif type == 'textarea'
|
||||
= form.text_area name, rows: 5, class: "form-control", placeholder: placeholder, required: required, disabled: disabled
|
||||
- elsif type == 'checkbox'
|
||||
|
@ -24,6 +24,6 @@
|
|||
- elsif type == 'select'
|
||||
= form.select name, options_for_select(choices, value ? value : default_choice), {}, { class: "form-control", disabled: disabled}
|
||||
- elsif type == 'password'
|
||||
= form.password_field name, autocomplete: "new-password", placeholder: placeholder, class: "form-control", required: value.blank? && required, disabled: disabled
|
||||
= form.password_field name, autocomplete: "new-password", placeholder: placeholder, class: "form-control", required: value.blank? && required, disabled: disabled, data: { qa_selector: "#{name.downcase.gsub('\s', '')}_field" }
|
||||
- if help
|
||||
%span.form-text.text-muted= help
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
.form-group.row
|
||||
= form.label :active, "Active", class: "col-form-label col-sm-2"
|
||||
.col-sm-10
|
||||
= form.check_box :active, disabled: disable_fields_service?(@service)
|
||||
= form.check_box :active, disabled: disable_fields_service?(@service), data: { qa_selector: 'active_checkbox' }
|
||||
|
||||
- if @service.configurable_events.present?
|
||||
.form-group.row
|
||||
|
|
|
@ -45,6 +45,9 @@
|
|||
- gcp_cluster:cluster_project_configure
|
||||
- gcp_cluster:clusters_applications_wait_for_uninstall_app
|
||||
- gcp_cluster:clusters_applications_uninstall
|
||||
- gcp_cluster:clusters_cleanup_app
|
||||
- gcp_cluster:clusters_cleanup_project_namespace
|
||||
- gcp_cluster:clusters_cleanup_service_account
|
||||
|
||||
- github_import_advance_stage
|
||||
- github_importer:github_import_import_diff_note
|
||||
|
|
16
app/workers/clusters/cleanup/app_worker.rb
Normal file
16
app/workers/clusters/cleanup/app_worker.rb
Normal file
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Clusters
|
||||
module Cleanup
|
||||
class AppWorker
|
||||
include ApplicationWorker
|
||||
include ClusterQueue
|
||||
include ClusterApplications
|
||||
|
||||
# TODO: Merge with https://gitlab.com/gitlab-org/gitlab/merge_requests/16954
|
||||
# We're splitting the above MR in smaller chunks to facilitate reviews
|
||||
def perform
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
16
app/workers/clusters/cleanup/project_namespace_worker.rb
Normal file
16
app/workers/clusters/cleanup/project_namespace_worker.rb
Normal file
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Clusters
|
||||
module Cleanup
|
||||
class ProjectNamespaceWorker
|
||||
include ApplicationWorker
|
||||
include ClusterQueue
|
||||
include ClusterApplications
|
||||
|
||||
# TODO: Merge with https://gitlab.com/gitlab-org/gitlab/merge_requests/16954
|
||||
# We're splitting the above MR in smaller chunks to facilitate reviews
|
||||
def perform
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
16
app/workers/clusters/cleanup/service_account_worker.rb
Normal file
16
app/workers/clusters/cleanup/service_account_worker.rb
Normal file
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Clusters
|
||||
module Cleanup
|
||||
class ServiceAccountWorker
|
||||
include ApplicationWorker
|
||||
include ClusterQueue
|
||||
include ClusterApplications
|
||||
|
||||
# TODO: Merge with https://gitlab.com/gitlab-org/gitlab/merge_requests/16954
|
||||
# We're splitting the above MR in smaller chunks to facilitate reviews
|
||||
def perform
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
5
changelogs/unreleased/34850-fix-graphql-todo-ids.yml
Normal file
5
changelogs/unreleased/34850-fix-graphql-todo-ids.yml
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Fix Todo IDs in GraphQL API
|
||||
merge_request: 19068
|
||||
author:
|
||||
type: fixed
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Fix previewing quick actions for epics
|
||||
merge_request: 19042
|
||||
author:
|
||||
type: fixed
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Abort only MWPS when FF only merge is impossible
|
||||
merge_request: 18591
|
||||
author:
|
||||
type: fixed
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add cleanup status to clusters
|
||||
merge_request: 18144
|
||||
author:
|
||||
type: added
|
|
@ -100,6 +100,7 @@
|
|||
- [create_evidence, 2]
|
||||
|
||||
# EE-specific queues
|
||||
- [analytics, 1]
|
||||
- [ldap_group_sync, 2]
|
||||
- [create_github_webhook, 2]
|
||||
- [geo, 1]
|
||||
|
|
21
db/migrate/20190918104731_add_cleanup_status_to_cluster.rb
Normal file
21
db/migrate/20190918104731_add_cleanup_status_to_cluster.rb
Normal file
|
@ -0,0 +1,21 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddCleanupStatusToCluster < ActiveRecord::Migration[5.2]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
# Set this constant to true if this migration requires downtime.
|
||||
DOWNTIME = false
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_column_with_default(:clusters, :cleanup_status,
|
||||
:smallint,
|
||||
default: 1,
|
||||
allow_null: false)
|
||||
end
|
||||
|
||||
def down
|
||||
remove_column(:clusters, :cleanup_status)
|
||||
end
|
||||
end
|
|
@ -0,0 +1,12 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddCleanupStatusReasonToCluster < ActiveRecord::Migration[5.2]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
# Set this constant to true if this migration requires downtime.
|
||||
DOWNTIME = false
|
||||
|
||||
def change
|
||||
add_column :clusters, :cleanup_status_reason, :text
|
||||
end
|
||||
end
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddMergeRequestsIndexOnTargetProjectAndBranch < ActiveRecord::Migration[5.2]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
# Set this constant to true if this migration requires downtime.
|
||||
DOWNTIME = false
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_concurrent_index :merge_requests, [:target_project_id, :target_branch],
|
||||
where: "state_id = 1 AND merge_when_pipeline_succeeds = true"
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index :merge_requests, [:target_project_id, :target_branch]
|
||||
end
|
||||
end
|
|
@ -1041,6 +1041,8 @@ ActiveRecord::Schema.define(version: 2019_10_26_041447) do
|
|||
t.boolean "managed", default: true, null: false
|
||||
t.boolean "namespace_per_environment", default: true, null: false
|
||||
t.integer "management_project_id"
|
||||
t.integer "cleanup_status", limit: 2, default: 1, null: false
|
||||
t.text "cleanup_status_reason"
|
||||
t.index ["enabled"], name: "index_clusters_on_enabled"
|
||||
t.index ["management_project_id"], name: "index_clusters_on_management_project_id", where: "(management_project_id IS NOT NULL)"
|
||||
t.index ["user_id"], name: "index_clusters_on_user_id"
|
||||
|
@ -2340,6 +2342,7 @@ ActiveRecord::Schema.define(version: 2019_10_26_041447) do
|
|||
t.index ["target_project_id", "iid"], name: "index_merge_requests_on_target_project_id_and_iid", unique: true
|
||||
t.index ["target_project_id", "iid"], name: "index_merge_requests_on_target_project_id_and_iid_opened", where: "((state)::text = 'opened'::text)"
|
||||
t.index ["target_project_id", "merge_commit_sha", "id"], name: "index_merge_requests_on_tp_id_and_merge_commit_sha_and_id"
|
||||
t.index ["target_project_id", "target_branch"], name: "index_merge_requests_on_target_project_id_and_target_branch", where: "((state_id = 1) AND (merge_when_pipeline_succeeds = true))"
|
||||
t.index ["title"], name: "index_merge_requests_on_title"
|
||||
t.index ["title"], name: "index_merge_requests_on_title_trigram", opclass: :gin_trgm_ops, using: :gin
|
||||
t.index ["updated_by_id"], name: "index_merge_requests_on_updated_by_id", where: "(updated_by_id IS NOT NULL)"
|
||||
|
|
|
@ -146,6 +146,10 @@ query($project_path: ID!) {
|
|||
}
|
||||
```
|
||||
|
||||
To ensure that we get consistent ordering, we will append an ordering on the primary
|
||||
key, in descending order. This is usually `id`, so basically we will add `order(id: :desc)`
|
||||
to the end of the relation. A primary key _must_ be available on the underlying table.
|
||||
|
||||
### Exposing permissions for a type
|
||||
|
||||
To expose permissions the current user has on a resource, you can call
|
||||
|
|
25
doc/development/testing_guide/end_to_end/feature_flags.md
Normal file
25
doc/development/testing_guide/end_to_end/feature_flags.md
Normal file
|
@ -0,0 +1,25 @@
|
|||
# Testing with feature flags
|
||||
|
||||
To run a specific test with a feature flag enabled you can use the `QA::Runtime::Feature` class to enabled and disable feature flags ([via the API](../../../api/features.md)).
|
||||
|
||||
```ruby
|
||||
context "with feature flag enabled" do
|
||||
before do
|
||||
Runtime::Feature.enable('feature_flag_name')
|
||||
end
|
||||
|
||||
it "feature flag test" do
|
||||
# Execute a test with a feature flag enabled
|
||||
end
|
||||
|
||||
after do
|
||||
Runtime::Feature.disable('feature_flag_name')
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
## Running a scenario with a feature flag enabled
|
||||
|
||||
It's also possible to run an entire scenario with a feature flag enabled, without having to edit existing tests or write new ones.
|
||||
|
||||
Please see the [QA readme](https://gitlab.com/gitlab-org/gitlab/tree/master/qa#running-tests-with-a-feature-flag-enabled) for details.
|
|
@ -130,6 +130,7 @@ Continued reading:
|
|||
- [Quick Start Guide](quick_start_guide.md)
|
||||
- [Style Guide](style_guide.md)
|
||||
- [Best Practices](best_practices.md)
|
||||
- [Testing with feature flags](feature_flags.md)
|
||||
|
||||
## Where can I ask for help?
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ module Gitlab
|
|||
def self.use(_schema)
|
||||
GraphQL::Relay::BaseConnection.register_connection_implementation(
|
||||
ActiveRecord::Relation,
|
||||
Gitlab::Graphql::Connections::KeysetConnection
|
||||
Gitlab::Graphql::Connections::Keyset::Connection
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Graphql
|
||||
module Connections
|
||||
module Keyset
|
||||
module Conditions
|
||||
class BaseCondition
|
||||
def initialize(arel_table, names, values, operator, before_or_after)
|
||||
@arel_table, @names, @values, @operator, @before_or_after = arel_table, names, values, operator, before_or_after
|
||||
end
|
||||
|
||||
def build
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :arel_table, :names, :values, :operator, :before_or_after
|
||||
|
||||
def table_condition(attribute, value, operator)
|
||||
case operator
|
||||
when '>'
|
||||
arel_table[attribute].gt(value)
|
||||
when '<'
|
||||
arel_table[attribute].lt(value)
|
||||
when '='
|
||||
arel_table[attribute].eq(value)
|
||||
when 'is_null'
|
||||
arel_table[attribute].eq(nil)
|
||||
when 'is_not_null'
|
||||
arel_table[attribute].not_eq(nil)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,57 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Graphql
|
||||
module Connections
|
||||
module Keyset
|
||||
module Conditions
|
||||
class NotNullCondition < BaseCondition
|
||||
def build
|
||||
conditions = [first_attribute_condition]
|
||||
|
||||
# If there is only one order field, we can assume it
|
||||
# does not contain NULLs, and don't need additional
|
||||
# conditions
|
||||
unless names.count == 1
|
||||
conditions << [second_attribute_condition, final_condition]
|
||||
end
|
||||
|
||||
conditions.join
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# ex: "(relative_position > 23)"
|
||||
def first_attribute_condition
|
||||
<<~SQL
|
||||
(#{table_condition(names.first, values.first, operator.first).to_sql})
|
||||
SQL
|
||||
end
|
||||
|
||||
# ex: " OR (relative_position = 23 AND id > 500)"
|
||||
def second_attribute_condition
|
||||
condition = <<~SQL
|
||||
OR (
|
||||
#{table_condition(names.first, values.first, '=').to_sql}
|
||||
AND
|
||||
#{table_condition(names[1], values[1], operator[1]).to_sql}
|
||||
)
|
||||
SQL
|
||||
|
||||
condition
|
||||
end
|
||||
|
||||
# ex: " OR (relative_position IS NULL)"
|
||||
def final_condition
|
||||
if before_or_after == :after
|
||||
<<~SQL
|
||||
OR (#{table_condition(names.first, nil, 'is_null').to_sql})
|
||||
SQL
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,41 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Graphql
|
||||
module Connections
|
||||
module Keyset
|
||||
module Conditions
|
||||
class NullCondition < BaseCondition
|
||||
def build
|
||||
[first_attribute_condition, final_condition].join
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# ex: "(relative_position IS NULL AND id > 500)"
|
||||
def first_attribute_condition
|
||||
condition = <<~SQL
|
||||
(
|
||||
#{table_condition(names.first, nil, 'is_null').to_sql}
|
||||
AND
|
||||
#{table_condition(names[1], values[1], operator[1]).to_sql}
|
||||
)
|
||||
SQL
|
||||
|
||||
condition
|
||||
end
|
||||
|
||||
# ex: " OR (relative_position IS NOT NULL)"
|
||||
def final_condition
|
||||
if before_or_after == :before
|
||||
<<~SQL
|
||||
OR (#{table_condition(names.first, nil, 'is_not_null').to_sql})
|
||||
SQL
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
148
lib/gitlab/graphql/connections/keyset/connection.rb
Normal file
148
lib/gitlab/graphql/connections/keyset/connection.rb
Normal file
|
@ -0,0 +1,148 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# Keyset::Connection provides cursor based pagination, to avoid using OFFSET.
|
||||
# It basically sorts / filters using WHERE sorting_value > cursor.
|
||||
# We do this for performance reasons (https://gitlab.com/gitlab-org/gitlab-foss/issues/45756),
|
||||
# as well as for having stable pagination
|
||||
# https://graphql-ruby.org/pro/cursors.html#whats-the-difference
|
||||
# https://coderwall.com/p/lkcaag/pagination-you-re-probably-doing-it-wrong
|
||||
#
|
||||
# It currently supports sorting on two columns, but the last column must
|
||||
# be the primary key. For example
|
||||
#
|
||||
# Issue.order(created_at: :asc).order(:id)
|
||||
# Issue.order(due_date: :asc).order(:id)
|
||||
#
|
||||
# It will tolerate non-attribute ordering, but only attributes determine the cursor.
|
||||
# For example, this is legitimate:
|
||||
#
|
||||
# Issue.order('issues.due_date IS NULL').order(due_date: :asc).order(:id)
|
||||
#
|
||||
# but anything more complex has a chance of not working.
|
||||
#
|
||||
module Gitlab
|
||||
module Graphql
|
||||
module Connections
|
||||
module Keyset
|
||||
class Connection < GraphQL::Relay::BaseConnection
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
# TODO https://gitlab.com/gitlab-org/gitlab/issues/35104
|
||||
include Gitlab::Graphql::Connections::Keyset::LegacyKeysetConnection
|
||||
|
||||
def cursor_from_node(node)
|
||||
return legacy_cursor_from_node(node) if use_legacy_pagination?
|
||||
|
||||
encoded_json_from_ordering(node)
|
||||
end
|
||||
|
||||
def sliced_nodes
|
||||
return legacy_sliced_nodes if use_legacy_pagination?
|
||||
|
||||
@sliced_nodes ||=
|
||||
begin
|
||||
OrderInfo.validate_ordering(ordered_nodes, order_list)
|
||||
|
||||
sliced = ordered_nodes
|
||||
sliced = slice_nodes(sliced, before, :before) if before.present?
|
||||
sliced = slice_nodes(sliced, after, :after) if after.present?
|
||||
|
||||
sliced
|
||||
end
|
||||
end
|
||||
|
||||
def paged_nodes
|
||||
# These are the nodes that will be loaded into memory for rendering
|
||||
# So we're ok loading them into memory here as that's bound to happen
|
||||
# anyway. Having them ready means we can modify the result while
|
||||
# rendering the fields.
|
||||
@paged_nodes ||= load_paged_nodes.to_a
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def load_paged_nodes
|
||||
if first && last
|
||||
raise Gitlab::Graphql::Errors::ArgumentError.new("Can only provide either `first` or `last`, not both")
|
||||
end
|
||||
|
||||
if last
|
||||
sliced_nodes.last(limit_value)
|
||||
else
|
||||
sliced_nodes.limit(limit_value) # rubocop: disable CodeReuse/ActiveRecord
|
||||
end
|
||||
end
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def slice_nodes(sliced, encoded_cursor, before_or_after)
|
||||
decoded_cursor = ordering_from_encoded_json(encoded_cursor)
|
||||
builder = QueryBuilder.new(arel_table, order_list, decoded_cursor, before_or_after)
|
||||
ordering = builder.conditions
|
||||
|
||||
sliced.where(*ordering).where.not(id: decoded_cursor['id'])
|
||||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
def limit_value
|
||||
@limit_value ||= [first, last, max_page_size].compact.min
|
||||
end
|
||||
|
||||
def ordered_nodes
|
||||
strong_memoize(:order_nodes) do
|
||||
unless nodes.primary_key.present?
|
||||
raise ArgumentError.new('Relation must have a primary key')
|
||||
end
|
||||
|
||||
list = OrderInfo.build_order_list(nodes)
|
||||
|
||||
# ensure there is a primary key ordering
|
||||
if list&.last&.attribute_name != nodes.primary_key
|
||||
nodes.order(arel_table[nodes.primary_key].desc) # rubocop: disable CodeReuse/ActiveRecord
|
||||
else
|
||||
nodes
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def order_list
|
||||
strong_memoize(:order_list) do
|
||||
OrderInfo.build_order_list(ordered_nodes)
|
||||
end
|
||||
end
|
||||
|
||||
def arel_table
|
||||
nodes.arel_table
|
||||
end
|
||||
|
||||
# Storing the current order values in the cursor allows us to
|
||||
# make an intelligent decision on handling NULL values.
|
||||
# Otherwise we would either need to fetch the record first,
|
||||
# or fetch it in the SQL, significantly complicating it.
|
||||
def encoded_json_from_ordering(node)
|
||||
ordering = { 'id' => node[:id].to_s }
|
||||
|
||||
order_list.each do |field|
|
||||
field_name = field.attribute_name
|
||||
ordering[field_name] = node[field_name].to_s
|
||||
end
|
||||
|
||||
encode(ordering.to_json)
|
||||
end
|
||||
|
||||
def ordering_from_encoded_json(cursor)
|
||||
JSON.parse(decode(cursor))
|
||||
rescue JSON::ParserError
|
||||
# for the transition period where a client might request using an
|
||||
# old style cursor. Once removed, make it an error:
|
||||
# raise Gitlab::Graphql::Errors::ArgumentError, "Please provide a valid cursor"
|
||||
# TODO can be removed in next release
|
||||
# https://gitlab.com/gitlab-org/gitlab/issues/32933
|
||||
field_name = order_list.first.attribute_name
|
||||
|
||||
{ field_name => decode(cursor) }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,66 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# TODO https://gitlab.com/gitlab-org/gitlab/issues/35104
|
||||
module Gitlab
|
||||
module Graphql
|
||||
module Connections
|
||||
module Keyset
|
||||
module LegacyKeysetConnection
|
||||
def legacy_cursor_from_node(node)
|
||||
encode(node[legacy_order_field].to_s)
|
||||
end
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def legacy_sliced_nodes
|
||||
@sliced_nodes ||=
|
||||
begin
|
||||
sliced = nodes
|
||||
|
||||
sliced = sliced.where(legacy_before_slice) if before.present?
|
||||
sliced = sliced.where(legacy_after_slice) if after.present?
|
||||
|
||||
sliced
|
||||
end
|
||||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
private
|
||||
|
||||
def use_legacy_pagination?
|
||||
strong_memoize(:feature_disabled) do
|
||||
Feature.disabled?(:graphql_keyset_pagination, default_enabled: true)
|
||||
end
|
||||
end
|
||||
|
||||
def legacy_before_slice
|
||||
if legacy_sort_direction == :asc
|
||||
arel_table[legacy_order_field].lt(decode(before))
|
||||
else
|
||||
arel_table[legacy_order_field].gt(decode(before))
|
||||
end
|
||||
end
|
||||
|
||||
def legacy_after_slice
|
||||
if legacy_sort_direction == :asc
|
||||
arel_table[legacy_order_field].gt(decode(after))
|
||||
else
|
||||
arel_table[legacy_order_field].lt(decode(after))
|
||||
end
|
||||
end
|
||||
|
||||
def legacy_order_info
|
||||
@legacy_order_info ||= nodes.order_values.first
|
||||
end
|
||||
|
||||
def legacy_order_field
|
||||
@legacy_order_field ||= legacy_order_info&.expr&.name || nodes.primary_key
|
||||
end
|
||||
|
||||
def legacy_sort_direction
|
||||
@legacy_order_direction ||= legacy_order_info&.direction || :desc
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
66
lib/gitlab/graphql/connections/keyset/order_info.rb
Normal file
66
lib/gitlab/graphql/connections/keyset/order_info.rb
Normal file
|
@ -0,0 +1,66 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Graphql
|
||||
module Connections
|
||||
module Keyset
|
||||
class OrderInfo
|
||||
def initialize(order_value)
|
||||
@order_value = order_value
|
||||
end
|
||||
|
||||
def attribute_name
|
||||
order_value.expr.name
|
||||
end
|
||||
|
||||
def operator_for(before_or_after)
|
||||
case before_or_after
|
||||
when :before
|
||||
sort_direction == :asc ? '<' : '>'
|
||||
when :after
|
||||
sort_direction == :asc ? '>' : '<'
|
||||
end
|
||||
end
|
||||
|
||||
# Only allow specific node types. For example ignore String nodes
|
||||
def self.build_order_list(relation)
|
||||
order_list = relation.order_values.select do |value|
|
||||
value.is_a?(Arel::Nodes::Ascending) || value.is_a?(Arel::Nodes::Descending)
|
||||
end
|
||||
|
||||
order_list.map { |info| OrderInfo.new(info) }
|
||||
end
|
||||
|
||||
def self.validate_ordering(relation, order_list)
|
||||
if order_list.empty?
|
||||
raise ArgumentError.new('A minimum of 1 ordering field is required')
|
||||
end
|
||||
|
||||
if order_list.count > 2
|
||||
raise ArgumentError.new('A maximum of 2 ordering fields are allowed')
|
||||
end
|
||||
|
||||
# make sure the last ordering field is non-nullable
|
||||
attribute_name = order_list.last&.attribute_name
|
||||
|
||||
if relation.columns_hash[attribute_name].null
|
||||
raise ArgumentError.new("Column `#{attribute_name}` must not allow NULL")
|
||||
end
|
||||
|
||||
if order_list.last.attribute_name != relation.primary_key
|
||||
raise ArgumentError.new("Last ordering field must be the primary key, `#{relation.primary_key}`")
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :order_value
|
||||
|
||||
def sort_direction
|
||||
order_value.direction
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
68
lib/gitlab/graphql/connections/keyset/query_builder.rb
Normal file
68
lib/gitlab/graphql/connections/keyset/query_builder.rb
Normal file
|
@ -0,0 +1,68 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Graphql
|
||||
module Connections
|
||||
module Keyset
|
||||
class QueryBuilder
|
||||
def initialize(arel_table, order_list, decoded_cursor, before_or_after)
|
||||
@arel_table, @order_list, @decoded_cursor, @before_or_after = arel_table, order_list, decoded_cursor, before_or_after
|
||||
|
||||
if order_list.empty?
|
||||
raise ArgumentError.new('No ordering scopes have been supplied')
|
||||
end
|
||||
end
|
||||
|
||||
# Based on whether the main field we're ordering on is NULL in the
|
||||
# cursor, we can more easily target our query condition.
|
||||
# We assume that the last ordering field is unique, meaning
|
||||
# it will not contain NULLs.
|
||||
# We currently only support two ordering fields.
|
||||
#
|
||||
# Example of the conditions for
|
||||
# relation: Issue.order(relative_position: :asc).order(id: :asc)
|
||||
# after cursor: relative_position: 1500, id: 500
|
||||
#
|
||||
# when cursor[relative_position] is not NULL
|
||||
#
|
||||
# ("issues"."relative_position" > 1500)
|
||||
# OR (
|
||||
# "issues"."relative_position" = 1500
|
||||
# AND
|
||||
# "issues"."id" > 500
|
||||
# )
|
||||
# OR ("issues"."relative_position" IS NULL)
|
||||
#
|
||||
# when cursor[relative_position] is NULL
|
||||
#
|
||||
# "issues"."relative_position" IS NULL
|
||||
# AND
|
||||
# "issues"."id" > 500
|
||||
#
|
||||
def conditions
|
||||
attr_names = order_list.map { |field| field.attribute_name }
|
||||
attr_values = attr_names.map { |name| decoded_cursor[name] }
|
||||
|
||||
if attr_names.count == 1 && attr_values.first.nil?
|
||||
raise Gitlab::Graphql::Errors::ArgumentError.new('Before/after cursor invalid: `nil` was provided as only sortable value')
|
||||
end
|
||||
|
||||
if attr_names.count == 1 || attr_values.first.present?
|
||||
Keyset::Conditions::NotNullCondition.new(arel_table, attr_names, attr_values, operators, before_or_after).build
|
||||
else
|
||||
Keyset::Conditions::NullCondition.new(arel_table, attr_names, attr_values, operators, before_or_after).build
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :arel_table, :order_list, :decoded_cursor, :before_or_after
|
||||
|
||||
def operators
|
||||
order_list.map { |field| field.operator_for(before_or_after) }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,85 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Graphql
|
||||
module Connections
|
||||
class KeysetConnection < GraphQL::Relay::BaseConnection
|
||||
def cursor_from_node(node)
|
||||
encode(node[order_field].to_s)
|
||||
end
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def sliced_nodes
|
||||
@sliced_nodes ||=
|
||||
begin
|
||||
sliced = nodes
|
||||
|
||||
sliced = sliced.where(before_slice) if before.present?
|
||||
sliced = sliced.where(after_slice) if after.present?
|
||||
|
||||
sliced
|
||||
end
|
||||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
def paged_nodes
|
||||
# These are the nodes that will be loaded into memory for rendering
|
||||
# So we're ok loading them into memory here as that's bound to happen
|
||||
# anyway. Having them ready means we can modify the result while
|
||||
# rendering the fields.
|
||||
@paged_nodes ||= load_paged_nodes.to_a
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def load_paged_nodes
|
||||
if first && last
|
||||
raise Gitlab::Graphql::Errors::ArgumentError.new("Can only provide either `first` or `last`, not both")
|
||||
end
|
||||
|
||||
if last
|
||||
sliced_nodes.last(limit_value)
|
||||
else
|
||||
sliced_nodes.limit(limit_value) # rubocop: disable CodeReuse/ActiveRecord
|
||||
end
|
||||
end
|
||||
|
||||
def before_slice
|
||||
if sort_direction == :asc
|
||||
table[order_field].lt(decode(before))
|
||||
else
|
||||
table[order_field].gt(decode(before))
|
||||
end
|
||||
end
|
||||
|
||||
def after_slice
|
||||
if sort_direction == :asc
|
||||
table[order_field].gt(decode(after))
|
||||
else
|
||||
table[order_field].lt(decode(after))
|
||||
end
|
||||
end
|
||||
|
||||
def limit_value
|
||||
@limit_value ||= [first, last, max_page_size].compact.min
|
||||
end
|
||||
|
||||
def table
|
||||
nodes.arel_table
|
||||
end
|
||||
|
||||
def order_info
|
||||
@order_info ||= nodes.order_values.first
|
||||
end
|
||||
|
||||
def order_field
|
||||
@order_field ||= order_info&.expr&.name || nodes.primary_key
|
||||
end
|
||||
|
||||
def sort_direction
|
||||
@order_direction ||= order_info&.direction || :desc
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
13
qa/qa.rb
13
qa/qa.rb
|
@ -331,6 +331,7 @@ module QA
|
|||
|
||||
module Component
|
||||
autoload :IpLimits, 'qa/page/admin/settings/component/ip_limits'
|
||||
autoload :OutboundRequests, 'qa/page/admin/settings/component/outbound_requests'
|
||||
autoload :RepositoryStorage, 'qa/page/admin/settings/component/repository_storage'
|
||||
autoload :AccountAndLimit, 'qa/page/admin/settings/component/account_and_limit'
|
||||
autoload :PerformanceBar, 'qa/page/admin/settings/component/performance_bar'
|
||||
|
@ -406,6 +407,7 @@ module QA
|
|||
|
||||
module DockerRun
|
||||
autoload :Base, 'qa/service/docker_run/base'
|
||||
autoload :Jenkins, 'qa/service/docker_run/jenkins'
|
||||
autoload :LDAP, 'qa/service/docker_run/ldap'
|
||||
autoload :Maven, 'qa/service/docker_run/maven'
|
||||
autoload :NodeJs, 'qa/service/docker_run/node_js'
|
||||
|
@ -438,6 +440,17 @@ module QA
|
|||
end
|
||||
end
|
||||
|
||||
module Jenkins
|
||||
module Page
|
||||
autoload :Base, 'qa/vendor/jenkins/page/base'
|
||||
autoload :Login, 'qa/vendor/jenkins/page/login'
|
||||
autoload :Configure, 'qa/vendor/jenkins/page/configure'
|
||||
autoload :NewCredentials, 'qa/vendor/jenkins/page/new_credentials'
|
||||
autoload :NewJob, 'qa/vendor/jenkins/page/new_job'
|
||||
autoload :ConfigureJob, 'qa/vendor/jenkins/page/configure_job'
|
||||
end
|
||||
end
|
||||
|
||||
module Github
|
||||
module Page
|
||||
autoload :Base, 'qa/vendor/github/page/base'
|
||||
|
|
33
qa/qa/page/admin/settings/component/outbound_requests.rb
Normal file
33
qa/qa/page/admin/settings/component/outbound_requests.rb
Normal file
|
@ -0,0 +1,33 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module QA
|
||||
module Page
|
||||
module Admin
|
||||
module Settings
|
||||
module Component
|
||||
class OutboundRequests < Page::Base
|
||||
view 'app/views/admin/application_settings/_outbound.html.haml' do
|
||||
element :allow_requests_from_services_checkbox
|
||||
element :save_changes_button
|
||||
end
|
||||
|
||||
def allow_requests_to_local_network_from_services
|
||||
check_allow_requests_to_local_network_from_services_checkbox
|
||||
click_save_changes_button
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def check_allow_requests_to_local_network_from_services_checkbox
|
||||
check_element :allow_requests_from_services_checkbox
|
||||
end
|
||||
|
||||
def click_save_changes_button
|
||||
click_element :save_changes_button
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -9,6 +9,7 @@ module QA
|
|||
|
||||
view 'app/views/admin/application_settings/network.html.haml' do
|
||||
element :ip_limits_section
|
||||
element :outbound_requests_section
|
||||
end
|
||||
|
||||
def expand_ip_limits(&block)
|
||||
|
@ -16,6 +17,12 @@ module QA
|
|||
Component::IpLimits.perform(&block)
|
||||
end
|
||||
end
|
||||
|
||||
def expand_outbound_requests(&block)
|
||||
expand_section(:outbound_requests_section) do
|
||||
Component::OutboundRequests.perform(&block)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -20,7 +20,7 @@ module QA
|
|||
element :admin_area_link
|
||||
element :projects_dropdown, required: true
|
||||
element :groups_dropdown, required: true
|
||||
element :more_dropdown, required: true
|
||||
element :more_dropdown
|
||||
element :snippets_link
|
||||
end
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ module QA
|
|||
element :settings_item
|
||||
element :link_members_settings
|
||||
element :general_settings_link
|
||||
element :integrations_settings_link
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -55,6 +56,14 @@ module QA
|
|||
end
|
||||
end
|
||||
|
||||
def go_to_integrations_settings
|
||||
hover_settings do
|
||||
within_submenu do
|
||||
click_element :integrations_settings_link
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def hover_settings
|
||||
|
|
|
@ -9,6 +9,7 @@ module QA
|
|||
include Members
|
||||
|
||||
attr_writer :initialize_with_readme
|
||||
attr_writer :auto_devops_enabled
|
||||
attr_writer :visibility
|
||||
|
||||
attribute :id
|
||||
|
@ -47,6 +48,7 @@ module QA
|
|||
@standalone = false
|
||||
@description = 'My awesome project'
|
||||
@initialize_with_readme = false
|
||||
@auto_devops_enabled = true
|
||||
@visibility = 'public'
|
||||
end
|
||||
|
||||
|
@ -101,7 +103,8 @@ module QA
|
|||
name: name,
|
||||
description: description,
|
||||
visibility: @visibility,
|
||||
initialize_with_readme: @initialize_with_readme
|
||||
initialize_with_readme: @initialize_with_readme,
|
||||
auto_devops_enabled: @auto_devops_enabled
|
||||
}
|
||||
|
||||
unless @standalone
|
||||
|
|
43
qa/qa/service/docker_run/jenkins.rb
Normal file
43
qa/qa/service/docker_run/jenkins.rb
Normal file
|
@ -0,0 +1,43 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module QA
|
||||
module Service
|
||||
module DockerRun
|
||||
class Jenkins < Base
|
||||
def initialize
|
||||
@image = 'registry.gitlab.com/gitlab-org/gitlab-qa/jenkins-gitlab:version1'
|
||||
@name = 'jenkins-server'
|
||||
@port = '8080'
|
||||
super()
|
||||
end
|
||||
|
||||
def host_address
|
||||
"http://#{host_name}:#{@port}"
|
||||
end
|
||||
|
||||
def host_name
|
||||
return 'localhost' unless QA::Runtime::Env.running_in_ci?
|
||||
|
||||
super
|
||||
end
|
||||
|
||||
def register!
|
||||
command = <<~CMD.tr("\n", ' ')
|
||||
docker run -d --rm
|
||||
--network #{network}
|
||||
--hostname #{host_name}
|
||||
--name #{@name}
|
||||
--env JENKINS_HOME=jenkins_home
|
||||
--publish #{@port}:8080
|
||||
--publish 50000:50000
|
||||
#{@image}
|
||||
CMD
|
||||
|
||||
command.gsub!("--network #{network} ", '') unless QA::Runtime::Env.running_in_ci?
|
||||
|
||||
shell command
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
24
qa/qa/vendor/jenkins/page/base.rb
vendored
Normal file
24
qa/qa/vendor/jenkins/page/base.rb
vendored
Normal file
|
@ -0,0 +1,24 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module QA
|
||||
module Vendor
|
||||
module Jenkins
|
||||
module Page
|
||||
class Base
|
||||
include Capybara::DSL
|
||||
include Scenario::Actable
|
||||
|
||||
attr_reader :path
|
||||
|
||||
class << self
|
||||
attr_accessor :host
|
||||
end
|
||||
|
||||
def visit!
|
||||
page.visit URI.join(Base.host, path).to_s
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
48
qa/qa/vendor/jenkins/page/configure.rb
vendored
Normal file
48
qa/qa/vendor/jenkins/page/configure.rb
vendored
Normal file
|
@ -0,0 +1,48 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'capybara/dsl'
|
||||
|
||||
module QA
|
||||
module Vendor
|
||||
module Jenkins
|
||||
module Page
|
||||
class Configure < Page::Base
|
||||
def initialize
|
||||
@path = 'configure'
|
||||
end
|
||||
|
||||
def visit_and_setup_gitlab_connection(gitlab_host, token_description)
|
||||
visit!
|
||||
fill_in '_.name', with: 'GitLab'
|
||||
find('.setting-name', text: "Gitlab host URL").find(:xpath, "..").find('input').set gitlab_host
|
||||
|
||||
dropdown_element = find('.setting-name', text: "Credentials").find(:xpath, "..").find('select')
|
||||
|
||||
QA::Support::Retrier.retry_until(exit_on_failure: true) do
|
||||
dropdown_element.select "GitLab API token (#{token_description})"
|
||||
dropdown_element.value != ''
|
||||
end
|
||||
|
||||
yield if block_given?
|
||||
|
||||
click_save
|
||||
end
|
||||
|
||||
def click_test_connection
|
||||
click_on 'Test Connection'
|
||||
end
|
||||
|
||||
def has_success?
|
||||
has_css?('div.ok', text: "Success")
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def click_save
|
||||
click_on 'Save'
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
62
qa/qa/vendor/jenkins/page/configure_job.rb
vendored
Normal file
62
qa/qa/vendor/jenkins/page/configure_job.rb
vendored
Normal file
|
@ -0,0 +1,62 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'capybara/dsl'
|
||||
|
||||
module QA
|
||||
module Vendor
|
||||
module Jenkins
|
||||
module Page
|
||||
class ConfigureJob < Page::Base
|
||||
attr_accessor :job_name
|
||||
|
||||
def initialize
|
||||
@path = "/job/#{@job_name}/configure"
|
||||
end
|
||||
|
||||
def configure(scm_url:)
|
||||
set_git_source_code_management_url(scm_url)
|
||||
click_build_when_change_is_pushed_to_gitlab
|
||||
set_publish_status_to_gitlab
|
||||
click_save
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def set_git_source_code_management_url(repository_url)
|
||||
select_git_source_code_management
|
||||
set_repository_url(repository_url)
|
||||
end
|
||||
|
||||
def click_build_when_change_is_pushed_to_gitlab
|
||||
find('label', text: 'Build when a change is pushed to GitLab').find(:xpath, "..").find('input').click
|
||||
end
|
||||
|
||||
def set_publish_status_to_gitlab
|
||||
click_add_post_build_action
|
||||
select_publish_build_status_to_gitlab
|
||||
end
|
||||
|
||||
def click_save
|
||||
click_on 'Save'
|
||||
end
|
||||
|
||||
def select_git_source_code_management
|
||||
find('#radio-block-1').click
|
||||
end
|
||||
|
||||
def set_repository_url(repository_url)
|
||||
find('.setting-name', text: "Repository URL").find(:xpath, "..").find('input').set repository_url
|
||||
end
|
||||
|
||||
def click_add_post_build_action
|
||||
click_on "Add post-build action"
|
||||
end
|
||||
|
||||
def select_publish_build_status_to_gitlab
|
||||
click_link "Publish build status to GitLab"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
31
qa/qa/vendor/jenkins/page/login.rb
vendored
Normal file
31
qa/qa/vendor/jenkins/page/login.rb
vendored
Normal file
|
@ -0,0 +1,31 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'capybara/dsl'
|
||||
|
||||
module QA
|
||||
module Vendor
|
||||
module Jenkins
|
||||
module Page
|
||||
class Login < Page::Base
|
||||
def initialize
|
||||
@path = 'login'
|
||||
end
|
||||
|
||||
def visit!
|
||||
super
|
||||
|
||||
QA::Support::Retrier.retry_until(sleep_interval: 3, reload_page: page, max_attempts: 20, exit_on_failure: true) do
|
||||
page.has_text? 'Welcome to Jenkins!'
|
||||
end
|
||||
end
|
||||
|
||||
def login
|
||||
fill_in 'j_username', with: 'admin'
|
||||
fill_in 'j_password', with: 'password'
|
||||
click_on 'Sign in'
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
50
qa/qa/vendor/jenkins/page/new_credentials.rb
vendored
Normal file
50
qa/qa/vendor/jenkins/page/new_credentials.rb
vendored
Normal file
|
@ -0,0 +1,50 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'capybara/dsl'
|
||||
|
||||
module QA
|
||||
module Vendor
|
||||
module Jenkins
|
||||
module Page
|
||||
class NewCredentials < Page::Base
|
||||
def initialize
|
||||
@path = 'credentials/store/system/domain/_/newCredentials'
|
||||
end
|
||||
|
||||
def visit_and_set_gitlab_api_token(api_token, description)
|
||||
visit!
|
||||
wait_for_page_to_load
|
||||
select_gitlab_api_token
|
||||
set_api_token(api_token)
|
||||
set_description(description)
|
||||
click_ok
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def select_gitlab_api_token
|
||||
find('.setting-name', text: "Kind").find(:xpath, "..").find('select').select "GitLab API token"
|
||||
end
|
||||
|
||||
def set_api_token(api_token)
|
||||
fill_in '_.apiToken', with: api_token
|
||||
end
|
||||
|
||||
def set_description(description)
|
||||
fill_in '_.description', with: description
|
||||
end
|
||||
|
||||
def click_ok
|
||||
click_on 'OK'
|
||||
end
|
||||
|
||||
def wait_for_page_to_load
|
||||
QA::Support::Waiter.wait(interval: 1.0) do
|
||||
page.has_css?('.setting-name', text: "Description")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
38
qa/qa/vendor/jenkins/page/new_job.rb
vendored
Normal file
38
qa/qa/vendor/jenkins/page/new_job.rb
vendored
Normal file
|
@ -0,0 +1,38 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'capybara/dsl'
|
||||
|
||||
module QA
|
||||
module Vendor
|
||||
module Jenkins
|
||||
module Page
|
||||
class NewJob < Page::Base
|
||||
def initialize
|
||||
@path = 'newJob'
|
||||
end
|
||||
|
||||
def visit_and_create_new_job_with_name(new_job_name)
|
||||
visit!
|
||||
set_new_job_name(new_job_name)
|
||||
click_free_style_project
|
||||
click_ok
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def set_new_job_name(new_job_name)
|
||||
fill_in 'name', with: new_job_name
|
||||
end
|
||||
|
||||
def click_free_style_project
|
||||
find('.hudson_model_FreeStyleProject').click
|
||||
end
|
||||
|
||||
def click_ok
|
||||
click_on 'OK'
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -93,5 +93,25 @@ FactoryBot.define do
|
|||
trait :not_managed do
|
||||
managed { false }
|
||||
end
|
||||
|
||||
trait :cleanup_not_started do
|
||||
cleanup_status { 1 }
|
||||
end
|
||||
|
||||
trait :cleanup_uninstalling_applications do
|
||||
cleanup_status { 2 }
|
||||
end
|
||||
|
||||
trait :cleanup_removing_project_namespaces do
|
||||
cleanup_status { 3 }
|
||||
end
|
||||
|
||||
trait :cleanup_removing_service_account do
|
||||
cleanup_status { 4 }
|
||||
end
|
||||
|
||||
trait :cleanup_errored do
|
||||
cleanup_status { 5 }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -259,7 +259,8 @@ describe 'Gitlab::Graphql::Authorization' do
|
|||
let(:project_type) do |type|
|
||||
type_factory do |type|
|
||||
type.graphql_name 'FakeProjectType'
|
||||
type.field :test_issues, issue_type.connection_type, null: false, resolve: -> (_, _, _) { Issue.where(project: [visible_project, other_project]) }
|
||||
type.field :test_issues, issue_type.connection_type, null: false,
|
||||
resolve: -> (_, _, _) { Issue.where(project: [visible_project, other_project]).order(id: :asc) }
|
||||
end
|
||||
end
|
||||
let(:query_type) do
|
||||
|
|
|
@ -36,7 +36,7 @@ describe GitlabSchema do
|
|||
it 'paginates active record relations using `Gitlab::Graphql::Connections::KeysetConnection`' do
|
||||
connection = GraphQL::Relay::BaseConnection::CONNECTION_IMPLEMENTATIONS[ActiveRecord::Relation.name]
|
||||
|
||||
expect(connection).to eq(Gitlab::Graphql::Connections::KeysetConnection)
|
||||
expect(connection).to eq(Gitlab::Graphql::Connections::Keyset::Connection)
|
||||
end
|
||||
|
||||
describe '.execute' do
|
||||
|
|
|
@ -75,6 +75,12 @@ describe GitlabRoutingHelper do
|
|||
expect(preview_markdown_path(group)).to eq("/groups/#{group.path}/preview_markdown")
|
||||
end
|
||||
|
||||
it 'returns group preview markdown path for a group parent with args' do
|
||||
group = create(:group)
|
||||
|
||||
expect(preview_markdown_path(group, { type_id: 5 })).to eq("/groups/#{group.path}/preview_markdown?type_id=5")
|
||||
end
|
||||
|
||||
it 'returns project preview markdown path for a project parent' do
|
||||
expect(preview_markdown_path(project)).to eq("/#{project.full_path}/preview_markdown")
|
||||
end
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::Graphql::Connections::Keyset::Conditions::NotNullCondition do
|
||||
describe '#build' do
|
||||
let(:condition) { described_class.new(Issue.arel_table, %w(relative_position id), [1500, 500], ['>', '>'], before_or_after) }
|
||||
|
||||
context 'when there is only one ordering field' do
|
||||
let(:condition) { described_class.new(Issue.arel_table, ['id'], [500], ['>'], :after) }
|
||||
|
||||
it 'generates a single condition sql' do
|
||||
expected_sql = <<~SQL
|
||||
("issues"."id" > 500)
|
||||
SQL
|
||||
|
||||
expect(condition.build.squish).to eq expected_sql.squish
|
||||
end
|
||||
end
|
||||
|
||||
context 'when :after' do
|
||||
let(:before_or_after) { :after }
|
||||
|
||||
it 'generates :after sql' do
|
||||
expected_sql = <<~SQL
|
||||
("issues"."relative_position" > 1500)
|
||||
OR (
|
||||
"issues"."relative_position" = 1500
|
||||
AND
|
||||
"issues"."id" > 500
|
||||
)
|
||||
OR ("issues"."relative_position" IS NULL)
|
||||
SQL
|
||||
|
||||
expect(condition.build.squish).to eq expected_sql.squish
|
||||
end
|
||||
end
|
||||
|
||||
context 'when :before' do
|
||||
let(:before_or_after) { :before }
|
||||
|
||||
it 'generates :before sql' do
|
||||
expected_sql = <<~SQL
|
||||
("issues"."relative_position" > 1500)
|
||||
OR (
|
||||
"issues"."relative_position" = 1500
|
||||
AND
|
||||
"issues"."id" > 500
|
||||
)
|
||||
SQL
|
||||
|
||||
expect(condition.build.squish).to eq expected_sql.squish
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,42 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::Graphql::Connections::Keyset::Conditions::NullCondition do
|
||||
describe '#build' do
|
||||
let(:condition) { described_class.new(Issue.arel_table, %w(relative_position id), [nil, 500], [nil, '>'], before_or_after) }
|
||||
|
||||
context 'when :after' do
|
||||
let(:before_or_after) { :after }
|
||||
|
||||
it 'generates sql' do
|
||||
expected_sql = <<~SQL
|
||||
(
|
||||
"issues"."relative_position" IS NULL
|
||||
AND
|
||||
"issues"."id" > 500
|
||||
)
|
||||
SQL
|
||||
|
||||
expect(condition.build.squish).to eq expected_sql.squish
|
||||
end
|
||||
end
|
||||
|
||||
context 'when :before' do
|
||||
let(:before_or_after) { :before }
|
||||
|
||||
it 'generates :before sql' do
|
||||
expected_sql = <<~SQL
|
||||
(
|
||||
"issues"."relative_position" IS NULL
|
||||
AND
|
||||
"issues"."id" > 500
|
||||
)
|
||||
OR ("issues"."relative_position" IS NOT NULL)
|
||||
SQL
|
||||
|
||||
expect(condition.build.squish).to eq expected_sql.squish
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
303
spec/lib/gitlab/graphql/connections/keyset/connection_spec.rb
Normal file
303
spec/lib/gitlab/graphql/connections/keyset/connection_spec.rb
Normal file
|
@ -0,0 +1,303 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::Graphql::Connections::Keyset::Connection do
|
||||
let(:nodes) { Project.all.order(id: :asc) }
|
||||
let(:arguments) { {} }
|
||||
subject(:connection) do
|
||||
described_class.new(nodes, arguments, max_page_size: 3)
|
||||
end
|
||||
|
||||
def encoded_cursor(node)
|
||||
described_class.new(nodes, {}).cursor_from_node(node)
|
||||
end
|
||||
|
||||
def decoded_cursor(cursor)
|
||||
JSON.parse(Base64Bp.urlsafe_decode64(cursor))
|
||||
end
|
||||
|
||||
describe '#cursor_from_nodes' do
|
||||
let(:project) { create(:project) }
|
||||
let(:cursor) { connection.cursor_from_node(project) }
|
||||
|
||||
it 'returns an encoded ID' do
|
||||
expect(decoded_cursor(cursor)).to eq('id' => project.id.to_s)
|
||||
end
|
||||
|
||||
context 'when an order is specified' do
|
||||
let(:nodes) { Project.order(:updated_at) }
|
||||
|
||||
it 'returns the encoded value of the order' do
|
||||
expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.to_s)
|
||||
end
|
||||
|
||||
it 'includes the :id even when not specified in the order' do
|
||||
expect(decoded_cursor(cursor)).to include('id' => project.id.to_s)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when multiple orders are specified' do
|
||||
let(:nodes) { Project.order(:updated_at).order(:created_at) }
|
||||
|
||||
it 'returns the encoded value of the order' do
|
||||
expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.to_s)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when multiple orders with SQL are specified' do
|
||||
let(:nodes) { Project.order(Arel.sql('projects.updated_at IS NULL')).order(:updated_at).order(:id) }
|
||||
|
||||
it 'returns the encoded value of the order' do
|
||||
expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.to_s)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#sliced_nodes' do
|
||||
let(:projects) { create_list(:project, 4) }
|
||||
|
||||
context 'when before is passed' do
|
||||
let(:arguments) { { before: encoded_cursor(projects[1]) } }
|
||||
|
||||
it 'only returns the project before the selected one' do
|
||||
expect(subject.sliced_nodes).to contain_exactly(projects.first)
|
||||
end
|
||||
|
||||
context 'when the sort order is descending' do
|
||||
let(:nodes) { Project.all.order(id: :desc) }
|
||||
|
||||
it 'returns the correct nodes' do
|
||||
expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when after is passed' do
|
||||
let(:arguments) { { after: encoded_cursor(projects[1]) } }
|
||||
|
||||
it 'only returns the project before the selected one' do
|
||||
expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
|
||||
end
|
||||
|
||||
context 'when the sort order is descending' do
|
||||
let(:nodes) { Project.all.order(id: :desc) }
|
||||
|
||||
it 'returns the correct nodes' do
|
||||
expect(subject.sliced_nodes).to contain_exactly(projects.first)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when both before and after are passed' do
|
||||
let(:arguments) do
|
||||
{
|
||||
after: encoded_cursor(projects[1]),
|
||||
before: encoded_cursor(projects[3])
|
||||
}
|
||||
end
|
||||
|
||||
it 'returns the expected set' do
|
||||
expect(subject.sliced_nodes).to contain_exactly(projects[2])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when multiple orders are defined' do
|
||||
let!(:project1) { create(:project, last_repository_check_at: 10.days.ago) } # Asc: project5 Desc: project3
|
||||
let!(:project2) { create(:project, last_repository_check_at: nil) } # Asc: project1 Desc: project1
|
||||
let!(:project3) { create(:project, last_repository_check_at: 5.days.ago) } # Asc: project3 Desc: project5
|
||||
let!(:project4) { create(:project, last_repository_check_at: nil) } # Asc: project2 Desc: project2
|
||||
let!(:project5) { create(:project, last_repository_check_at: 20.days.ago) } # Asc: project4 Desc: project4
|
||||
|
||||
context 'when ascending' do
|
||||
let(:nodes) do
|
||||
Project.order(Arel.sql('projects.last_repository_check_at IS NULL')).order(last_repository_check_at: :asc).order(id: :asc)
|
||||
end
|
||||
|
||||
context 'when no cursor is passed' do
|
||||
let(:arguments) { {} }
|
||||
|
||||
it 'returns projects in ascending order' do
|
||||
expect(subject.sliced_nodes).to eq([project5, project1, project3, project2, project4])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when before cursor value is NULL' do
|
||||
let(:arguments) { { before: encoded_cursor(project4) } }
|
||||
|
||||
it 'returns all projects before the cursor' do
|
||||
expect(subject.sliced_nodes).to eq([project5, project1, project3, project2])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when before cursor value is not NULL' do
|
||||
let(:arguments) { { before: encoded_cursor(project3) } }
|
||||
|
||||
it 'returns all projects before the cursor' do
|
||||
expect(subject.sliced_nodes).to eq([project5, project1])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when after cursor value is NULL' do
|
||||
let(:arguments) { { after: encoded_cursor(project2) } }
|
||||
|
||||
it 'returns all projects after the cursor' do
|
||||
expect(subject.sliced_nodes).to eq([project4])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when after cursor value is not NULL' do
|
||||
let(:arguments) { { after: encoded_cursor(project1) } }
|
||||
|
||||
it 'returns all projects after the cursor' do
|
||||
expect(subject.sliced_nodes).to eq([project3, project2, project4])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when before and after cursor' do
|
||||
let(:arguments) { { before: encoded_cursor(project4), after: encoded_cursor(project5) } }
|
||||
|
||||
it 'returns all projects after the cursor' do
|
||||
expect(subject.sliced_nodes).to eq([project1, project3, project2])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when descending' do
|
||||
let(:nodes) do
|
||||
Project.order(Arel.sql('projects.last_repository_check_at IS NULL')).order(last_repository_check_at: :desc).order(id: :asc)
|
||||
end
|
||||
|
||||
context 'when no cursor is passed' do
|
||||
let(:arguments) { {} }
|
||||
|
||||
it 'only returns projects in descending order' do
|
||||
expect(subject.sliced_nodes).to eq([project3, project1, project5, project2, project4])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when before cursor value is NULL' do
|
||||
let(:arguments) { { before: encoded_cursor(project4) } }
|
||||
|
||||
it 'returns all projects before the cursor' do
|
||||
expect(subject.sliced_nodes).to eq([project3, project1, project5, project2])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when before cursor value is not NULL' do
|
||||
let(:arguments) { { before: encoded_cursor(project5) } }
|
||||
|
||||
it 'returns all projects before the cursor' do
|
||||
expect(subject.sliced_nodes).to eq([project3, project1])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when after cursor value is NULL' do
|
||||
let(:arguments) { { after: encoded_cursor(project2) } }
|
||||
|
||||
it 'returns all projects after the cursor' do
|
||||
expect(subject.sliced_nodes).to eq([project4])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when after cursor value is not NULL' do
|
||||
let(:arguments) { { after: encoded_cursor(project1) } }
|
||||
|
||||
it 'returns all projects after the cursor' do
|
||||
expect(subject.sliced_nodes).to eq([project5, project2, project4])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when before and after cursor' do
|
||||
let(:arguments) { { before: encoded_cursor(project4), after: encoded_cursor(project3) } }
|
||||
|
||||
it 'returns all projects after the cursor' do
|
||||
expect(subject.sliced_nodes).to eq([project1, project5, project2])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# TODO Enable this as part of below issue
|
||||
# https://gitlab.com/gitlab-org/gitlab/issues/32933
|
||||
# context 'when an invalid cursor is provided' do
|
||||
# let(:arguments) { { before: 'invalidcursor' } }
|
||||
#
|
||||
# it 'raises an error' do
|
||||
# expect { expect(subject.sliced_nodes) }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
|
||||
# end
|
||||
# end
|
||||
|
||||
# TODO Remove this as part of below issue
|
||||
# https://gitlab.com/gitlab-org/gitlab/issues/32933
|
||||
context 'when an old style cursor is provided' do
|
||||
let(:arguments) { { before: Base64Bp.urlsafe_encode64(projects[1].id.to_s, padding: false) } }
|
||||
|
||||
it 'only returns the project before the selected one' do
|
||||
expect(subject.sliced_nodes).to contain_exactly(projects.first)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#paged_nodes' do
|
||||
let!(:projects) { create_list(:project, 5) }
|
||||
|
||||
it 'returns the collection limited to max page size' do
|
||||
expect(subject.paged_nodes.size).to eq(3)
|
||||
end
|
||||
|
||||
it 'is a loaded memoized array' do
|
||||
expect(subject.paged_nodes).to be_an(Array)
|
||||
expect(subject.paged_nodes.object_id).to eq(subject.paged_nodes.object_id)
|
||||
end
|
||||
|
||||
context 'when `first` is passed' do
|
||||
let(:arguments) { { first: 2 } }
|
||||
|
||||
it 'returns only the first elements' do
|
||||
expect(subject.paged_nodes).to contain_exactly(projects.first, projects.second)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when `last` is passed' do
|
||||
let(:arguments) { { last: 2 } }
|
||||
|
||||
it 'returns only the last elements' do
|
||||
expect(subject.paged_nodes).to contain_exactly(projects[3], projects[4])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when both are passed' do
|
||||
let(:arguments) { { first: 2, last: 2 } }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { subject.paged_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when primary key is not in original order' do
|
||||
let(:nodes) { Project.order(last_repository_check_at: :desc) }
|
||||
|
||||
it 'is added to end' do
|
||||
sliced = subject.sliced_nodes
|
||||
last_order_name = sliced.order_values.last.expr.name
|
||||
|
||||
expect(last_order_name).to eq sliced.primary_key
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there is no primary key' do
|
||||
let(:nodes) { NoPrimaryKey.all }
|
||||
|
||||
it 'raises an error' do
|
||||
expect(NoPrimaryKey.primary_key).to be_nil
|
||||
expect { subject.sliced_nodes }.to raise_error(ArgumentError, 'Relation must have a primary key')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
class NoPrimaryKey < ActiveRecord::Base
|
||||
self.table_name = 'no_primary_key'
|
||||
self.primary_key = nil
|
||||
end
|
||||
end
|
|
@ -0,0 +1,127 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# TODO https://gitlab.com/gitlab-org/gitlab/issues/35104
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::Graphql::Connections::Keyset::LegacyKeysetConnection do
|
||||
describe 'old keyset_connection' do
|
||||
let(:described_class) { Gitlab::Graphql::Connections::Keyset::Connection }
|
||||
let(:nodes) { Project.all.order(id: :asc) }
|
||||
let(:arguments) { {} }
|
||||
subject(:connection) do
|
||||
described_class.new(nodes, arguments, max_page_size: 3)
|
||||
end
|
||||
|
||||
before do
|
||||
stub_feature_flags(graphql_keyset_pagination: false)
|
||||
end
|
||||
|
||||
def encoded_property(value)
|
||||
Base64Bp.urlsafe_encode64(value.to_s, padding: false)
|
||||
end
|
||||
|
||||
describe '#cursor_from_nodes' do
|
||||
let(:project) { create(:project) }
|
||||
|
||||
it 'returns an encoded ID' do
|
||||
expect(connection.cursor_from_node(project))
|
||||
.to eq(encoded_property(project.id))
|
||||
end
|
||||
|
||||
context 'when an order was specified' do
|
||||
let(:nodes) { Project.order(:updated_at) }
|
||||
|
||||
it 'returns the encoded value of the order' do
|
||||
expect(connection.cursor_from_node(project))
|
||||
.to eq(encoded_property(project.updated_at))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#sliced_nodes' do
|
||||
let(:projects) { create_list(:project, 4) }
|
||||
|
||||
context 'when before is passed' do
|
||||
let(:arguments) { { before: encoded_property(projects[1].id) } }
|
||||
|
||||
it 'only returns the project before the selected one' do
|
||||
expect(subject.sliced_nodes).to contain_exactly(projects.first)
|
||||
end
|
||||
|
||||
context 'when the sort order is descending' do
|
||||
let(:nodes) { Project.all.order(id: :desc) }
|
||||
|
||||
it 'returns the correct nodes' do
|
||||
expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when after is passed' do
|
||||
let(:arguments) { { after: encoded_property(projects[1].id) } }
|
||||
|
||||
it 'only returns the project before the selected one' do
|
||||
expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
|
||||
end
|
||||
|
||||
context 'when the sort order is descending' do
|
||||
let(:nodes) { Project.all.order(id: :desc) }
|
||||
|
||||
it 'returns the correct nodes' do
|
||||
expect(subject.sliced_nodes).to contain_exactly(projects.first)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when both before and after are passed' do
|
||||
let(:arguments) do
|
||||
{
|
||||
after: encoded_property(projects[1].id),
|
||||
before: encoded_property(projects[3].id)
|
||||
}
|
||||
end
|
||||
|
||||
it 'returns the expected set' do
|
||||
expect(subject.sliced_nodes).to contain_exactly(projects[2])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#paged_nodes' do
|
||||
let!(:projects) { create_list(:project, 5) }
|
||||
|
||||
it 'returns the collection limited to max page size' do
|
||||
expect(subject.paged_nodes.size).to eq(3)
|
||||
end
|
||||
|
||||
it 'is a loaded memoized array' do
|
||||
expect(subject.paged_nodes).to be_an(Array)
|
||||
expect(subject.paged_nodes.object_id).to eq(subject.paged_nodes.object_id)
|
||||
end
|
||||
|
||||
context 'when `first` is passed' do
|
||||
let(:arguments) { { first: 2 } }
|
||||
|
||||
it 'returns only the first elements' do
|
||||
expect(subject.paged_nodes).to contain_exactly(projects.first, projects.second)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when `last` is passed' do
|
||||
let(:arguments) { { last: 2 } }
|
||||
|
||||
it 'returns only the last elements' do
|
||||
expect(subject.paged_nodes).to contain_exactly(projects[3], projects[4])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when both are passed' do
|
||||
let(:arguments) { { first: 2, last: 2 } }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { subject.paged_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,61 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::Graphql::Connections::Keyset::OrderInfo do
|
||||
describe '#build_order_list' do
|
||||
let(:order_list) { described_class.build_order_list(relation) }
|
||||
|
||||
context 'when multiple orders with SQL is specified' do
|
||||
let(:relation) { Project.order(Arel.sql('projects.updated_at IS NULL')).order(:updated_at).order(:id) }
|
||||
|
||||
it 'ignores the SQL order' do
|
||||
expect(order_list.count).to eq 2
|
||||
expect(order_list.first.attribute_name).to eq 'updated_at'
|
||||
expect(order_list.first.operator_for(:after)).to eq '>'
|
||||
expect(order_list.last.attribute_name).to eq 'id'
|
||||
expect(order_list.last.operator_for(:after)).to eq '>'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#validate_ordering' do
|
||||
let(:order_list) { described_class.build_order_list(relation) }
|
||||
|
||||
context 'when number of ordering fields is 0' do
|
||||
let(:relation) { Project.all }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { described_class.validate_ordering(relation, order_list) }
|
||||
.to raise_error(ArgumentError, 'A minimum of 1 ordering field is required')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when number of ordering fields is over 2' do
|
||||
let(:relation) { Project.order(last_repository_check_at: :desc).order(updated_at: :desc).order(:id) }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { described_class.validate_ordering(relation, order_list) }
|
||||
.to raise_error(ArgumentError, 'A maximum of 2 ordering fields are allowed')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the second (or first) column is nullable' do
|
||||
let(:relation) { Project.order(last_repository_check_at: :desc).order(updated_at: :desc) }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { described_class.validate_ordering(relation, order_list) }
|
||||
.to raise_error(ArgumentError, "Column `updated_at` must not allow NULL")
|
||||
end
|
||||
end
|
||||
|
||||
context 'for last ordering field' do
|
||||
let(:relation) { Project.order(namespace_id: :desc) }
|
||||
|
||||
it 'raises error if primary key is not last field' do
|
||||
expect { described_class.validate_ordering(relation, order_list) }
|
||||
.to raise_error(ArgumentError, "Last ordering field must be the primary key, `#{relation.primary_key}`")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
108
spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb
Normal file
108
spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb
Normal file
|
@ -0,0 +1,108 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::Graphql::Connections::Keyset::QueryBuilder do
|
||||
context 'when number of ordering fields is 0' do
|
||||
it 'raises an error' do
|
||||
expect { described_class.new(Issue.arel_table, [], {}, :after) }
|
||||
.to raise_error(ArgumentError, 'No ordering scopes have been supplied')
|
||||
end
|
||||
end
|
||||
|
||||
describe '#conditions' do
|
||||
let(:relation) { Issue.order(relative_position: :desc).order(:id) }
|
||||
let(:order_list) { Gitlab::Graphql::Connections::Keyset::OrderInfo.build_order_list(relation) }
|
||||
let(:builder) { described_class.new(arel_table, order_list, decoded_cursor, before_or_after) }
|
||||
let(:before_or_after) { :after }
|
||||
|
||||
context 'when only a single ordering' do
|
||||
let(:relation) { Issue.order(id: :desc) }
|
||||
|
||||
context 'when the value is nil' do
|
||||
let(:decoded_cursor) { { 'id' => nil } }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { builder.conditions }
|
||||
.to raise_error(Gitlab::Graphql::Errors::ArgumentError, 'Before/after cursor invalid: `nil` was provided as only sortable value')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when value is not nil' do
|
||||
let(:decoded_cursor) { { 'id' => 100 } }
|
||||
let(:conditions) { builder.conditions }
|
||||
|
||||
context 'when :after' do
|
||||
it 'generates the correct condition' do
|
||||
expect(conditions.strip).to eq '("issues"."id" < 100)'
|
||||
end
|
||||
end
|
||||
|
||||
context 'when :before' do
|
||||
let(:before_or_after) { :before }
|
||||
|
||||
it 'generates the correct condition' do
|
||||
expect(conditions.strip).to eq '("issues"."id" > 100)'
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when two orderings' do
|
||||
let(:decoded_cursor) { { 'relative_position' => 1500, 'id' => 100 } }
|
||||
|
||||
context 'when no values are nil' do
|
||||
context 'when :after' do
|
||||
it 'generates the correct condition' do
|
||||
conditions = builder.conditions
|
||||
|
||||
expect(conditions).to include '"issues"."relative_position" < 1500'
|
||||
expect(conditions).to include '"issues"."id" > 100'
|
||||
expect(conditions).to include 'OR ("issues"."relative_position" IS NULL)'
|
||||
end
|
||||
end
|
||||
|
||||
context 'when :before' do
|
||||
let(:before_or_after) { :before }
|
||||
|
||||
it 'generates the correct condition' do
|
||||
conditions = builder.conditions
|
||||
|
||||
expect(conditions).to include '("issues"."relative_position" > 1500)'
|
||||
expect(conditions).to include '"issues"."id" < 100'
|
||||
expect(conditions).to include '"issues"."relative_position" = 1500'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when first value is nil' do
|
||||
let(:decoded_cursor) { { 'relative_position' => nil, 'id' => 100 } }
|
||||
|
||||
context 'when :after' do
|
||||
it 'generates the correct condition' do
|
||||
conditions = builder.conditions
|
||||
|
||||
expect(conditions).to include '"issues"."relative_position" IS NULL'
|
||||
expect(conditions).to include '"issues"."id" > 100'
|
||||
end
|
||||
end
|
||||
|
||||
context 'when :before' do
|
||||
let(:before_or_after) { :before }
|
||||
|
||||
it 'generates the correct condition' do
|
||||
conditions = builder.conditions
|
||||
|
||||
expect(conditions).to include '"issues"."relative_position" IS NULL'
|
||||
expect(conditions).to include '"issues"."id" < 100'
|
||||
expect(conditions).to include 'OR ("issues"."relative_position" IS NOT NULL)'
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def arel_table
|
||||
Issue.arel_table
|
||||
end
|
||||
end
|
|
@ -1,117 +0,0 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::Graphql::Connections::KeysetConnection do
|
||||
let(:nodes) { Project.all.order(id: :asc) }
|
||||
let(:arguments) { {} }
|
||||
subject(:connection) do
|
||||
described_class.new(nodes, arguments, max_page_size: 3)
|
||||
end
|
||||
|
||||
def encoded_property(value)
|
||||
Base64Bp.urlsafe_encode64(value.to_s, padding: false)
|
||||
end
|
||||
|
||||
describe '#cursor_from_nodes' do
|
||||
let(:project) { create(:project) }
|
||||
|
||||
it 'returns an encoded ID' do
|
||||
expect(connection.cursor_from_node(project))
|
||||
.to eq(encoded_property(project.id))
|
||||
end
|
||||
|
||||
context 'when an order was specified' do
|
||||
let(:nodes) { Project.order(:updated_at) }
|
||||
|
||||
it 'returns the encoded value of the order' do
|
||||
expect(connection.cursor_from_node(project))
|
||||
.to eq(encoded_property(project.updated_at))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#sliced_nodes' do
|
||||
let(:projects) { create_list(:project, 4) }
|
||||
|
||||
context 'when before is passed' do
|
||||
let(:arguments) { { before: encoded_property(projects[1].id) } }
|
||||
|
||||
it 'only returns the project before the selected one' do
|
||||
expect(subject.sliced_nodes).to contain_exactly(projects.first)
|
||||
end
|
||||
|
||||
context 'when the sort order is descending' do
|
||||
let(:nodes) { Project.all.order(id: :desc) }
|
||||
|
||||
it 'returns the correct nodes' do
|
||||
expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when after is passed' do
|
||||
let(:arguments) { { after: encoded_property(projects[1].id) } }
|
||||
|
||||
it 'only returns the project before the selected one' do
|
||||
expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
|
||||
end
|
||||
|
||||
context 'when the sort order is descending' do
|
||||
let(:nodes) { Project.all.order(id: :desc) }
|
||||
|
||||
it 'returns the correct nodes' do
|
||||
expect(subject.sliced_nodes).to contain_exactly(projects.first)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when both before and after are passed' do
|
||||
let(:arguments) do
|
||||
{
|
||||
after: encoded_property(projects[1].id),
|
||||
before: encoded_property(projects[3].id)
|
||||
}
|
||||
end
|
||||
|
||||
it 'returns the expected set' do
|
||||
expect(subject.sliced_nodes).to contain_exactly(projects[2])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#paged_nodes' do
|
||||
let!(:projects) { create_list(:project, 5) }
|
||||
|
||||
it 'returns the collection limited to max page size' do
|
||||
expect(subject.paged_nodes.size).to eq(3)
|
||||
end
|
||||
|
||||
it 'is a loaded memoized array' do
|
||||
expect(subject.paged_nodes).to be_an(Array)
|
||||
expect(subject.paged_nodes.object_id).to eq(subject.paged_nodes.object_id)
|
||||
end
|
||||
|
||||
context 'when `first` is passed' do
|
||||
let(:arguments) { { first: 2 } }
|
||||
|
||||
it 'returns only the first elements' do
|
||||
expect(subject.paged_nodes).to contain_exactly(projects.first, projects.second)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when `last` is passed' do
|
||||
let(:arguments) { { last: 2 } }
|
||||
|
||||
it 'returns only the last elements' do
|
||||
expect(subject.paged_nodes).to contain_exactly(projects[3], projects[4])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when both are passed' do
|
||||
let(:arguments) { { first: 2, last: 2 } }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { subject.paged_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -686,12 +686,36 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
|
|||
|
||||
context 'the cluster has a provider' do
|
||||
let(:cluster) { create(:cluster, :provided_by_gcp) }
|
||||
let(:provider_status) { :errored }
|
||||
|
||||
before do
|
||||
cluster.provider.make_errored!
|
||||
end
|
||||
|
||||
it { is_expected.to eq :errored }
|
||||
it { is_expected.to eq provider_status }
|
||||
|
||||
context 'when cluster cleanup is ongoing' do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
where(:status_name, :cleanup_status) do
|
||||
provider_status | :cleanup_not_started
|
||||
:cleanup_ongoing | :cleanup_uninstalling_applications
|
||||
:cleanup_ongoing | :cleanup_removing_project_namespaces
|
||||
:cleanup_ongoing | :cleanup_removing_service_account
|
||||
:cleanup_errored | :cleanup_errored
|
||||
end
|
||||
|
||||
with_them do
|
||||
it 'returns cleanup_ongoing when uninstalling applications' do
|
||||
cluster.cleanup_status = described_class
|
||||
.state_machines[:cleanup_status]
|
||||
.states[cleanup_status]
|
||||
.value
|
||||
|
||||
is_expected.to eq status_name
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'there is a cached connection status' do
|
||||
|
@ -715,6 +739,83 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
|
|||
end
|
||||
end
|
||||
|
||||
describe 'cleanup_status state_machine' do
|
||||
shared_examples 'cleanup_status transition' do
|
||||
let(:cluster) { create(:cluster, from_state) }
|
||||
|
||||
it 'transitions cleanup_status correctly' do
|
||||
expect { subject }.to change { cluster.cleanup_status_name }
|
||||
.from(from_state).to(to_state)
|
||||
end
|
||||
|
||||
it 'schedules a Clusters::Cleanup::*Worker' do
|
||||
expect(expected_worker_class).to receive(:perform_async).with(cluster.id)
|
||||
subject
|
||||
end
|
||||
end
|
||||
|
||||
describe '#start_cleanup!' do
|
||||
let(:expected_worker_class) { Clusters::Cleanup::AppWorker }
|
||||
let(:to_state) { :cleanup_uninstalling_applications }
|
||||
|
||||
subject { cluster.start_cleanup! }
|
||||
|
||||
context 'when cleanup_status is cleanup_not_started' do
|
||||
let(:from_state) { :cleanup_not_started }
|
||||
|
||||
it_behaves_like 'cleanup_status transition'
|
||||
end
|
||||
|
||||
context 'when cleanup_status is errored' do
|
||||
let(:from_state) { :cleanup_errored }
|
||||
|
||||
it_behaves_like 'cleanup_status transition'
|
||||
end
|
||||
end
|
||||
|
||||
describe '#make_cleanup_errored!' do
|
||||
NON_ERRORED_STATES = Clusters::Cluster.state_machines[:cleanup_status].states.keys - [:cleanup_errored]
|
||||
|
||||
NON_ERRORED_STATES.each do |state|
|
||||
it "transitions cleanup_status from #{state} to cleanup_errored" do
|
||||
cluster = create(:cluster, state)
|
||||
|
||||
expect { cluster.make_cleanup_errored! }.to change { cluster.cleanup_status_name }
|
||||
.from(state).to(:cleanup_errored)
|
||||
end
|
||||
|
||||
it "sets error message" do
|
||||
cluster = create(:cluster, state)
|
||||
|
||||
expect { cluster.make_cleanup_errored!("Error Message") }.to change { cluster.cleanup_status_reason }
|
||||
.from(nil).to("Error Message")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#continue_cleanup!' do
|
||||
context 'when cleanup_status is cleanup_uninstalling_applications' do
|
||||
let(:expected_worker_class) { Clusters::Cleanup::ProjectNamespaceWorker }
|
||||
let(:from_state) { :cleanup_uninstalling_applications }
|
||||
let(:to_state) { :cleanup_removing_project_namespaces }
|
||||
|
||||
subject { cluster.continue_cleanup! }
|
||||
|
||||
it_behaves_like 'cleanup_status transition'
|
||||
end
|
||||
|
||||
context 'when cleanup_status is cleanup_removing_project_namespaces' do
|
||||
let(:expected_worker_class) { Clusters::Cleanup::ServiceAccountWorker }
|
||||
let(:from_state) { :cleanup_removing_project_namespaces }
|
||||
let(:to_state) { :cleanup_removing_service_account }
|
||||
|
||||
subject { cluster.continue_cleanup! }
|
||||
|
||||
it_behaves_like 'cleanup_status transition'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#connection_status' do
|
||||
let(:cluster) { create(:cluster) }
|
||||
let(:status) { :connected }
|
||||
|
|
|
@ -3368,7 +3368,7 @@ describe MergeRequest do
|
|||
end
|
||||
end
|
||||
|
||||
describe '.with_open_merge_when_pipeline_succeeds' do
|
||||
describe '.with_auto_merge_enabled' do
|
||||
let!(:project) { create(:project) }
|
||||
let!(:fork) { fork_project(project) }
|
||||
let!(:merge_request1) do
|
||||
|
@ -3380,15 +3380,6 @@ describe MergeRequest do
|
|||
source_branch: 'feature-1')
|
||||
end
|
||||
|
||||
let!(:merge_request2) do
|
||||
create(:merge_request,
|
||||
:merge_when_pipeline_succeeds,
|
||||
target_project: project,
|
||||
target_branch: 'master',
|
||||
source_project: fork,
|
||||
source_branch: 'fork-feature-1')
|
||||
end
|
||||
|
||||
let!(:merge_request4) do
|
||||
create(:merge_request,
|
||||
target_project: project,
|
||||
|
@ -3397,9 +3388,9 @@ describe MergeRequest do
|
|||
source_branch: 'fork-feature-2')
|
||||
end
|
||||
|
||||
let(:query) { described_class.with_open_merge_when_pipeline_succeeds }
|
||||
let(:query) { described_class.with_auto_merge_enabled }
|
||||
|
||||
it { expect(query).to contain_exactly(merge_request1, merge_request2) }
|
||||
it { expect(query).to contain_exactly(merge_request1) }
|
||||
end
|
||||
|
||||
it_behaves_like 'versioned description'
|
||||
|
|
38
spec/requests/api/graphql/current_user/todos_query_spec.rb
Normal file
38
spec/requests/api/graphql/current_user/todos_query_spec.rb
Normal file
|
@ -0,0 +1,38 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe 'Query current user todos' do
|
||||
include GraphqlHelpers
|
||||
|
||||
let_it_be(:current_user) { create(:user) }
|
||||
let_it_be(:commit_todo) { create(:on_commit_todo, user: current_user, project: create(:project, :repository)) }
|
||||
let_it_be(:issue_todo) { create(:todo, user: current_user, target: create(:issue)) }
|
||||
let_it_be(:merge_request_todo) { create(:todo, user: current_user, target: create(:merge_request)) }
|
||||
|
||||
let(:fields) do
|
||||
<<~QUERY
|
||||
nodes {
|
||||
id
|
||||
}
|
||||
QUERY
|
||||
end
|
||||
|
||||
let(:query) do
|
||||
graphql_query_for('currentUser', {}, query_graphql_field('todos', {}, fields))
|
||||
end
|
||||
|
||||
subject { graphql_data.dig('currentUser', 'todos', 'nodes') }
|
||||
|
||||
before do
|
||||
post_graphql(query, current_user: current_user)
|
||||
end
|
||||
|
||||
it 'contains the expected ids' do
|
||||
is_expected.to include(
|
||||
a_hash_including('id' => commit_todo.to_global_id.to_s),
|
||||
a_hash_including('id' => issue_todo.to_global_id.to_s),
|
||||
a_hash_including('id' => merge_request_todo.to_global_id.to_s)
|
||||
)
|
||||
end
|
||||
end
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require 'rubocop'
|
||||
require 'rubocop/rspec/support'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require 'rubocop'
|
||||
require 'rubocop/rspec/support'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require 'rubocop'
|
||||
require 'rubocop/rspec/support'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
require 'rubocop'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require 'rubocop'
|
||||
require 'rubocop/rspec/support'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require 'rubocop'
|
||||
require 'rubocop/rspec/support'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require 'rubocop'
|
||||
require 'rubocop/rspec/support'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require 'rubocop'
|
||||
require 'rubocop/rspec/support'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
require 'rubocop'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require 'rubocop'
|
||||
require 'rubocop/rspec/support'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
require 'rubocop'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
require 'rubocop'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
require 'rubocop'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
require 'rubocop'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
require 'rubocop'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
require 'rubocop'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
require 'rubocop'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
require 'rubocop'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
require 'rubocop'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
require 'rubocop'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
require 'rubocop'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
require 'rubocop'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
require 'rubocop'
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
require 'rubocop'
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue