From 79a7da2537e94c34aba92ad4b9a5dc1da431b2c4 Mon Sep 17 00:00:00 2001 From: GitLab Bot Date: Tue, 14 Dec 2021 12:13:33 +0000 Subject: [PATCH] Add latest changes from gitlab-org/gitlab@master --- .gitlab/ci/rails.gitlab-ci.yml | 10 ++ .rubocop_todo.yml | 16 --- GITALY_SERVER_VERSION | 2 +- .../import_groups/components/import_table.vue | 3 +- .../components/import_target_cell.vue | 128 ++++++++++-------- .../import_groups/graphql/client_factory.js | 16 ++- ...ort_source_group_progress.fragment.graphql | 1 + .../mutations/import_groups.mutation.graphql | 1 + .../graphql/services/local_storage_cache.js | 9 +- .../import_groups/graphql/typedefs.graphql | 1 + .../package_registry/constants.js | 6 + .../package_registry/index.js | 30 ++++ .../package_registry/pages/index.vue | 5 + .../package_registry/pages/list.js | 24 ---- .../list/app.vue => pages/list.vue} | 26 ++-- .../package_registry/router.js | 21 +++ .../pages/groups/packages/index/index.js | 4 +- .../projects/packages/packages/index/index.js | 4 +- .../components/markdown/suggestions.vue | 2 +- .../stylesheets/framework/markdown_area.scss | 4 + .../import/bulk_imports_controller.rb | 8 +- app/controllers/projects/blob_controller.rb | 2 +- app/graphql/resolvers/ci/jobs_resolver.rb | 2 +- .../resolvers/project_pipelines_resolver.rb | 2 +- app/helpers/diff_helper.rb | 2 +- app/helpers/packages_helper.rb | 11 -- app/models/ci/pipeline.rb | 1 + .../encryption_helper.rb | 2 +- app/models/hooks/project_hook.rb | 11 +- app/models/hooks/web_hook.rb | 32 ++++- app/models/namespaces/traversal/linear.rb | 2 +- app/models/serverless/domain.rb | 2 +- app/models/wiki_page.rb | 2 +- app/services/bulk_imports/create_service.rb | 2 +- app/services/cohorts_service.rb | 2 +- app/uploaders/lfs_object_uploader.rb | 2 +- app/views/groups/packages/index.html.haml | 6 +- .../packages/packages/index.html.haml | 6 +- config/initializers/wikicloth_patch.rb | 2 +- .../reference_architectures/10k_users.md | 18 ++- .../reference_architectures/25k_users.md | 18 ++- .../reference_architectures/2k_users.md | 9 +- .../reference_architectures/3k_users.md | 18 ++- .../reference_architectures/50k_users.md | 18 ++- .../reference_architectures/5k_users.md | 18 ++- doc/api/pipelines.md | 8 ++ .../documentation/styleguide/index.md | 3 +- doc/integration/oauth2_generic.md | 73 +++++----- lib/api/entities/ci/pipeline_basic.rb | 2 +- lib/banzai/filter/math_filter.rb | 2 +- lib/banzai/filter/repository_link_filter.rb | 2 +- lib/extracts_ref.rb | 2 +- lib/flowdock/git/builder.rb | 2 +- lib/gitlab/chat/output.rb | 4 +- lib/gitlab/ci/reports/security/report.rb | 2 +- lib/gitlab/diff/custom_diff.rb | 2 +- lib/gitlab/diff/inline_diff.rb | 4 +- lib/gitlab/gfm/reference_rewriter.rb | 2 +- lib/gitlab/gpg.rb | 2 +- lib/gitlab/search/query.rb | 2 +- lib/gitlab/sherlock/line_profiler.rb | 2 +- lib/gitlab/string_range_marker.rb | 2 +- lib/gitlab/utils/nokogiri.rb | 2 +- lib/tasks/gitlab/seed/group_seed.rake | 2 +- qa/qa/resource/merge_request.rb | 35 +++++ rubocop/cop/migration/schedule_async.rb | 2 +- rubocop/cop/project_path_helper.rb | 2 +- .../import/bulk_imports_controller_spec.rb | 32 ++--- .../serverless/functions_controller_spec.rb | 4 +- .../user_views_open_merge_request_spec.rb | 2 +- .../labels/issues_sorted_by_priority_spec.rb | 2 +- .../packages/nuget/package_finder_spec.rb | 2 +- .../api/schemas/pipeline_schedule.json | 1 + .../components/import_target_cell_spec.js | 11 +- .../graphql/client_factory_spec.js | 59 +++++++- .../import_groups/graphql/fixtures.js | 3 +- .../__snapshots__/list_spec.js.snap} | 4 +- .../list/app_spec.js => pages/list_spec.js} | 23 ++-- spec/helpers/packages_helper_spec.rb | 30 ---- spec/lib/gitlab/git/tree_spec.rb | 10 +- .../conflict_files_stitcher_spec.rb | 4 +- .../gitaly_client/diff_stitcher_spec.rb | 2 +- .../gpg/invalid_gpg_signature_updater_spec.rb | 2 +- .../keyset/connection_generic_keyset_spec.rb | 4 +- .../pagination/keyset/connection_spec.rb | 4 +- spec/lib/gitlab/import_export/all_models.yml | 1 + .../project/tree_restorer_spec.rb | 6 +- .../gitlab/multi_collection_paginator_spec.rb | 2 +- .../gitlab/pagination/keyset/order_spec.rb | 2 +- spec/lib/gitlab/rack_attack_spec.rb | 2 +- spec/lib/gitlab/redis/sessions_spec.rb | 13 +- spec/models/ci/build_spec.rb | 2 +- spec/models/ci/pipeline_spec.rb | 1 + spec/models/event_spec.rb | 2 +- spec/models/hooks/web_hook_spec.rb | 87 ++++++++++++ spec/requests/api/ci/pipelines_spec.rb | 3 +- .../system_notes/commit_service_spec.rb | 2 +- spec/support/helpers/gpg_helpers.rb | 8 +- spec/support/helpers/memory_usage_helper.rb | 2 +- .../redis_new_instance_shared_examples.rb | 8 +- 100 files changed, 662 insertions(+), 348 deletions(-) create mode 100644 app/assets/javascripts/packages_and_registries/package_registry/index.js create mode 100644 app/assets/javascripts/packages_and_registries/package_registry/pages/index.vue delete mode 100644 app/assets/javascripts/packages_and_registries/package_registry/pages/list.js rename app/assets/javascripts/packages_and_registries/package_registry/{components/list/app.vue => pages/list.vue} (88%) create mode 100644 app/assets/javascripts/packages_and_registries/package_registry/router.js rename spec/frontend/packages_and_registries/package_registry/{components/list/__snapshots__/app_spec.js.snap => pages/__snapshots__/list_spec.js.snap} (93%) rename spec/frontend/packages_and_registries/package_registry/{components/list/app_spec.js => pages/list_spec.js} (91%) diff --git a/.gitlab/ci/rails.gitlab-ci.yml b/.gitlab/ci/rails.gitlab-ci.yml index 28c24c0dd26..8ceca10e1ac 100644 --- a/.gitlab/ci/rails.gitlab-ci.yml +++ b/.gitlab/ci/rails.gitlab-ci.yml @@ -488,6 +488,11 @@ rspec:coverage: - rspec unit pg12 minimal - rspec integration pg12 minimal - rspec system pg12 minimal + # FOSS/EE decomposed jobs + - rspec migration pg12 decomposed + - rspec unit pg12 decomposed + - rspec integration pg12 decomposed + - rspec system pg12 decomposed # EE jobs - rspec-ee migration pg12 - rspec-ee unit pg12 @@ -498,6 +503,11 @@ rspec:coverage: - rspec-ee unit pg12 minimal - rspec-ee integration pg12 minimal - rspec-ee system pg12 minimal + # EE decomposed jobs + - rspec-ee migration pg12 decomposed + - rspec-ee unit pg12 decomposed + - rspec-ee integration pg12 decomposed + - rspec-ee system pg12 decomposed # Geo jobs - rspec-ee unit pg12 geo - rspec-ee integration pg12 geo diff --git a/.rubocop_todo.yml b/.rubocop_todo.yml index 17841377974..94d8d40c10f 100644 --- a/.rubocop_todo.yml +++ b/.rubocop_todo.yml @@ -289,17 +289,6 @@ Performance/DeleteSuffix: - 'ee/app/models/geo/upload_registry.rb' - 'ee/app/workers/geo/file_download_dispatch_worker/attachment_job_finder.rb' -# Offense count: 13 -# Cop supports --auto-correct. -Performance/Detect: - Exclude: - - 'ee/spec/controllers/projects/dependencies_controller_spec.rb' - - 'ee/spec/requests/api/dependencies_spec.rb' - - 'qa/qa/runtime/feature.rb' - - 'spec/lib/gitlab/git/tree_spec.rb' - - 'spec/lib/gitlab/import_export/project/tree_restorer_spec.rb' - - 'spec/models/event_spec.rb' - # Offense count: 121 Performance/MethodObjectAsBlock: Enabled: false @@ -827,11 +816,6 @@ Style/RescueModifier: Style/SingleArgumentDig: Enabled: false -# Offense count: 45 -# Cop supports --auto-correct. -Style/SlicingWithRange: - Enabled: false - # Offense count: 63 # Configuration parameters: AllowModifier. Style/SoleNestedConditional: diff --git a/GITALY_SERVER_VERSION b/GITALY_SERVER_VERSION index 57be36455ed..7271ddc86e0 100644 --- a/GITALY_SERVER_VERSION +++ b/GITALY_SERVER_VERSION @@ -1 +1 @@ -125f0fc0e49db4dd46f2e905f34178ce880dd79e +0e0aeb5ca4488903f41acd392911bd89d1ad3d6d diff --git a/app/assets/javascripts/import_entities/import_groups/components/import_table.vue b/app/assets/javascripts/import_entities/import_groups/components/import_table.vue index 298771a4d12..f2837a7bb44 100644 --- a/app/assets/javascripts/import_entities/import_groups/components/import_table.vue +++ b/app/assets/javascripts/import_entities/import_groups/components/import_table.vue @@ -314,9 +314,8 @@ export default { variables: { importRequests }, }); } catch (error) { - const message = error?.networkError?.response?.data?.error ?? i18n.ERROR_IMPORT; createFlash({ - message, + message: i18n.ERROR_IMPORT, captureError: true, error, }); diff --git a/app/assets/javascripts/import_entities/import_groups/components/import_target_cell.vue b/app/assets/javascripts/import_entities/import_groups/components/import_target_cell.vue index ca9ae9447d0..344a6e45370 100644 --- a/app/assets/javascripts/import_entities/import_groups/components/import_target_cell.vue +++ b/app/assets/javascripts/import_entities/import_groups/components/import_target_cell.vue @@ -32,72 +32,84 @@ export default { fullPath() { return this.group.importTarget.targetNamespace.fullPath || s__('BulkImport|No parent'); }, - invalidNameValidationMessage() { - return getInvalidNameValidationMessage(this.group.importTarget); + validationMessage() { + return ( + this.group.progress?.message || getInvalidNameValidationMessage(this.group.importTarget) + ); + }, + validNameState() { + // bootstrap-vue requires null for "indifferent" state, if we return true + // this will highlight field in green like "passed validation" + return this.group.flags.isInvalid && this.group.flags.isAvailableForImport ? false : null; }, }, }; diff --git a/app/assets/javascripts/import_entities/import_groups/graphql/client_factory.js b/app/assets/javascripts/import_entities/import_groups/graphql/client_factory.js index bce6e7bcb1f..31352bbc8bb 100644 --- a/app/assets/javascripts/import_entities/import_groups/graphql/client_factory.js +++ b/app/assets/javascripts/import_entities/import_groups/graphql/client_factory.js @@ -142,9 +142,7 @@ export function createResolvers({ endpoints }) { }; }); - const { - data: { id: jobId }, - } = await axios.post(endpoints.createBulkImport, { + const { data: originalResponse } = await axios.post(endpoints.createBulkImport, { bulk_import: importOperations.map((op) => ({ source_type: 'group_entity', source_full_path: op.group.fullPath, @@ -153,15 +151,21 @@ export function createResolvers({ endpoints }) { })), }); - return importOperations.map((op) => { + const responses = Array.isArray(originalResponse) + ? originalResponse + : [{ success: true, id: originalResponse.id }]; + + return importOperations.map((op, idx) => { + const response = responses[idx]; const lastImportTarget = { targetNamespace: op.targetNamespace, newName: op.newName, }; const progress = { - id: jobId, - status: STATUSES.CREATED, + id: response.id || `local-${Date.now()}-${idx}`, + status: response.success ? STATUSES.CREATED : STATUSES.FAILED, + message: response.message || null, }; localStorageCache.set(op.group.webUrl, { progress, lastImportTarget }); diff --git a/app/assets/javascripts/import_entities/import_groups/graphql/fragments/bulk_import_source_group_progress.fragment.graphql b/app/assets/javascripts/import_entities/import_groups/graphql/fragments/bulk_import_source_group_progress.fragment.graphql index 2d60bf82d65..33c564f36a8 100644 --- a/app/assets/javascripts/import_entities/import_groups/graphql/fragments/bulk_import_source_group_progress.fragment.graphql +++ b/app/assets/javascripts/import_entities/import_groups/graphql/fragments/bulk_import_source_group_progress.fragment.graphql @@ -1,4 +1,5 @@ fragment BulkImportSourceGroupProgress on ClientBulkImportProgress { id status + message } diff --git a/app/assets/javascripts/import_entities/import_groups/graphql/mutations/import_groups.mutation.graphql b/app/assets/javascripts/import_entities/import_groups/graphql/mutations/import_groups.mutation.graphql index 75215471d0f..39289887b75 100644 --- a/app/assets/javascripts/import_entities/import_groups/graphql/mutations/import_groups.mutation.graphql +++ b/app/assets/javascripts/import_entities/import_groups/graphql/mutations/import_groups.mutation.graphql @@ -9,6 +9,7 @@ mutation importGroups($importRequests: [ImportGroupInput!]!) { progress { id status + message } } } diff --git a/app/assets/javascripts/import_entities/import_groups/graphql/services/local_storage_cache.js b/app/assets/javascripts/import_entities/import_groups/graphql/services/local_storage_cache.js index 09bc7b33692..1aad22f0f3f 100644 --- a/app/assets/javascripts/import_entities/import_groups/graphql/services/local_storage_cache.js +++ b/app/assets/javascripts/import_entities/import_groups/graphql/services/local_storage_cache.js @@ -22,7 +22,14 @@ export class LocalStorageCache { loadCacheFromStorage() { try { - return JSON.parse(this.storage.getItem(KEY)) ?? {}; + const storage = JSON.parse(this.storage.getItem(KEY)) ?? {}; + Object.values(storage).forEach((entry) => { + if (entry.progress && !('message' in entry.progress)) { + // eslint-disable-next-line no-param-reassign + entry.progress.message = ''; + } + }); + return storage; } catch { return {}; } diff --git a/app/assets/javascripts/import_entities/import_groups/graphql/typedefs.graphql b/app/assets/javascripts/import_entities/import_groups/graphql/typedefs.graphql index b8dd79a5000..1639ce16758 100644 --- a/app/assets/javascripts/import_entities/import_groups/graphql/typedefs.graphql +++ b/app/assets/javascripts/import_entities/import_groups/graphql/typedefs.graphql @@ -16,6 +16,7 @@ type ClientBulkImportSourceGroupConnection { type ClientBulkImportProgress { id: ID! status: String! + message: String } type ClientBulkImportValidationError { diff --git a/app/assets/javascripts/packages_and_registries/package_registry/constants.js b/app/assets/javascripts/packages_and_registries/package_registry/constants.js index d75c195be4c..ab6541e4264 100644 --- a/app/assets/javascripts/packages_and_registries/package_registry/constants.js +++ b/app/assets/javascripts/packages_and_registries/package_registry/constants.js @@ -1,4 +1,5 @@ import { s__, __ } from '~/locale'; +import { helpPagePath } from '~/helpers/help_page_helper'; export { DELETE_PACKAGE_TRACKING_ACTION, @@ -136,3 +137,8 @@ export const PACKAGE_TYPES = [ s__('PackageRegistry|Debian'), s__('PackageRegistry|Helm'), ]; + +// links + +export const EMPTY_LIST_HELP_URL = helpPagePath('user/packages/package_registry/index'); +export const PACKAGE_HELP_URL = helpPagePath('user/packages/index'); diff --git a/app/assets/javascripts/packages_and_registries/package_registry/index.js b/app/assets/javascripts/packages_and_registries/package_registry/index.js new file mode 100644 index 00000000000..7ec931ff9a0 --- /dev/null +++ b/app/assets/javascripts/packages_and_registries/package_registry/index.js @@ -0,0 +1,30 @@ +import Vue from 'vue'; +import Translate from '~/vue_shared/translate'; +import { apolloProvider } from '~/packages_and_registries/package_registry/graphql/index'; +import PackageRegistry from '~/packages_and_registries/package_registry/pages/index.vue'; +import createRouter from './router'; + +Vue.use(Translate); + +export default () => { + const el = document.getElementById('js-vue-packages-list'); + const { endpoint, resourceId, fullPath, pageType, emptyListIllustration } = el.dataset; + const router = createRouter(endpoint); + + const isGroupPage = pageType === 'groups'; + + return new Vue({ + el, + router, + apolloProvider, + provide: { + resourceId, + fullPath, + emptyListIllustration, + isGroupPage, + }, + render(createElement) { + return createElement(PackageRegistry); + }, + }); +}; diff --git a/app/assets/javascripts/packages_and_registries/package_registry/pages/index.vue b/app/assets/javascripts/packages_and_registries/package_registry/pages/index.vue new file mode 100644 index 00000000000..a14d0c32cbe --- /dev/null +++ b/app/assets/javascripts/packages_and_registries/package_registry/pages/index.vue @@ -0,0 +1,5 @@ + diff --git a/app/assets/javascripts/packages_and_registries/package_registry/pages/list.js b/app/assets/javascripts/packages_and_registries/package_registry/pages/list.js deleted file mode 100644 index d797a0a5327..00000000000 --- a/app/assets/javascripts/packages_and_registries/package_registry/pages/list.js +++ /dev/null @@ -1,24 +0,0 @@ -import Vue from 'vue'; -import Translate from '~/vue_shared/translate'; -import { apolloProvider } from '~/packages_and_registries/package_registry/graphql/index'; -import PackagesListApp from '../components/list/app.vue'; - -Vue.use(Translate); - -export default () => { - const el = document.getElementById('js-vue-packages-list'); - - const isGroupPage = el.dataset.pageType === 'groups'; - - return new Vue({ - el, - apolloProvider, - provide: { - ...el.dataset, - isGroupPage, - }, - render(createElement) { - return createElement(PackagesListApp); - }, - }); -}; diff --git a/app/assets/javascripts/packages_and_registries/package_registry/components/list/app.vue b/app/assets/javascripts/packages_and_registries/package_registry/pages/list.vue similarity index 88% rename from app/assets/javascripts/packages_and_registries/package_registry/components/list/app.vue rename to app/assets/javascripts/packages_and_registries/package_registry/pages/list.vue index d65711d061d..38df701157a 100644 --- a/app/assets/javascripts/packages_and_registries/package_registry/components/list/app.vue +++ b/app/assets/javascripts/packages_and_registries/package_registry/pages/list.vue @@ -9,13 +9,15 @@ import { GROUP_RESOURCE_TYPE, GRAPHQL_PAGE_SIZE, DELETE_PACKAGE_SUCCESS_MESSAGE, + EMPTY_LIST_HELP_URL, + PACKAGE_HELP_URL, } from '~/packages_and_registries/package_registry/constants'; import getPackagesQuery from '~/packages_and_registries/package_registry/graphql/queries/get_packages.query.graphql'; import DeletePackage from '~/packages_and_registries/package_registry/components/functional/delete_package.vue'; -import PackageTitle from './package_title.vue'; -import PackageSearch from './package_search.vue'; -import PackageList from './packages_list.vue'; +import PackageTitle from '~/packages_and_registries/package_registry/components/list/package_title.vue'; +import PackageSearch from '~/packages_and_registries/package_registry/components/list/package_search.vue'; +import PackageList from '~/packages_and_registries/package_registry/components/list/packages_list.vue'; export default { components: { @@ -27,13 +29,7 @@ export default { PackageSearch, DeletePackage, }, - inject: [ - 'packageHelpUrl', - 'emptyListIllustration', - 'emptyListHelpUrl', - 'isGroupPage', - 'fullPath', - ], + inject: ['emptyListIllustration', 'isGroupPage', 'fullPath'], data() { return { packages: {}, @@ -156,12 +152,16 @@ export default { 'PackageRegistry|Learn how to %{noPackagesLinkStart}publish and share your packages%{noPackagesLinkEnd} with GitLab.', ), }, + links: { + EMPTY_LIST_HELP_URL, + PACKAGE_HELP_URL, + }, }; diff --git a/app/assets/javascripts/packages_and_registries/package_registry/router.js b/app/assets/javascripts/packages_and_registries/package_registry/router.js new file mode 100644 index 00000000000..ea5b740e879 --- /dev/null +++ b/app/assets/javascripts/packages_and_registries/package_registry/router.js @@ -0,0 +1,21 @@ +import Vue from 'vue'; +import VueRouter from 'vue-router'; +import List from '~/packages_and_registries/package_registry/pages/list.vue'; + +Vue.use(VueRouter); + +export default function createRouter(base) { + const router = new VueRouter({ + base, + mode: 'history', + routes: [ + { + name: 'list', + path: '/', + component: List, + }, + ], + }); + + return router; +} diff --git a/app/assets/javascripts/pages/groups/packages/index/index.js b/app/assets/javascripts/pages/groups/packages/index/index.js index f9eecff4ac4..174973a9fad 100644 --- a/app/assets/javascripts/pages/groups/packages/index/index.js +++ b/app/assets/javascripts/pages/groups/packages/index/index.js @@ -1,3 +1,3 @@ -import packageList from '~/packages_and_registries/package_registry/pages/list'; +import packageApp from '~/packages_and_registries/package_registry/index'; -packageList(); +packageApp(); diff --git a/app/assets/javascripts/pages/projects/packages/packages/index/index.js b/app/assets/javascripts/pages/projects/packages/packages/index/index.js index f9eecff4ac4..174973a9fad 100644 --- a/app/assets/javascripts/pages/projects/packages/packages/index/index.js +++ b/app/assets/javascripts/pages/projects/packages/packages/index/index.js @@ -1,3 +1,3 @@ -import packageList from '~/packages_and_registries/package_registry/pages/list'; +import packageApp from '~/packages_and_registries/package_registry/index'; -packageList(); +packageApp(); diff --git a/app/assets/javascripts/vue_shared/components/markdown/suggestions.vue b/app/assets/javascripts/vue_shared/components/markdown/suggestions.vue index e36cfb3b275..2f6776f835e 100644 --- a/app/assets/javascripts/vue_shared/components/markdown/suggestions.vue +++ b/app/assets/javascripts/vue_shared/components/markdown/suggestions.vue @@ -165,6 +165,6 @@ export default { diff --git a/app/assets/stylesheets/framework/markdown_area.scss b/app/assets/stylesheets/framework/markdown_area.scss index 9b04b9a2612..c6e52c13e83 100644 --- a/app/assets/stylesheets/framework/markdown_area.scss +++ b/app/assets/stylesheets/framework/markdown_area.scss @@ -139,6 +139,10 @@ font-family: $monospace-font !important; } +.suggestions.md > .markdown-code-block { + @include gl-static; +} + .md-suggestion-header { height: $suggestion-header-height; display: flex; diff --git a/app/controllers/import/bulk_imports_controller.rb b/app/controllers/import/bulk_imports_controller.rb index bec26cb547d..f26c06b7e37 100644 --- a/app/controllers/import/bulk_imports_controller.rb +++ b/app/controllers/import/bulk_imports_controller.rb @@ -40,13 +40,9 @@ class Import::BulkImportsController < ApplicationController end def create - response = ::BulkImports::CreateService.new(current_user, create_params, credentials).execute + responses = create_params.map { |entry| ::BulkImports::CreateService.new(current_user, entry, credentials).execute } - if response.success? - render json: response.payload.to_json(only: [:id]) - else - render json: { error: response.message }, status: response.http_status - end + render json: responses.map { |response| { success: response.success?, id: response.payload[:id], message: response.message } } end def realtime_changes diff --git a/app/controllers/projects/blob_controller.rb b/app/controllers/projects/blob_controller.rb index 2b42dcb66ad..b30ef7506aa 100644 --- a/app/controllers/projects/blob_controller.rb +++ b/app/controllers/projects/blob_controller.rb @@ -99,7 +99,7 @@ class Projects::BlobController < Projects::ApplicationController @content = params[:content] @blob.load_all_data! diffy = Diffy::Diff.new(@blob.data, @content, diff: '-U 3', include_diff_info: true) - diff_lines = diffy.diff.scan(/.*\n/)[2..-1] + diff_lines = diffy.diff.scan(/.*\n/)[2..] diff_lines = Gitlab::Diff::Parser.new.parse(diff_lines).to_a @diff_lines = Gitlab::Diff::Highlight.new(diff_lines, repository: @repository).highlight diff --git a/app/graphql/resolvers/ci/jobs_resolver.rb b/app/graphql/resolvers/ci/jobs_resolver.rb index 5ae9e721cc8..df138a15538 100644 --- a/app/graphql/resolvers/ci/jobs_resolver.rb +++ b/app/graphql/resolvers/ci/jobs_resolver.rb @@ -29,7 +29,7 @@ module Resolvers job_types: security_report_types ).execute else - pipeline.statuses + pipeline.statuses_order_id_desc end end end diff --git a/app/graphql/resolvers/project_pipelines_resolver.rb b/app/graphql/resolvers/project_pipelines_resolver.rb index 23423b9274a..47a8b028d4d 100644 --- a/app/graphql/resolvers/project_pipelines_resolver.rb +++ b/app/graphql/resolvers/project_pipelines_resolver.rb @@ -18,7 +18,7 @@ module Resolvers def preloads { - jobs: { statuses: [:needs] }, + jobs: { statuses_order_id_desc: [:needs] }, upstream: [:triggered_by_pipeline], downstream: [:triggered_pipelines] } diff --git a/app/helpers/diff_helper.rb b/app/helpers/diff_helper.rb index ca5fe38576e..2b5f726dad1 100644 --- a/app/helpers/diff_helper.rb +++ b/app/helpers/diff_helper.rb @@ -283,7 +283,7 @@ module DiffHelper return path unless path.size > max && max > 3 - "...#{path[-(max - 3)..-1]}" + "...#{path[-(max - 3)..]}" end def code_navigation_path(diffs) diff --git a/app/helpers/packages_helper.rb b/app/helpers/packages_helper.rb index c69d9eb1326..66f80e7eeb8 100644 --- a/app/helpers/packages_helper.rb +++ b/app/helpers/packages_helper.rb @@ -38,17 +38,6 @@ module PackagesHelper "#{Gitlab.config.gitlab.host}/#{group_id}" end - def packages_list_data(type, resource) - { - resource_id: resource.id, - full_path: resource.full_path, - page_type: type, - empty_list_help_url: help_page_path('user/packages/package_registry/index'), - empty_list_illustration: image_path('illustrations/no-packages.svg'), - package_help_url: help_page_path('user/packages/index') - } - end - def track_package_event(event_name, scope, **args) ::Packages::CreateEventService.new(nil, current_user, event_name: event_name, scope: scope).execute category = args.delete(:category) || self.class.name diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb index 2a746784a01..d0db3e134ed 100644 --- a/app/models/ci/pipeline.rb +++ b/app/models/ci/pipeline.rb @@ -63,6 +63,7 @@ module Ci has_many :statuses, class_name: 'CommitStatus', foreign_key: :commit_id, inverse_of: :pipeline has_many :latest_statuses_ordered_by_stage, -> { latest.order(:stage_idx, :stage) }, class_name: 'CommitStatus', foreign_key: :commit_id, inverse_of: :pipeline has_many :latest_statuses, -> { latest }, class_name: 'CommitStatus', foreign_key: :commit_id, inverse_of: :pipeline + has_many :statuses_order_id_desc, -> { order_id_desc }, class_name: 'CommitStatus', foreign_key: :commit_id has_many :processables, class_name: 'Ci::Processable', foreign_key: :commit_id, inverse_of: :pipeline has_many :bridges, class_name: 'Ci::Bridge', foreign_key: :commit_id, inverse_of: :pipeline has_many :builds, foreign_key: :commit_id, inverse_of: :pipeline diff --git a/app/models/concerns/token_authenticatable_strategies/encryption_helper.rb b/app/models/concerns/token_authenticatable_strategies/encryption_helper.rb index 3be82ed72d3..447521ad8c1 100644 --- a/app/models/concerns/token_authenticatable_strategies/encryption_helper.rb +++ b/app/models/concerns/token_authenticatable_strategies/encryption_helper.rb @@ -11,7 +11,7 @@ module TokenAuthenticatableStrategies # The pattern of the token is "#{DYNAMIC_NONCE_IDENTIFIER}#{token}#{iv_of_12_characters}" if token.start_with?(DYNAMIC_NONCE_IDENTIFIER) && token.size > NONCE_SIZE + DYNAMIC_NONCE_IDENTIFIER.size token_to_decrypt = token[1...-NONCE_SIZE] - iv = token[-NONCE_SIZE..-1] + iv = token[-NONCE_SIZE..] Gitlab::CryptoHelper.aes256_gcm_decrypt(token_to_decrypt, nonce: iv) else diff --git a/app/models/hooks/project_hook.rb b/app/models/hooks/project_hook.rb index d1584a62bfb..16b95d2a2b9 100644 --- a/app/models/hooks/project_hook.rb +++ b/app/models/hooks/project_hook.rb @@ -31,10 +31,6 @@ class ProjectHook < WebHook _('Webhooks') end - def web_hooks_disable_failed? - Feature.enabled?(:web_hooks_disable_failed, project) - end - override :rate_limit def rate_limit project.actual_limits.limit_for(:web_hook_calls) @@ -44,6 +40,13 @@ class ProjectHook < WebHook def application_context super.merge(project: project) end + + private + + override :web_hooks_disable_failed? + def web_hooks_disable_failed? + Feature.enabled?(:web_hooks_disable_failed, project) + end end ProjectHook.prepend_mod_with('ProjectHook') diff --git a/app/models/hooks/web_hook.rb b/app/models/hooks/web_hook.rb index cb5c1ac48cd..e8a55abfc8f 100644 --- a/app/models/hooks/web_hook.rb +++ b/app/models/hooks/web_hook.rb @@ -34,9 +34,19 @@ class WebHook < ApplicationRecord end def executable? - return true unless web_hooks_disable_failed? + !temporarily_disabled? && !permanently_disabled? + end - recent_failures <= FAILURE_THRESHOLD && (disabled_until.nil? || disabled_until < Time.current) + def temporarily_disabled? + return false unless web_hooks_disable_failed? + + disabled_until.present? && disabled_until >= Time.current + end + + def permanently_disabled? + return false unless web_hooks_disable_failed? + + recent_failures > FAILURE_THRESHOLD end # rubocop: disable CodeReuse/ServiceClass @@ -69,6 +79,8 @@ class WebHook < ApplicationRecord end def disable! + return if permanently_disabled? + update_attribute(:recent_failures, FAILURE_THRESHOLD + 1) end @@ -80,7 +92,7 @@ class WebHook < ApplicationRecord end def backoff! - return if backoff_count >= MAX_FAILURES && disabled_until && disabled_until > Time.current + return if permanently_disabled? || (backoff_count >= MAX_FAILURES && temporarily_disabled?) assign_attributes(disabled_until: next_backoff.from_now, backoff_count: backoff_count.succ.clamp(0, MAX_FAILURES)) save(validate: false) @@ -93,7 +105,19 @@ class WebHook < ApplicationRecord save(validate: false) end - # Overridden in ProjectHook and GroupHook, other webhooks are not rate-limited. + # @return [Boolean] Whether or not the WebHook is currently throttled. + def rate_limited? + return false unless rate_limit + + Gitlab::ApplicationRateLimiter.peek( + :web_hook_calls, + scope: [self], + threshold: rate_limit + ) + end + + # Threshold for the rate-limit. + # Overridden in ProjectHook and GroupHook, other WebHooks are not rate-limited. def rate_limit nil end diff --git a/app/models/namespaces/traversal/linear.rb b/app/models/namespaces/traversal/linear.rb index 36b45dde6b4..5a5f2a5d063 100644 --- a/app/models/namespaces/traversal/linear.rb +++ b/app/models/namespaces/traversal/linear.rb @@ -204,7 +204,7 @@ module Namespaces end if bottom - skope = skope.where(id: bottom.traversal_ids[0..-1]) + skope = skope.where(id: bottom.traversal_ids) end # The original `with_depth` attribute in ObjectHierarchy increments as you diff --git a/app/models/serverless/domain.rb b/app/models/serverless/domain.rb index 2fef3b66b08..164f93afa9a 100644 --- a/app/models/serverless/domain.rb +++ b/app/models/serverless/domain.rb @@ -37,7 +37,7 @@ module Serverless 'a1', serverless_domain_cluster.uuid[2..-3], 'f2', - serverless_domain_cluster.uuid[-2..-1] + serverless_domain_cluster.uuid[-2..] ].join end end diff --git a/app/models/wiki_page.rb b/app/models/wiki_page.rb index 25438581f2f..3dbbbcdfe23 100644 --- a/app/models/wiki_page.rb +++ b/app/models/wiki_page.rb @@ -338,7 +338,7 @@ class WikiPage current_dirname = File.dirname(title) if persisted? - return title[1..-1] if current_dirname == '/' + return title[1..] if current_dirname == '/' return File.join([directory.presence, title].compact) if current_dirname == '.' end diff --git a/app/services/bulk_imports/create_service.rb b/app/services/bulk_imports/create_service.rb index c1becbb5609..cbf2b34b33c 100644 --- a/app/services/bulk_imports/create_service.rb +++ b/app/services/bulk_imports/create_service.rb @@ -59,7 +59,7 @@ module BulkImports ) bulk_import.create_configuration!(credentials.slice(:url, :access_token)) - params.each do |entity| + Array.wrap(params).each do |entity| BulkImports::Entity.create!( bulk_import: bulk_import, source_type: entity[:source_type], diff --git a/app/services/cohorts_service.rb b/app/services/cohorts_service.rb index 7bc3b267a12..1b1598b301c 100644 --- a/app/services/cohorts_service.rb +++ b/app/services/cohorts_service.rb @@ -38,7 +38,7 @@ class CohortsService { registration_month: registration_month, - activity_months: activity_months[1..-1], + activity_months: activity_months[1..], total: activity_months.first[:total], inactive: inactive } diff --git a/app/uploaders/lfs_object_uploader.rb b/app/uploaders/lfs_object_uploader.rb index 0a966f3d44f..027857500f4 100644 --- a/app/uploaders/lfs_object_uploader.rb +++ b/app/uploaders/lfs_object_uploader.rb @@ -9,7 +9,7 @@ class LfsObjectUploader < GitlabUploader alias_method :upload, :model def filename - model.oid[4..-1] + model.oid[4..] end def store_dir diff --git a/app/views/groups/packages/index.html.haml b/app/views/groups/packages/index.html.haml index 7910217c939..d56a806f082 100644 --- a/app/views/groups/packages/index.html.haml +++ b/app/views/groups/packages/index.html.haml @@ -3,4 +3,8 @@ .row .col-12 - #js-vue-packages-list{ data: packages_list_data('groups', @group) } + #js-vue-packages-list{ data: { resource_id: @group.id, + full_path: @group.full_path, + endpoint: group_packages_path(@group), + page_type: 'groups', + empty_list_illustration: image_path('illustrations/no-packages.svg'), } } diff --git a/app/views/projects/packages/packages/index.html.haml b/app/views/projects/packages/packages/index.html.haml index 0d5350ab62b..c67b06218e2 100644 --- a/app/views/projects/packages/packages/index.html.haml +++ b/app/views/projects/packages/packages/index.html.haml @@ -3,4 +3,8 @@ .row .col-12 - #js-vue-packages-list{ data: packages_list_data('projects', @project) } + #js-vue-packages-list{ data: { resource_id: @project.id, + full_path: @project.full_path, + endpoint: project_packages_path(@project), + page_type: 'projects', + empty_list_illustration: image_path('illustrations/no-packages.svg'), } } diff --git a/config/initializers/wikicloth_patch.rb b/config/initializers/wikicloth_patch.rb index c033d9ad7ca..13180180c32 100644 --- a/config/initializers/wikicloth_patch.rb +++ b/config/initializers/wikicloth_patch.rb @@ -108,7 +108,7 @@ module WikiCloth "#{I18n.t('template loop detected', :tree => debug_tree)}" else key = params[0].to_s.strip - key_options = params[1..-1].collect { |p| p.is_a?(Hash) ? { :name => p[:name].strip, :value => p[:value].strip } : p.strip } + key_options = params[1..].collect { |p| p.is_a?(Hash) ? { :name => p[:name].strip, :value => p[:value].strip } : p.strip } key_options ||= [] key_digest = Digest::MD5.hexdigest(key_options.to_a.sort {|x,y| (x.is_a?(Hash) ? x[:name] : x) <=> (y.is_a?(Hash) ? y[:name] : y) }.inspect) diff --git a/doc/administration/reference_architectures/10k_users.md b/doc/administration/reference_architectures/10k_users.md index 120874131f1..e266ecb7716 100644 --- a/doc/administration/reference_architectures/10k_users.md +++ b/doc/administration/reference_architectures/10k_users.md @@ -37,7 +37,7 @@ full list of reference architectures, see -1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. Google Cloud SQL and Amazon RDS are known to work, however Azure Database for PostgreSQL is [not recommended](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61) due to performance issues. Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. +1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. 2. Can be optionally run on reputable third-party external PaaS Redis solutions. Google Memorystore and AWS Elasticache are known to work. 3. Can be optionally run on reputable third-party load balancing services (LB PaaS). AWS ELB is known to work. 4. Should be run on reputable third-party object storage (storage PaaS) for cloud implementations. Google Cloud Storage and AWS S3 are known to work. @@ -486,8 +486,9 @@ cluster to be used with GitLab. ### Provide your own PostgreSQL instance If you're hosting GitLab on a cloud provider, you can optionally use a -managed service for PostgreSQL. For example, AWS offers a managed Relational -Database Service (RDS) that runs PostgreSQL. +managed service for PostgreSQL. + +A reputable provider or solution should be used for this. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). If you use a cloud-managed service, or provide your own PostgreSQL: @@ -1249,6 +1250,15 @@ There are many third-party solutions for PostgreSQL HA. The solution selected mu - A static IP for all connections that doesn't change on failover. - [`LISTEN`](https://www.postgresql.org/docs/12/sql-listen.html) SQL functionality must be supported. +NOTE: +With a third-party setup, it's possible to colocate Praefect's database on the same server as +the main [GitLab](#provide-your-own-postgresql-instance) database as a convenience unless +you are using Geo, where separate database instances are required for handling replication correctly. +In this setup, the specs of the main database setup shouldn't need to be changed as the impact should be +minimal. + +A reputable provider or solution should be used for this. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). + Examples of the above could include [Google's Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) or [Amazon RDS](https://aws.amazon.com/rds/). Once the database is set up, follow the [post configuration](#praefect-postgresql-post-configuration). @@ -2234,7 +2244,7 @@ services where applicable): -1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. Google Cloud SQL and Amazon RDS are known to work, however Azure Database for PostgreSQL is [not recommended](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61) due to performance issues. Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. +1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. 2. Can be optionally run on reputable third-party external PaaS Redis solutions. Google Memorystore and AWS Elasticache are known to work. 3. Can be optionally run on reputable third-party load balancing services (LB PaaS). AWS ELB is known to work. 4. Should be run on reputable third-party object storage (storage PaaS) for cloud implementations. Google Cloud Storage and AWS S3 are known to work. diff --git a/doc/administration/reference_architectures/25k_users.md b/doc/administration/reference_architectures/25k_users.md index 7983aa1b43f..a3a96e23094 100644 --- a/doc/administration/reference_architectures/25k_users.md +++ b/doc/administration/reference_architectures/25k_users.md @@ -37,7 +37,7 @@ full list of reference architectures, see -1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. Google Cloud SQL and Amazon RDS are known to work, however Azure Database for PostgreSQL is [not recommended](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61) due to performance issues. Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. +1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. 2. Can be optionally run on reputable third-party external PaaS Redis solutions. Google Memorystore and AWS Elasticache are known to work. 3. Can be optionally run on reputable third-party load balancing services (LB PaaS). AWS ELB is known to work. 4. Should be run on reputable third-party object storage (storage PaaS) for cloud implementations. Google Cloud Storage and AWS S3 are known to work. @@ -489,8 +489,9 @@ cluster to be used with GitLab. ### Provide your own PostgreSQL instance If you're hosting GitLab on a cloud provider, you can optionally use a -managed service for PostgreSQL. For example, AWS offers a managed Relational -Database Service (RDS) that runs PostgreSQL. +managed service for PostgreSQL. + +A reputable provider or solution should be used for this. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). If you use a cloud-managed service, or provide your own PostgreSQL: @@ -1255,7 +1256,14 @@ There are many third-party solutions for PostgreSQL HA. The solution selected mu - A static IP for all connections that doesn't change on failover. - [`LISTEN`](https://www.postgresql.org/docs/12/sql-listen.html) SQL functionality must be supported. -Examples of the above could include [Google's Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) or [Amazon RDS](https://aws.amazon.com/rds/). +NOTE: +With a third-party setup, it's possible to colocate Praefect's database on the same server as +the main [GitLab](#provide-your-own-postgresql-instance) database as a convenience unless +you are using Geo, where separate database instances are required for handling replication correctly. +In this setup, the specs of the main database setup shouldn't need to be changed as the impact should be +minimal. + +A reputable provider or solution should be used for this. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). Once the database is set up, follow the [post configuration](#praefect-postgresql-post-configuration). @@ -2234,7 +2242,7 @@ services where applicable): -1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. Google Cloud SQL and Amazon RDS are known to work, however Azure Database for PostgreSQL is [not recommended](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61) due to performance issues. Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. +1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. 2. Can be optionally run on reputable third-party external PaaS Redis solutions. Google Memorystore and AWS Elasticache are known to work. 3. Can be optionally run on reputable third-party load balancing services (LB PaaS). AWS ELB is known to work. 4. Should be run on reputable third-party object storage (storage PaaS) for cloud implementations. Google Cloud Storage and AWS S3 are known to work. diff --git a/doc/administration/reference_architectures/2k_users.md b/doc/administration/reference_architectures/2k_users.md index cb2cd9137d3..b67a6d50f71 100644 --- a/doc/administration/reference_architectures/2k_users.md +++ b/doc/administration/reference_architectures/2k_users.md @@ -30,7 +30,7 @@ For a full list of reference architectures, see | NFS server (optional, not recommended) | 1 | 4 vCPU, 3.6 GB memory | `n1-highcpu-4` | `c5.xlarge` | `F4s v2` | -1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. Google Cloud SQL and Amazon RDS are known to work, however Azure Database for PostgreSQL is [not recommended](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61) due to performance issues. Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. +1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. 2. Can be optionally run as reputable third-party external PaaS Redis solutions. Google Memorystore and AWS Elasticache are known to work. 3. Can be optionally run as reputable third-party load balancing services (LB PaaS). AWS ELB is known to work. 4. Should be run on reputable third-party object storage (storage PaaS) for cloud implementations. Google Cloud Storage and AWS S3 are known to work. @@ -233,8 +233,9 @@ to be used with GitLab. ### Provide your own PostgreSQL instance If you're hosting GitLab on a cloud provider, you can optionally use a -managed service for PostgreSQL. For example, AWS offers a managed relational -database service (RDS) that runs PostgreSQL. +managed service for PostgreSQL. + +A reputable provider or solution should be used for this. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). If you use a cloud-managed service, or provide your own PostgreSQL: @@ -1028,7 +1029,7 @@ services where applicable): -1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. Google Cloud SQL and Amazon RDS are known to work, however Azure Database for PostgreSQL is [not recommended](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61) due to performance issues. Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. +1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. 2. Can be optionally run on reputable third-party external PaaS Redis solutions. Google Memorystore and AWS Elasticache are known to work. 3. Should be run on reputable third-party object storage (storage PaaS) for cloud implementations. Google Cloud Storage and AWS S3 are known to work. diff --git a/doc/administration/reference_architectures/3k_users.md b/doc/administration/reference_architectures/3k_users.md index 84ea6f57c57..6e081a74959 100644 --- a/doc/administration/reference_architectures/3k_users.md +++ b/doc/administration/reference_architectures/3k_users.md @@ -46,7 +46,7 @@ For a full list of reference architectures, see -1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. Google Cloud SQL and Amazon RDS are known to work, however Azure Database for PostgreSQL is [not recommended](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61) due to performance issues. Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. +1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. 2. Can be optionally run on reputable third-party external PaaS Redis solutions. Google Memorystore and AWS Elasticache are known to work. 3. Can be optionally run on reputable third-party load balancing services (LB PaaS). AWS ELB is known to work. 4. Should be run on reputable third-party object storage (storage PaaS) for cloud implementations. Google Cloud Storage and AWS S3 are known to work. @@ -777,8 +777,9 @@ cluster to be used with GitLab. ### Provide your own PostgreSQL instance If you're hosting GitLab on a cloud provider, you can optionally use a -managed service for PostgreSQL. For example, AWS offers a managed Relational -Database Service (RDS) that runs PostgreSQL. +managed service for PostgreSQL. + +A reputable provider or solution should be used for this. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). If you use a cloud-managed service, or provide your own PostgreSQL: @@ -1201,7 +1202,14 @@ There are many third-party solutions for PostgreSQL HA. The solution selected mu - A static IP for all connections that doesn't change on failover. - [`LISTEN`](https://www.postgresql.org/docs/12/sql-listen.html) SQL functionality must be supported. -Examples of the above could include [Google's Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) or [Amazon RDS](https://aws.amazon.com/rds/). +NOTE: +With a third-party setup, it's possible to colocate Praefect's database on the same server as +the main [GitLab](#provide-your-own-postgresql-instance) database as a convenience unless +you are using Geo, where separate database instances are required for handling replication correctly. +In this setup, the specs of the main database setup shouldn't need to be changed as the impact should be +minimal. + +A reputable provider or solution should be used for this. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). Once the database is set up, follow the [post configuration](#praefect-postgresql-post-configuration). @@ -2198,7 +2206,7 @@ services where applicable): -1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. Google Cloud SQL and Amazon RDS are known to work, however Azure Database for PostgreSQL is [not recommended](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61) due to performance issues. Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. +1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. 2. Can be optionally run on reputable third-party external PaaS Redis solutions. Google Memorystore and AWS Elasticache are known to work. 3. Can be optionally run on reputable third-party load balancing services (LB PaaS). AWS ELB is known to work. 4. Should be run on reputable third-party object storage (storage PaaS) for cloud implementations. Google Cloud Storage and AWS S3 are known to work. diff --git a/doc/administration/reference_architectures/50k_users.md b/doc/administration/reference_architectures/50k_users.md index 2182377b621..efd5271e8d2 100644 --- a/doc/administration/reference_architectures/50k_users.md +++ b/doc/administration/reference_architectures/50k_users.md @@ -37,7 +37,7 @@ full list of reference architectures, see -1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. Google Cloud SQL and Amazon RDS are known to work, however Azure Database for PostgreSQL is [not recommended](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61) due to performance issues. Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. +1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. 2. Can be optionally run on reputable third-party external PaaS Redis solutions. Google Memorystore and AWS Elasticache are known to work. 3. Can be optionally run on reputable third-party load balancing services (LB PaaS). AWS ELB is known to work. 4. Should be run on reputable third-party object storage (storage PaaS) for cloud implementations. Google Cloud Storage and AWS S3 are known to work. @@ -495,8 +495,9 @@ cluster to be used with GitLab. ### Provide your own PostgreSQL instance If you're hosting GitLab on a cloud provider, you can optionally use a -managed service for PostgreSQL. For example, AWS offers a managed Relational -Database Service (RDS) that runs PostgreSQL. +managed service for PostgreSQL. + +A reputable provider or solution should be used for this. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). If you use a cloud-managed service, or provide your own PostgreSQL: @@ -1262,6 +1263,15 @@ There are many third-party solutions for PostgreSQL HA. The solution selected mu - A static IP for all connections that doesn't change on failover. - [`LISTEN`](https://www.postgresql.org/docs/12/sql-listen.html) SQL functionality must be supported. +NOTE: +With a third-party setup, it's possible to colocate Praefect's database on the same server as +the main [GitLab](#provide-your-own-postgresql-instance) database as a convenience unless +you are using Geo, where separate database instances are required for handling replication correctly. +In this setup, the specs of the main database setup shouldn't need to be changed as the impact should be +minimal. + +A reputable provider or solution should be used for this. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). + Examples of the above could include [Google's Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) or [Amazon RDS](https://aws.amazon.com/rds/). Once the database is set up, follow the [post configuration](#praefect-postgresql-post-configuration). @@ -2248,7 +2258,7 @@ services where applicable): -1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. Google Cloud SQL and Amazon RDS are known to work, however Azure Database for PostgreSQL is [not recommended](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61) due to performance issues. Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. +1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. 2. Can be optionally run on reputable third-party external PaaS Redis solutions. Google Memorystore and AWS Elasticache are known to work. 3. Can be optionally run on reputable third-party load balancing services (LB PaaS). AWS ELB is known to work. 4. Should be run on reputable third-party object storage (storage PaaS) for cloud implementations. Google Cloud Storage and AWS S3 are known to work. diff --git a/doc/administration/reference_architectures/5k_users.md b/doc/administration/reference_architectures/5k_users.md index a2adc86515f..43070cccc94 100644 --- a/doc/administration/reference_architectures/5k_users.md +++ b/doc/administration/reference_architectures/5k_users.md @@ -43,7 +43,7 @@ costly-to-operate environment by using the -1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. Google Cloud SQL and AWS RDS are known to work, however Azure Database for PostgreSQL is [not recommended](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61) due to performance issues. Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. +1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. 2. Can be optionally run on reputable third-party external PaaS Redis solutions. Google Memorystore and AWS Elasticache are known to work. 3. Can be optionally run on reputable third-party load balancing services (LB PaaS). AWS ELB is known to work. 4. Should be run on reputable third-party object storage (storage PaaS) for cloud implementations. Google Cloud Storage and AWS S3 are known to work. @@ -769,8 +769,9 @@ cluster to be used with GitLab. ### Provide your own PostgreSQL instance If you're hosting GitLab on a cloud provider, you can optionally use a -managed service for PostgreSQL. For example, AWS offers a managed Relational -Database Service (RDS) that runs PostgreSQL. +managed service for PostgreSQL. + +A reputable provider or solution should be used for this. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). If you use a cloud-managed service, or provide your own PostgreSQL: @@ -1194,7 +1195,14 @@ There are many third-party solutions for PostgreSQL HA. The solution selected mu - A static IP for all connections that doesn't change on failover. - [`LISTEN`](https://www.postgresql.org/docs/12/sql-listen.html) SQL functionality must be supported. -Examples of the above could include [Google's Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) or [Amazon RDS](https://aws.amazon.com/rds/). +NOTE: +With a third-party setup, it's possible to colocate Praefect's database on the same server as +the main [GitLab](#provide-your-own-postgresql-instance) database as a convenience unless +you are using Geo, where separate database instances are required for handling replication correctly. +In this setup, the specs of the main database setup shouldn't need to be changed as the impact should be +minimal. + +A reputable provider or solution should be used for this. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). Once the database is set up, follow the [post configuration](#praefect-postgresql-post-configuration). @@ -2169,7 +2177,7 @@ services where applicable): -1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. Google Cloud SQL and AWS RDS are known to work, however Azure Database for PostgreSQL is [not recommended](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61) due to performance issues. Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. +1. Can be optionally run on reputable third-party external PaaS PostgreSQL solutions. [Google Cloud SQL](https://cloud.google.com/sql/docs/postgres/high-availability#normal) and [Amazon RDS](https://aws.amazon.com/rds/) are known to work, however Azure Database for PostgreSQL is **not recommended** due to [performance issues](https://gitlab.com/gitlab-org/quality/reference-architectures/-/issues/61). Consul is primarily used for PostgreSQL high availability so can be ignored when using a PostgreSQL PaaS setup. However it is also used optionally by Prometheus for Omnibus auto host discovery. 2. Can be optionally run on reputable third-party external PaaS Redis solutions. Google Memorystore and AWS Elasticache are known to work. 3. Can be optionally run on reputable third-party load balancing services (LB PaaS). AWS ELB is known to work. 4. Should be run on reputable third-party object storage (storage PaaS) for cloud implementations. Google Cloud Storage and AWS S3 are known to work. diff --git a/doc/api/pipelines.md b/doc/api/pipelines.md index c412979ca99..d850113f9b6 100644 --- a/doc/api/pipelines.md +++ b/doc/api/pipelines.md @@ -15,6 +15,8 @@ Read more on [pagination](index.md#pagination). ## List project pipelines +> `iid` in response [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/342223) in GitLab 14.6. + List pipelines in a project. Child pipelines are not included in the results, but you can [get child pipeline](pipelines.md#get-a-single-pipeline) individually. @@ -74,6 +76,8 @@ Example of response ## Get a single pipeline +> `iid` in response [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/342223) in GitLab 14.6. + Get one pipeline from a project. You can also get a single [child pipeline](../ci/pipelines/parent_child_pipelines.md). @@ -267,6 +271,8 @@ Sample response: ## Create a new pipeline +> `iid` in response [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/342223) in GitLab 14.6. + ```plaintext POST /projects/:id/pipeline ``` @@ -316,6 +322,8 @@ Example of response ## Retry jobs in a pipeline +> `iid` in response [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/342223) in GitLab 14.6. + ```plaintext POST /projects/:id/pipelines/:pipeline_id/retry ``` diff --git a/doc/development/documentation/styleguide/index.md b/doc/development/documentation/styleguide/index.md index d56f604bf5a..7e9bd5be9df 100644 --- a/doc/development/documentation/styleguide/index.md +++ b/doc/development/documentation/styleguide/index.md @@ -58,8 +58,7 @@ it was originally composed for, if it is helpful to any of our audiences, we can include it. - If you use an image that has a separate source file (for example, a vector or - diagram format), link the image to the source file so that it may be reused or - updated by anyone. + diagram format), link the image to the source file so that anyone can update or reuse it. - Do not copy and paste content from other sources unless it is a limited quotation with the source cited. Typically it is better to either rephrase relevant information in your own words or link out to the other source. diff --git a/doc/integration/oauth2_generic.md b/doc/integration/oauth2_generic.md index 931feaa5bb3..3d44da8b4c8 100644 --- a/doc/integration/oauth2_generic.md +++ b/doc/integration/oauth2_generic.md @@ -4,31 +4,39 @@ group: Integrations info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments --- -# Sign into GitLab with (almost) any OAuth2 provider **(FREE SELF)** +# Generic OAuth2 provider **(FREE SELF)** -The `omniauth-oauth2-generic` gem allows Single Sign-On between GitLab and your own OAuth2 provider -(or any OAuth2 provider compatible with this gem) +The `omniauth-oauth2-generic` gem allows single sign-on (SSO) between GitLab +and your OAuth2 provider (or any OAuth2 provider compatible with this gem). -This strategy is designed to allow configuration of the simple OmniAuth SSO process outlined below: +This strategy allows for the configuration of this OmniAuth SSO process: -1. Strategy directs client to your authorization URL (**configurable**), with specified ID and key -1. OAuth provider handles authentication of request, user, and (optionally) authorization to access user's profile -1. OAuth provider directs client back to GitLab where Strategy handles retrieval of access token -1. Strategy requests user information from a **configurable** "user profile" URL (using the access token) -1. Strategy parses user information from the response, using a **configurable** format -1. GitLab finds or creates the returned user and logs them in +1. Strategy directs the client to your authorization URL (**configurable**), with + the specified ID and key. +1. The OAuth2 provider handles authentication of the request, user, and (optionally) + authorization to access user's profile. +1. The OAuth2 provider directs the client back to GitLab where Strategy handles + the retrieval of the access token. +1. Strategy requests user information from a **configurable** "user profile" + URL (using the access token). +1. Strategy parses user information from the response, using a **configurable** + format. +1. GitLab finds or creates the returned user and signs them in. -## Limitations of this Strategy +## Limitations of this strategy -- It can only be used for Single Sign on, and doesn't provide any other access granted by any OAuth provider - (importing projects or users, etc) -- It only supports the Authorization Grant flow (most common for client-server applications, like GitLab) -- It is not able to fetch user information from more than one URL -- It has not been tested with user information formats other than JSON +- It can only be used for single sign-on, and doesn't provide any other access + granted by any OAuth2 provider (like importing projects or users). +- It supports only the Authorization Grant flow (most common for client-server + applications, like GitLab). +- It can't fetch user information from more than one URL. +- It hasn't been tested with user information formats, other than JSON. -## Configuration Instructions +## Configure the OAuth2 provider -1. Register your application in the OAuth2 provider you wish to authenticate with. +To configure the provider: + +1. Register your application in the OAuth2 provider you want to authenticate with. The redirect URI you provide when registering the application should be: @@ -36,13 +44,13 @@ This strategy is designed to allow configuration of the simple OmniAuth SSO proc http://your-gitlab.host.com/users/auth/oauth2_generic/callback ``` -1. You should now be able to get a Client ID and Client Secret. - Where this shows up differs for each provider. - This may also be called Application ID and Secret + You should now be able to get a Client ID and Client Secret. Where this + appears differs for each provider. This may also be called Application ID + and Secret. -1. On your GitLab server, open the configuration file. +1. On your GitLab server, open the appropriate configuration file. - For Omnibus package: + For Omnibus GitLab: ```shell sudo editor /etc/gitlab/gitlab.rb @@ -55,9 +63,10 @@ This strategy is designed to allow configuration of the simple OmniAuth SSO proc sudo -u git -H editor config/gitlab.yml ``` -1. See [Configure initial settings](omniauth.md#configure-initial-settings) for initial settings +1. See [Configure initial settings](omniauth.md#configure-initial-settings) for + initial settings. -1. Add the provider-specific configuration for your provider, for example: +1. Add the provider-specific configuration for your provider. For example: ```ruby gitlab_rails['omniauth_providers'] = [ @@ -92,11 +101,13 @@ This strategy is designed to allow configuration of the simple OmniAuth SSO proc For more information about these settings, see [the gem's README](https://gitlab.com/satorix/omniauth-oauth2-generic#gitlab-config-example). -1. Save the configuration file +1. Save the configuration file. -1. Restart GitLab for the changes to take effect +1. [Restart](../administration/restart_gitlab.md#installations-from-source) + GitLab for the changes to take effect. -On the sign in page there should now be a new button below the regular sign in form. -Click the button to begin your provider's authentication process. This directs -the browser to your OAuth2 Provider's authentication page. If everything goes well -the user is returned to your GitLab instance and is signed in. +On the sign-in page there should now be a new button below the regular sign-in +form. Select the button to begin your provider's authentication process. This +directs the browser to your OAuth2 provider's authentication page. If +everything goes well, you are returned to your GitLab instance and are +signed in. diff --git a/lib/api/entities/ci/pipeline_basic.rb b/lib/api/entities/ci/pipeline_basic.rb index 4d56176bdb3..a2a5a98920a 100644 --- a/lib/api/entities/ci/pipeline_basic.rb +++ b/lib/api/entities/ci/pipeline_basic.rb @@ -4,7 +4,7 @@ module API module Entities module Ci class PipelineBasic < Grape::Entity - expose :id, :project_id, :sha, :ref, :status, :source + expose :id, :iid, :project_id, :sha, :ref, :status, :source expose :created_at, :updated_at expose :web_url do |pipeline, _options| diff --git a/lib/banzai/filter/math_filter.rb b/lib/banzai/filter/math_filter.rb index 53dafe45fb3..6859d67c9d8 100644 --- a/lib/banzai/filter/math_filter.rb +++ b/lib/banzai/filter/math_filter.rb @@ -39,7 +39,7 @@ module Banzai code[:class] = INLINE_CLASSES code[STYLE_ATTRIBUTE] = 'inline' - closing.content = closing.content[1..-1] + closing.content = closing.content[1..] opening.content = opening.content[0..-2] end end diff --git a/lib/banzai/filter/repository_link_filter.rb b/lib/banzai/filter/repository_link_filter.rb index 04bbcabd93f..408e6dc685d 100644 --- a/lib/banzai/filter/repository_link_filter.rb +++ b/lib/banzai/filter/repository_link_filter.rb @@ -174,7 +174,7 @@ module Banzai def build_relative_path(path, request_path) return request_path if path.empty? return path unless request_path - return path[1..-1] if path.start_with?('/') + return path[1..] if path.start_with?('/') parts = request_path.split('/') diff --git a/lib/extracts_ref.rb b/lib/extracts_ref.rb index d130a9d6f82..daba0452318 100644 --- a/lib/extracts_ref.rb +++ b/lib/extracts_ref.rb @@ -113,7 +113,7 @@ module ExtractsRef best_match = valid_refs.max_by(&:length) # Partition the string into the ref and the path, ignoring the empty first value - id.partition(best_match)[1..-1] + id.partition(best_match)[1..] end def use_first_path_segment?(ref) diff --git a/lib/flowdock/git/builder.rb b/lib/flowdock/git/builder.rb index 6f4428d1f42..88d9814950a 100644 --- a/lib/flowdock/git/builder.rb +++ b/lib/flowdock/git/builder.rb @@ -51,7 +51,7 @@ module Flowdock end def body - content = @commit[:message][first_line.size..-1] + content = @commit[:message][first_line.size..] content.strip! if content "
#{content}
" unless content.empty? end diff --git a/lib/gitlab/chat/output.rb b/lib/gitlab/chat/output.rb index 4a55b81a9eb..b4ce05ca06d 100644 --- a/lib/gitlab/chat/output.rb +++ b/lib/gitlab/chat/output.rb @@ -48,10 +48,10 @@ module Gitlab # follows it will produce a nil. For example: # # "\n".split("\n") # => [] - # "\n".split("\n")[1..-1] # => nil + # "\n".split("\n")[1..] # => nil # # To work around this we only "join" if we're given an Array. - if (converted = output.split("\n")[1..-1]) + if (converted = output.split("\n")[1..]) converted.join("\n") else '' diff --git a/lib/gitlab/ci/reports/security/report.rb b/lib/gitlab/ci/reports/security/report.rb index 417319cb5be..3e4a44a2e70 100644 --- a/lib/gitlab/ci/reports/security/report.rb +++ b/lib/gitlab/ci/reports/security/report.rb @@ -51,7 +51,7 @@ module Gitlab def replace_with!(other) instance_variables.each do |ivar| - instance_variable_set(ivar, other.public_send(ivar.to_s[1..-1])) # rubocop:disable GitlabSecurity/PublicSend + instance_variable_set(ivar, other.public_send(ivar.to_s[1..])) # rubocop:disable GitlabSecurity/PublicSend end end diff --git a/lib/gitlab/diff/custom_diff.rb b/lib/gitlab/diff/custom_diff.rb index e1d3cea4306..3928ece9281 100644 --- a/lib/gitlab/diff/custom_diff.rb +++ b/lib/gitlab/diff/custom_diff.rb @@ -36,7 +36,7 @@ module Gitlab end def strip_diff_frontmatter(diff_content) - diff_content.scan(/.*\n/)[2..-1]&.join('') if diff_content.present? + diff_content.scan(/.*\n/)[2..]&.join('') if diff_content.present? end def blobs_with_transformed_diffs diff --git a/lib/gitlab/diff/inline_diff.rb b/lib/gitlab/diff/inline_diff.rb index f70618195d0..802da50cfc6 100644 --- a/lib/gitlab/diff/inline_diff.rb +++ b/lib/gitlab/diff/inline_diff.rb @@ -6,8 +6,8 @@ module Gitlab attr_accessor :old_line, :new_line, :offset def initialize(old_line, new_line, offset: 0) - @old_line = old_line[offset..-1] - @new_line = new_line[offset..-1] + @old_line = old_line[offset..] + @new_line = new_line[offset..] @offset = offset end diff --git a/lib/gitlab/gfm/reference_rewriter.rb b/lib/gitlab/gfm/reference_rewriter.rb index 4d82acd9d87..5d0a638f97a 100644 --- a/lib/gitlab/gfm/reference_rewriter.rb +++ b/lib/gitlab/gfm/reference_rewriter.rb @@ -57,7 +57,7 @@ module Gitlab def unfold_reference(reference, match, target_parent) before = @text[0...match.begin(0)] - after = @text[match.end(0)..-1] + after = @text[match.end(0)..] referable = find_referable(reference) return reference unless referable diff --git a/lib/gitlab/gpg.rb b/lib/gitlab/gpg.rb index 3d9b06855ff..f55afd90ac0 100644 --- a/lib/gitlab/gpg.rb +++ b/lib/gitlab/gpg.rb @@ -48,7 +48,7 @@ module Gitlab raw_keys.each_with_object({}) do |raw_key, grouped_subkeys| primary_subkey_id = raw_key.primary_subkey.keyid - grouped_subkeys[primary_subkey_id] = raw_key.subkeys[1..-1].map do |s| + grouped_subkeys[primary_subkey_id] = raw_key.subkeys[1..].map do |s| { keyid: s.keyid, fingerprint: s.fingerprint } end end diff --git a/lib/gitlab/search/query.rb b/lib/gitlab/search/query.rb index c0420126ada..97ee7c7817d 100644 --- a/lib/gitlab/search/query.rb +++ b/lib/gitlab/search/query.rb @@ -44,7 +44,7 @@ module Gitlab next unless match - input = match.split(':')[1..-1].join + input = match.split(':')[1..].join next if input.empty? filter[:negated] = match.start_with?("-") diff --git a/lib/gitlab/sherlock/line_profiler.rb b/lib/gitlab/sherlock/line_profiler.rb index 52d88f074b7..aa25eb5a571 100644 --- a/lib/gitlab/sherlock/line_profiler.rb +++ b/lib/gitlab/sherlock/line_profiler.rb @@ -70,7 +70,7 @@ module Gitlab next if total_duration <= MINIMUM_DURATION - stats[1..-1].each_with_index do |data, index| + stats[1..].each_with_index do |data, index| next unless source_lines[index] duration = microsec_to_millisec(data[0]) diff --git a/lib/gitlab/string_range_marker.rb b/lib/gitlab/string_range_marker.rb index 5ddc88edf50..292a9d07e6a 100644 --- a/lib/gitlab/string_range_marker.rb +++ b/lib/gitlab/string_range_marker.rb @@ -99,7 +99,7 @@ module Gitlab start = prev = positions[0] range = MarkerRange.new(start, prev, mode: mode) - positions[1..-1].each do |pos| + positions[1..].each do |pos| if pos == prev + 1 range = MarkerRange.new(start, pos, mode: mode) prev = pos diff --git a/lib/gitlab/utils/nokogiri.rb b/lib/gitlab/utils/nokogiri.rb index 4b37bb7e5ea..5113553c584 100644 --- a/lib/gitlab/utils/nokogiri.rb +++ b/lib/gitlab/utils/nokogiri.rb @@ -16,7 +16,7 @@ module Gitlab # we remove the leading `//` and add `descendant-or-self::` # in order to ensure we're searching from this node and all # descendants. - xpath.map { |t| "descendant-or-self::#{t[2..-1]}" }.join('|') + xpath.map { |t| "descendant-or-self::#{t[2..]}" }.join('|') end end end diff --git a/lib/tasks/gitlab/seed/group_seed.rake b/lib/tasks/gitlab/seed/group_seed.rake index bc705c94422..a9a350fb6c3 100644 --- a/lib/tasks/gitlab/seed/group_seed.rake +++ b/lib/tasks/gitlab/seed/group_seed.rake @@ -184,7 +184,7 @@ class GroupSeeder group = Group.find(group_id) @resource_count.times do |i| - _, project_path = PROJECT_URL.split('/')[-2..-1] + _, project_path = PROJECT_URL.split('/')[-2..] project_path.gsub!('.git', '') diff --git a/qa/qa/resource/merge_request.rb b/qa/qa/resource/merge_request.rb index 1fea6feb910..ba63e0823f0 100644 --- a/qa/qa/resource/merge_request.rb +++ b/qa/qa/resource/merge_request.rb @@ -168,6 +168,41 @@ module QA ) end + # Object comparison + # + # @param [QA::Resource::MergeRequest] other + # @return [Boolean] + def ==(other) + other.is_a?(MergeRequest) && comparable_mr == other.comparable_mr + end + + # Override inspect for a better rspec failure diff output + # + # @return [String] + def inspect + JSON.pretty_generate(comparable_mr) + end + + protected + + # Return subset of fields for comparing merge requests + # + # @return [Hash] + def comparable_mr + reload! if api_response.nil? + + api_resource.except( + :id, + :web_url, + :project_id, + :source_project_id, + :target_project_id, + # these can differ depending on user fetching mr + :subscribed, + :first_contribution + ).merge({ references: api_resource[:references].except(:full) }) + end + private def transform_api_resource(api_resource) diff --git a/rubocop/cop/migration/schedule_async.rb b/rubocop/cop/migration/schedule_async.rb index f296628c3d6..74bd2baffa9 100644 --- a/rubocop/cop/migration/schedule_async.rb +++ b/rubocop/cop/migration/schedule_async.rb @@ -46,7 +46,7 @@ module RuboCop end def arguments(node) - node.children[2..-1] + node.children[2..] end end end diff --git a/rubocop/cop/project_path_helper.rb b/rubocop/cop/project_path_helper.rb index ec3f847faf9..0d12f2d2b12 100644 --- a/rubocop/cop/project_path_helper.rb +++ b/rubocop/cop/project_path_helper.rb @@ -46,7 +46,7 @@ module RuboCop end def arguments(node) - node.children[2..-1] + node.children[2..] end end end diff --git a/spec/controllers/import/bulk_imports_controller_spec.rb b/spec/controllers/import/bulk_imports_controller_spec.rb index 3adba32c74a..a7089005abf 100644 --- a/spec/controllers/import/bulk_imports_controller_spec.rb +++ b/spec/controllers/import/bulk_imports_controller_spec.rb @@ -215,9 +215,13 @@ RSpec.describe Import::BulkImportsController do let(:pat) { "fake-pat" } let(:bulk_import_params) do [{ "source_type" => "group_entity", - "source_full_path" => "full_path", - "destination_name" => "destination_name", - "destination_namespace" => "root" }] + "source_full_path" => "full_path", + "destination_name" => "destination_name", + "destination_namespace" => "root" }, + { "source_type" => "group_entity2", + "source_full_path" => "full_path2", + "destination_name" => "destination_name2", + "destination_namespace" => "root" }] end before do @@ -225,29 +229,23 @@ RSpec.describe Import::BulkImportsController do session[:bulk_import_gitlab_url] = instance_url end - it 'executes BulkImpors::CreatetService' do + it 'executes BulkImpors::CreateService' do + error_response = ServiceResponse.error(message: 'Record invalid', http_status: :unprocessable_entity) + expect_next_instance_of( - ::BulkImports::CreateService, user, bulk_import_params, { url: instance_url, access_token: pat }) do |service| + ::BulkImports::CreateService, user, bulk_import_params[0], { url: instance_url, access_token: pat }) do |service| allow(service).to receive(:execute).and_return(ServiceResponse.success(payload: bulk_import)) end - - post :create, params: { bulk_import: bulk_import_params } - - expect(response).to have_gitlab_http_status(:ok) - expect(response.body).to eq({ id: bulk_import.id }.to_json) - end - - it 'returns error when validation fails' do - error_response = ServiceResponse.error(message: 'Record invalid', http_status: :unprocessable_entity) expect_next_instance_of( - ::BulkImports::CreateService, user, bulk_import_params, { url: instance_url, access_token: pat }) do |service| + ::BulkImports::CreateService, user, bulk_import_params[1], { url: instance_url, access_token: pat }) do |service| allow(service).to receive(:execute).and_return(error_response) end post :create, params: { bulk_import: bulk_import_params } - expect(response).to have_gitlab_http_status(:unprocessable_entity) - expect(response.body).to eq({ error: 'Record invalid' }.to_json) + expect(response).to have_gitlab_http_status(:ok) + expect(json_response).to eq([{ "success" => true, "id" => bulk_import.id, "message" => nil }, + { "success" => false, "id" => nil, "message" => "Record invalid" }]) end end end diff --git a/spec/controllers/projects/serverless/functions_controller_spec.rb b/spec/controllers/projects/serverless/functions_controller_spec.rb index 75135839a06..860bbc1c5cc 100644 --- a/spec/controllers/projects/serverless/functions_controller_spec.rb +++ b/spec/controllers/projects/serverless/functions_controller_spec.rb @@ -128,7 +128,7 @@ RSpec.describe Projects::Serverless::FunctionsController do expect(json_response["functions"]).to all( include( - 'url' => "https://#{function_name}-#{serverless_domain_cluster.uuid[0..1]}a1#{serverless_domain_cluster.uuid[2..-3]}f2#{serverless_domain_cluster.uuid[-2..-1]}#{"%x" % environment.id}-#{environment.slug}.#{serverless_domain_cluster.domain}" + 'url' => "https://#{function_name}-#{serverless_domain_cluster.uuid[0..1]}a1#{serverless_domain_cluster.uuid[2..-3]}f2#{serverless_domain_cluster.uuid[-2..]}#{"%x" % environment.id}-#{environment.slug}.#{serverless_domain_cluster.domain}" ) ) end @@ -166,7 +166,7 @@ RSpec.describe Projects::Serverless::FunctionsController do expect(response).to have_gitlab_http_status(:ok) expect(json_response).to include( - 'url' => "https://#{function_name}-#{serverless_domain_cluster.uuid[0..1]}a1#{serverless_domain_cluster.uuid[2..-3]}f2#{serverless_domain_cluster.uuid[-2..-1]}#{"%x" % environment.id}-#{environment.slug}.#{serverless_domain_cluster.domain}" + 'url' => "https://#{function_name}-#{serverless_domain_cluster.uuid[0..1]}a1#{serverless_domain_cluster.uuid[2..-3]}f2#{serverless_domain_cluster.uuid[-2..]}#{"%x" % environment.id}-#{environment.slug}.#{serverless_domain_cluster.domain}" ) end diff --git a/spec/features/merge_request/user_views_open_merge_request_spec.rb b/spec/features/merge_request/user_views_open_merge_request_spec.rb index 073706cf9d8..b5a973a53c0 100644 --- a/spec/features/merge_request/user_views_open_merge_request_spec.rb +++ b/spec/features/merge_request/user_views_open_merge_request_spec.rb @@ -20,7 +20,7 @@ RSpec.describe 'User views an open merge request' do # Work around a weird Capybara behavior where calling `parent` on a node # returns the whole document, not the node's actual parent element - expect(find(:xpath, "#{node.path}/..").text).to eq(merge_request.description[2..-1]) + expect(find(:xpath, "#{node.path}/..").text).to eq(merge_request.description[2..]) expect(page).to have_content(merge_request.title) end diff --git a/spec/features/projects/labels/issues_sorted_by_priority_spec.rb b/spec/features/projects/labels/issues_sorted_by_priority_spec.rb index 4a25e28a14e..91a30004fc3 100644 --- a/spec/features/projects/labels/issues_sorted_by_priority_spec.rb +++ b/spec/features/projects/labels/issues_sorted_by_priority_spec.rb @@ -80,7 +80,7 @@ RSpec.describe 'Issue prioritization' do expect(issue_titles[0..1]).to contain_exactly('issue_5', 'issue_8') expect(issue_titles[2..4]).to contain_exactly('issue_1', 'issue_3', 'issue_7') - expect(issue_titles[5..-1]).to eq(%w(issue_2 issue_4 issue_6)) + expect(issue_titles[5..]).to eq(%w(issue_2 issue_4 issue_6)) end end end diff --git a/spec/finders/packages/nuget/package_finder_spec.rb b/spec/finders/packages/nuget/package_finder_spec.rb index 4ad02ce7da8..045dba295ac 100644 --- a/spec/finders/packages/nuget/package_finder_spec.rb +++ b/spec/finders/packages/nuget/package_finder_spec.rb @@ -71,7 +71,7 @@ RSpec.describe Packages::Nuget::PackageFinder do end context 'with prefix wildcard' do - let(:package_name) { "%#{package1.name[3..-1]}" } + let(:package_name) { "%#{package1.name[3..]}" } it { is_expected.to match_array([package1, package2]) } end diff --git a/spec/fixtures/api/schemas/pipeline_schedule.json b/spec/fixtures/api/schemas/pipeline_schedule.json index cdb4aea76da..ef5942b7eb3 100644 --- a/spec/fixtures/api/schemas/pipeline_schedule.json +++ b/spec/fixtures/api/schemas/pipeline_schedule.json @@ -14,6 +14,7 @@ "type": ["object", "null"], "properties": { "id": { "type": "integer" }, + "iid": { "type": "integer" }, "project_id": { "type": "integer" }, "sha": { "type": "string" }, "ref": { "type": "string" }, diff --git a/spec/frontend/import_entities/import_groups/components/import_target_cell_spec.js b/spec/frontend/import_entities/import_groups/components/import_target_cell_spec.js index 3c2367e22f5..d3f86672f33 100644 --- a/spec/frontend/import_entities/import_groups/components/import_target_cell_spec.js +++ b/spec/frontend/import_entities/import_groups/components/import_target_cell_spec.js @@ -123,13 +123,22 @@ describe('import target cell', () => { }); describe('when entity is available for import', () => { + const FAKE_PROGRESS_MESSAGE = 'progress message'; beforeEach(() => { - group = generateFakeTableEntry({ id: 1, flags: { isAvailableForImport: true } }); + group = generateFakeTableEntry({ + id: 1, + flags: { isAvailableForImport: true }, + progress: { message: FAKE_PROGRESS_MESSAGE }, + }); createComponent({ group }); }); it('renders namespace dropdown as enabled', () => { expect(findNamespaceDropdown().attributes('disabled')).toBe(undefined); }); + + it('renders progress message as error if it exists', () => { + expect(wrapper.find('[role=alert]').text()).toBe(FAKE_PROGRESS_MESSAGE); + }); }); }); diff --git a/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js b/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js index f3447494578..c6ddce17fe4 100644 --- a/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js +++ b/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js @@ -163,12 +163,14 @@ describe('Bulk import resolvers', () => { }); describe('mutations', () => { - beforeEach(() => { - axiosMockAdapter.onPost(FAKE_ENDPOINTS.createBulkImport).reply(httpStatus.OK, { id: 1 }); - }); + beforeEach(() => {}); describe('importGroup', () => { - it('sets import status to CREATED when request completes', async () => { + it('sets import status to CREATED for successful groups when request completes', async () => { + axiosMockAdapter + .onPost(FAKE_ENDPOINTS.createBulkImport) + .reply(httpStatus.OK, [{ success: true, id: 1 }]); + await client.mutate({ mutation: importGroupsMutation, variables: { @@ -185,9 +187,57 @@ describe('Bulk import resolvers', () => { await axios.waitForAll(); expect(results[0].progress.status).toBe(STATUSES.CREATED); }); + + it('sets import status to CREATED for successful groups when request completes with legacy response', async () => { + axiosMockAdapter.onPost(FAKE_ENDPOINTS.createBulkImport).reply(httpStatus.OK, { id: 1 }); + + await client.mutate({ + mutation: importGroupsMutation, + variables: { + importRequests: [ + { + sourceGroupId: statusEndpointFixture.importable_data[0].id, + newName: 'test', + targetNamespace: 'root', + }, + ], + }, + }); + + await axios.waitForAll(); + expect(results[0].progress.status).toBe(STATUSES.CREATED); + }); + + it('sets import status to FAILED and sets progress message for failed groups when request completes', async () => { + const FAKE_ERROR_MESSAGE = 'foo'; + axiosMockAdapter + .onPost(FAKE_ENDPOINTS.createBulkImport) + .reply(httpStatus.OK, [{ success: false, id: 1, message: FAKE_ERROR_MESSAGE }]); + + await client.mutate({ + mutation: importGroupsMutation, + variables: { + importRequests: [ + { + sourceGroupId: statusEndpointFixture.importable_data[0].id, + newName: 'test', + targetNamespace: 'root', + }, + ], + }, + }); + + await axios.waitForAll(); + expect(results[0].progress.status).toBe(STATUSES.FAILED); + expect(results[0].progress.message).toBe(FAKE_ERROR_MESSAGE); + }); }); it('updateImportStatus updates status', async () => { + axiosMockAdapter + .onPost(FAKE_ENDPOINTS.createBulkImport) + .reply(httpStatus.OK, [{ success: true, id: 1 }]); + const NEW_STATUS = 'dummy'; await client.mutate({ mutation: importGroupsMutation, @@ -216,6 +266,7 @@ describe('Bulk import resolvers', () => { expect(statusInResponse).toStrictEqual({ __typename: clientTypenames.BulkImportProgress, id, + message: null, status: NEW_STATUS, }); }); diff --git a/spec/frontend/import_entities/import_groups/graphql/fixtures.js b/spec/frontend/import_entities/import_groups/graphql/fixtures.js index 5f6f9987a8f..279084f5ebf 100644 --- a/spec/frontend/import_entities/import_groups/graphql/fixtures.js +++ b/spec/frontend/import_entities/import_groups/graphql/fixtures.js @@ -1,7 +1,7 @@ import { STATUSES } from '~/import_entities/constants'; import { clientTypenames } from '~/import_entities/import_groups/graphql/client_factory'; -export const generateFakeEntry = ({ id, status, ...rest }) => ({ +export const generateFakeEntry = ({ id, status, message, ...rest }) => ({ __typename: clientTypenames.BulkImportSourceGroup, webUrl: `https://fake.host/${id}`, fullPath: `fake_group_${id}`, @@ -18,6 +18,7 @@ export const generateFakeEntry = ({ id, status, ...rest }) => ({ : { id, status, + message: message || '', }, ...rest, }); diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/app_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/pages/__snapshots__/list_spec.js.snap similarity index 93% rename from spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/app_spec.js.snap rename to spec/frontend/packages_and_registries/package_registry/pages/__snapshots__/list_spec.js.snap index f72698b4aac..dbe3c70c3cb 100644 --- a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/app_spec.js.snap +++ b/spec/frontend/packages_and_registries/package_registry/pages/__snapshots__/list_spec.js.snap @@ -4,7 +4,7 @@ exports[`PackagesListApp renders 1`] = `
@@ -49,7 +49,7 @@ exports[`PackagesListApp renders 1`] = ` diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/app_spec.js b/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js similarity index 91% rename from spec/frontend/packages_and_registries/package_registry/components/list/app_spec.js rename to spec/frontend/packages_and_registries/package_registry/pages/list_spec.js index ad848f367e0..2ac2a6455ef 100644 --- a/spec/frontend/packages_and_registries/package_registry/components/list/app_spec.js +++ b/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js @@ -6,7 +6,7 @@ import { nextTick } from 'vue'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; -import PackageListApp from '~/packages_and_registries/package_registry/components/list/app.vue'; +import ListPage from '~/packages_and_registries/package_registry/pages/list.vue'; import PackageTitle from '~/packages_and_registries/package_registry/components/list/package_title.vue'; import PackageSearch from '~/packages_and_registries/package_registry/components/list/package_search.vue'; import OriginalPackageList from '~/packages_and_registries/package_registry/components/list/packages_list.vue'; @@ -16,11 +16,13 @@ import { PROJECT_RESOURCE_TYPE, GROUP_RESOURCE_TYPE, GRAPHQL_PAGE_SIZE, + EMPTY_LIST_HELP_URL, + PACKAGE_HELP_URL, } from '~/packages_and_registries/package_registry/constants'; import getPackagesQuery from '~/packages_and_registries/package_registry/graphql/queries/get_packages.query.graphql'; -import { packagesListQuery, packageData, pagination } from '../../mock_data'; +import { packagesListQuery, packageData, pagination } from '../mock_data'; jest.mock('~/lib/utils/common_utils'); jest.mock('~/flash'); @@ -32,9 +34,7 @@ describe('PackagesListApp', () => { let apolloProvider; const defaultProvide = { - packageHelpUrl: 'packageHelpUrl', emptyListIllustration: 'emptyListIllustration', - emptyListHelpUrl: 'emptyListHelpUrl', isGroupPage: true, fullPath: 'gitlab-org', }; @@ -66,7 +66,7 @@ describe('PackagesListApp', () => { const requestHandlers = [[getPackagesQuery, resolver]]; apolloProvider = createMockApollo(requestHandlers); - wrapper = shallowMountExtended(PackageListApp, { + wrapper = shallowMountExtended(ListPage, { localVue, apolloProvider, provide, @@ -113,7 +113,10 @@ describe('PackagesListApp', () => { await waitForFirstRequest(); expect(findPackageTitle().exists()).toBe(true); - expect(findPackageTitle().props('count')).toBe(2); + expect(findPackageTitle().props()).toMatchObject({ + count: 2, + helpUrl: PACKAGE_HELP_URL, + }); }); describe('search component', () => { @@ -213,12 +216,12 @@ describe('PackagesListApp', () => { it('generate the correct empty list link', () => { const link = findListComponent().findComponent(GlLink); - expect(link.attributes('href')).toBe(defaultProvide.emptyListHelpUrl); + expect(link.attributes('href')).toBe(EMPTY_LIST_HELP_URL); expect(link.text()).toBe('publish and share your packages'); }); it('includes the right content on the default tab', () => { - expect(findEmptyState().text()).toContain(PackageListApp.i18n.emptyPageTitle); + expect(findEmptyState().text()).toContain(ListPage.i18n.emptyPageTitle); }); }); @@ -234,8 +237,8 @@ describe('PackagesListApp', () => { }); it('should show specific empty message', () => { - expect(findEmptyState().text()).toContain(PackageListApp.i18n.noResultsTitle); - expect(findEmptyState().text()).toContain(PackageListApp.i18n.widenFilters); + expect(findEmptyState().text()).toContain(ListPage.i18n.noResultsTitle); + expect(findEmptyState().text()).toContain(ListPage.i18n.widenFilters); }); }); diff --git a/spec/helpers/packages_helper_spec.rb b/spec/helpers/packages_helper_spec.rb index 2af572850da..06c6cccd488 100644 --- a/spec/helpers/packages_helper_spec.rb +++ b/spec/helpers/packages_helper_spec.rb @@ -260,34 +260,4 @@ RSpec.describe PackagesHelper do end end end - - describe '#packages_list_data' do - let_it_be(:resource) { project } - let_it_be(:type) { 'project' } - - let(:expected_result) do - { - resource_id: resource.id, - full_path: resource.full_path, - page_type: type - } - end - - subject(:result) { helper.packages_list_data(type, resource) } - - context 'at a project level' do - it 'populates presenter data' do - expect(result).to match(hash_including(expected_result)) - end - end - - context 'at a group level' do - let_it_be(:resource) { create(:group) } - let_it_be(:type) { 'group' } - - it 'populates presenter data' do - expect(result).to match(hash_including(expected_result)) - end - end - end end diff --git a/spec/lib/gitlab/git/tree_spec.rb b/spec/lib/gitlab/git/tree_spec.rb index 005f8ecaa3a..97ba177da71 100644 --- a/spec/lib/gitlab/git/tree_spec.rb +++ b/spec/lib/gitlab/git/tree_spec.rb @@ -43,7 +43,7 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do end describe '#dir?' do - let(:dir) { entries.select(&:dir?).first } + let(:dir) { entries.find(&:dir?) } it { expect(dir).to be_kind_of Gitlab::Git::Tree } it { expect(dir.id).to eq('3c122d2b7830eca25235131070602575cf8b41a1') } @@ -134,7 +134,7 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do end describe '#file?' do - let(:file) { entries.select(&:file?).first } + let(:file) { entries.find(&:file?) } it { expect(file).to be_kind_of Gitlab::Git::Tree } it { expect(file.id).to eq('dfaa3f97ca337e20154a98ac9d0be76ddd1fcc82') } @@ -143,21 +143,21 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do end describe '#readme?' do - let(:file) { entries.select(&:readme?).first } + let(:file) { entries.find(&:readme?) } it { expect(file).to be_kind_of Gitlab::Git::Tree } it { expect(file.name).to eq('README.md') } end describe '#contributing?' do - let(:file) { entries.select(&:contributing?).first } + let(:file) { entries.find(&:contributing?) } it { expect(file).to be_kind_of Gitlab::Git::Tree } it { expect(file.name).to eq('CONTRIBUTING.md') } end describe '#submodule?' do - let(:submodule) { entries.select(&:submodule?).first } + let(:submodule) { entries.find(&:submodule?) } it { expect(submodule).to be_kind_of Gitlab::Git::Tree } it { expect(submodule.id).to eq('79bceae69cb5750d6567b223597999bfa91cb3b9') } diff --git a/spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb b/spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb index 0eecdfcb630..d0787d8b673 100644 --- a/spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb +++ b/spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb @@ -43,10 +43,10 @@ RSpec.describe Gitlab::GitalyClient::ConflictFilesStitcher do messages = [ double(files: [double(header: header_1), double(header: nil, content: content_1[0..5])]), - double(files: [double(header: nil, content: content_1[6..-1])]), + double(files: [double(header: nil, content: content_1[6..])]), double(files: [double(header: header_2)]), double(files: [double(header: nil, content: content_2[0..5]), double(header: nil, content: content_2[6..10])]), - double(files: [double(header: nil, content: content_2[11..-1])]) + double(files: [double(header: nil, content: content_2[11..])]) ] conflict_files = described_class.new(messages, target_repository.gitaly_repository).to_a diff --git a/spec/lib/gitlab/gitaly_client/diff_stitcher_spec.rb b/spec/lib/gitlab/gitaly_client/diff_stitcher_spec.rb index 113c47b4f2c..54c84ddc56f 100644 --- a/spec/lib/gitlab/gitaly_client/diff_stitcher_spec.rb +++ b/spec/lib/gitlab/gitaly_client/diff_stitcher_spec.rb @@ -41,7 +41,7 @@ RSpec.describe Gitlab::GitalyClient::DiffStitcher do msg_2.raw_patch_data = diff_2.patch[0..100] msg_2.end_of_patch = false - msg_3 = OpenStruct.new(raw_patch_data: diff_2.patch[101..-1], end_of_patch: true) + msg_3 = OpenStruct.new(raw_patch_data: diff_2.patch[101..], end_of_patch: true) msg_4 = OpenStruct.new(diff_3.to_h.except(:patch)) msg_4.raw_patch_data = diff_3.patch diff --git a/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb b/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb index 771f6e1ec46..5d444775e53 100644 --- a/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb +++ b/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb @@ -192,7 +192,7 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do project: project, commit_sha: commit_sha, gpg_key: nil, - gpg_key_primary_keyid: GpgHelpers::User3.subkey_fingerprints.last[24..-1], + gpg_key_primary_keyid: GpgHelpers::User3.subkey_fingerprints.last[24..], verification_status: 'unknown_key' end diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb index d9afac5f522..0741088c915 100644 --- a/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb +++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb @@ -98,7 +98,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id_desc])) } it 'returns the correct nodes' do - expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1]) + expect(subject.sliced_nodes).to contain_exactly(*projects[2..]) end end end @@ -107,7 +107,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do let(:arguments) { { after: encoded_cursor(projects[1]) } } it 'only returns the project before the selected one' do - expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1]) + expect(subject.sliced_nodes).to contain_exactly(*projects[2..]) end context 'when the sort order is descending' do diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb index 8ef5f1147c5..efdbe199761 100644 --- a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb +++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb @@ -120,7 +120,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do let(:nodes) { Project.all.order(id: :desc) } it 'returns the correct nodes' do - expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1]) + expect(subject.sliced_nodes).to contain_exactly(*projects[2..]) end end end @@ -129,7 +129,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do let(:arguments) { { after: encoded_cursor(projects[1]) } } it 'only returns the project before the selected one' do - expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1]) + expect(subject.sliced_nodes).to contain_exactly(*projects[2..]) end context 'when the sort order is descending' do diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml index cabc7cff8a1..e2ed821f622 100644 --- a/spec/lib/gitlab/import_export/all_models.yml +++ b/spec/lib/gitlab/import_export/all_models.yml @@ -224,6 +224,7 @@ ci_pipelines: - ci_ref - stages - statuses +- statuses_order_id_desc - latest_statuses_ordered_by_stage - builds - bridges diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb index 6bb6be07749..1d8b137c196 100644 --- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' def match_mr1_note(content_regex) - MergeRequest.find_by(title: 'MR1').notes.select { |n| n.note.match(/#{content_regex}/)}.first + MergeRequest.find_by(title: 'MR1').notes.find { |n| n.note.match(/#{content_regex}/) } end RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do @@ -75,7 +75,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do context 'for an Issue' do it 'does not import note_html' do note_content = 'Quo reprehenderit aliquam qui dicta impedit cupiditate eligendi' - issue_note = Issue.find_by(description: 'Aliquam enim illo et possimus.').notes.select { |n| n.note.match(/#{note_content}/)}.first + issue_note = Issue.find_by(description: 'Aliquam enim illo et possimus.').notes.find { |n| n.note.match(/#{note_content}/) } expect(issue_note.note_html).to match(/#{note_content}/) end @@ -552,7 +552,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do it 'issue system note metadata restored successfully' do note_content = 'created merge request !1 to address this issue' - note = project.issues.first.notes.select { |n| n.note.match(/#{note_content}/)}.first + note = project.issues.first.notes.find { |n| n.note.match(/#{note_content}/)} expect(note.noteable_type).to eq('Issue') expect(note.system).to eq(true) diff --git a/spec/lib/gitlab/multi_collection_paginator_spec.rb b/spec/lib/gitlab/multi_collection_paginator_spec.rb index c7c8f4f969f..080b3382684 100644 --- a/spec/lib/gitlab/multi_collection_paginator_spec.rb +++ b/spec/lib/gitlab/multi_collection_paginator_spec.rb @@ -40,7 +40,7 @@ RSpec.describe Gitlab::MultiCollectionPaginator do end it 'fils the last page with elements from the second collection' do - expected_collection = all_groups[-2..-1] + expected_collection = all_groups[-2..] expect(paginator.paginate(3)).to eq(expected_collection) end diff --git a/spec/lib/gitlab/pagination/keyset/order_spec.rb b/spec/lib/gitlab/pagination/keyset/order_spec.rb index 3c14d91fdfd..1bed8e542a2 100644 --- a/spec/lib/gitlab/pagination/keyset/order_spec.rb +++ b/spec/lib/gitlab/pagination/keyset/order_spec.rb @@ -127,7 +127,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do end it do - expect(subject).to eq(expected.reverse[1..-1]) # removing one item because we used it to calculate cursor data for the "last" page in subject + expect(subject).to eq(expected.reverse[1..]) # removing one item because we used it to calculate cursor data for the "last" page in subject end end end diff --git a/spec/lib/gitlab/rack_attack_spec.rb b/spec/lib/gitlab/rack_attack_spec.rb index f0d6e3a527a..39ea02bad8b 100644 --- a/spec/lib/gitlab/rack_attack_spec.rb +++ b/spec/lib/gitlab/rack_attack_spec.rb @@ -78,7 +78,7 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do it 'configures tracks and throttles with a selected set of dry-runs' do dry_run_throttles = throttles.each_key.first(2) - regular_throttles = throttles.keys[2..-1] + regular_throttles = throttles.keys[2..] stub_env('GITLAB_THROTTLE_DRY_RUN', dry_run_throttles.join(',')) described_class.configure(fake_rack_attack) diff --git a/spec/lib/gitlab/redis/sessions_spec.rb b/spec/lib/gitlab/redis/sessions_spec.rb index ecd77564360..a6a0373a4bb 100644 --- a/spec/lib/gitlab/redis/sessions_spec.rb +++ b/spec/lib/gitlab/redis/sessions_spec.rb @@ -44,20 +44,23 @@ RSpec.describe Gitlab::Redis::Sessions do describe '#store' do subject { described_class.store(namespace: described_class::SESSION_NAMESPACE) } - context 'when redis.sessions configuration is provided' do + context 'when redis.sessions configuration is NOT provided' do it 'instantiates ::Redis instance' do expect(described_class).to receive(:config_fallback?).and_return(true) expect(subject).to be_instance_of(::Redis::Store) end end - context 'when redis.sessions configuration is not provided' do + context 'when redis.sessions configuration is provided' do + before do + allow(described_class).to receive(:config_fallback?).and_return(false) + end + it 'instantiates an instance of MultiStore' do - expect(described_class).to receive(:config_fallback?).and_return(false) expect(subject).to be_instance_of(::Gitlab::Redis::MultiStore) end - end - it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_sessions, :use_primary_store_as_default_for_sessions + it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_sessions, :use_primary_store_as_default_for_sessions + end end end diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb index ad2d646edc9..ea626a1da6e 100644 --- a/spec/models/ci/build_spec.rb +++ b/spec/models/ci/build_spec.rb @@ -2506,7 +2506,7 @@ RSpec.describe Ci::Build do it { is_expected.to start_with(project.web_url[0..6]) } it { is_expected.to include(build.token) } it { is_expected.to include('gitlab-ci-token') } - it { is_expected.to include(project.web_url[7..-1]) } + it { is_expected.to include(project.web_url[7..]) } end context 'when token is empty' do diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb index 678086078da..07e68236d45 100644 --- a/spec/models/ci/pipeline_spec.rb +++ b/spec/models/ci/pipeline_spec.rb @@ -28,6 +28,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do it { is_expected.to have_many(:trigger_requests) } it { is_expected.to have_many(:variables) } it { is_expected.to have_many(:builds) } + it { is_expected.to have_many(:statuses_order_id_desc) } it { is_expected.to have_many(:bridges) } it { is_expected.to have_many(:job_artifacts).through(:builds) } it { is_expected.to have_many(:auto_canceled_pipelines) } diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb index ee27eaf1d0b..97854086162 100644 --- a/spec/models/event_spec.rb +++ b/spec/models/event_spec.rb @@ -706,7 +706,7 @@ RSpec.describe Event do describe '.for_wiki_meta' do it 'finds events for a given wiki page metadata object' do - event = events.select(&:wiki_page?).first + event = events.find(&:wiki_page?) expect(described_class.for_wiki_meta(event.target)).to contain_exactly(event) end diff --git a/spec/models/hooks/web_hook_spec.rb b/spec/models/hooks/web_hook_spec.rb index 59f4533a6c1..c292e78b32d 100644 --- a/spec/models/hooks/web_hook_spec.rb +++ b/spec/models/hooks/web_hook_spec.rb @@ -330,6 +330,20 @@ RSpec.describe WebHook do expect { hook.backoff! }.to change(hook, :backoff_count).by(1) end + context 'when the hook is permanently disabled' do + before do + allow(hook).to receive(:permanently_disabled?).and_return(true) + end + + it 'does not set disabled_until' do + expect { hook.backoff! }.not_to change(hook, :disabled_until) + end + + it 'does not increment the backoff count' do + expect { hook.backoff! }.not_to change(hook, :backoff_count) + end + end + context 'when we have backed off MAX_FAILURES times' do before do stub_const("#{described_class}::MAX_FAILURES", 5) @@ -392,4 +406,77 @@ RSpec.describe WebHook do end end end + + describe '#temporarily_disabled?' do + it 'is false when not temporarily disabled' do + expect(hook).not_to be_temporarily_disabled + end + + context 'when hook has been told to back off' do + before do + hook.backoff! + end + + it 'is true' do + expect(hook).to be_temporarily_disabled + end + + it 'is false when `web_hooks_disable_failed` flag is disabled' do + stub_feature_flags(web_hooks_disable_failed: false) + + expect(hook).not_to be_temporarily_disabled + end + end + end + + describe '#permanently_disabled?' do + it 'is false when not disabled' do + expect(hook).not_to be_permanently_disabled + end + + context 'when hook has been disabled' do + before do + hook.disable! + end + + it 'is true' do + expect(hook).to be_permanently_disabled + end + + it 'is false when `web_hooks_disable_failed` flag is disabled' do + stub_feature_flags(web_hooks_disable_failed: false) + + expect(hook).not_to be_permanently_disabled + end + end + end + + describe '#rate_limited?' do + context 'when there are rate limits' do + before do + allow(hook).to receive(:rate_limit).and_return(3) + end + + it 'is false when hook has not been rate limited' do + expect(Gitlab::ApplicationRateLimiter).to receive(:peek).and_return(false) + expect(hook).not_to be_rate_limited + end + + it 'is true when hook has been rate limited' do + expect(Gitlab::ApplicationRateLimiter).to receive(:peek).and_return(true) + expect(hook).to be_rate_limited + end + end + + context 'when there are no rate limits' do + before do + allow(hook).to receive(:rate_limit).and_return(nil) + end + + it 'does not call Gitlab::ApplicationRateLimiter, and is false' do + expect(Gitlab::ApplicationRateLimiter).not_to receive(:peek) + expect(hook).not_to be_rate_limited + end + end + end end diff --git a/spec/requests/api/ci/pipelines_spec.rb b/spec/requests/api/ci/pipelines_spec.rb index ab977f8326d..13838cffd76 100644 --- a/spec/requests/api/ci/pipelines_spec.rb +++ b/spec/requests/api/ci/pipelines_spec.rb @@ -33,6 +33,7 @@ RSpec.describe API::Ci::Pipelines do expect(json_response).to be_an Array expect(json_response.first['sha']).to match(/\A\h{40}\z/) expect(json_response.first['id']).to eq pipeline.id + expect(json_response.first['iid']).to eq pipeline.iid expect(json_response.first['web_url']).to be_present end @@ -40,7 +41,7 @@ RSpec.describe API::Ci::Pipelines do it 'includes pipeline source' do get api("/projects/#{project.id}/pipelines", user) - expect(json_response.first.keys).to contain_exactly(*%w[id project_id sha ref status web_url created_at updated_at source]) + expect(json_response.first.keys).to contain_exactly(*%w[id iid project_id sha ref status web_url created_at updated_at source]) end end diff --git a/spec/services/system_notes/commit_service_spec.rb b/spec/services/system_notes/commit_service_spec.rb index bd6b3ec953a..0399603980d 100644 --- a/spec/services/system_notes/commit_service_spec.rb +++ b/spec/services/system_notes/commit_service_spec.rb @@ -57,7 +57,7 @@ RSpec.describe SystemNotes::CommitService do end context 'with multiple existing commits' do - let(:old_commits) { noteable.commits[3..-1] } + let(:old_commits) { noteable.commits[3..] } context 'with oldrev' do let(:oldrev) { noteable.commits[2].id } diff --git a/spec/support/helpers/gpg_helpers.rb b/spec/support/helpers/gpg_helpers.rb index 81e669aab57..7e78fd86de3 100644 --- a/spec/support/helpers/gpg_helpers.rb +++ b/spec/support/helpers/gpg_helpers.rb @@ -138,7 +138,7 @@ module GpgHelpers end def primary_keyid - fingerprint[-16..-1] + fingerprint[-16..] end def fingerprint @@ -281,7 +281,7 @@ module GpgHelpers end def primary_keyid2 - fingerprint2[-16..-1] + fingerprint2[-16..] end def fingerprint2 @@ -374,7 +374,7 @@ module GpgHelpers end def primary_keyid - fingerprint[-16..-1] + fingerprint[-16..] end def fingerprint @@ -776,7 +776,7 @@ module GpgHelpers end def primary_keyid - fingerprint[-16..-1] + fingerprint[-16..] end def fingerprint diff --git a/spec/support/helpers/memory_usage_helper.rb b/spec/support/helpers/memory_usage_helper.rb index aa7b3bae83a..02d1935921f 100644 --- a/spec/support/helpers/memory_usage_helper.rb +++ b/spec/support/helpers/memory_usage_helper.rb @@ -23,7 +23,7 @@ module MemoryUsageHelper output, status = Gitlab::Popen.popen(%w(free -m)) abort "`free -m` return code is #{status}: #{output}" unless status == 0 - result = output.split("\n")[1].split(" ")[1..-1] + result = output.split("\n")[1].split(" ")[1..] attrs = %i(m_total m_used m_free m_shared m_buffers_cache m_available).freeze attrs.zip(result).to_h diff --git a/spec/support/redis/redis_new_instance_shared_examples.rb b/spec/support/redis/redis_new_instance_shared_examples.rb index e9b1e3e4da1..ede517bfaff 100644 --- a/spec/support/redis/redis_new_instance_shared_examples.rb +++ b/spec/support/redis/redis_new_instance_shared_examples.rb @@ -8,10 +8,16 @@ RSpec.shared_examples "redis_new_instance_shared_examples" do |name, fallback_cl let(:fallback_config_file) { nil } before do + fallback_class.remove_instance_variable(:@_raw_config) rescue nil + allow(fallback_class).to receive(:config_file_name).and_return(fallback_config_file) end - include_examples "redis_shared_examples" + after do + fallback_class.remove_instance_variable(:@_raw_config) rescue nil + end + + it_behaves_like "redis_shared_examples" describe '.config_file_name' do subject { described_class.config_file_name }