diff --git a/.gitlab/ci/rules.gitlab-ci.yml b/.gitlab/ci/rules.gitlab-ci.yml index 19f6e6f1346..812cf8425a9 100644 --- a/.gitlab/ci/rules.gitlab-ci.yml +++ b/.gitlab/ci/rules.gitlab-ci.yml @@ -52,6 +52,9 @@ .if-dot-com-gitlab-org-merge-request: &if-dot-com-gitlab-org-merge-request if: '$CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_NAMESPACE == "gitlab-org" && $CI_MERGE_REQUEST_IID' +.if-dot-com-gitlab-org-and-security-merge-request: &if-dot-com-gitlab-org-and-security-merge-request + if: '$CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_NAMESPACE =~ /^gitlab-org($|\/security$)/ && $CI_MERGE_REQUEST_IID' + .if-dot-com-gitlab-org-and-security-tag: &if-dot-com-gitlab-org-and-security-tag if: '$CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_NAMESPACE =~ /^gitlab-org($|\/security$)/ && $CI_COMMIT_TAG' @@ -372,13 +375,13 @@ .qa:rules:package-and-qa: rules: - - <<: *if-dot-com-gitlab-org-merge-request + - <<: *if-dot-com-gitlab-org-and-security-merge-request changes: *ci-patterns allow_failure: true - - <<: *if-dot-com-gitlab-org-merge-request + - <<: *if-dot-com-gitlab-org-and-security-merge-request changes: *qa-patterns allow_failure: true - - <<: *if-dot-com-gitlab-org-merge-request + - <<: *if-dot-com-gitlab-org-and-security-merge-request changes: *code-patterns when: manual allow_failure: true @@ -507,7 +510,7 @@ rules: - <<: *if-not-ee when: never - - <<: *if-dot-com-gitlab-org-merge-request + - <<: *if-dot-com-gitlab-org-and-security-merge-request changes: *code-qa-patterns - <<: *if-dot-com-gitlab-org-schedule diff --git a/app/assets/javascripts/clusters_list/components/clusters.vue b/app/assets/javascripts/clusters_list/components/clusters.vue index 58b16f35347..a3104038c17 100644 --- a/app/assets/javascripts/clusters_list/components/clusters.vue +++ b/app/assets/javascripts/clusters_list/components/clusters.vue @@ -28,7 +28,7 @@ export default { tooltip, }, computed: { - ...mapState(['clusters', 'clustersPerPage', 'loading', 'page', 'totalCulsters']), + ...mapState(['clusters', 'clustersPerPage', 'loading', 'page', 'providers', 'totalCulsters']), currentPage: { get() { return this.page; @@ -102,6 +102,9 @@ export default { // Sentry will notify us if we are missing types. throw new Error(`UnknownK8sCpuQuantity:${quantity}`); }, + selectedProvider(provider) { + return this.providers[provider] || this.providers.default; + }, statusTitle(status) { const iconTitle = STATUSES[status] || STATUSES.default; return sprintf(__('Status: %{title}'), { title: iconTitle.title }, false); @@ -182,8 +185,21 @@ export default {
diff --git a/app/assets/javascripts/clusters_list/index.js b/app/assets/javascripts/clusters_list/index.js index 67d0a33030b..51ad8769250 100644 --- a/app/assets/javascripts/clusters_list/index.js +++ b/app/assets/javascripts/clusters_list/index.js @@ -9,12 +9,10 @@ export default () => { return; } - const { endpoint } = entryPoint.dataset; - // eslint-disable-next-line no-new new Vue({ el: '#js-clusters-list-app', - store: createStore({ endpoint }), + store: createStore(entryPoint.dataset), render(createElement) { return createElement(Clusters); }, diff --git a/app/assets/javascripts/clusters_list/store/state.js b/app/assets/javascripts/clusters_list/store/state.js index d590ea09e66..0023b43ed92 100644 --- a/app/assets/javascripts/clusters_list/store/state.js +++ b/app/assets/javascripts/clusters_list/store/state.js @@ -5,5 +5,10 @@ export default (initialState = {}) => ({ clusters: [], clustersPerPage: 0, page: 1, + providers: { + aws: { path: initialState.imgTagsAwsPath, text: initialState.imgTagsAwsText }, + default: { path: initialState.imgTagsDefaultPath, text: initialState.imgTagsDefaultText }, + gcp: { path: initialState.imgTagsGcpPath, text: initialState.imgTagsGcpText }, + }, totalCulsters: 0, }); diff --git a/app/assets/javascripts/ide/components/repo_editor.vue b/app/assets/javascripts/ide/components/repo_editor.vue index 07d14fd0338..f4f34d206dd 100644 --- a/app/assets/javascripts/ide/components/repo_editor.vue +++ b/app/assets/javascripts/ide/components/repo_editor.vue @@ -15,6 +15,8 @@ import FileTemplatesBar from './file_templates/bar.vue'; import { __ } from '~/locale'; import { extractMarkdownImagesFromEntries } from '../stores/utils'; import { getPathParent, readFileAsDataURL } from '../utils'; +import { getRulesWithTraversal } from '../lib/editorconfig/parser'; +import mapRulesToMonaco from '../lib/editorconfig/rules_mapper'; export default { components: { @@ -32,6 +34,7 @@ export default { return { content: '', images: {}, + rules: {}, }; }, computed: { @@ -195,7 +198,7 @@ export default { this.editor.clearEditor(); - this.fetchFileData() + Promise.all([this.fetchFileData(), this.fetchEditorconfigRules()]) .then(() => { this.createEditorInstance(); }) @@ -254,6 +257,8 @@ export default { this.editor.attachModel(this.model); } + this.model.updateOptions(this.rules); + this.model.onChange(model => { const { file } = model; if (!file.active) return; @@ -280,12 +285,29 @@ export default { this.setFileLanguage({ fileLanguage: this.model.language, }); + + this.$emit('editorSetup'); }, refreshEditorDimensions() { if (this.showEditor) { this.editor.updateDimensions(); } }, + fetchEditorconfigRules() { + return getRulesWithTraversal(this.file.path, path => { + const entry = this.entries[path]; + if (!entry) return Promise.resolve(null); + + const content = entry.content || entry.raw; + if (content) return Promise.resolve(content); + + return this.getFileData({ path: entry.path, makeFileActive: false }).then(() => + this.getRawFileData({ path: entry.path }), + ); + }).then(rules => { + this.rules = mapRulesToMonaco(rules); + }); + }, onPaste(event) { const editor = this.editor.instance; const reImage = /^image\/(png|jpg|jpeg|gif)$/; diff --git a/app/assets/javascripts/ide/lib/editorconfig/parser.js b/app/assets/javascripts/ide/lib/editorconfig/parser.js new file mode 100644 index 00000000000..a30a8cb868d --- /dev/null +++ b/app/assets/javascripts/ide/lib/editorconfig/parser.js @@ -0,0 +1,55 @@ +import { parseString } from 'editorconfig/src/lib/ini'; +import minimatch from 'minimatch'; +import { getPathParents } from '../../utils'; + +const dirname = path => path.replace(/\.editorconfig$/, ''); + +function isRootConfig(config) { + return config.some(([pattern, rules]) => !pattern && rules?.root === 'true'); +} + +function getRulesForSection(path, [pattern, rules]) { + if (!pattern) { + return {}; + } + if (minimatch(path, pattern, { matchBase: true })) { + return rules; + } + + return {}; +} + +function getRulesWithConfigs(filePath, configFiles = [], rules = {}) { + if (!configFiles.length) return rules; + + const [{ content, path: configPath }, ...nextConfigs] = configFiles; + const configDir = dirname(configPath); + + if (!filePath.startsWith(configDir)) return rules; + + const parsed = parseString(content); + const isRoot = isRootConfig(parsed); + const relativeFilePath = filePath.slice(configDir.length); + + const sectionRules = parsed.reduce( + (acc, section) => Object.assign(acc, getRulesForSection(relativeFilePath, section)), + {}, + ); + + // prefer existing rules by overwriting to section rules + const result = Object.assign(sectionRules, rules); + + return isRoot ? result : getRulesWithConfigs(filePath, nextConfigs, result); +} + +// eslint-disable-next-line import/prefer-default-export +export function getRulesWithTraversal(filePath, getFileContent) { + const editorconfigPaths = [ + ...getPathParents(filePath).map(x => `${x}/.editorconfig`), + '.editorconfig', + ]; + + return Promise.all( + editorconfigPaths.map(path => getFileContent(path).then(content => ({ path, content }))), + ).then(results => getRulesWithConfigs(filePath, results.filter(x => x.content))); +} diff --git a/app/assets/javascripts/ide/lib/editorconfig/rules_mapper.js b/app/assets/javascripts/ide/lib/editorconfig/rules_mapper.js new file mode 100644 index 00000000000..f9d5579511a --- /dev/null +++ b/app/assets/javascripts/ide/lib/editorconfig/rules_mapper.js @@ -0,0 +1,33 @@ +import { isBoolean, isNumber } from 'lodash'; + +const map = (key, validValues) => value => + value in validValues ? { [key]: validValues[value] } : {}; + +const bool = key => value => (isBoolean(value) ? { [key]: value } : {}); + +const int = (key, isValid) => value => + isNumber(value) && isValid(value) ? { [key]: Math.trunc(value) } : {}; + +const rulesMapper = { + indent_style: map('insertSpaces', { tab: false, space: true }), + indent_size: int('tabSize', n => n > 0), + tab_width: int('tabSize', n => n > 0), + trim_trailing_whitespace: bool('trimTrailingWhitespace'), + end_of_line: map('endOfLine', { crlf: 1, lf: 0 }), + insert_final_newline: bool('insertFinalNewline'), +}; + +const parseValue = x => { + let value = typeof x === 'string' ? x.toLowerCase() : x; + if (/^[0-9.-]+$/.test(value)) value = Number(value); + if (value === 'true') value = true; + if (value === 'false') value = false; + + return value; +}; + +export default function mapRulesToMonaco(rules) { + return Object.entries(rules).reduce((obj, [key, value]) => { + return Object.assign(obj, rulesMapper[key]?.(parseValue(value)) || {}); + }, {}); +} diff --git a/app/assets/javascripts/ide/stores/index.js b/app/assets/javascripts/ide/stores/index.js index b5ae85c95ae..18c466cc93d 100644 --- a/app/assets/javascripts/ide/stores/index.js +++ b/app/assets/javascripts/ide/stores/index.js @@ -15,22 +15,23 @@ import routerModule from './modules/router'; Vue.use(Vuex); -export const createStore = () => - new Vuex.Store({ - state: state(), - actions, - mutations, - getters, - modules: { - commit: commitModule, - pipelines, - mergeRequests, - branches, - fileTemplates: fileTemplates(), - rightPane: paneModule(), - clientside: clientsideModule(), - router: routerModule, - }, - }); +export const createStoreOptions = () => ({ + state: state(), + actions, + mutations, + getters, + modules: { + commit: commitModule, + pipelines, + mergeRequests, + branches, + fileTemplates: fileTemplates(), + rightPane: paneModule(), + clientside: clientsideModule(), + router: routerModule, + }, +}); + +export const createStore = () => new Vuex.Store(createStoreOptions()); export default createStore(); diff --git a/app/assets/javascripts/vue_shared/components/url_sync.vue b/app/assets/javascripts/vue_shared/components/url_sync.vue new file mode 100644 index 00000000000..389d42f0829 --- /dev/null +++ b/app/assets/javascripts/vue_shared/components/url_sync.vue @@ -0,0 +1,25 @@ + diff --git a/app/controllers/groups_controller.rb b/app/controllers/groups_controller.rb index f772a5dc5b5..a75ee3f5439 100644 --- a/app/controllers/groups_controller.rb +++ b/app/controllers/groups_controller.rb @@ -57,6 +57,8 @@ class GroupsController < Groups::ApplicationController @group = Groups::CreateService.new(current_user, group_params).execute if @group.persisted? + track_experiment_event(:onboarding_issues, 'created_namespace') + notice = if @group.chat_team.present? "Group '#{@group.name}' and its Mattermost team were successfully created." else diff --git a/app/controllers/registrations_controller.rb b/app/controllers/registrations_controller.rb index fa691af135b..fa3aa571642 100644 --- a/app/controllers/registrations_controller.rb +++ b/app/controllers/registrations_controller.rb @@ -69,6 +69,7 @@ class RegistrationsController < Devise::RegistrationsController if result[:status] == :success track_experiment_event(:signup_flow, 'end') # We want this event to be tracked when the user is _in_ the experimental group + track_experiment_event(:onboarding_issues, 'signed_up') if ::Gitlab.com? && !helpers.in_subscription_flow? && !helpers.in_invitation_flow? return redirect_to new_users_sign_up_group_path if experiment_enabled?(:onboarding_issues) && !helpers.in_subscription_flow? && !helpers.in_invitation_flow? set_flash_message! :notice, :signed_up diff --git a/app/graphql/types/snippet_type.rb b/app/graphql/types/snippet_type.rb index 70a29333b3f..73ca3425ded 100644 --- a/app/graphql/types/snippet_type.rb +++ b/app/graphql/types/snippet_type.rb @@ -27,9 +27,12 @@ module Types authorize: :read_project, resolve: -> (snippet, args, context) { Gitlab::Graphql::Loaders::BatchModelLoader.new(Project, snippet.project_id).find } + # Author can be nil in some scenarios. For example, + # when the admin setting restricted visibility + # level is set to public field :author, Types::UserType, description: 'The owner of the snippet', - null: false, + null: true, resolve: -> (snippet, args, context) { Gitlab::Graphql::Loaders::BatchModelLoader.new(User, snippet.author_id).find } field :file_name, GraphQL::STRING_TYPE, diff --git a/app/helpers/clusters_helper.rb b/app/helpers/clusters_helper.rb index 39aaf242231..1204f882707 100644 --- a/app/helpers/clusters_helper.rb +++ b/app/helpers/clusters_helper.rb @@ -17,15 +17,23 @@ module ClustersHelper end end + def js_clusters_list_data(path = nil) + { + endpoint: path, + img_tags: { + aws: { path: image_path('illustrations/logos/amazon_eks.svg'), text: s_('ClusterIntegration|Amazon EKS') }, + default: { path: image_path('illustrations/logos/kubernetes.svg'), text: _('Kubernetes Cluster') }, + gcp: { path: image_path('illustrations/logos/google_gke.svg'), text: s_('ClusterIntegration|Google GKE') } + } + } + end + + # This method is depreciated and will be removed when associated HAML files are moved to JavaScript def provider_icon(provider = nil) - case provider - when 'aws' - image_tag 'illustrations/logos/amazon_eks.svg', alt: s_('ClusterIntegration|Amazon EKS'), class: 'gl-h-full' - when 'gcp' - image_tag 'illustrations/logos/google_gke.svg', alt: s_('ClusterIntegration|Google GKE'), class: 'gl-h-full' - else - image_tag 'illustrations/logos/kubernetes.svg', alt: _('Kubernetes Cluster'), class: 'gl-h-full' - end + img_data = js_clusters_list_data.dig(:img_tags, provider&.to_sym) || + js_clusters_list_data.dig(:img_tags, :default) + + image_tag img_data[:path], alt: img_data[:text], class: 'gl-h-full' end def render_gcp_signup_offer diff --git a/app/helpers/environments_helper.rb b/app/helpers/environments_helper.rb index 57f3c99b74d..41a255434af 100644 --- a/app/helpers/environments_helper.rb +++ b/app/helpers/environments_helper.rb @@ -60,7 +60,8 @@ module EnvironmentsHelper 'custom-metrics-path' => project_prometheus_metrics_path(project), 'validate-query-path' => validate_query_project_prometheus_metrics_path(project), 'custom-metrics-available' => "#{custom_metrics_available?(project)}", - 'prometheus-alerts-available' => "#{can?(current_user, :read_prometheus_alerts, project)}" + 'prometheus-alerts-available' => "#{can?(current_user, :read_prometheus_alerts, project)}", + 'dashboard-timezone' => project.metrics_setting_dashboard_timezone.to_s.upcase } end diff --git a/app/models/project.rb b/app/models/project.rb index a70f016a9f3..d05886e2b04 100644 --- a/app/models/project.rb +++ b/app/models/project.rb @@ -2410,6 +2410,10 @@ class Project < ApplicationRecord touch(:last_activity_at, :last_repository_updated_at) end + def metrics_setting + super || build_metrics_setting + end + private def find_service(services, name) diff --git a/app/serializers/cluster_entity.rb b/app/serializers/cluster_entity.rb index 4f53ea30544..8a1d41dbd96 100644 --- a/app/serializers/cluster_entity.rb +++ b/app/serializers/cluster_entity.rb @@ -8,6 +8,7 @@ class ClusterEntity < Grape::Entity expose :environment_scope expose :name expose :nodes + expose :provider_type expose :status_name, as: :status expose :status_reason expose :applications, using: ClusterApplicationEntity diff --git a/app/serializers/cluster_serializer.rb b/app/serializers/cluster_serializer.rb index f59b6a35a29..27156d3178f 100644 --- a/app/serializers/cluster_serializer.rb +++ b/app/serializers/cluster_serializer.rb @@ -13,6 +13,7 @@ class ClusterSerializer < BaseSerializer :name, :nodes, :path, + :provider_type, :status ] }) diff --git a/app/views/clusters/clusters/index.html.haml b/app/views/clusters/clusters/index.html.haml index 86194842664..a654a8741a4 100644 --- a/app/views/clusters/clusters/index.html.haml +++ b/app/views/clusters/clusters/index.html.haml @@ -19,7 +19,7 @@ = link_to _('More information'), help_page_path('user/group/clusters/index', anchor: 'cluster-precedence') - if Feature.enabled?(:clusters_list_redesign) - #js-clusters-list-app{ data: { endpoint: clusterable.index_path(format: :json) } } + #js-clusters-list-app{ data: js_clusters_list_data(clusterable.index_path(format: :json)) } - else .clusters-table.js-clusters-list .gl-responsive-table-row.table-row-header{ role: "row" } diff --git a/changelogs/unreleased/212882-add-instance-variable.yml b/changelogs/unreleased/212882-add-instance-variable.yml new file mode 100644 index 00000000000..19131ff7ab9 --- /dev/null +++ b/changelogs/unreleased/212882-add-instance-variable.yml @@ -0,0 +1,5 @@ +--- +title: Add ability to filter self monitoring resource usage charts by instance name +merge_request: 34084 +author: +type: changed diff --git a/changelogs/unreleased/23352-editorconfig.yml b/changelogs/unreleased/23352-editorconfig.yml new file mode 100644 index 00000000000..a80fb3556c8 --- /dev/null +++ b/changelogs/unreleased/23352-editorconfig.yml @@ -0,0 +1,5 @@ +--- +title: Support reading .editorconfig files inside of the Web IDE +merge_request: 32378 +author: +type: added diff --git a/changelogs/unreleased/emilyring-cluster-list-refactor-provider-icon.yml b/changelogs/unreleased/emilyring-cluster-list-refactor-provider-icon.yml new file mode 100644 index 00000000000..b16051b7303 --- /dev/null +++ b/changelogs/unreleased/emilyring-cluster-list-refactor-provider-icon.yml @@ -0,0 +1,5 @@ +--- +title: Added provider type icon to cluster list +merge_request: 33196 +author: +type: changed diff --git a/changelogs/unreleased/fj-change-snippet-author-nullable-graphql-type.yml b/changelogs/unreleased/fj-change-snippet-author-nullable-graphql-type.yml new file mode 100644 index 00000000000..925f7ff9a25 --- /dev/null +++ b/changelogs/unreleased/fj-change-snippet-author-nullable-graphql-type.yml @@ -0,0 +1,5 @@ +--- +title: Set author as nullable in snippet GraphQL Type +merge_request: 34135 +author: +type: fixed diff --git a/config/application.rb b/config/application.rb index b4fc89051f3..d0c211bf608 100644 --- a/config/application.rb +++ b/config/application.rb @@ -17,11 +17,16 @@ module Gitlab class Application < Rails::Application require_dependency Rails.root.join('lib/gitlab') require_dependency Rails.root.join('lib/gitlab/utils') + require_dependency Rails.root.join('lib/gitlab/redis/wrapper') + require_dependency Rails.root.join('lib/gitlab/redis/cache') + require_dependency Rails.root.join('lib/gitlab/redis/queues') + require_dependency Rails.root.join('lib/gitlab/redis/shared_state') require_dependency Rails.root.join('lib/gitlab/current_settings') require_dependency Rails.root.join('lib/gitlab/middleware/read_only') require_dependency Rails.root.join('lib/gitlab/middleware/basic_health_check') require_dependency Rails.root.join('lib/gitlab/middleware/same_site_cookies') require_dependency Rails.root.join('lib/gitlab/middleware/handle_ip_spoof_attack_error') + require_dependency Rails.root.join('lib/gitlab/runtime') # Settings in config/environments/* take precedence over those specified here. # Application configuration should go into files in config/initializers @@ -257,6 +262,17 @@ module Gitlab end end + # Use caching across all environments + # Full list of options: + # https://api.rubyonrails.org/classes/ActiveSupport/Cache/RedisCacheStore.html#method-c-new + caching_config_hash = {} + caching_config_hash[:redis] = Gitlab::Redis::Cache.pool + caching_config_hash[:compress] = Gitlab::Utils.to_boolean(ENV.fetch('ENABLE_REDIS_CACHE_COMPRESSION', '1')) + caching_config_hash[:namespace] = Gitlab::Redis::Cache::CACHE_NAMESPACE + caching_config_hash[:expires_in] = 2.weeks # Cache should not grow forever + + config.cache_store = :redis_cache_store, caching_config_hash + config.active_job.queue_adapter = :sidekiq # This is needed for gitlab-shell diff --git a/config/environments/development.rb b/config/environments/development.rb index 9d4fc6ba5e9..25d57467060 100644 --- a/config/environments/development.rb +++ b/config/environments/development.rb @@ -49,6 +49,8 @@ Rails.application.configure do # Do not log asset requests config.assets.quiet = true + config.allow_concurrency = Gitlab::Runtime.multi_threaded? + # BetterErrors live shell (REPL) on every stack frame BetterErrors::Middleware.allow_ip!("127.0.0.1/0") diff --git a/config/environments/production.rb b/config/environments/production.rb index 393a274606e..c03421040a3 100644 --- a/config/environments/production.rb +++ b/config/environments/production.rb @@ -77,4 +77,6 @@ Rails.application.configure do config.action_mailer.raise_delivery_errors = true config.eager_load = true + + config.allow_concurrency = Gitlab::Runtime.multi_threaded? end diff --git a/config/gitlab.yml.example b/config/gitlab.yml.example index 9c0fa783250..2dc41f46b03 100644 --- a/config/gitlab.yml.example +++ b/config/gitlab.yml.example @@ -1075,9 +1075,6 @@ production: &base ## ActionCable settings action_cable: - # Enables handling of ActionCable requests on the Puma web workers - # When this is disabled, a standalone ActionCable server must be started - in_app: true # Number of threads used to process ActionCable connection callbacks and channel actions # worker_pool_size: 4 diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb index df552e33fd0..6525c1598b5 100644 --- a/config/initializers/1_settings.rb +++ b/config/initializers/1_settings.rb @@ -729,7 +729,6 @@ Settings.webpack.dev_server['port'] ||= 3808 # ActionCable settings # Settings['action_cable'] ||= Settingslogic.new({}) -Settings.action_cable['in_app'] ||= false Settings.action_cable['worker_pool_size'] ||= 4 # diff --git a/config/initializers/7_redis.rb b/config/initializers/7_redis.rb index e493d7c1949..af4967521b8 100644 --- a/config/initializers/7_redis.rb +++ b/config/initializers/7_redis.rb @@ -1,14 +1,3 @@ -# Use caching across all environments -# Full list of options: -# https://api.rubyonrails.org/classes/ActiveSupport/Cache/RedisCacheStore.html#method-c-new -caching_config_hash = {} -caching_config_hash[:redis] = Gitlab::Redis::Cache.pool -caching_config_hash[:compress] = Gitlab::Utils.to_boolean(ENV.fetch('ENABLE_REDIS_CACHE_COMPRESSION', '1')) -caching_config_hash[:namespace] = Gitlab::Redis::Cache::CACHE_NAMESPACE -caching_config_hash[:expires_in] = 2.weeks # Cache should not grow forever - -Gitlab::Application.config.cache_store = :redis_cache_store, caching_config_hash - # Make sure we initialize a Redis connection pool before multi-threaded # execution starts by # 1. Sidekiq diff --git a/config/initializers/action_cable.rb b/config/initializers/action_cable.rb index 074d393473f..c549dd45ad9 100644 --- a/config/initializers/action_cable.rb +++ b/config/initializers/action_cable.rb @@ -3,9 +3,9 @@ require 'action_cable/subscription_adapter/redis' Rails.application.configure do - # Mount the ActionCable engine when in-app mode is enabled - config.action_cable.mount_path = Gitlab.config.action_cable.in_app ? '/-/cable' : nil - + # We only mount the ActionCable engine in tests where we run it in-app + # For other environments, we run it on a standalone Puma server + config.action_cable.mount_path = Rails.env.test? ? '/-/cable' : nil config.action_cable.url = Gitlab::Utils.append_path(Gitlab.config.gitlab.relative_url_root, '/-/cable') config.action_cable.worker_pool_size = Gitlab.config.action_cable.worker_pool_size end diff --git a/config/prometheus/self_monitoring_default.yml b/config/prometheus/self_monitoring_default.yml index 53b47274ecd..50e6f4585e4 100644 --- a/config/prometheus/self_monitoring_default.yml +++ b/config/prometheus/self_monitoring_default.yml @@ -1,5 +1,14 @@ dashboard: 'Default dashboard' priority: 1 + +templating: + variables: + instance: + type: 'text' + label: 'Instance label regex' + options: + default_value: '.+' + panel_groups: - group: 'Resource usage' @@ -9,7 +18,7 @@ panel_groups: y_label: "% memory used" metrics: - id: node_memory_usage_percentage - query_range: '(1 - (node_memory_MemAvailable_bytes or node_memory_MemFree_bytes + node_memory_Buffers_bytes + node_memory_Cached_bytes + node_memory_Slab_bytes) / node_memory_MemTotal_bytes) * 100' + query_range: '(1 - (node_memory_MemAvailable_bytes{instance=~"{{instance}}"} or (node_memory_MemFree_bytes{instance=~"{{instance}}"} + node_memory_Buffers_bytes{instance=~"{{instance}}"} + node_memory_Cached_bytes{instance=~"{{instance}}"} + node_memory_Slab_bytes{instance=~"{{instance}}"})) / node_memory_MemTotal_bytes{instance=~"{{instance}}"}) * 100' unit: "%" label: instance @@ -18,7 +27,7 @@ panel_groups: y_label: "% CPU used" metrics: - id: node_cpu_usage_percentage - query_range: '(avg without (mode,cpu) (1 - irate(node_cpu_seconds_total{mode="idle"}[5m]))) * 100' + query_range: '(avg without (mode,cpu) (1 - irate(node_cpu_seconds_total{mode="idle",instance=~"{{instance}}"}[5m]))) * 100' unit: "%" label: instance diff --git a/doc/.vale/gitlab/spelling-exceptions.txt b/doc/.vale/gitlab/spelling-exceptions.txt index 1759eaa5f0b..2933e39bba9 100644 --- a/doc/.vale/gitlab/spelling-exceptions.txt +++ b/doc/.vale/gitlab/spelling-exceptions.txt @@ -42,6 +42,7 @@ backtrace backtraced backtraces backtracing +badging Bamboo Bitbucket blockquote @@ -121,6 +122,8 @@ Ecto Elasticsearch enablement enqueued +enum +enums ETag Excon expirable @@ -139,6 +142,7 @@ Forgerock Fugit Gantt Gemnasium +Gemojione gettext Git Gitaly @@ -148,6 +152,7 @@ GitLab gitlabsos Gitleaks Gitter +globals Gmail Google Gosec @@ -297,6 +302,7 @@ preloading preloads prepend prepended +prepending prepends Pritaly profiler diff --git a/doc/api/graphql/reference/gitlab_schema.graphql b/doc/api/graphql/reference/gitlab_schema.graphql index a4f4e366428..b01af672049 100644 --- a/doc/api/graphql/reference/gitlab_schema.graphql +++ b/doc/api/graphql/reference/gitlab_schema.graphql @@ -11240,7 +11240,7 @@ type Snippet implements Noteable { """ The owner of the snippet """ - author: User! + author: User """ Snippet blob diff --git a/doc/api/graphql/reference/gitlab_schema.json b/doc/api/graphql/reference/gitlab_schema.json index c599ead2576..c5d172c035d 100644 --- a/doc/api/graphql/reference/gitlab_schema.json +++ b/doc/api/graphql/reference/gitlab_schema.json @@ -33183,13 +33183,9 @@ ], "type": { - "kind": "NON_NULL", - "name": null, - "ofType": { - "kind": "OBJECT", - "name": "User", - "ofType": null - } + "kind": "OBJECT", + "name": "User", + "ofType": null }, "isDeprecated": false, "deprecationReason": null diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md index c2ec2eee136..a3f84642e13 100644 --- a/doc/api/graphql/reference/index.md +++ b/doc/api/graphql/reference/index.md @@ -1641,7 +1641,7 @@ Represents a snippet entry | Name | Type | Description | | --- | ---- | ---------- | -| `author` | User! | The owner of the snippet | +| `author` | User | The owner of the snippet | | `blob` | SnippetBlob! | Snippet blob | | `blobs` | SnippetBlob! => Array | Snippet blobs | | `createdAt` | Time! | Timestamp this snippet was created | diff --git a/doc/ci/docker/using_docker_images.md b/doc/ci/docker/using_docker_images.md index bbea0ccf8a0..2448bb536ab 100644 --- a/doc/ci/docker/using_docker_images.md +++ b/doc/ci/docker/using_docker_images.md @@ -594,7 +594,7 @@ There are two ways to determine the value of `DOCKER_AUTH_CONFIG`: ``` - **Second way -** In some setups, it's possible that Docker client - will use the available system keystore to store the result of `docker + will use the available system key store to store the result of `docker login`. In that case, it's impossible to read `~/.docker/config.json`, so you will need to prepare the required base64-encoded version of `${username}:${password}` and create the Docker configuration JSON manually. @@ -712,7 +712,7 @@ To configure credentials store, follow these steps: ``` - Or, if you are running self-managed Runners, add the above JSON to - `${GITLAB_RUNNER_HOME}/.docker/config.json`. GitLab Runner will read this config file + `${GITLAB_RUNNER_HOME}/.docker/config.json`. GitLab Runner will read this configuration file and will use the needed helper for this specific repository. NOTE: **Note:** `credsStore` is used to access ALL the registries. @@ -761,7 +761,7 @@ To configure access for `aws_account_id.dkr.ecr.region.amazonaws.com`, follow th - Or, if you are running self-managed Runners, add the above JSON to `${GITLAB_RUNNER_HOME}/.docker/config.json`. - GitLab Runner will read this config file and will use the needed helper for this + GitLab Runner will read this configuration file and will use the needed helper for this specific repository. 1. You can now use any private image from `aws_account_id.dkr.ecr.region.amazonaws.com` defined in diff --git a/doc/ci/examples/artifactory_and_gitlab/index.md b/doc/ci/examples/artifactory_and_gitlab/index.md index 5bda1c786fb..c1b3ddec1b9 100644 --- a/doc/ci/examples/artifactory_and_gitlab/index.md +++ b/doc/ci/examples/artifactory_and_gitlab/index.md @@ -109,7 +109,7 @@ parameter in `.gitlab-ci.yml` to use the custom location instead of the default Now it's time we set up [GitLab CI/CD](https://about.gitlab.com/stages-devops-lifecycle/continuous-integration/) to automatically build, test and deploy the dependency! -GitLab CI/CD uses a file in the root of the repo, named `.gitlab-ci.yml`, to read the definitions for jobs +GitLab CI/CD uses a file in the root of the repository, named `.gitlab-ci.yml`, to read the definitions for jobs that will be executed by the configured GitLab Runners. You can read more about this file in the [GitLab Documentation](../../yaml/README.md). First of all, remember to set up variables for your deployment. Navigate to your project's **Settings > CI/CD > Environment variables** page @@ -119,7 +119,7 @@ and add the following ones (replace them with your current values, of course): - **MAVEN_REPO_USER**: `gitlab` (your Artifactory username) - **MAVEN_REPO_PASS**: `AKCp2WXr3G61Xjz1PLmYa3arm3yfBozPxSta4taP3SeNu2HPXYa7FhNYosnndFNNgoEds8BCS` (your Artifactory Encrypted Password) -Now it's time to define jobs in `.gitlab-ci.yml` and push it to the repo: +Now it's time to define jobs in `.gitlab-ci.yml` and push it to the repository: ```yaml image: maven:latest @@ -154,7 +154,7 @@ deploy: GitLab Runner will use the latest [Maven Docker image](https://hub.docker.com/_/maven/), which already contains all the tools and the dependencies you need to manage the project, in order to run the jobs. -Environment variables are set to instruct Maven to use the `homedir` of the repo instead of the user's home when searching for configuration and dependencies. +Environment variables are set to instruct Maven to use the `homedir` of the repository instead of the user's home when searching for configuration and dependencies. Caching the `.m2/repository folder` (where all the Maven files are stored), and the `target` folder (where our application will be created), is useful for speeding up the process by running all Maven phases in a sequential order, therefore, executing `mvn test` will automatically run `mvn compile` if necessary. @@ -164,7 +164,7 @@ Both `build` and `test` jobs leverage the `mvn` command to compile the applicati Deploy to Artifactory is done as defined by the variables we have just set up. The deployment occurs only if we're pushing or merging to `master` branch, so that the development versions are tested but not published. -Done! Now you have all the changes in the GitLab repo, and a pipeline has already been started for this commit. In the **Pipelines** tab you can see what's happening. +Done! Now you have all the changes in the GitLab repository, and a pipeline has already been started for this commit. In the **Pipelines** tab you can see what's happening. If the deployment has been successful, the deploy job log will output: ```plaintext @@ -177,7 +177,7 @@ If the deployment has been successful, the deploy job log will output: >**Note**: the `mvn` command downloads a lot of files from the internet, so you'll see a lot of extra activity in the log the first time you run it. -Yay! You did it! Checking in Artifactory will confirm that you have a new artifact available in the `libs-release-local` repo. +Yay! You did it! Checking in Artifactory will confirm that you have a new artifact available in the `libs-release-local` repository. ## Create the main Maven application @@ -228,7 +228,7 @@ Here is how you can get the content of the file directly from Artifactory: 1. Click on **Generate Maven Settings** 1. Click on **Generate Settings** 1. Copy to clipboard the configuration file -1. Save the file as `.m2/settings.xml` in your repo +1. Save the file as `.m2/settings.xml` in your repository Now you are ready to use the Artifactory repository to resolve dependencies and use `simple-maven-dep` in your main application! @@ -239,7 +239,7 @@ You need a last step to have everything in place: configure the `.gitlab-ci.yml` You want to leverage [GitLab CI/CD](https://about.gitlab.com/stages-devops-lifecycle/continuous-integration/) to automatically build, test and run your awesome application, and see if you can get the greeting as expected! -All you need to do is to add the following `.gitlab-ci.yml` to the repo: +All you need to do is to add the following `.gitlab-ci.yml` to the repository: ```yaml image: maven:latest diff --git a/doc/ci/examples/deployment/README.md b/doc/ci/examples/deployment/README.md index a090e930dd8..ec02fb6dd43 100644 --- a/doc/ci/examples/deployment/README.md +++ b/doc/ci/examples/deployment/README.md @@ -56,7 +56,7 @@ To use different provider take a look at long list of [Supported Providers](http ## Using Dpl with Docker In most cases, you will have configured [GitLab Runner](https://docs.gitlab.com/runner/) to use your server's shell commands. -This means that all commands are run in the context of local user (e.g. gitlab_runner or gitlab_ci_multi_runner). +This means that all commands are run in the context of local user (e.g. `gitlab_runner` or `gitlab_ci_multi_runner`). It also means that most probably in your Docker container you don't have the Ruby runtime installed. You will have to install it: diff --git a/doc/ci/examples/deployment/composer-npm-deploy.md b/doc/ci/examples/deployment/composer-npm-deploy.md index c8effec3264..cea6f26181f 100644 --- a/doc/ci/examples/deployment/composer-npm-deploy.md +++ b/doc/ci/examples/deployment/composer-npm-deploy.md @@ -47,7 +47,7 @@ All these operations will put all files into a `build` folder, which is ready to ## How to transfer files to a live server -You have multiple options: rsync, scp, sftp, and so on. For now, we will use scp. +You have multiple options: rsync, SCP, SFTP, and so on. For now, we will use SCP. To make this work, you need to add a GitLab CI/CD Variable (accessible on `gitlab.example/your-project-name/variables`). That variable will be called `STAGING_PRIVATE_KEY` and it's the **private** SSH key of your server. @@ -123,7 +123,7 @@ Therefore, for a production environment we use additional steps to ensure that a Since this was a WordPress project, I gave real life code snippets. Some further ideas you can pursue: - Having a slightly different script for `master` branch will allow you to deploy to a production server from that branch and to a stage server from any other branches. -- Instead of pushing it live, you can push it to WordPress official repo (with creating a SVN commit, etc.). +- Instead of pushing it live, you can push it to WordPress official repository (with creating a SVN commit, etc.). - You could generate i18n text domains on the fly. --- diff --git a/doc/ci/examples/php.md b/doc/ci/examples/php.md index 6611ac48ce2..e7768868c15 100644 --- a/doc/ci/examples/php.md +++ b/doc/ci/examples/php.md @@ -65,7 +65,7 @@ docker-php-ext-install pdo_mysql ``` You might wonder what `docker-php-ext-install` is. In short, it is a script -provided by the official php Docker image that you can use to easily install +provided by the official PHP Docker image that you can use to easily install extensions. For more information read the documentation at . @@ -174,7 +174,7 @@ Finally, push to GitLab and let the tests begin! ### Test against different PHP versions in Shell builds The [phpenv](https://github.com/phpenv/phpenv) project allows you to easily manage different versions of PHP -each with its own config. This is especially useful when testing PHP projects +each with its own configuration. This is especially useful when testing PHP projects with the Shell executor. You will have to install it on your build machine under the `gitlab-runner` diff --git a/doc/ci/variables/predefined_variables.md b/doc/ci/variables/predefined_variables.md index 13970683710..2127373a91c 100644 --- a/doc/ci/variables/predefined_variables.md +++ b/doc/ci/variables/predefined_variables.md @@ -44,7 +44,7 @@ You can add a command to your `.gitlab-ci.yml` file to | `CI_COMMIT_TITLE` | 10.8 | all | The title of the commit - the full first line of the message | | `CI_CONCURRENT_ID` | all | 11.10 | Unique ID of build execution within a single executor. | | `CI_CONCURRENT_PROJECT_ID` | all | 11.10 | Unique ID of build execution within a single executor and project. | -| `CI_CONFIG_PATH` | 9.4 | 0.5 | The path to CI config file. Defaults to `.gitlab-ci.yml` | +| `CI_CONFIG_PATH` | 9.4 | 0.5 | The path to CI configuration file. Defaults to `.gitlab-ci.yml` | | `CI_DEBUG_TRACE` | all | 1.7 | Whether [debug logging (tracing)](README.md#debug-logging) is enabled | | `CI_DEFAULT_BRANCH` | 12.4 | all | The name of the default branch for the project. | | `CI_DEPLOY_PASSWORD` | 10.8 | all | Authentication password of the [GitLab Deploy Token](../../user/project/deploy_tokens/index.md#gitlab-deploy-token), only present if the Project has one related. | @@ -97,7 +97,7 @@ You can add a command to your `.gitlab-ci.yml` file to | `CI_PROJECT_DIR` | all | all | The full path where the repository is cloned and where the job is run. If the GitLab Runner `builds_dir` parameter is set, this variable is set relative to the value of `builds_dir`. For more information, see [Advanced configuration](https://docs.gitlab.com/runner/configuration/advanced-configuration.html#the-runners-section) for GitLab Runner. | | `CI_PROJECT_ID` | all | all | The unique ID of the current project that GitLab CI/CD uses internally | | `CI_PROJECT_NAME` | 8.10 | 0.5 | The name of the directory for the project that is currently being built. For example, if the project URL is `gitlab.example.com/group-name/project-1`, the `CI_PROJECT_NAME` would be `project-1`. | -| `CI_PROJECT_NAMESPACE` | 8.10 | 0.5 | The project namespace (username or groupname) that is currently being built | +| `CI_PROJECT_NAMESPACE` | 8.10 | 0.5 | The project namespace (username or group name) that is currently being built | | `CI_PROJECT_PATH` | 8.10 | 0.5 | The namespace with project name | | `CI_PROJECT_PATH_SLUG` | 9.3 | all | `$CI_PROJECT_PATH` lowercased and with everything except `0-9` and `a-z` replaced with `-`. Use in URLs and domain names. | | `CI_PROJECT_REPOSITORY_LANGUAGES` | 12.3 | all | Comma-separated, lowercased list of the languages used in the repository (e.g. `ruby,javascript,html,css`) | diff --git a/doc/development/api_styleguide.md b/doc/development/api_styleguide.md index ba926f9f728..6a044004926 100644 --- a/doc/development/api_styleguide.md +++ b/doc/development/api_styleguide.md @@ -120,7 +120,7 @@ For instance: In order to validate some parameters in the API request, we validate them before sending them further (say Gitaly). The following are the -[custom validators](https://gitlab.com/gitlab-org/gitlab/-/tree/master/lib/api/validations/validators), +[custom validators](https://GitLab.com/gitlab-org/gitlab/-/tree/master/lib/api/validations/validators), which we have added so far and how to use them. We also wrote a guide on how you can add a new custom validator. diff --git a/doc/development/creating_enums.md b/doc/development/creating_enums.md index e2ebad538d9..3833f771bb5 100644 --- a/doc/development/creating_enums.md +++ b/doc/development/creating_enums.md @@ -83,7 +83,7 @@ module EE end ``` -This looks working as a workaround, however, this approach has some donwside that: +This looks working as a workaround, however, this approach has some downsides that: - Features could move from EE to FOSS or vice versa. Therefore, the offset might be mixed between FOSS and EE in the future. e.g. When you move `activity_limit_exceeded` to FOSS, you'll see `{ unknown_failure: 0, config_error: 1, activity_limit_exceeded: 1_000 }`. diff --git a/doc/development/distributed_tracing.md b/doc/development/distributed_tracing.md index ae84e38e324..7fc33380aba 100644 --- a/doc/development/distributed_tracing.md +++ b/doc/development/distributed_tracing.md @@ -27,7 +27,7 @@ process boundaries, the correlation ID is injected into the outgoing request. Th the propagation of the correlation ID to each downstream subsystem. Correlation IDs are normally generated in the Rails application in response to -certain webrequests. Some user facing systems don't generate correlation IDs in +certain web requests. Some user facing systems don't generate correlation IDs in response to user requests (for example, Git pushes over SSH). ### Developer guidelines for working with correlation IDs diff --git a/doc/development/documentation/site_architecture/global_nav.md b/doc/development/documentation/site_architecture/global_nav.md index 12190e2cb9e..71020e6054e 100644 --- a/doc/development/documentation/site_architecture/global_nav.md +++ b/doc/development/documentation/site_architecture/global_nav.md @@ -356,7 +356,7 @@ files. ``` This also allows the nav to be displayed on other -highest-level dirs (`/omnibus/`, `/runner/`, etc), +highest-level directories (`/omnibus/`, `/runner/`, etc), linking them back to `/ee/`. The same logic is applied to all sections (`sec[:section_url]`), diff --git a/doc/development/documentation/site_architecture/index.md b/doc/development/documentation/site_architecture/index.md index a24f12bd068..942b202a3ec 100644 --- a/doc/development/documentation/site_architecture/index.md +++ b/doc/development/documentation/site_architecture/index.md @@ -107,13 +107,13 @@ The pipeline in the `gitlab-docs` project: Once a week on Mondays, a scheduled pipeline runs and rebuilds the Docker images used in various pipeline jobs, like `docs-lint`. The Docker image configuration files are -located at . +located in the [Dockerfiles directory](https://gitlab.com/gitlab-org/gitlab-docs/-/tree/master/dockerfiles). If you need to rebuild the Docker images immediately (must have maintainer level permissions): CAUTION: **Caution** If you change the dockerfile configuration and rebuild the images, you can break the master -pipeline in the main `gitlab` repo as well as in `gitlab-docs`. Create an image with +pipeline in the main `gitlab` repository as well as in `gitlab-docs`. Create an image with a different name first and test it to ensure you do not break the pipelines. 1. In [`gitlab-docs`](https://gitlab.com/gitlab-org/gitlab-docs), go to **{rocket}** **CI / CD > Pipelines**. @@ -207,22 +207,22 @@ If you don't specify `editor:`, the simple one is used by default. ## Algolia search engine -The docs site uses [Algolia docsearch](https://community.algolia.com/docsearch/) +The docs site uses [Algolia DocSearch](https://community.algolia.com/docsearch/) for its search function. This is how it works: -1. GitLab is a member of the [docsearch program](https://community.algolia.com/docsearch/#join-docsearch-program), +1. GitLab is a member of the [DocSearch program](https://community.algolia.com/docsearch/#join-docsearch-program), which is the free tier of [Algolia](https://www.algolia.com/). 1. Algolia hosts a [DocSearch configuration](https://github.com/algolia/docsearch-configs/blob/master/configs/gitlab.json) for the GitLab docs site, and we've worked together to refine it. -1. That [config](https://community.algolia.com/docsearch/config-file.html) is +1. That [configuration](https://community.algolia.com/docsearch/config-file.html) is parsed by their [crawler](https://community.algolia.com/docsearch/crawler-overview.html) every 24h and [stores](https://community.algolia.com/docsearch/inside-the-engine.html) - the [docsearch index](https://community.algolia.com/docsearch/how-do-we-build-an-index.html) + the [DocSearch index](https://community.algolia.com/docsearch/how-do-we-build-an-index.html) on [Algolia's servers](https://community.algolia.com/docsearch/faq.html#where-is-my-data-hosted%3F). -1. On the docs side, we use a [docsearch layout](https://gitlab.com/gitlab-org/gitlab-docs/blob/master/layouts/docsearch.html) which +1. On the docs side, we use a [DocSearch layout](https://gitlab.com/gitlab-org/gitlab-docs/blob/master/layouts/docsearch.html) which is present on pretty much every page except , which uses its [own layout](https://gitlab.com/gitlab-org/gitlab-docs/blob/master/layouts/instantsearch.html). In those layouts, - there's a JavaScript snippet which initiates docsearch by using an API key + there's a JavaScript snippet which initiates DocSearch by using an API key and an index name (`gitlab`) that are needed for Algolia to show the results. NOTE: **For GitLab employees:** diff --git a/doc/development/documentation/structure.md b/doc/development/documentation/structure.md index d19383bee27..eadcedfaac0 100644 --- a/doc/development/documentation/structure.md +++ b/doc/development/documentation/structure.md @@ -34,7 +34,7 @@ For additional details on each, see the [template for new docs](#template-for-ne below. Note that you can include additional subsections, as appropriate, such as 'How it Works', 'Architecture', -and other logical divisions such as pre- and post-deployment steps. +and other logical divisions such as pre-deployment and post-deployment steps. ## Template for new docs diff --git a/doc/development/elasticsearch.md b/doc/development/elasticsearch.md index d86fdf496e1..9f54386f1af 100644 --- a/doc/development/elasticsearch.md +++ b/doc/development/elasticsearch.md @@ -24,19 +24,19 @@ See the [Elasticsearch GDK setup instructions](https://gitlab.com/gitlab-org/git - `gitlab:elastic:test:index_size`: Tells you how much space the current index is using, as well as how many documents are in the index. - `gitlab:elastic:test:index_size_change`: Outputs index size, reindexes, and outputs index size again. Useful when testing improvements to indexing size. -Additionally, if you need large repos or multiple forks for testing, please consider [following these instructions](rake_tasks.md#extra-project-seed-options) +Additionally, if you need large repositories or multiple forks for testing, please consider [following these instructions](rake_tasks.md#extra-project-seed-options) ## How does it work? -The Elasticsearch integration depends on an external indexer. We ship an [indexer written in Go](https://gitlab.com/gitlab-org/gitlab-elasticsearch-indexer). The user must trigger the initial indexing via a Rake task but, after this is done, GitLab itself will trigger reindexing when required via `after_` callbacks on create, update, and destroy that are inherited from [/ee/app/models/concerns/elastic/application_versioned_search.rb](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/app/models/concerns/elastic/application_versioned_search.rb). +The Elasticsearch integration depends on an external indexer. We ship an [indexer written in Go](https://gitlab.com/gitlab-org/gitlab-elasticsearch-indexer). The user must trigger the initial indexing via a Rake task but, after this is done, GitLab itself will trigger reindexing when required via `after_` callbacks on create, update, and destroy that are inherited from [`/ee/app/models/concerns/elastic/application_versioned_search.rb`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/app/models/concerns/elastic/application_versioned_search.rb). After initial indexing is complete, create, update, and delete operations for all models except projects (see [#207494](https://gitlab.com/gitlab-org/gitlab/-/issues/207494)) are tracked in a Redis [`ZSET`](https://redis.io/topics/data-types#sorted-sets). A regular `sidekiq-cron` `ElasticIndexBulkCronWorker` processes this queue, updating many Elasticsearch documents at a time with the [Bulk Request API](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html). -Search queries are generated by the concerns found in [ee/app/models/concerns/elastic](https://gitlab.com/gitlab-org/gitlab/tree/master/ee/app/models/concerns/elastic). These concerns are also in charge of access control, and have been a historic source of security bugs so please pay close attention to them! +Search queries are generated by the concerns found in [`ee/app/models/concerns/elastic`](https://gitlab.com/gitlab-org/gitlab/tree/master/ee/app/models/concerns/elastic). These concerns are also in charge of access control, and have been a historic source of security bugs so please pay close attention to them! ## Existing Analyzers/Tokenizers/Filters -These are all defined in [ee/lib/elastic/latest/config.rb](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/elastic/latest/config.rb) +These are all defined in [`ee/lib/elastic/latest/config.rb`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/elastic/latest/config.rb) ### Analyzers @@ -71,7 +71,7 @@ Not directly used for indexing, but rather used to transform a search input. Use #### `sha_tokenizer` -This is a custom tokenizer that uses the [`edgeNGram` tokenizer](https://www.elastic.co/guide/en/elasticsearch/reference/5.5/analysis-edgengram-tokenizer.html) to allow SHAs to be searcheable by any sub-set of it (minimum of 5 chars). +This is a custom tokenizer that uses the [`edgeNGram` tokenizer](https://www.elastic.co/guide/en/elasticsearch/reference/5.5/analysis-edgengram-tokenizer.html) to allow SHAs to be searchable by any sub-set of it (minimum of 5 chars). Example: @@ -149,7 +149,7 @@ These proxy objects would talk to Elasticsearch server directly (see top half of ![Elasticsearch Architecture](img/elasticsearch_architecture.svg) -In the planned new design, each model would have a pair of corresponding subclassed proxy objects, in which model-specific logic is located. For example, `Snippet` would have `SnippetClassProxy` and `SnippetInstanceProxy` (being subclass of `Elasticsearch::Model::Proxy::ClassMethodsProxy` and `Elasticsearch::Model::Proxy::InstanceMethodsProxy`, respectively). +In the planned new design, each model would have a pair of corresponding sub-classed proxy objects, in which model-specific logic is located. For example, `Snippet` would have `SnippetClassProxy` and `SnippetInstanceProxy` (being subclass of `Elasticsearch::Model::Proxy::ClassMethodsProxy` and `Elasticsearch::Model::Proxy::InstanceMethodsProxy`, respectively). `__elasticsearch__` would represent another layer of proxy object, keeping track of multiple actual proxy objects. It would forward method calls to the appropriate index. For example: diff --git a/doc/development/fe_guide/axios.md b/doc/development/fe_guide/axios.md index f8d301dac5e..38a8c8f1086 100644 --- a/doc/development/fe_guide/axios.md +++ b/doc/development/fe_guide/axios.md @@ -1,15 +1,15 @@ # Axios -We use [axios](https://github.com/axios/axios) to communicate with the server in Vue applications and most new code. +We use [Axios](https://github.com/axios/axios) to communicate with the server in Vue applications and most new code. -In order to guarantee all defaults are set you *should not use `axios` directly*, you should import `axios` from `axios_utils`. +In order to guarantee all defaults are set you *should not use Axios directly*, you should import Axios from `axios_utils`. ## CSRF token -All our request require a CSRF token. -To guarantee this token is set, we are importing [axios](https://github.com/axios/axios), setting the token, and exporting `axios` . +All our requests require a CSRF token. +To guarantee this token is set, we are importing [Axios](https://github.com/axios/axios), setting the token, and exporting `axios` . -This exported module should be used instead of directly using `axios` to ensure the token is set. +This exported module should be used instead of directly using Axios to ensure the token is set. ## Usage @@ -30,7 +30,7 @@ This exported module should be used instead of directly using `axios` to ensure }); ``` -## Mock axios response in tests +## Mock Axios response in tests To help us mock the responses we are using [axios-mock-adapter](https://github.com/ctimmerm/axios-mock-adapter). @@ -41,7 +41,7 @@ Advantages over [`spyOn()`](https://jasmine.github.io/api/edge/global.html#spyOn - simple API to test error cases - provides `replyOnce()` to allow for different responses -We have also decided against using [axios interceptors](https://github.com/axios/axios#interceptors) because they are not suitable for mocking. +We have also decided against using [Axios interceptors](https://github.com/axios/axios#interceptors) because they are not suitable for mocking. ### Example @@ -67,7 +67,7 @@ We have also decided against using [axios interceptors](https://github.com/axios }); ``` -### Mock poll requests in tests with axios +### Mock poll requests in tests with Axios Because polling function requires a header object, we need to always include an object as the third argument: diff --git a/doc/development/fe_guide/droplab/plugins/ajax.md b/doc/development/fe_guide/droplab/plugins/ajax.md index abc208e7568..f22d95064dd 100644 --- a/doc/development/fe_guide/droplab/plugins/ajax.md +++ b/doc/development/fe_guide/droplab/plugins/ajax.md @@ -6,7 +6,7 @@ Add the `Ajax` object to the plugins array of a `DropLab.prototype.init` or `DropLab.prototype.addHook` call. -`Ajax` requires 2 config values, the `endpoint` and `method`. +`Ajax` requires 2 configuration values, the `endpoint` and `method`. - `endpoint` should be a URL to the request endpoint. - `method` should be `setData` or `addData`. diff --git a/doc/development/fe_guide/droplab/plugins/filter.md b/doc/development/fe_guide/droplab/plugins/filter.md index 876149e4872..e8194e45a41 100644 --- a/doc/development/fe_guide/droplab/plugins/filter.md +++ b/doc/development/fe_guide/droplab/plugins/filter.md @@ -7,7 +7,7 @@ to the dropdown using a simple fuzzy string search of an input value. Add the `Filter` object to the plugins array of a `DropLab.prototype.init` or `DropLab.prototype.addHook` call. -- `Filter` requires a config value for `template`. +- `Filter` requires a configuration value for `template`. - `template` should be the key of the objects within your data array that you want to compare to the user input string, for filtering. diff --git a/doc/development/fe_guide/droplab/plugins/input_setter.md b/doc/development/fe_guide/droplab/plugins/input_setter.md index 9b2e1e8faab..b873b7a14ee 100644 --- a/doc/development/fe_guide/droplab/plugins/input_setter.md +++ b/doc/development/fe_guide/droplab/plugins/input_setter.md @@ -6,12 +6,12 @@ Add the `InputSetter` object to the plugins array of a `DropLab.prototype.init` or `DropLab.prototype.addHook` call. -- `InputSetter` requires a config value for `input` and `valueAttribute`. +- `InputSetter` requires a configuration value for `input` and `valueAttribute`. - `input` should be the DOM element that you want to manipulate. - `valueAttribute` should be a string that is the name of an attribute on your list items that is used to get the value to update the `input` element with. -You can also set the `InputSetter` config to an array of objects, which will allow you to update multiple elements. +You can also set the `InputSetter` configuration to an array of objects, which will allow you to update multiple elements. ```html diff --git a/doc/development/fe_guide/emojis.md b/doc/development/fe_guide/emojis.md index 6d324d4c4a0..3cd14c0dfd3 100644 --- a/doc/development/fe_guide/emojis.md +++ b/doc/development/fe_guide/emojis.md @@ -1,6 +1,6 @@ # Emojis -GitLab supports native unicode emojis and fallsback to image-based emojis selectively +GitLab supports native Unicode emojis and falls back to image-based emojis selectively when your platform does not support it. ## How to update Emojis @@ -21,7 +21,7 @@ when your platform does not support it. 1. Ensure you see new individual images copied into `app/assets/images/emoji/` 1. Ensure you can see the new emojis and their aliases in the GFM Autocomplete 1. Ensure you can see the new emojis and their aliases in the award emoji menu - 1. You might need to add new emoji unicode support checks and rules for platforms + 1. You might need to add new emoji Unicode support checks and rules for platforms that do not support a certain emoji and we need to fallback to an image. See `app/assets/javascripts/emoji/support/is_emoji_unicode_supported.js` and `app/assets/javascripts/emoji/support/unicode_support_map.js` diff --git a/doc/development/fe_guide/graphql.md b/doc/development/fe_guide/graphql.md index 4be7623db8d..191ebd2ff58 100644 --- a/doc/development/fe_guide/graphql.md +++ b/doc/development/fe_guide/graphql.md @@ -89,7 +89,7 @@ Default client accepts two parameters: `resolvers` and `config`. ## GraphQL Queries To save query compilation at runtime, webpack can directly import `.graphql` -files. This allows webpack to preprocess the query at compile time instead +files. This allows webpack to pre-process the query at compile time instead of the client doing compilation of queries. To distinguish queries from mutations and fragments, the following naming convention is recommended: diff --git a/doc/development/fe_guide/icons.md b/doc/development/fe_guide/icons.md index 4fb738f5466..131324e6479 100644 --- a/doc/development/fe_guide/icons.md +++ b/doc/development/fe_guide/icons.md @@ -24,7 +24,7 @@ sprite_icon(icon_name, size: nil, css_class: '') - **icon_name** Use the icon_name that you can find in the SVG Sprite ([Overview is available here](https://gitlab-org.gitlab.io/gitlab-svgs)). - **size (optional)** Use one of the following sizes : 16, 24, 32, 48, 72 (this will be translated into a `s16` class) -- **css_class (optional)** If you want to add additional css classes +- **css_class (optional)** If you want to add additional CSS classes **Example** @@ -67,8 +67,8 @@ export default { - **name** Name of the Icon in the SVG Sprite ([Overview is available here](https://gitlab-org.gitlab.io/gitlab-svgs)). - **size (optional)** Number value for the size which is then mapped to a specific CSS class - (Available Sizes: 8, 12, 16, 18, 24, 32, 48, 72 are mapped to `sXX` css classes) -- **css-classes (optional)** Additional CSS Classes to add to the svg tag. + (Available Sizes: 8, 12, 16, 18, 24, 32, 48, 72 are mapped to `sXX` CSS classes) +- **css-classes (optional)** Additional CSS Classes to add to the SVG tag. ### Usage in HTML/JS @@ -91,7 +91,7 @@ Please use the class `svg-content` around it to ensure nice rendering. ### Usage in Vue -To use an SVG illustrations in a template provide the path as a property and display it through a standard img tag. +To use an SVG illustrations in a template provide the path as a property and display it through a standard `img` tag. Component: diff --git a/doc/development/fe_guide/vue.md b/doc/development/fe_guide/vue.md index 972c2ded9c9..0d77e4d129b 100644 --- a/doc/development/fe_guide/vue.md +++ b/doc/development/fe_guide/vue.md @@ -6,9 +6,9 @@ To get started with Vue, read through [their documentation](https://vuejs.org/v2 What is described in the following sections can be found in these examples: -- web ide: -- security products: -- registry: +- [Web IDE](https://gitlab.com/gitlab-org/gitlab-foss/tree/master/app/assets/javascripts/ide/stores) +- [Security products](https://gitlab.com/gitlab-org/gitlab/tree/master/ee/app/assets/javascripts/vue_shared/security_reports) +- [Registry](https://gitlab.com/gitlab-org/gitlab-foss/tree/master/app/assets/javascripts/registry/stores) ## Vue architecture @@ -16,7 +16,7 @@ All new features built with Vue.js must follow a [Flux architecture](https://fac The main goal we are trying to achieve is to have only one data flow and only one data entry. In order to achieve this goal we use [vuex](#vuex). -You can also read about this architecture in vue docs about [state management](https://vuejs.org/v2/guide/state-management.html#Simple-State-Management-from-Scratch) +You can also read about this architecture in Vue docs about [state management](https://vuejs.org/v2/guide/state-management.html#Simple-State-Management-from-Scratch) and about [one way data flow](https://vuejs.org/v2/guide/components.html#One-Way-Data-Flow). ### Components and Store @@ -59,7 +59,7 @@ To do that, provide the data through `data` attributes in the HTML element and q _Note:_ You should only do this while initializing the application, because the mounted element will be replaced with Vue-generated DOM. The advantage of providing data from the DOM to the Vue instance through `props` in the `render` function -instead of querying the DOM inside the main vue component is that makes tests easier by avoiding the need to +instead of querying the DOM inside the main Vue component is that makes tests easier by avoiding the need to create a fixture or an HTML element in the unit test. See the following example: ```javascript diff --git a/doc/topics/git/feature_branch_development.md b/doc/topics/git/feature_branch_development.md new file mode 100644 index 00000000000..ab3adf54dd7 --- /dev/null +++ b/doc/topics/git/feature_branch_development.md @@ -0,0 +1,86 @@ +--- +type: how-tos +--- + +# Develop on a feature branch + +GitLab values encourage the use of [Minimal Viable Change (MVC)](https://about.gitlab.com/handbook/values/#minimal-viable-change-mvc). +However, viable changes are not always small. In such cases, it can help to set up a dedicated feature branch. +People can contribute MRs to that feature branch, without affecting the functionality of the default (usually `master`) branch. + +Once work on the development branch is complete, then the feature branch can be finally merged into the default branch. + +GitLab frequently implements this process whenever there is an MVC that requires multiple MRs. + +## Use case: GitLab's release posts + +This section describes the use case with GitLab [release posts](https://about.gitlab.com/handbook/marketing/blog/release-posts/). +Dozens of GitLab team members contribute to each monthly release post. +In such cases, it may be more efficient to submit an MR on the release post feature branch instead of master. + +In this case, the feature branch would be `release-X-Y`. Assuming the `release-X-Y` branch already exists, you can set up an MR against that branch, with the following steps: + +1. Create a new branch (`test-branch`) against the feature branch (`release-X-Y`): + + ```shell + git checkout -b test-branch release-X-Y + ``` + + You should now be on a branch named `test-branch`. + +1. Make desired changes on the `test-branch`. +1. Add your changes, commit, and push to the `test-branch`: + + ```shell + git add . + ``` + +1. Commit your changes: + + ```shell + git commit -m "Some good reason" + ``` + +1. Push your changes to the repository: + + ```shell + git push --set-upstream origin test-branch + ``` + +1. Navigate to the URL for your repository. In this case, the repository is `www-gitlab-com`, available at `https://gitlab.com/gitlab-com/www-gitlab-com`. + + If needed, sign in to GitLab. You should then see an option to **Create merge request**: + + ![Create merge request](img/create_merge_request_v13_1.png) + +1. After you click **Create merge request**, you'll see an option to **Change branches**. Select that option. + +1. In the **New Merge Request** screen, you can now select the **Source** and **Target** branches. +In the screenshot shown, +we have selected `test-branch` as the source, and `release-13-0` as the target. + + ![Modify branches](img/modify_branches_v13_1.png) + +1. Once you've selected the Source and Target branches, click **Compare branches and continue**. + You should see an entry similar to: + + ```plaintext + New Merge Request + + From test-branch into release-13-0 + ``` + + An entry like this confirms that your MR will **not** merge into master. + +1. Make any additional changes in the **New Merge Request** screen, and click **Submit merge request**. +1. In the new merge request, look for **Request to merge**. You'll see an entry similar to: + + ```plaintext + Request to merge test-branch into release-13-0 + ``` + + That confirms you've set up the MR to merge into the specified branch, not master. + +1. Proceed with the change as you would with any other MR. +1. When your MR is approved, and an appropriate user merges that MR, you can rest assured that your work is incorporated directly into the feature branch. +When the feature branch is ready, it can then be merged into master. diff --git a/doc/topics/git/img/create_merge_request_v13_1.png b/doc/topics/git/img/create_merge_request_v13_1.png new file mode 100644 index 00000000000..a725149f6a2 Binary files /dev/null and b/doc/topics/git/img/create_merge_request_v13_1.png differ diff --git a/doc/topics/git/img/modify_branches_v13_1.png b/doc/topics/git/img/modify_branches_v13_1.png new file mode 100644 index 00000000000..dc517dd249f Binary files /dev/null and b/doc/topics/git/img/modify_branches_v13_1.png differ diff --git a/doc/topics/git/index.md b/doc/topics/git/index.md index 9e6875312f3..03b9365fc57 100644 --- a/doc/topics/git/index.md +++ b/doc/topics/git/index.md @@ -68,6 +68,7 @@ If you have problems with Git, the following may help: ## Branching strategies - [Feature branch workflow](../../gitlab-basics/feature_branch_workflow.md) +- [Develop on a feature branch](feature_branch_development.md) - [GitLab Flow](../gitlab_flow.md) - [Git Branching - Branches in a Nutshell](https://git-scm.com/book/en/v2/Git-Branching-Branches-in-a-Nutshell) - [Git Branching - Branching Workflows](https://git-scm.com/book/en/v2/Git-Branching-Branching-Workflows) diff --git a/doc/user/packages/composer_repository/index.md b/doc/user/packages/composer_repository/index.md index 8426724e342..26517cfa22b 100644 --- a/doc/user/packages/composer_repository/index.md +++ b/doc/user/packages/composer_repository/index.md @@ -59,7 +59,7 @@ After this basic package structure is created, we need to tag it in Git and push ```shell git init -add composer.json +git add composer.json git commit -m 'Composer package test' git tag v1.0.0 git add origin git@gitlab.com:/.git diff --git a/doc/user/permissions.md b/doc/user/permissions.md index 49cf0fea789..3f09d3f53b8 100644 --- a/doc/user/permissions.md +++ b/doc/user/permissions.md @@ -23,9 +23,6 @@ GitLab [administrators](../administration/index.md) receive all permissions. To add or import a user, you can follow the [project members documentation](project/members/index.md). -For information on eligible approvers for Merge Requests, see -[Eligible approvers](project/merge_requests/merge_request_approvals.md#eligible-approvers). - ## Principles behind permissions See our [product handbook on permissions](https://about.gitlab.com/handbook/product/#permissions-in-gitlab) @@ -99,6 +96,7 @@ The following table depicts the various user permission levels in a project. | Assign merge requests | | | ✓ | ✓ | ✓ | | Label merge requests | | | ✓ | ✓ | ✓ | | Lock merge request threads | | | ✓ | ✓ | ✓ | +| Approve merge requests (*9*) | | | ✓ | ✓ | ✓ | | Manage/Accept merge requests | | | ✓ | ✓ | ✓ | | Create new environments | | | ✓ | ✓ | ✓ | | Stop environments | | | ✓ | ✓ | ✓ | @@ -177,6 +175,8 @@ The following table depicts the various user permission levels in a project. 1. Guest users can access GitLab [**Releases**](project/releases/index.md) for downloading assets but are not allowed to download the source code nor see repository information like tags and commits. 1. Actions are limited only to records owned (referenced) by user. 1. When [Share Group Lock](./group/index.md#share-with-group-lock) is enabled the project can't be shared with other groups. It does not affect group with group sharing. +1. For information on eligible approvers for merge requests, see + [Eligible approvers](project/merge_requests/merge_request_approvals.md#eligible-approvers). ## Project features permissions diff --git a/doc/user/project/code_owners.md b/doc/user/project/code_owners.md index 9b0512830c7..6b81aea4b87 100644 --- a/doc/user/project/code_owners.md +++ b/doc/user/project/code_owners.md @@ -73,19 +73,27 @@ be used for merge request approvals: - As [merge request eligible approvers](merge_requests/merge_request_approvals.md#code-owners-as-eligible-approvers). - As required approvers for [protected branches](protected_branches.md#protected-branches-approval-by-code-owners-premium). **(PREMIUM)** +NOTE: **Note**: +Developer or higher [permissions](../permissions.md) are required in order to +approve a merge request. + Once set, Code Owners are displayed in merge requests widgets: ![MR widget - Code Owners](img/code_owners_mr_widget_v12_4.png) -NOTE: **Note**: - While the`CODEOWNERS` file can be used in addition to Merge Request [Approval Rules](merge_requests/merge_request_approvals.md#approval-rules) it can also be used as the sole driver of a Merge Request approval (without using [Approval Rules](merge_requests/merge_request_approvals.md#approval-rules)) by simply creating the file in one of the three locations specified above, configuring the Code Owners to be required approvers for [protected branches](protected_branches.md#protected-branches-approval-by-code-owners-premium) and then using [the syntax of Code Owners files](code_owners.md#the-syntax-of-code-owners-files) to specify the actual owners and granular permissions. +While the `CODEOWNERS` file can be used in addition to Merge Request [Approval Rules](merge_requests/merge_request_approvals.md#approval-rules) +it can also be used as the sole driver of merge request approvals +(without using [Approval Rules](merge_requests/merge_request_approvals.md#approval-rules)). +To do so, create the file in one of the three locations specified above and +set the code owners as required approvers for [protected branches](protected_branches.md#protected-branches-approval-by-code-owners-premium). +Use [the syntax of Code Owners files](code_owners.md#the-syntax-of-code-owners-files) +to specify the actual owners and granular permissions. -NOTE: **Note**: - Using Code Owners in conjunction with [Protected Branches Approvals](protected_branches.md#protected-branches-approval-by-code-owners-premium) - will prevent any user who is not specified in the `CODEOWNERS` file from pushing changes +Using Code Owners in conjunction with [Protected Branches Approvals](protected_branches.md#protected-branches-approval-by-code-owners-premium) +will prevent any user who is not specified in the `CODEOWNERS` file from pushing changes for the specified files/paths, even if their role is included in the **Allowed to push** column. This allows for a more inclusive push strategy, as administrators don't have to restrict developers - from pushing directly to the protected branch, but can restrict pushing to certain +from pushing directly to the protected branch, but can restrict pushing to certain files where a review by Code Owners is required. ## The syntax of Code Owners files diff --git a/doc/user/project/integrations/custom_issue_tracker.md b/doc/user/project/integrations/custom_issue_tracker.md index 848e89c18cb..4eaf3a0d4b4 100644 --- a/doc/user/project/integrations/custom_issue_tracker.md +++ b/doc/user/project/integrations/custom_issue_tracker.md @@ -1,25 +1,34 @@ -# Custom Issue Tracker Service +# Custom Issue Tracker service -To enable the Custom Issue Tracker integration in a project, navigate to the -[Integrations page](overview.md#accessing-integrations), click -the **Customer Issue Tracker** service, and fill in the required details on the page as described -in the table below. You will be able to edit the title and description later as well. +To enable the Custom Issue Tracker integration in a project: -| Field | Description | -| ----- | ----------- | -| `title` | A title for the issue tracker (to differentiate between instances, for example). | -| `description` | A name for the issue tracker (to differentiate between instances, for example) | -| `project_url` | The URL to the project in the custom issue tracker. | -| `issues_url` | The URL to the issue in the issue tracker project that is linked to this GitLab project. Note that the `issues_url` requires `:id` in the URL. This ID is used by GitLab as a placeholder to replace the issue number. For example, `https://customissuetracker.com/project-name/:id`. | -| `new_issue_url` | Currently unused. Will be changed in a future release. | +1. Go to **{settings}** **Settings > Integrations**. +1. Click **Custom Issue Tracker** +1. Fill in the tracker's details, such as title, description, and URLs. + You will be able to edit these fields later as well. -Once you have configured and enabled Custom Issue Tracker Service you'll see a link on the GitLab project pages that takes you to that custom issue tracker. + These are some of the required fields: + + | Field | Description | + | --------------- | ----------- | + | **Title** | A title for the issue tracker (for example, to differentiate between instances). | + | **Description** | A name for the issue tracker (for example, to differentiate between instances). | + | **Project URL** | The URL to the project in the custom issue tracker. | + | **Issues URL** | The URL to the issue in the issue tracker project that is linked to this GitLab project. Note that the `issues_url` requires `:id` in the URL. This ID is used by GitLab as a placeholder to replace the issue number. For example, `https://customissuetracker.com/project-name/:id`. | + | **New issue URL** | Currently unused. Will be changed in a future release. | + +1. Click **Test settings and save changes**. + +After you configure and enable the Custom Issue Tracker service, you'll see a link on the GitLab +project pages that takes you to that custom issue tracker. ## Referencing issues -- Issues are referenced with `ANYTHING-`, where `ANYTHING` can be any string in CAPS and `` -is a number used in the target project of the custom integration (for example, `PROJECT-143`). -- `ANYTHING` is a placeholder to differentiate against GitLab issues, which are referenced with `#`. You can use a project name or project key to replace it for example. -- When building the hyperlink, the `ANYTHING` part is ignored, and links always point to the address +Issues are referenced with `-` (for example, `PROJECT-143`), where `` can be any string in CAPS, and `` +is a number used in the target project of the custom integration. + +`` is a placeholder to differentiate against GitLab issues, which are referenced with `#`. You can use a project name or project key to replace it for example. + +When building the hyperlink, the `` part is ignored, and links always point to the address specified in `issues_url`, so in the example above, `PROJECT-143` would refer to `https://customissuetracker.com/project-name/143`. diff --git a/doc/user/project/merge_requests/merge_request_approvals.md b/doc/user/project/merge_requests/merge_request_approvals.md index bc4c7963272..e61889a400c 100644 --- a/doc/user/project/merge_requests/merge_request_approvals.md +++ b/doc/user/project/merge_requests/merge_request_approvals.md @@ -34,10 +34,12 @@ minimum number of required approvers can still be set in the [project settings f ### Eligible approvers -The following can approve merge requests: +The following users can approve merge requests: -- Users being added as approvers at project or merge request level. -- [Code owners](#code-owners-as-eligible-approvers) to the files changed by the merge request. +- Users who have been added as approvers at the project or merge request levels with + developer or higher [permissions](../../permissions.md). +- [Code owners](#code-owners-as-eligible-approvers) of the files changed by the merge request + that have developer or higher [permissions](../../permissions.md). An individual user can be added as an approver for a project if they are a member of: @@ -68,7 +70,7 @@ were not explicitly listed in the approval rules. If you add [Code Owners](../code_owners.md) to your repository, the owners to the corresponding files will become eligible approvers, together with members with Developer -or higher permissions. +or higher [permissions](../../permissions.md). To enable this merge request approval rule: diff --git a/lib/gitlab/runtime.rb b/lib/gitlab/runtime.rb index 147924048f1..abf6ee07d53 100644 --- a/lib/gitlab/runtime.rb +++ b/lib/gitlab/runtime.rb @@ -37,7 +37,7 @@ module Gitlab end def puma? - !!defined?(::Puma) + !!defined?(::Puma) && !defined?(ACTION_CABLE_SERVER) end # For unicorn, we need to check for actual server instances to avoid false positives. @@ -70,11 +70,11 @@ module Gitlab end def web_server? - puma? || unicorn? + puma? || unicorn? || action_cable? end def action_cable? - web_server? && (!!defined?(ACTION_CABLE_SERVER) || Gitlab.config.action_cable.in_app) + !!defined?(ACTION_CABLE_SERVER) end def multi_threaded? @@ -82,21 +82,19 @@ module Gitlab end def max_threads - threads = 1 # main thread + main_thread = 1 - if puma? - threads += Puma.cli_config.options[:max_threads] + if action_cable? + Gitlab::Application.config.action_cable.worker_pool_size + elsif puma? + Puma.cli_config.options[:max_threads] elsif sidekiq? # An extra thread for the poller in Sidekiq Cron: # https://github.com/ondrejbartas/sidekiq-cron#under-the-hood - threads += Sidekiq.options[:concurrency] + 1 - end - - if action_cable? - threads += Gitlab.config.action_cable.worker_pool_size - end - - threads + Sidekiq.options[:concurrency] + 1 + else + 0 + end + main_thread end end end diff --git a/package.json b/package.json index db0d973a24b..fc137493429 100644 --- a/package.json +++ b/package.json @@ -79,6 +79,7 @@ "diff": "^3.4.0", "document-register-element": "1.14.3", "dropzone": "^4.2.0", + "editorconfig": "^0.15.3", "emoji-regex": "^7.0.3", "emoji-unicode-version": "^0.2.1", "exports-loader": "^0.7.0", @@ -103,6 +104,7 @@ "marked": "^0.3.12", "mermaid": "^8.5.1", "mersenne-twister": "1.1.0", + "minimatch": "^3.0.4", "mitt": "^1.2.0", "monaco-editor": "^0.18.1", "monaco-editor-webpack-plugin": "^1.7.0", diff --git a/scripts/build_assets_image b/scripts/build_assets_image index 9eb1ccd5515..12beddfa184 100755 --- a/scripts/build_assets_image +++ b/scripts/build_assets_image @@ -1,7 +1,7 @@ # Exit early if we don't want to build the image if [[ "${BUILD_ASSETS_IMAGE}" != "true" ]] then - exit 0 + exit 0 fi # Generate the image name based on the project this is being run in diff --git a/scripts/trigger-build b/scripts/trigger-build index c7b45480bf3..b8bea95a069 100755 --- a/scripts/trigger-build +++ b/scripts/trigger-build @@ -16,6 +16,10 @@ module Trigger %w[gitlab gitlab-ee].include?(ENV['CI_PROJECT_NAME']) end + def self.security? + %r{\Agitlab-org/security(\z|/)}.match?(ENV['CI_PROJECT_NAMESPACE']) + end + def self.non_empty_variable_value(variable) variable_value = ENV[variable] @@ -26,6 +30,9 @@ module Trigger class Base def invoke!(post_comment: false, downstream_job_name: nil) + # gitlab-bot's token "GitLab multi-project pipeline polling" + Gitlab.private_token = access_token + pipeline_variables = variables puts "Triggering downstream pipeline on #{downstream_project_path}" @@ -40,7 +47,7 @@ module Trigger puts "Triggered downstream pipeline: #{pipeline.web_url}\n" puts "Waiting for downstream pipeline status" - Trigger::CommitComment.post!(pipeline, access_token) if post_comment + Trigger::CommitComment.post!(pipeline) if post_comment downstream_job = if downstream_job_name Gitlab.pipeline_jobs(downstream_project_path, pipeline.id).auto_paginate.find do |potential_job| @@ -49,9 +56,9 @@ module Trigger end if downstream_job - Trigger::Job.new(downstream_project_path, downstream_job.id, access_token) + Trigger::Job.new(downstream_project_path, downstream_job.id) else - Trigger::Pipeline.new(downstream_project_path, pipeline.id, access_token) + Trigger::Pipeline.new(downstream_project_path, pipeline.id) end end @@ -140,6 +147,7 @@ module Trigger { 'GITLAB_VERSION' => Trigger.non_empty_variable_value('CI_MERGE_REQUEST_SOURCE_BRANCH_SHA') || ENV['CI_COMMIT_SHA'], 'ALTERNATIVE_SOURCES' => 'true', + 'SECURITY_SOURCES' => Trigger.security? ? 'true' : 'false', 'ee' => Trigger.ee? ? 'true' : 'false', 'QA_BRANCH' => ENV['QA_BRANCH'] || 'master' } @@ -197,9 +205,7 @@ module Trigger end class CommitComment - def self.post!(downstream_pipeline, access_token) - Gitlab.private_token = access_token - + def self.post!(downstream_pipeline) Gitlab.create_commit_comment( ENV['CI_PROJECT_PATH'], Trigger.non_empty_variable_value('CI_MERGE_REQUEST_SOURCE_BRANCH_SHA') || ENV['CI_COMMIT_SHA'], @@ -214,7 +220,7 @@ module Trigger INTERVAL = 60 # seconds MAX_DURATION = 3600 * 3 # 3 hours - attr_reader :project, :id, :api_token + attr_reader :project, :id def self.unscoped_class_name name.split('::').last @@ -224,14 +230,10 @@ module Trigger unscoped_class_name.downcase end - def initialize(project, id, api_token) + def initialize(project, id) @project = project @id = id - @api_token = api_token @start = Time.now.to_i - - # gitlab-bot's token "GitLab multi-project pipeline polling" - Gitlab.private_token = api_token end def wait! diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb index 6b84f3b7460..600e1ac223e 100644 --- a/spec/controllers/groups_controller_spec.rb +++ b/spec/controllers/groups_controller_spec.rb @@ -301,6 +301,66 @@ RSpec.describe GroupsController do end end end + + describe 'tracking group creation for onboarding issues experiment' do + before do + sign_in(user) + end + + subject(:create_namespace) { post :create, params: { group: { name: 'new_group', path: 'new_group' } } } + + context 'experiment disabled' do + before do + stub_experiment(onboarding_issues: false) + end + + it 'does not track anything' do + expect(Gitlab::Tracking).not_to receive(:event) + + create_namespace + end + end + + context 'experiment enabled' do + before do + stub_experiment(onboarding_issues: true) + end + + context 'and the user is part of the control group' do + before do + stub_experiment_for_user(onboarding_issues: false) + end + + it 'tracks the event with the "created_namespace" action with the "control_group" property' do + expect(Gitlab::Tracking).to receive(:event).with( + 'Growth::Conversion::Experiment::OnboardingIssues', + 'created_namespace', + label: anything, + property: 'control_group' + ) + + create_namespace + end + end + + context 'and the user is part of the experimental group' do + before do + stub_experiment_for_user(onboarding_issues: true) + end + + it 'tracks the event with the "created_namespace" action with the "experimental_group" property' do + expect(Gitlab::Tracking).to receive(:event).with( + 'Growth::Conversion::Experiment::OnboardingIssues', + 'created_namespace', + label: anything, + property: 'experimental_group' + ) + + create_namespace + end + end + end + end end describe 'GET #index' do diff --git a/spec/frontend/__mocks__/monaco-editor/index.js b/spec/frontend/__mocks__/monaco-editor/index.js index 18cc3a7c377..7c53cfb5174 100644 --- a/spec/frontend/__mocks__/monaco-editor/index.js +++ b/spec/frontend/__mocks__/monaco-editor/index.js @@ -9,5 +9,8 @@ import 'monaco-editor/esm/vs/language/json/monaco.contribution'; import 'monaco-editor/esm/vs/language/html/monaco.contribution'; import 'monaco-editor/esm/vs/basic-languages/monaco.contribution'; +// This language starts trying to spin up web workers which obviously breaks in Jest environment +jest.mock('monaco-editor/esm/vs/language/typescript/tsMode'); + export * from 'monaco-editor/esm/vs/editor/editor.api'; export default global.monaco; diff --git a/spec/frontend/clusters_list/components/clusters_spec.js b/spec/frontend/clusters_list/components/clusters_spec.js index 3a5c4c4c008..07faee7e50b 100644 --- a/spec/frontend/clusters_list/components/clusters_spec.js +++ b/spec/frontend/clusters_list/components/clusters_spec.js @@ -14,6 +14,13 @@ describe('Clusters', () => { const endpoint = 'some/endpoint'; + const entryData = { + endpoint, + imgTagsAwsText: 'AWS Icon', + imgTagsDefaultText: 'Default Icon', + imgTagsGcpText: 'GCP Icon', + }; + const findLoader = () => wrapper.find(GlLoadingIcon); const findPaginatedButtons = () => wrapper.find(GlPagination); const findTable = () => wrapper.find(GlTable); @@ -24,7 +31,7 @@ describe('Clusters', () => { }; const mountWrapper = () => { - store = ClusterStore({ endpoint }); + store = ClusterStore(entryData); wrapper = mount(Clusters, { store }); return axios.waitForAll(); }; @@ -87,6 +94,23 @@ describe('Clusters', () => { }); }); + describe('cluster icon', () => { + it.each` + providerText | lineNumber + ${'GCP Icon'} | ${0} + ${'AWS Icon'} | ${1} + ${'Default Icon'} | ${2} + ${'Default Icon'} | ${3} + ${'Default Icon'} | ${4} + ${'Default Icon'} | ${5} + `('renders provider image and alt text for each cluster', ({ providerText, lineNumber }) => { + const images = findTable().findAll('.js-status img'); + const image = images.at(lineNumber); + + expect(image.attributes('alt')).toBe(providerText); + }); + }); + describe('cluster status', () => { it.each` statusName | lineNumber | result diff --git a/spec/frontend/clusters_list/mock_data.js b/spec/frontend/clusters_list/mock_data.js index eb8582796f7..48af3b91c94 100644 --- a/spec/frontend/clusters_list/mock_data.js +++ b/spec/frontend/clusters_list/mock_data.js @@ -3,6 +3,7 @@ export const clusterList = [ name: 'My Cluster 1', environment_scope: '*', cluster_type: 'group_type', + provider_type: 'gcp', status: 'creating', nodes: null, }, @@ -10,6 +11,7 @@ export const clusterList = [ name: 'My Cluster 2', environment_scope: 'development', cluster_type: 'project_type', + provider_type: 'aws', status: 'unreachable', nodes: [ { @@ -22,6 +24,7 @@ export const clusterList = [ name: 'My Cluster 3', environment_scope: 'development', cluster_type: 'project_type', + provider_type: 'none', status: 'authentication_failure', nodes: [ { diff --git a/spec/frontend/ide/components/repo_editor_spec.js b/spec/frontend/ide/components/repo_editor_spec.js index 614f62009ad..4967434dfd7 100644 --- a/spec/frontend/ide/components/repo_editor_spec.js +++ b/spec/frontend/ide/components/repo_editor_spec.js @@ -1,37 +1,70 @@ +import Vuex from 'vuex'; import Vue from 'vue'; import MockAdapter from 'axios-mock-adapter'; import '~/behaviors/markdown/render_gfm'; import { Range } from 'monaco-editor'; import axios from '~/lib/utils/axios_utils'; -import { createStore } from '~/ide/stores'; -import repoEditor from '~/ide/components/repo_editor.vue'; +import { createStoreOptions } from '~/ide/stores'; +import RepoEditor from '~/ide/components/repo_editor.vue'; import Editor from '~/ide/lib/editor'; import { leftSidebarViews, FILE_VIEW_MODE_EDITOR, FILE_VIEW_MODE_PREVIEW } from '~/ide/constants'; import { createComponentWithStore } from '../../helpers/vue_mount_component_helper'; import waitForPromises from 'helpers/wait_for_promises'; import { file } from '../helpers'; +import { exampleConfigs, exampleFiles } from '../lib/editorconfig/mock_data'; describe('RepoEditor', () => { let vm; let store; + let mockActions; + + const waitForEditorSetup = () => + new Promise(resolve => { + vm.$once('editorSetup', resolve); + }); + + const createComponent = () => { + if (vm) { + throw new Error('vm already exists'); + } + vm = createComponentWithStore(Vue.extend(RepoEditor), store, { + file: store.state.openFiles[0], + }); + vm.$mount(); + }; + + const createOpenFile = path => { + const origFile = store.state.openFiles[0]; + const newFile = { ...origFile, path, key: path }; + + store.state.entries[path] = newFile; + + store.state.openFiles = [newFile]; + }; beforeEach(() => { + mockActions = { + getFileData: jest.fn().mockResolvedValue(), + getRawFileData: jest.fn().mockResolvedValue(), + }; + const f = { ...file(), viewMode: FILE_VIEW_MODE_EDITOR, }; - const RepoEditor = Vue.extend(repoEditor); - store = createStore(); - vm = createComponentWithStore(RepoEditor, store, { - file: f, - }); + const storeOptions = createStoreOptions(); + storeOptions.actions = { + ...storeOptions.actions, + ...mockActions, + }; + store = new Vuex.Store(storeOptions); f.active = true; f.tempFile = true; - vm.$store.state.openFiles.push(f); - vm.$store.state.projects = { + store.state.openFiles.push(f); + store.state.projects = { 'gitlab-org/gitlab': { branches: { master: { @@ -43,536 +76,588 @@ describe('RepoEditor', () => { }, }, }; - vm.$store.state.currentProjectId = 'gitlab-org/gitlab'; - vm.$store.state.currentBranchId = 'master'; + store.state.currentProjectId = 'gitlab-org/gitlab'; + store.state.currentBranchId = 'master'; - Vue.set(vm.$store.state.entries, f.path, f); - - jest.spyOn(vm, 'getFileData').mockResolvedValue(); - jest.spyOn(vm, 'getRawFileData').mockResolvedValue(); - - vm.$mount(); - - return vm.$nextTick(); + Vue.set(store.state.entries, f.path, f); }); afterEach(() => { vm.$destroy(); + vm = null; Editor.editorInstance.dispose(); }); const findEditor = () => vm.$el.querySelector('.multi-file-editor-holder'); - it('sets renderWhitespace to `all`', () => { - vm.$store.state.renderWhitespaceInCode = true; - - expect(vm.editorOptions.renderWhitespace).toEqual('all'); - }); - - it('sets renderWhitespace to `none`', () => { - vm.$store.state.renderWhitespaceInCode = false; - - expect(vm.editorOptions.renderWhitespace).toEqual('none'); - }); - - it('renders an ide container', () => { - expect(vm.shouldHideEditor).toBeFalsy(); - expect(vm.showEditor).toBe(true); - expect(findEditor()).not.toHaveCss({ display: 'none' }); - }); - - it('renders only an edit tab', done => { - Vue.nextTick(() => { - const tabs = vm.$el.querySelectorAll('.ide-mode-tabs .nav-links li'); - - expect(tabs.length).toBe(1); - expect(tabs[0].textContent.trim()).toBe('Edit'); - - done(); - }); - }); - - describe('when file is markdown', () => { - let mock; - + describe('default', () => { beforeEach(() => { - mock = new MockAdapter(axios); - mock.onPost(/(.*)\/preview_markdown/).reply(200, { - body: '

testing 123

', - }); + createComponent(); - Vue.set(vm, 'file', { - ...vm.file, - projectId: 'namespace/project', - path: 'sample.md', - content: 'testing 123', - }); - - vm.$store.state.entries[vm.file.path] = vm.file; - - return vm.$nextTick(); + return waitForEditorSetup(); }); - afterEach(() => { - mock.restore(); + it('sets renderWhitespace to `all`', () => { + vm.$store.state.renderWhitespaceInCode = true; + + expect(vm.editorOptions.renderWhitespace).toEqual('all'); }); - it('renders an Edit and a Preview Tab', done => { + it('sets renderWhitespace to `none`', () => { + vm.$store.state.renderWhitespaceInCode = false; + + expect(vm.editorOptions.renderWhitespace).toEqual('none'); + }); + + it('renders an ide container', () => { + expect(vm.shouldHideEditor).toBeFalsy(); + expect(vm.showEditor).toBe(true); + expect(findEditor()).not.toHaveCss({ display: 'none' }); + }); + + it('renders only an edit tab', done => { Vue.nextTick(() => { const tabs = vm.$el.querySelectorAll('.ide-mode-tabs .nav-links li'); - expect(tabs.length).toBe(2); + expect(tabs.length).toBe(1); expect(tabs[0].textContent.trim()).toBe('Edit'); - expect(tabs[1].textContent.trim()).toBe('Preview Markdown'); done(); }); }); - it('renders markdown for tempFile', done => { - vm.file.tempFile = true; + describe('when file is markdown', () => { + let mock; - vm.$nextTick() - .then(() => { - vm.$el.querySelectorAll('.ide-mode-tabs .nav-links a')[1].click(); - }) - .then(waitForPromises) - .then(() => { - expect(vm.$el.querySelector('.preview-container').innerHTML).toContain( - '

testing 123

', - ); - }) - .then(done) - .catch(done.fail); - }); + beforeEach(() => { + mock = new MockAdapter(axios); - describe('when not in edit mode', () => { - beforeEach(async () => { - await vm.$nextTick(); + mock.onPost(/(.*)\/preview_markdown/).reply(200, { + body: '

testing 123

', + }); - vm.$store.state.currentActivityView = leftSidebarViews.review.name; + Vue.set(vm, 'file', { + ...vm.file, + projectId: 'namespace/project', + path: 'sample.md', + content: 'testing 123', + }); + + vm.$store.state.entries[vm.file.path] = vm.file; return vm.$nextTick(); }); - it('shows no tabs', () => { - expect(vm.$el.querySelectorAll('.ide-mode-tabs .nav-links a')).toHaveLength(0); + afterEach(() => { + mock.restore(); }); - }); - }); - describe('when open file is binary and not raw', () => { - beforeEach(done => { - vm.file.binary = true; + it('renders an Edit and a Preview Tab', done => { + Vue.nextTick(() => { + const tabs = vm.$el.querySelectorAll('.ide-mode-tabs .nav-links li'); - vm.$nextTick(done); - }); + expect(tabs.length).toBe(2); + expect(tabs[0].textContent.trim()).toBe('Edit'); + expect(tabs[1].textContent.trim()).toBe('Preview Markdown'); - it('does not render the IDE', () => { - expect(vm.shouldHideEditor).toBeTruthy(); - }); - }); - - describe('createEditorInstance', () => { - it('calls createInstance when viewer is editor', done => { - jest.spyOn(vm.editor, 'createInstance').mockImplementation(); - - vm.createEditorInstance(); - - vm.$nextTick(() => { - expect(vm.editor.createInstance).toHaveBeenCalled(); - - done(); - }); - }); - - it('calls createDiffInstance when viewer is diff', done => { - vm.$store.state.viewer = 'diff'; - - jest.spyOn(vm.editor, 'createDiffInstance').mockImplementation(); - - vm.createEditorInstance(); - - vm.$nextTick(() => { - expect(vm.editor.createDiffInstance).toHaveBeenCalled(); - - done(); - }); - }); - - it('calls createDiffInstance when viewer is a merge request diff', done => { - vm.$store.state.viewer = 'mrdiff'; - - jest.spyOn(vm.editor, 'createDiffInstance').mockImplementation(); - - vm.createEditorInstance(); - - vm.$nextTick(() => { - expect(vm.editor.createDiffInstance).toHaveBeenCalled(); - - done(); - }); - }); - }); - - describe('setupEditor', () => { - it('creates new model', () => { - jest.spyOn(vm.editor, 'createModel'); - - Editor.editorInstance.modelManager.dispose(); - - vm.setupEditor(); - - expect(vm.editor.createModel).toHaveBeenCalledWith(vm.file, null); - expect(vm.model).not.toBeNull(); - }); - - it('attaches model to editor', () => { - jest.spyOn(vm.editor, 'attachModel'); - - Editor.editorInstance.modelManager.dispose(); - - vm.setupEditor(); - - expect(vm.editor.attachModel).toHaveBeenCalledWith(vm.model); - }); - - it('attaches model to merge request editor', () => { - vm.$store.state.viewer = 'mrdiff'; - vm.file.mrChange = true; - jest.spyOn(vm.editor, 'attachMergeRequestModel').mockImplementation(); - - Editor.editorInstance.modelManager.dispose(); - - vm.setupEditor(); - - expect(vm.editor.attachMergeRequestModel).toHaveBeenCalledWith(vm.model); - }); - - it('does not attach model to merge request editor when not a MR change', () => { - vm.$store.state.viewer = 'mrdiff'; - vm.file.mrChange = false; - jest.spyOn(vm.editor, 'attachMergeRequestModel').mockImplementation(); - - Editor.editorInstance.modelManager.dispose(); - - vm.setupEditor(); - - expect(vm.editor.attachMergeRequestModel).not.toHaveBeenCalledWith(vm.model); - }); - - it('adds callback methods', () => { - jest.spyOn(vm.editor, 'onPositionChange'); - - Editor.editorInstance.modelManager.dispose(); - - vm.setupEditor(); - - expect(vm.editor.onPositionChange).toHaveBeenCalled(); - expect(vm.model.events.size).toBe(2); - }); - - it('updates state with the value of the model', () => { - vm.model.setValue('testing 1234'); - - vm.setupEditor(); - - expect(vm.file.content).toBe('testing 1234'); - }); - - it('sets head model as staged file', () => { - jest.spyOn(vm.editor, 'createModel'); - - Editor.editorInstance.modelManager.dispose(); - - vm.$store.state.stagedFiles.push({ ...vm.file, key: 'staged' }); - vm.file.staged = true; - vm.file.key = `unstaged-${vm.file.key}`; - - vm.setupEditor(); - - expect(vm.editor.createModel).toHaveBeenCalledWith(vm.file, vm.$store.state.stagedFiles[0]); - }); - }); - - describe('editor updateDimensions', () => { - beforeEach(() => { - jest.spyOn(vm.editor, 'updateDimensions'); - jest.spyOn(vm.editor, 'updateDiffView').mockImplementation(); - }); - - it('calls updateDimensions when panelResizing is false', done => { - vm.$store.state.panelResizing = true; - - vm.$nextTick() - .then(() => { - vm.$store.state.panelResizing = false; - }) - .then(vm.$nextTick) - .then(() => { - expect(vm.editor.updateDimensions).toHaveBeenCalled(); - expect(vm.editor.updateDiffView).toHaveBeenCalled(); - }) - .then(done) - .catch(done.fail); - }); - - it('does not call updateDimensions when panelResizing is true', done => { - vm.$store.state.panelResizing = true; - - vm.$nextTick(() => { - expect(vm.editor.updateDimensions).not.toHaveBeenCalled(); - expect(vm.editor.updateDiffView).not.toHaveBeenCalled(); - - done(); - }); - }); - - it('calls updateDimensions when rightPane is opened', done => { - vm.$store.state.rightPane.isOpen = true; - - vm.$nextTick(() => { - expect(vm.editor.updateDimensions).toHaveBeenCalled(); - expect(vm.editor.updateDiffView).toHaveBeenCalled(); - - done(); - }); - }); - }); - - describe('show tabs', () => { - it('shows tabs in edit mode', () => { - expect(vm.$el.querySelector('.nav-links')).not.toBe(null); - }); - - it('hides tabs in review mode', done => { - vm.$store.state.currentActivityView = leftSidebarViews.review.name; - - vm.$nextTick(() => { - expect(vm.$el.querySelector('.nav-links')).toBe(null); - - done(); - }); - }); - - it('hides tabs in commit mode', done => { - vm.$store.state.currentActivityView = leftSidebarViews.commit.name; - - vm.$nextTick(() => { - expect(vm.$el.querySelector('.nav-links')).toBe(null); - - done(); - }); - }); - }); - - describe('when files view mode is preview', () => { - beforeEach(done => { - jest.spyOn(vm.editor, 'updateDimensions').mockImplementation(); - vm.file.viewMode = FILE_VIEW_MODE_PREVIEW; - vm.$nextTick(done); - }); - - it('should hide editor', () => { - expect(vm.showEditor).toBe(false); - expect(findEditor()).toHaveCss({ display: 'none' }); - }); - - describe('when file view mode changes to editor', () => { - it('should update dimensions', () => { - vm.file.viewMode = FILE_VIEW_MODE_EDITOR; - - return vm.$nextTick().then(() => { - expect(vm.editor.updateDimensions).toHaveBeenCalled(); + done(); }); }); - }); - }); - describe('initEditor', () => { - beforeEach(() => { - vm.file.tempFile = false; - jest.spyOn(vm.editor, 'createInstance').mockImplementation(); - jest.spyOn(vm, 'shouldHideEditor', 'get').mockReturnValue(true); - }); + it('renders markdown for tempFile', done => { + vm.file.tempFile = true; - it('does not fetch file information for temp entries', done => { - vm.file.tempFile = true; + vm.$nextTick() + .then(() => { + vm.$el.querySelectorAll('.ide-mode-tabs .nav-links a')[1].click(); + }) + .then(waitForPromises) + .then(() => { + expect(vm.$el.querySelector('.preview-container').innerHTML).toContain( + '

testing 123

', + ); + }) + .then(done) + .catch(done.fail); + }); - vm.initEditor(); - vm.$nextTick() - .then(() => { - expect(vm.getFileData).not.toHaveBeenCalled(); - }) - .then(done) - .catch(done.fail); - }); + describe('when not in edit mode', () => { + beforeEach(async () => { + await vm.$nextTick(); - it('is being initialised for files without content even if shouldHideEditor is `true`', done => { - vm.file.content = ''; - vm.file.raw = ''; - - vm.initEditor(); - vm.$nextTick() - .then(() => { - expect(vm.getFileData).toHaveBeenCalled(); - expect(vm.getRawFileData).toHaveBeenCalled(); - }) - .then(done) - .catch(done.fail); - }); - - it('does not initialize editor for files already with content', done => { - vm.file.content = 'foo'; - - vm.initEditor(); - vm.$nextTick() - .then(() => { - expect(vm.getFileData).not.toHaveBeenCalled(); - expect(vm.getRawFileData).not.toHaveBeenCalled(); - expect(vm.editor.createInstance).not.toHaveBeenCalled(); - }) - .then(done) - .catch(done.fail); - }); - }); - - describe('updates on file changes', () => { - beforeEach(() => { - jest.spyOn(vm, 'initEditor').mockImplementation(); - }); - - it('calls removePendingTab when old file is pending', done => { - jest.spyOn(vm, 'shouldHideEditor', 'get').mockReturnValue(true); - jest.spyOn(vm, 'removePendingTab').mockImplementation(); - - vm.file.pending = true; - - vm.$nextTick() - .then(() => { - vm.file = file('testing'); - vm.file.content = 'foo'; // need to prevent full cycle of initEditor + vm.$store.state.currentActivityView = leftSidebarViews.review.name; return vm.$nextTick(); - }) - .then(() => { - expect(vm.removePendingTab).toHaveBeenCalled(); - }) - .then(done) - .catch(done.fail); + }); + + it('shows no tabs', () => { + expect(vm.$el.querySelectorAll('.ide-mode-tabs .nav-links a')).toHaveLength(0); + }); + }); }); - it('does not call initEditor if the file did not change', done => { - Vue.set(vm, 'file', vm.file); + describe('when open file is binary and not raw', () => { + beforeEach(done => { + vm.file.binary = true; - vm.$nextTick() - .then(() => { - expect(vm.initEditor).not.toHaveBeenCalled(); - }) - .then(done) - .catch(done.fail); - }); - - it('calls initEditor when file key is changed', done => { - expect(vm.initEditor).not.toHaveBeenCalled(); - - Vue.set(vm, 'file', { - ...vm.file, - key: 'new', + vm.$nextTick(done); }); - vm.$nextTick() - .then(() => { - expect(vm.initEditor).toHaveBeenCalled(); - }) - .then(done) - .catch(done.fail); + it('does not render the IDE', () => { + expect(vm.shouldHideEditor).toBeTruthy(); + }); + }); + + describe('createEditorInstance', () => { + it('calls createInstance when viewer is editor', done => { + jest.spyOn(vm.editor, 'createInstance').mockImplementation(); + + vm.createEditorInstance(); + + vm.$nextTick(() => { + expect(vm.editor.createInstance).toHaveBeenCalled(); + + done(); + }); + }); + + it('calls createDiffInstance when viewer is diff', done => { + vm.$store.state.viewer = 'diff'; + + jest.spyOn(vm.editor, 'createDiffInstance').mockImplementation(); + + vm.createEditorInstance(); + + vm.$nextTick(() => { + expect(vm.editor.createDiffInstance).toHaveBeenCalled(); + + done(); + }); + }); + + it('calls createDiffInstance when viewer is a merge request diff', done => { + vm.$store.state.viewer = 'mrdiff'; + + jest.spyOn(vm.editor, 'createDiffInstance').mockImplementation(); + + vm.createEditorInstance(); + + vm.$nextTick(() => { + expect(vm.editor.createDiffInstance).toHaveBeenCalled(); + + done(); + }); + }); + }); + + describe('setupEditor', () => { + it('creates new model', () => { + jest.spyOn(vm.editor, 'createModel'); + + Editor.editorInstance.modelManager.dispose(); + + vm.setupEditor(); + + expect(vm.editor.createModel).toHaveBeenCalledWith(vm.file, null); + expect(vm.model).not.toBeNull(); + }); + + it('attaches model to editor', () => { + jest.spyOn(vm.editor, 'attachModel'); + + Editor.editorInstance.modelManager.dispose(); + + vm.setupEditor(); + + expect(vm.editor.attachModel).toHaveBeenCalledWith(vm.model); + }); + + it('attaches model to merge request editor', () => { + vm.$store.state.viewer = 'mrdiff'; + vm.file.mrChange = true; + jest.spyOn(vm.editor, 'attachMergeRequestModel').mockImplementation(); + + Editor.editorInstance.modelManager.dispose(); + + vm.setupEditor(); + + expect(vm.editor.attachMergeRequestModel).toHaveBeenCalledWith(vm.model); + }); + + it('does not attach model to merge request editor when not a MR change', () => { + vm.$store.state.viewer = 'mrdiff'; + vm.file.mrChange = false; + jest.spyOn(vm.editor, 'attachMergeRequestModel').mockImplementation(); + + Editor.editorInstance.modelManager.dispose(); + + vm.setupEditor(); + + expect(vm.editor.attachMergeRequestModel).not.toHaveBeenCalledWith(vm.model); + }); + + it('adds callback methods', () => { + jest.spyOn(vm.editor, 'onPositionChange'); + + Editor.editorInstance.modelManager.dispose(); + + vm.setupEditor(); + + expect(vm.editor.onPositionChange).toHaveBeenCalled(); + expect(vm.model.events.size).toBe(2); + }); + + it('updates state with the value of the model', () => { + vm.model.setValue('testing 1234\n'); + + vm.setupEditor(); + + expect(vm.file.content).toBe('testing 1234\n'); + }); + + it('sets head model as staged file', () => { + jest.spyOn(vm.editor, 'createModel'); + + Editor.editorInstance.modelManager.dispose(); + + vm.$store.state.stagedFiles.push({ ...vm.file, key: 'staged' }); + vm.file.staged = true; + vm.file.key = `unstaged-${vm.file.key}`; + + vm.setupEditor(); + + expect(vm.editor.createModel).toHaveBeenCalledWith(vm.file, vm.$store.state.stagedFiles[0]); + }); + }); + + describe('editor updateDimensions', () => { + beforeEach(() => { + jest.spyOn(vm.editor, 'updateDimensions'); + jest.spyOn(vm.editor, 'updateDiffView').mockImplementation(); + }); + + it('calls updateDimensions when panelResizing is false', done => { + vm.$store.state.panelResizing = true; + + vm.$nextTick() + .then(() => { + vm.$store.state.panelResizing = false; + }) + .then(vm.$nextTick) + .then(() => { + expect(vm.editor.updateDimensions).toHaveBeenCalled(); + expect(vm.editor.updateDiffView).toHaveBeenCalled(); + }) + .then(done) + .catch(done.fail); + }); + + it('does not call updateDimensions when panelResizing is true', done => { + vm.$store.state.panelResizing = true; + + vm.$nextTick(() => { + expect(vm.editor.updateDimensions).not.toHaveBeenCalled(); + expect(vm.editor.updateDiffView).not.toHaveBeenCalled(); + + done(); + }); + }); + + it('calls updateDimensions when rightPane is opened', done => { + vm.$store.state.rightPane.isOpen = true; + + vm.$nextTick(() => { + expect(vm.editor.updateDimensions).toHaveBeenCalled(); + expect(vm.editor.updateDiffView).toHaveBeenCalled(); + + done(); + }); + }); + }); + + describe('show tabs', () => { + it('shows tabs in edit mode', () => { + expect(vm.$el.querySelector('.nav-links')).not.toBe(null); + }); + + it('hides tabs in review mode', done => { + vm.$store.state.currentActivityView = leftSidebarViews.review.name; + + vm.$nextTick(() => { + expect(vm.$el.querySelector('.nav-links')).toBe(null); + + done(); + }); + }); + + it('hides tabs in commit mode', done => { + vm.$store.state.currentActivityView = leftSidebarViews.commit.name; + + vm.$nextTick(() => { + expect(vm.$el.querySelector('.nav-links')).toBe(null); + + done(); + }); + }); + }); + + describe('when files view mode is preview', () => { + beforeEach(done => { + jest.spyOn(vm.editor, 'updateDimensions').mockImplementation(); + vm.file.viewMode = FILE_VIEW_MODE_PREVIEW; + vm.$nextTick(done); + }); + + it('should hide editor', () => { + expect(vm.showEditor).toBe(false); + expect(findEditor()).toHaveCss({ display: 'none' }); + }); + + describe('when file view mode changes to editor', () => { + it('should update dimensions', () => { + vm.file.viewMode = FILE_VIEW_MODE_EDITOR; + + return vm.$nextTick().then(() => { + expect(vm.editor.updateDimensions).toHaveBeenCalled(); + }); + }); + }); + }); + + describe('initEditor', () => { + beforeEach(() => { + vm.file.tempFile = false; + jest.spyOn(vm.editor, 'createInstance').mockImplementation(); + jest.spyOn(vm, 'shouldHideEditor', 'get').mockReturnValue(true); + }); + + it('does not fetch file information for temp entries', done => { + vm.file.tempFile = true; + + vm.initEditor(); + vm.$nextTick() + .then(() => { + expect(mockActions.getFileData).not.toHaveBeenCalled(); + }) + .then(done) + .catch(done.fail); + }); + + it('is being initialised for files without content even if shouldHideEditor is `true`', done => { + vm.file.content = ''; + vm.file.raw = ''; + + vm.initEditor(); + vm.$nextTick() + .then(() => { + expect(mockActions.getFileData).toHaveBeenCalled(); + expect(mockActions.getRawFileData).toHaveBeenCalled(); + }) + .then(done) + .catch(done.fail); + }); + + it('does not initialize editor for files already with content', done => { + vm.file.content = 'foo'; + + vm.initEditor(); + vm.$nextTick() + .then(() => { + expect(mockActions.getFileData).not.toHaveBeenCalled(); + expect(mockActions.getRawFileData).not.toHaveBeenCalled(); + expect(vm.editor.createInstance).not.toHaveBeenCalled(); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('updates on file changes', () => { + beforeEach(() => { + jest.spyOn(vm, 'initEditor').mockImplementation(); + }); + + it('calls removePendingTab when old file is pending', done => { + jest.spyOn(vm, 'shouldHideEditor', 'get').mockReturnValue(true); + jest.spyOn(vm, 'removePendingTab').mockImplementation(); + + vm.file.pending = true; + + vm.$nextTick() + .then(() => { + vm.file = file('testing'); + vm.file.content = 'foo'; // need to prevent full cycle of initEditor + + return vm.$nextTick(); + }) + .then(() => { + expect(vm.removePendingTab).toHaveBeenCalled(); + }) + .then(done) + .catch(done.fail); + }); + + it('does not call initEditor if the file did not change', done => { + Vue.set(vm, 'file', vm.file); + + vm.$nextTick() + .then(() => { + expect(vm.initEditor).not.toHaveBeenCalled(); + }) + .then(done) + .catch(done.fail); + }); + + it('calls initEditor when file key is changed', done => { + expect(vm.initEditor).not.toHaveBeenCalled(); + + Vue.set(vm, 'file', { + ...vm.file, + key: 'new', + }); + + vm.$nextTick() + .then(() => { + expect(vm.initEditor).toHaveBeenCalled(); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('onPaste', () => { + const setFileName = name => { + Vue.set(vm, 'file', { + ...vm.file, + content: 'hello world\n', + name, + path: `foo/${name}`, + key: 'new', + }); + + vm.$store.state.entries[vm.file.path] = vm.file; + }; + + const pasteImage = () => { + window.dispatchEvent( + Object.assign(new Event('paste'), { + clipboardData: { + files: [new File(['foo'], 'foo.png', { type: 'image/png' })], + }, + }), + ); + }; + + const watchState = watched => + new Promise(resolve => { + const unwatch = vm.$store.watch(watched, () => { + unwatch(); + resolve(); + }); + }); + + beforeEach(() => { + setFileName('bar.md'); + + vm.$store.state.trees['gitlab-org/gitlab'] = { tree: [] }; + vm.$store.state.currentProjectId = 'gitlab-org'; + vm.$store.state.currentBranchId = 'gitlab'; + + // create a new model each time, otherwise tests conflict with each other + // because of same model being used in multiple tests + Editor.editorInstance.modelManager.dispose(); + vm.setupEditor(); + + return waitForPromises().then(() => { + // set cursor to line 2, column 1 + vm.editor.instance.setSelection(new Range(2, 1, 2, 1)); + vm.editor.instance.focus(); + }); + }); + + it('adds an image entry to the same folder for a pasted image in a markdown file', () => { + pasteImage(); + + return waitForPromises().then(() => { + expect(vm.$store.state.entries['foo/foo.png']).toMatchObject({ + path: 'foo/foo.png', + type: 'blob', + content: 'Zm9v', + binary: true, + rawPath: 'data:image/png;base64,Zm9v', + }); + }); + }); + + it("adds a markdown image tag to the file's contents", () => { + pasteImage(); + + // Pasting an image does a lot of things like using the FileReader API, + // so, waitForPromises isn't very reliable (and causes a flaky spec) + // Read more about state.watch: https://vuex.vuejs.org/api/#watch + return watchState(s => s.entries['foo/bar.md'].content).then(() => { + expect(vm.file.content).toBe('hello world\n![foo.png](./foo.png)'); + }); + }); + + it("does not add file to state or set markdown image syntax if the file isn't markdown", () => { + setFileName('myfile.txt'); + pasteImage(); + + return waitForPromises().then(() => { + expect(vm.$store.state.entries['foo/foo.png']).toBeUndefined(); + expect(vm.file.content).toBe('hello world\n'); + }); + }); }); }); - describe('onPaste', () => { - const setFileName = name => { - Vue.set(vm, 'file', { - ...vm.file, - content: 'hello world\n', - name, - path: `foo/${name}`, - key: 'new', - }); - - vm.$store.state.entries[vm.file.path] = vm.file; - }; - - const pasteImage = () => { - window.dispatchEvent( - Object.assign(new Event('paste'), { - clipboardData: { - files: [new File(['foo'], 'foo.png', { type: 'image/png' })], - }, - }), - ); - }; - - const watchState = watched => - new Promise(resolve => { - const unwatch = vm.$store.watch(watched, () => { - unwatch(); - resolve(); - }); - }); - + describe('fetchEditorconfigRules', () => { beforeEach(() => { - setFileName('bar.md'); - - vm.$store.state.trees['gitlab-org/gitlab'] = { tree: [] }; - vm.$store.state.currentProjectId = 'gitlab-org'; - vm.$store.state.currentBranchId = 'gitlab'; - - // create a new model each time, otherwise tests conflict with each other - // because of same model being used in multiple tests - Editor.editorInstance.modelManager.dispose(); - vm.setupEditor(); - - return waitForPromises().then(() => { - // set cursor to line 2, column 1 - vm.editor.instance.setSelection(new Range(2, 1, 2, 1)); - vm.editor.instance.focus(); + exampleConfigs.forEach(({ path, content }) => { + store.state.entries[path] = { ...file(), path, content }; }); }); - it('adds an image entry to the same folder for a pasted image in a markdown file', () => { - pasteImage(); + it.each(exampleFiles)( + 'does not fetch content from remote for .editorconfig files present locally (case %#)', + ({ path, monacoRules }) => { + createOpenFile(path); + createComponent(); - return waitForPromises().then(() => { - expect(vm.$store.state.entries['foo/foo.png']).toMatchObject({ - path: 'foo/foo.png', - type: 'blob', - content: 'Zm9v', - binary: true, - rawPath: 'data:image/png;base64,Zm9v', + return waitForEditorSetup().then(() => { + expect(vm.rules).toEqual(monacoRules); + expect(vm.model.options).toMatchObject(monacoRules); + expect(mockActions.getFileData).not.toHaveBeenCalled(); + expect(mockActions.getRawFileData).not.toHaveBeenCalled(); }); + }, + ); + + it('fetches content from remote for .editorconfig files not available locally', () => { + exampleConfigs.forEach(({ path }) => { + delete store.state.entries[path].content; + delete store.state.entries[path].raw; }); - }); - it("adds a markdown image tag to the file's contents", () => { - pasteImage(); + // Include a "test" directory which does not exist in store. This one should be skipped. + createOpenFile('foo/bar/baz/test/my_spec.js'); + createComponent(); - // Pasting an image does a lot of things like using the FileReader API, - // so, waitForPromises isn't very reliable (and causes a flaky spec) - // Read more about state.watch: https://vuex.vuejs.org/api/#watch - return watchState(s => s.entries['foo/bar.md'].content).then(() => { - expect(vm.file.content).toBe('hello world\n![foo.png](./foo.png)'); - }); - }); - - it("does not add file to state or set markdown image syntax if the file isn't markdown", () => { - setFileName('myfile.txt'); - pasteImage(); - - return waitForPromises().then(() => { - expect(vm.$store.state.entries['foo/foo.png']).toBeUndefined(); - expect(vm.file.content).toBe('hello world\n'); + return waitForEditorSetup().then(() => { + expect(mockActions.getFileData.mock.calls.map(([, args]) => args)).toEqual([ + { makeFileActive: false, path: 'foo/bar/baz/.editorconfig' }, + { makeFileActive: false, path: 'foo/bar/.editorconfig' }, + { makeFileActive: false, path: 'foo/.editorconfig' }, + { makeFileActive: false, path: '.editorconfig' }, + ]); + expect(mockActions.getRawFileData.mock.calls.map(([, args]) => args)).toEqual([ + { path: 'foo/bar/baz/.editorconfig' }, + { path: 'foo/bar/.editorconfig' }, + { path: 'foo/.editorconfig' }, + { path: '.editorconfig' }, + ]); }); }); }); diff --git a/spec/frontend/ide/lib/editorconfig/mock_data.js b/spec/frontend/ide/lib/editorconfig/mock_data.js new file mode 100644 index 00000000000..b21f4a5b735 --- /dev/null +++ b/spec/frontend/ide/lib/editorconfig/mock_data.js @@ -0,0 +1,146 @@ +export const exampleConfigs = [ + { + path: 'foo/bar/baz/.editorconfig', + content: ` +[*] +tab_width = 6 +indent_style = tab +`, + }, + { + path: 'foo/bar/.editorconfig', + content: ` +root = false + +[*] +indent_size = 5 +indent_style = space +trim_trailing_whitespace = true + +[*_spec.{js,py}] +end_of_line = crlf + `, + }, + { + path: 'foo/.editorconfig', + content: ` +[*] +tab_width = 4 +indent_style = tab + `, + }, + { + path: '.editorconfig', + content: ` +root = true + +[*] +indent_size = 3 +indent_style = space +end_of_line = lf +insert_final_newline = true + +[*.js] +indent_size = 2 +indent_style = space +trim_trailing_whitespace = true + +[*.txt] +end_of_line = crlf + `, + }, + { + path: 'foo/bar/root/.editorconfig', + content: ` +root = true + +[*] +tab_width = 1 +indent_style = tab + `, + }, +]; + +export const exampleFiles = [ + { + path: 'foo/bar/root/README.md', + rules: { + indent_style: 'tab', // foo/bar/root/.editorconfig + tab_width: '1', // foo/bar/root/.editorconfig + }, + monacoRules: { + insertSpaces: false, + tabSize: 1, + }, + }, + { + path: 'foo/bar/baz/my_spec.js', + rules: { + end_of_line: 'crlf', // foo/bar/.editorconfig (for _spec.js files) + indent_size: '5', // foo/bar/.editorconfig + indent_style: 'tab', // foo/bar/baz/.editorconfig + insert_final_newline: 'true', // .editorconfig + tab_width: '6', // foo/bar/baz/.editorconfig + trim_trailing_whitespace: 'true', // .editorconfig (for .js files) + }, + monacoRules: { + endOfLine: 1, + insertFinalNewline: true, + insertSpaces: false, + tabSize: 6, + trimTrailingWhitespace: true, + }, + }, + { + path: 'foo/my_file.js', + rules: { + end_of_line: 'lf', // .editorconfig + indent_size: '2', // .editorconfig (for .js files) + indent_style: 'tab', // foo/.editorconfig + insert_final_newline: 'true', // .editorconfig + tab_width: '4', // foo/.editorconfig + trim_trailing_whitespace: 'true', // .editorconfig (for .js files) + }, + monacoRules: { + endOfLine: 0, + insertFinalNewline: true, + insertSpaces: false, + tabSize: 4, + trimTrailingWhitespace: true, + }, + }, + { + path: 'foo/my_file.md', + rules: { + end_of_line: 'lf', // .editorconfig + indent_size: '3', // .editorconfig + indent_style: 'tab', // foo/.editorconfig + insert_final_newline: 'true', // .editorconfig + tab_width: '4', // foo/.editorconfig + }, + monacoRules: { + endOfLine: 0, + insertFinalNewline: true, + insertSpaces: false, + tabSize: 4, + }, + }, + { + path: 'foo/bar/my_file.txt', + rules: { + end_of_line: 'crlf', // .editorconfig (for .txt files) + indent_size: '5', // foo/bar/.editorconfig + indent_style: 'space', // foo/bar/.editorconfig + insert_final_newline: 'true', // .editorconfig + tab_width: '4', // foo/.editorconfig + trim_trailing_whitespace: 'true', // foo/bar/.editorconfig + }, + monacoRules: { + endOfLine: 1, + insertFinalNewline: true, + insertSpaces: true, + tabSize: 4, + trimTrailingWhitespace: true, + }, + }, +]; diff --git a/spec/frontend/ide/lib/editorconfig/parser_spec.js b/spec/frontend/ide/lib/editorconfig/parser_spec.js new file mode 100644 index 00000000000..f99410236e1 --- /dev/null +++ b/spec/frontend/ide/lib/editorconfig/parser_spec.js @@ -0,0 +1,18 @@ +import { getRulesWithTraversal } from '~/ide/lib/editorconfig/parser'; +import { exampleConfigs, exampleFiles } from './mock_data'; + +describe('~/ide/lib/editorconfig/parser', () => { + const getExampleConfigContent = path => + Promise.resolve(exampleConfigs.find(x => x.path === path)?.content); + + describe('getRulesWithTraversal', () => { + it.each(exampleFiles)( + 'traverses through all editorconfig files in parent directories (until root=true is hit) and finds rules for this file (case %#)', + ({ path, rules }) => { + return getRulesWithTraversal(path, getExampleConfigContent).then(result => { + expect(result).toEqual(rules); + }); + }, + ); + }); +}); diff --git a/spec/frontend/ide/lib/editorconfig/rules_mapper_spec.js b/spec/frontend/ide/lib/editorconfig/rules_mapper_spec.js new file mode 100644 index 00000000000..536b1409435 --- /dev/null +++ b/spec/frontend/ide/lib/editorconfig/rules_mapper_spec.js @@ -0,0 +1,43 @@ +import mapRulesToMonaco from '~/ide/lib/editorconfig/rules_mapper'; + +describe('mapRulesToMonaco', () => { + const multipleEntries = { + input: { indent_style: 'tab', indent_size: '4', insert_final_newline: 'true' }, + output: { insertSpaces: false, tabSize: 4, insertFinalNewline: true }, + }; + + // tab width takes precedence + const tabWidthAndIndent = { + input: { indent_style: 'tab', indent_size: '4', tab_width: '3' }, + output: { insertSpaces: false, tabSize: 3 }, + }; + + it.each` + rule | monacoOption + ${{ indent_style: 'tab' }} | ${{ insertSpaces: false }} + ${{ indent_style: 'space' }} | ${{ insertSpaces: true }} + ${{ indent_style: 'unset' }} | ${{}} + ${{ indent_size: '4' }} | ${{ tabSize: 4 }} + ${{ indent_size: '4.4' }} | ${{ tabSize: 4 }} + ${{ indent_size: '0' }} | ${{}} + ${{ indent_size: '-10' }} | ${{}} + ${{ indent_size: 'NaN' }} | ${{}} + ${{ tab_width: '4' }} | ${{ tabSize: 4 }} + ${{ tab_width: '5.4' }} | ${{ tabSize: 5 }} + ${{ tab_width: '-10' }} | ${{}} + ${{ trim_trailing_whitespace: 'true' }} | ${{ trimTrailingWhitespace: true }} + ${{ trim_trailing_whitespace: 'false' }} | ${{ trimTrailingWhitespace: false }} + ${{ trim_trailing_whitespace: 'unset' }} | ${{}} + ${{ end_of_line: 'lf' }} | ${{ endOfLine: 0 }} + ${{ end_of_line: 'crlf' }} | ${{ endOfLine: 1 }} + ${{ end_of_line: 'cr' }} | ${{}} + ${{ end_of_line: 'unset' }} | ${{}} + ${{ insert_final_newline: 'true' }} | ${{ insertFinalNewline: true }} + ${{ insert_final_newline: 'false' }} | ${{ insertFinalNewline: false }} + ${{ insert_final_newline: 'unset' }} | ${{}} + ${multipleEntries.input} | ${multipleEntries.output} + ${tabWidthAndIndent.input} | ${tabWidthAndIndent.output} + `('correctly maps editorconfig rule to monaco option: $rule', ({ rule, monacoOption }) => { + expect(mapRulesToMonaco(rule)).toEqual(monacoOption); + }); +}); diff --git a/spec/graphql/types/snippet_type_spec.rb b/spec/graphql/types/snippet_type_spec.rb index 3c26a05986f..f24419ce9cc 100644 --- a/spec/graphql/types/snippet_type_spec.rb +++ b/spec/graphql/types/snippet_type_spec.rb @@ -16,6 +16,44 @@ describe GitlabSchema.types['Snippet'] do expect(described_class).to have_graphql_fields(*expected_fields) end + context 'when restricted visibility level is set to public' do + let_it_be(:snippet) { create(:personal_snippet, :repository, :public, author: user) } + + let(:current_user) { user } + let(:query) do + %( + { + snippets { + nodes { + author { + id + } + } + } + } + ) + end + let(:response) { subject.dig('data', 'snippets', 'nodes')[0] } + + subject { GitlabSchema.execute(query, context: { current_user: current_user }).as_json } + + before do + stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC]) + end + + it 'returns snippet author' do + expect(response['author']).to be_present + end + + context 'when user is not logged in' do + let(:current_user) { nil } + + it 'returns snippet author as nil' do + expect(response['author']).to be_nil + end + end + end + describe 'authorizations' do specify { expect(described_class).to require_graphql_authorizations(:read_snippet) } end diff --git a/spec/helpers/clusters_helper_spec.rb b/spec/helpers/clusters_helper_spec.rb index d40ed2248ce..c41d4f0ede7 100644 --- a/spec/helpers/clusters_helper_spec.rb +++ b/spec/helpers/clusters_helper_spec.rb @@ -59,6 +59,22 @@ describe ClustersHelper do end end + describe '#js_clusters_list_data' do + it 'displays endpoint path and images' do + js_data = helper.js_clusters_list_data('/path') + + expect(js_data[:endpoint]).to eq('/path') + + expect(js_data.dig(:img_tags, :aws, :path)).to match(%r(/illustrations/logos/amazon_eks|svg)) + expect(js_data.dig(:img_tags, :default, :path)).to match(%r(/illustrations/logos/kubernetes|svg)) + expect(js_data.dig(:img_tags, :gcp, :path)).to match(%r(/illustrations/logos/google_gke|svg)) + + expect(js_data.dig(:img_tags, :aws, :text)).to eq('Amazon EKS') + expect(js_data.dig(:img_tags, :default, :text)).to eq('Kubernetes Cluster') + expect(js_data.dig(:img_tags, :gcp, :text)).to eq('Google GKE') + end + end + describe '#provider_icon' do it 'will return GCP logo with gcp argument' do logo = helper.provider_icon('gcp') diff --git a/spec/lib/gitlab/runtime_spec.rb b/spec/lib/gitlab/runtime_spec.rb index 93f24873b96..8f920bb2e01 100644 --- a/spec/lib/gitlab/runtime_spec.rb +++ b/spec/lib/gitlab/runtime_spec.rb @@ -48,45 +48,18 @@ describe Gitlab::Runtime do before do stub_const('::Puma', puma_type) allow(puma_type).to receive_message_chain(:cli_config, :options).and_return(max_threads: 2) - stub_config(action_cable: { in_app: false }) end it_behaves_like "valid runtime", :puma, 3 - - context "when ActionCable in-app mode is enabled" do - before do - stub_config(action_cable: { in_app: true, worker_pool_size: 3 }) - end - - it_behaves_like "valid runtime", :puma, 6 - end - - context "when ActionCable standalone is run" do - before do - stub_const('ACTION_CABLE_SERVER', true) - stub_config(action_cable: { worker_pool_size: 8 }) - end - - it_behaves_like "valid runtime", :puma, 11 - end end context "unicorn" do before do stub_const('::Unicorn', Module.new) stub_const('::Unicorn::HttpServer', Class.new) - stub_config(action_cable: { in_app: false }) end it_behaves_like "valid runtime", :unicorn, 1 - - context "when ActionCable in-app mode is enabled" do - before do - stub_config(action_cable: { in_app: true, worker_pool_size: 3 }) - end - - it_behaves_like "valid runtime", :unicorn, 4 - end end context "sidekiq" do @@ -132,4 +105,17 @@ describe Gitlab::Runtime do it_behaves_like "valid runtime", :rails_runner, 1 end + + context "action_cable" do + before do + stub_const('ACTION_CABLE_SERVER', true) + stub_const('::Puma', Module.new) + + allow(Gitlab::Application).to receive_message_chain(:config, :action_cable, :worker_pool_size).and_return(8) + end + + it "reports its maximum concurrency based on ActionCable's worker pool size" do + expect(subject.max_threads).to eq(9) + end + end end diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb index 787d778b483..be0d028b392 100644 --- a/spec/models/project_spec.rb +++ b/spec/models/project_spec.rb @@ -6064,6 +6064,14 @@ describe Project do it { is_expected.not_to include(user) } end + describe "#metrics_setting" do + let(:project) { build(:project) } + + it 'creates setting if it does not exist' do + expect(project.metrics_setting).to be_an_instance_of(ProjectMetricsSetting) + end + end + def finish_job(export_job) export_job.start export_job.finish diff --git a/spec/serializers/cluster_serializer_spec.rb b/spec/serializers/cluster_serializer_spec.rb index 39551649ff0..b7d7307d40b 100644 --- a/spec/serializers/cluster_serializer_spec.rb +++ b/spec/serializers/cluster_serializer_spec.rb @@ -16,6 +16,7 @@ describe ClusterSerializer do :name, :nodes, :path, + :provider_type, :status) end end diff --git a/spec/support/import_export/common_util.rb b/spec/support/import_export/common_util.rb index 0069ae81b76..c0c3559cca0 100644 --- a/spec/support/import_export/common_util.rb +++ b/spec/support/import_export/common_util.rb @@ -19,7 +19,7 @@ module ImportExport end def setup_reader(reader) - if reader == :ndjson_reader && Feature.enabled?(:project_import_ndjson) + if reader == :ndjson_reader && Feature.enabled?(:project_import_ndjson, default_enabled: true) allow_any_instance_of(Gitlab::ImportExport::JSON::LegacyReader::File).to receive(:exist?).and_return(false) allow_any_instance_of(Gitlab::ImportExport::JSON::NdjsonReader).to receive(:exist?).and_return(true) else