Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
f2f748c081
commit
f6905d2ed2
|
@ -52,6 +52,9 @@
|
|||
.if-dot-com-gitlab-org-merge-request: &if-dot-com-gitlab-org-merge-request
|
||||
if: '$CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_NAMESPACE == "gitlab-org" && $CI_MERGE_REQUEST_IID'
|
||||
|
||||
.if-dot-com-gitlab-org-and-security-merge-request: &if-dot-com-gitlab-org-and-security-merge-request
|
||||
if: '$CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_NAMESPACE =~ /^gitlab-org($|\/security$)/ && $CI_MERGE_REQUEST_IID'
|
||||
|
||||
.if-dot-com-gitlab-org-and-security-tag: &if-dot-com-gitlab-org-and-security-tag
|
||||
if: '$CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_NAMESPACE =~ /^gitlab-org($|\/security$)/ && $CI_COMMIT_TAG'
|
||||
|
||||
|
@ -372,13 +375,13 @@
|
|||
|
||||
.qa:rules:package-and-qa:
|
||||
rules:
|
||||
- <<: *if-dot-com-gitlab-org-merge-request
|
||||
- <<: *if-dot-com-gitlab-org-and-security-merge-request
|
||||
changes: *ci-patterns
|
||||
allow_failure: true
|
||||
- <<: *if-dot-com-gitlab-org-merge-request
|
||||
- <<: *if-dot-com-gitlab-org-and-security-merge-request
|
||||
changes: *qa-patterns
|
||||
allow_failure: true
|
||||
- <<: *if-dot-com-gitlab-org-merge-request
|
||||
- <<: *if-dot-com-gitlab-org-and-security-merge-request
|
||||
changes: *code-patterns
|
||||
when: manual
|
||||
allow_failure: true
|
||||
|
@ -507,7 +510,7 @@
|
|||
rules:
|
||||
- <<: *if-not-ee
|
||||
when: never
|
||||
- <<: *if-dot-com-gitlab-org-merge-request
|
||||
- <<: *if-dot-com-gitlab-org-and-security-merge-request
|
||||
changes: *code-qa-patterns
|
||||
- <<: *if-dot-com-gitlab-org-schedule
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ export default {
|
|||
tooltip,
|
||||
},
|
||||
computed: {
|
||||
...mapState(['clusters', 'clustersPerPage', 'loading', 'page', 'totalCulsters']),
|
||||
...mapState(['clusters', 'clustersPerPage', 'loading', 'page', 'providers', 'totalCulsters']),
|
||||
currentPage: {
|
||||
get() {
|
||||
return this.page;
|
||||
|
@ -102,6 +102,9 @@ export default {
|
|||
// Sentry will notify us if we are missing types.
|
||||
throw new Error(`UnknownK8sCpuQuantity:${quantity}`);
|
||||
},
|
||||
selectedProvider(provider) {
|
||||
return this.providers[provider] || this.providers.default;
|
||||
},
|
||||
statusTitle(status) {
|
||||
const iconTitle = STATUSES[status] || STATUSES.default;
|
||||
return sprintf(__('Status: %{title}'), { title: iconTitle.title }, false);
|
||||
|
@ -182,8 +185,21 @@ export default {
|
|||
<section v-else>
|
||||
<gl-table :items="clusters" :fields="fields" stacked="md" class="qa-clusters-table">
|
||||
<template #cell(name)="{ item }">
|
||||
<div class="d-flex flex-row-reverse flex-md-row js-status">
|
||||
<gl-link data-qa-selector="cluster" :data-qa-cluster-name="item.name" :href="item.path">
|
||||
<div
|
||||
class="gl-display-flex gl-align-items-center gl-justify-content-end gl-justify-content-md-start js-status"
|
||||
>
|
||||
<img
|
||||
:src="selectedProvider(item.provider_type).path"
|
||||
:alt="selectedProvider(item.provider_type).text"
|
||||
class="gl-w-6 gl-h-6 gl-display-flex gl-align-items-center"
|
||||
/>
|
||||
|
||||
<gl-link
|
||||
data-qa-selector="cluster"
|
||||
:data-qa-cluster-name="item.name"
|
||||
:href="item.path"
|
||||
class="gl-px-3"
|
||||
>
|
||||
{{ item.name }}
|
||||
</gl-link>
|
||||
|
||||
|
@ -192,7 +208,6 @@ export default {
|
|||
v-tooltip
|
||||
:title="statusTitle(item.status)"
|
||||
size="sm"
|
||||
class="mr-2 ml-md-2"
|
||||
/>
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
@ -9,12 +9,10 @@ export default () => {
|
|||
return;
|
||||
}
|
||||
|
||||
const { endpoint } = entryPoint.dataset;
|
||||
|
||||
// eslint-disable-next-line no-new
|
||||
new Vue({
|
||||
el: '#js-clusters-list-app',
|
||||
store: createStore({ endpoint }),
|
||||
store: createStore(entryPoint.dataset),
|
||||
render(createElement) {
|
||||
return createElement(Clusters);
|
||||
},
|
||||
|
|
|
@ -5,5 +5,10 @@ export default (initialState = {}) => ({
|
|||
clusters: [],
|
||||
clustersPerPage: 0,
|
||||
page: 1,
|
||||
providers: {
|
||||
aws: { path: initialState.imgTagsAwsPath, text: initialState.imgTagsAwsText },
|
||||
default: { path: initialState.imgTagsDefaultPath, text: initialState.imgTagsDefaultText },
|
||||
gcp: { path: initialState.imgTagsGcpPath, text: initialState.imgTagsGcpText },
|
||||
},
|
||||
totalCulsters: 0,
|
||||
});
|
||||
|
|
|
@ -15,6 +15,8 @@ import FileTemplatesBar from './file_templates/bar.vue';
|
|||
import { __ } from '~/locale';
|
||||
import { extractMarkdownImagesFromEntries } from '../stores/utils';
|
||||
import { getPathParent, readFileAsDataURL } from '../utils';
|
||||
import { getRulesWithTraversal } from '../lib/editorconfig/parser';
|
||||
import mapRulesToMonaco from '../lib/editorconfig/rules_mapper';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
|
@ -32,6 +34,7 @@ export default {
|
|||
return {
|
||||
content: '',
|
||||
images: {},
|
||||
rules: {},
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
|
@ -195,7 +198,7 @@ export default {
|
|||
|
||||
this.editor.clearEditor();
|
||||
|
||||
this.fetchFileData()
|
||||
Promise.all([this.fetchFileData(), this.fetchEditorconfigRules()])
|
||||
.then(() => {
|
||||
this.createEditorInstance();
|
||||
})
|
||||
|
@ -254,6 +257,8 @@ export default {
|
|||
this.editor.attachModel(this.model);
|
||||
}
|
||||
|
||||
this.model.updateOptions(this.rules);
|
||||
|
||||
this.model.onChange(model => {
|
||||
const { file } = model;
|
||||
if (!file.active) return;
|
||||
|
@ -280,12 +285,29 @@ export default {
|
|||
this.setFileLanguage({
|
||||
fileLanguage: this.model.language,
|
||||
});
|
||||
|
||||
this.$emit('editorSetup');
|
||||
},
|
||||
refreshEditorDimensions() {
|
||||
if (this.showEditor) {
|
||||
this.editor.updateDimensions();
|
||||
}
|
||||
},
|
||||
fetchEditorconfigRules() {
|
||||
return getRulesWithTraversal(this.file.path, path => {
|
||||
const entry = this.entries[path];
|
||||
if (!entry) return Promise.resolve(null);
|
||||
|
||||
const content = entry.content || entry.raw;
|
||||
if (content) return Promise.resolve(content);
|
||||
|
||||
return this.getFileData({ path: entry.path, makeFileActive: false }).then(() =>
|
||||
this.getRawFileData({ path: entry.path }),
|
||||
);
|
||||
}).then(rules => {
|
||||
this.rules = mapRulesToMonaco(rules);
|
||||
});
|
||||
},
|
||||
onPaste(event) {
|
||||
const editor = this.editor.instance;
|
||||
const reImage = /^image\/(png|jpg|jpeg|gif)$/;
|
||||
|
|
|
@ -0,0 +1,55 @@
|
|||
import { parseString } from 'editorconfig/src/lib/ini';
|
||||
import minimatch from 'minimatch';
|
||||
import { getPathParents } from '../../utils';
|
||||
|
||||
const dirname = path => path.replace(/\.editorconfig$/, '');
|
||||
|
||||
function isRootConfig(config) {
|
||||
return config.some(([pattern, rules]) => !pattern && rules?.root === 'true');
|
||||
}
|
||||
|
||||
function getRulesForSection(path, [pattern, rules]) {
|
||||
if (!pattern) {
|
||||
return {};
|
||||
}
|
||||
if (minimatch(path, pattern, { matchBase: true })) {
|
||||
return rules;
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
function getRulesWithConfigs(filePath, configFiles = [], rules = {}) {
|
||||
if (!configFiles.length) return rules;
|
||||
|
||||
const [{ content, path: configPath }, ...nextConfigs] = configFiles;
|
||||
const configDir = dirname(configPath);
|
||||
|
||||
if (!filePath.startsWith(configDir)) return rules;
|
||||
|
||||
const parsed = parseString(content);
|
||||
const isRoot = isRootConfig(parsed);
|
||||
const relativeFilePath = filePath.slice(configDir.length);
|
||||
|
||||
const sectionRules = parsed.reduce(
|
||||
(acc, section) => Object.assign(acc, getRulesForSection(relativeFilePath, section)),
|
||||
{},
|
||||
);
|
||||
|
||||
// prefer existing rules by overwriting to section rules
|
||||
const result = Object.assign(sectionRules, rules);
|
||||
|
||||
return isRoot ? result : getRulesWithConfigs(filePath, nextConfigs, result);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line import/prefer-default-export
|
||||
export function getRulesWithTraversal(filePath, getFileContent) {
|
||||
const editorconfigPaths = [
|
||||
...getPathParents(filePath).map(x => `${x}/.editorconfig`),
|
||||
'.editorconfig',
|
||||
];
|
||||
|
||||
return Promise.all(
|
||||
editorconfigPaths.map(path => getFileContent(path).then(content => ({ path, content }))),
|
||||
).then(results => getRulesWithConfigs(filePath, results.filter(x => x.content)));
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
import { isBoolean, isNumber } from 'lodash';
|
||||
|
||||
const map = (key, validValues) => value =>
|
||||
value in validValues ? { [key]: validValues[value] } : {};
|
||||
|
||||
const bool = key => value => (isBoolean(value) ? { [key]: value } : {});
|
||||
|
||||
const int = (key, isValid) => value =>
|
||||
isNumber(value) && isValid(value) ? { [key]: Math.trunc(value) } : {};
|
||||
|
||||
const rulesMapper = {
|
||||
indent_style: map('insertSpaces', { tab: false, space: true }),
|
||||
indent_size: int('tabSize', n => n > 0),
|
||||
tab_width: int('tabSize', n => n > 0),
|
||||
trim_trailing_whitespace: bool('trimTrailingWhitespace'),
|
||||
end_of_line: map('endOfLine', { crlf: 1, lf: 0 }),
|
||||
insert_final_newline: bool('insertFinalNewline'),
|
||||
};
|
||||
|
||||
const parseValue = x => {
|
||||
let value = typeof x === 'string' ? x.toLowerCase() : x;
|
||||
if (/^[0-9.-]+$/.test(value)) value = Number(value);
|
||||
if (value === 'true') value = true;
|
||||
if (value === 'false') value = false;
|
||||
|
||||
return value;
|
||||
};
|
||||
|
||||
export default function mapRulesToMonaco(rules) {
|
||||
return Object.entries(rules).reduce((obj, [key, value]) => {
|
||||
return Object.assign(obj, rulesMapper[key]?.(parseValue(value)) || {});
|
||||
}, {});
|
||||
}
|
|
@ -15,22 +15,23 @@ import routerModule from './modules/router';
|
|||
|
||||
Vue.use(Vuex);
|
||||
|
||||
export const createStore = () =>
|
||||
new Vuex.Store({
|
||||
state: state(),
|
||||
actions,
|
||||
mutations,
|
||||
getters,
|
||||
modules: {
|
||||
commit: commitModule,
|
||||
pipelines,
|
||||
mergeRequests,
|
||||
branches,
|
||||
fileTemplates: fileTemplates(),
|
||||
rightPane: paneModule(),
|
||||
clientside: clientsideModule(),
|
||||
router: routerModule,
|
||||
},
|
||||
});
|
||||
export const createStoreOptions = () => ({
|
||||
state: state(),
|
||||
actions,
|
||||
mutations,
|
||||
getters,
|
||||
modules: {
|
||||
commit: commitModule,
|
||||
pipelines,
|
||||
mergeRequests,
|
||||
branches,
|
||||
fileTemplates: fileTemplates(),
|
||||
rightPane: paneModule(),
|
||||
clientside: clientsideModule(),
|
||||
router: routerModule,
|
||||
},
|
||||
});
|
||||
|
||||
export const createStore = () => new Vuex.Store(createStoreOptions());
|
||||
|
||||
export default createStore();
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
<script>
|
||||
import { historyPushState } from '~/lib/utils/common_utils';
|
||||
import { setUrlParams } from '~/lib/utils/url_utility';
|
||||
|
||||
export default {
|
||||
props: {
|
||||
query: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
watch: {
|
||||
query: {
|
||||
immediate: true,
|
||||
deep: true,
|
||||
handler(newQuery) {
|
||||
historyPushState(setUrlParams(newQuery, window.location.href, true));
|
||||
},
|
||||
},
|
||||
},
|
||||
render() {
|
||||
return this.$slots.default;
|
||||
},
|
||||
};
|
||||
</script>
|
|
@ -57,6 +57,8 @@ class GroupsController < Groups::ApplicationController
|
|||
@group = Groups::CreateService.new(current_user, group_params).execute
|
||||
|
||||
if @group.persisted?
|
||||
track_experiment_event(:onboarding_issues, 'created_namespace')
|
||||
|
||||
notice = if @group.chat_team.present?
|
||||
"Group '#{@group.name}' and its Mattermost team were successfully created."
|
||||
else
|
||||
|
|
|
@ -69,6 +69,7 @@ class RegistrationsController < Devise::RegistrationsController
|
|||
if result[:status] == :success
|
||||
track_experiment_event(:signup_flow, 'end') # We want this event to be tracked when the user is _in_ the experimental group
|
||||
|
||||
track_experiment_event(:onboarding_issues, 'signed_up') if ::Gitlab.com? && !helpers.in_subscription_flow? && !helpers.in_invitation_flow?
|
||||
return redirect_to new_users_sign_up_group_path if experiment_enabled?(:onboarding_issues) && !helpers.in_subscription_flow? && !helpers.in_invitation_flow?
|
||||
|
||||
set_flash_message! :notice, :signed_up
|
||||
|
|
|
@ -27,9 +27,12 @@ module Types
|
|||
authorize: :read_project,
|
||||
resolve: -> (snippet, args, context) { Gitlab::Graphql::Loaders::BatchModelLoader.new(Project, snippet.project_id).find }
|
||||
|
||||
# Author can be nil in some scenarios. For example,
|
||||
# when the admin setting restricted visibility
|
||||
# level is set to public
|
||||
field :author, Types::UserType,
|
||||
description: 'The owner of the snippet',
|
||||
null: false,
|
||||
null: true,
|
||||
resolve: -> (snippet, args, context) { Gitlab::Graphql::Loaders::BatchModelLoader.new(User, snippet.author_id).find }
|
||||
|
||||
field :file_name, GraphQL::STRING_TYPE,
|
||||
|
|
|
@ -17,15 +17,23 @@ module ClustersHelper
|
|||
end
|
||||
end
|
||||
|
||||
def js_clusters_list_data(path = nil)
|
||||
{
|
||||
endpoint: path,
|
||||
img_tags: {
|
||||
aws: { path: image_path('illustrations/logos/amazon_eks.svg'), text: s_('ClusterIntegration|Amazon EKS') },
|
||||
default: { path: image_path('illustrations/logos/kubernetes.svg'), text: _('Kubernetes Cluster') },
|
||||
gcp: { path: image_path('illustrations/logos/google_gke.svg'), text: s_('ClusterIntegration|Google GKE') }
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
# This method is depreciated and will be removed when associated HAML files are moved to JavaScript
|
||||
def provider_icon(provider = nil)
|
||||
case provider
|
||||
when 'aws'
|
||||
image_tag 'illustrations/logos/amazon_eks.svg', alt: s_('ClusterIntegration|Amazon EKS'), class: 'gl-h-full'
|
||||
when 'gcp'
|
||||
image_tag 'illustrations/logos/google_gke.svg', alt: s_('ClusterIntegration|Google GKE'), class: 'gl-h-full'
|
||||
else
|
||||
image_tag 'illustrations/logos/kubernetes.svg', alt: _('Kubernetes Cluster'), class: 'gl-h-full'
|
||||
end
|
||||
img_data = js_clusters_list_data.dig(:img_tags, provider&.to_sym) ||
|
||||
js_clusters_list_data.dig(:img_tags, :default)
|
||||
|
||||
image_tag img_data[:path], alt: img_data[:text], class: 'gl-h-full'
|
||||
end
|
||||
|
||||
def render_gcp_signup_offer
|
||||
|
|
|
@ -60,7 +60,8 @@ module EnvironmentsHelper
|
|||
'custom-metrics-path' => project_prometheus_metrics_path(project),
|
||||
'validate-query-path' => validate_query_project_prometheus_metrics_path(project),
|
||||
'custom-metrics-available' => "#{custom_metrics_available?(project)}",
|
||||
'prometheus-alerts-available' => "#{can?(current_user, :read_prometheus_alerts, project)}"
|
||||
'prometheus-alerts-available' => "#{can?(current_user, :read_prometheus_alerts, project)}",
|
||||
'dashboard-timezone' => project.metrics_setting_dashboard_timezone.to_s.upcase
|
||||
}
|
||||
end
|
||||
|
||||
|
|
|
@ -2410,6 +2410,10 @@ class Project < ApplicationRecord
|
|||
touch(:last_activity_at, :last_repository_updated_at)
|
||||
end
|
||||
|
||||
def metrics_setting
|
||||
super || build_metrics_setting
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def find_service(services, name)
|
||||
|
|
|
@ -8,6 +8,7 @@ class ClusterEntity < Grape::Entity
|
|||
expose :environment_scope
|
||||
expose :name
|
||||
expose :nodes
|
||||
expose :provider_type
|
||||
expose :status_name, as: :status
|
||||
expose :status_reason
|
||||
expose :applications, using: ClusterApplicationEntity
|
||||
|
|
|
@ -13,6 +13,7 @@ class ClusterSerializer < BaseSerializer
|
|||
:name,
|
||||
:nodes,
|
||||
:path,
|
||||
:provider_type,
|
||||
:status
|
||||
]
|
||||
})
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
= link_to _('More information'), help_page_path('user/group/clusters/index', anchor: 'cluster-precedence')
|
||||
|
||||
- if Feature.enabled?(:clusters_list_redesign)
|
||||
#js-clusters-list-app{ data: { endpoint: clusterable.index_path(format: :json) } }
|
||||
#js-clusters-list-app{ data: js_clusters_list_data(clusterable.index_path(format: :json)) }
|
||||
- else
|
||||
.clusters-table.js-clusters-list
|
||||
.gl-responsive-table-row.table-row-header{ role: "row" }
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add ability to filter self monitoring resource usage charts by instance name
|
||||
merge_request: 34084
|
||||
author:
|
||||
type: changed
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Support reading .editorconfig files inside of the Web IDE
|
||||
merge_request: 32378
|
||||
author:
|
||||
type: added
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Added provider type icon to cluster list
|
||||
merge_request: 33196
|
||||
author:
|
||||
type: changed
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Set author as nullable in snippet GraphQL Type
|
||||
merge_request: 34135
|
||||
author:
|
||||
type: fixed
|
|
@ -17,11 +17,16 @@ module Gitlab
|
|||
class Application < Rails::Application
|
||||
require_dependency Rails.root.join('lib/gitlab')
|
||||
require_dependency Rails.root.join('lib/gitlab/utils')
|
||||
require_dependency Rails.root.join('lib/gitlab/redis/wrapper')
|
||||
require_dependency Rails.root.join('lib/gitlab/redis/cache')
|
||||
require_dependency Rails.root.join('lib/gitlab/redis/queues')
|
||||
require_dependency Rails.root.join('lib/gitlab/redis/shared_state')
|
||||
require_dependency Rails.root.join('lib/gitlab/current_settings')
|
||||
require_dependency Rails.root.join('lib/gitlab/middleware/read_only')
|
||||
require_dependency Rails.root.join('lib/gitlab/middleware/basic_health_check')
|
||||
require_dependency Rails.root.join('lib/gitlab/middleware/same_site_cookies')
|
||||
require_dependency Rails.root.join('lib/gitlab/middleware/handle_ip_spoof_attack_error')
|
||||
require_dependency Rails.root.join('lib/gitlab/runtime')
|
||||
|
||||
# Settings in config/environments/* take precedence over those specified here.
|
||||
# Application configuration should go into files in config/initializers
|
||||
|
@ -257,6 +262,17 @@ module Gitlab
|
|||
end
|
||||
end
|
||||
|
||||
# Use caching across all environments
|
||||
# Full list of options:
|
||||
# https://api.rubyonrails.org/classes/ActiveSupport/Cache/RedisCacheStore.html#method-c-new
|
||||
caching_config_hash = {}
|
||||
caching_config_hash[:redis] = Gitlab::Redis::Cache.pool
|
||||
caching_config_hash[:compress] = Gitlab::Utils.to_boolean(ENV.fetch('ENABLE_REDIS_CACHE_COMPRESSION', '1'))
|
||||
caching_config_hash[:namespace] = Gitlab::Redis::Cache::CACHE_NAMESPACE
|
||||
caching_config_hash[:expires_in] = 2.weeks # Cache should not grow forever
|
||||
|
||||
config.cache_store = :redis_cache_store, caching_config_hash
|
||||
|
||||
config.active_job.queue_adapter = :sidekiq
|
||||
|
||||
# This is needed for gitlab-shell
|
||||
|
|
|
@ -49,6 +49,8 @@ Rails.application.configure do
|
|||
# Do not log asset requests
|
||||
config.assets.quiet = true
|
||||
|
||||
config.allow_concurrency = Gitlab::Runtime.multi_threaded?
|
||||
|
||||
# BetterErrors live shell (REPL) on every stack frame
|
||||
BetterErrors::Middleware.allow_ip!("127.0.0.1/0")
|
||||
|
||||
|
|
|
@ -77,4 +77,6 @@ Rails.application.configure do
|
|||
config.action_mailer.raise_delivery_errors = true
|
||||
|
||||
config.eager_load = true
|
||||
|
||||
config.allow_concurrency = Gitlab::Runtime.multi_threaded?
|
||||
end
|
||||
|
|
|
@ -1075,9 +1075,6 @@ production: &base
|
|||
|
||||
## ActionCable settings
|
||||
action_cable:
|
||||
# Enables handling of ActionCable requests on the Puma web workers
|
||||
# When this is disabled, a standalone ActionCable server must be started
|
||||
in_app: true
|
||||
# Number of threads used to process ActionCable connection callbacks and channel actions
|
||||
# worker_pool_size: 4
|
||||
|
||||
|
|
|
@ -729,7 +729,6 @@ Settings.webpack.dev_server['port'] ||= 3808
|
|||
# ActionCable settings
|
||||
#
|
||||
Settings['action_cable'] ||= Settingslogic.new({})
|
||||
Settings.action_cable['in_app'] ||= false
|
||||
Settings.action_cable['worker_pool_size'] ||= 4
|
||||
|
||||
#
|
||||
|
|
|
@ -1,14 +1,3 @@
|
|||
# Use caching across all environments
|
||||
# Full list of options:
|
||||
# https://api.rubyonrails.org/classes/ActiveSupport/Cache/RedisCacheStore.html#method-c-new
|
||||
caching_config_hash = {}
|
||||
caching_config_hash[:redis] = Gitlab::Redis::Cache.pool
|
||||
caching_config_hash[:compress] = Gitlab::Utils.to_boolean(ENV.fetch('ENABLE_REDIS_CACHE_COMPRESSION', '1'))
|
||||
caching_config_hash[:namespace] = Gitlab::Redis::Cache::CACHE_NAMESPACE
|
||||
caching_config_hash[:expires_in] = 2.weeks # Cache should not grow forever
|
||||
|
||||
Gitlab::Application.config.cache_store = :redis_cache_store, caching_config_hash
|
||||
|
||||
# Make sure we initialize a Redis connection pool before multi-threaded
|
||||
# execution starts by
|
||||
# 1. Sidekiq
|
||||
|
|
|
@ -3,9 +3,9 @@
|
|||
require 'action_cable/subscription_adapter/redis'
|
||||
|
||||
Rails.application.configure do
|
||||
# Mount the ActionCable engine when in-app mode is enabled
|
||||
config.action_cable.mount_path = Gitlab.config.action_cable.in_app ? '/-/cable' : nil
|
||||
|
||||
# We only mount the ActionCable engine in tests where we run it in-app
|
||||
# For other environments, we run it on a standalone Puma server
|
||||
config.action_cable.mount_path = Rails.env.test? ? '/-/cable' : nil
|
||||
config.action_cable.url = Gitlab::Utils.append_path(Gitlab.config.gitlab.relative_url_root, '/-/cable')
|
||||
config.action_cable.worker_pool_size = Gitlab.config.action_cable.worker_pool_size
|
||||
end
|
||||
|
|
|
@ -1,5 +1,14 @@
|
|||
dashboard: 'Default dashboard'
|
||||
priority: 1
|
||||
|
||||
templating:
|
||||
variables:
|
||||
instance:
|
||||
type: 'text'
|
||||
label: 'Instance label regex'
|
||||
options:
|
||||
default_value: '.+'
|
||||
|
||||
panel_groups:
|
||||
|
||||
- group: 'Resource usage'
|
||||
|
@ -9,7 +18,7 @@ panel_groups:
|
|||
y_label: "% memory used"
|
||||
metrics:
|
||||
- id: node_memory_usage_percentage
|
||||
query_range: '(1 - (node_memory_MemAvailable_bytes or node_memory_MemFree_bytes + node_memory_Buffers_bytes + node_memory_Cached_bytes + node_memory_Slab_bytes) / node_memory_MemTotal_bytes) * 100'
|
||||
query_range: '(1 - (node_memory_MemAvailable_bytes{instance=~"{{instance}}"} or (node_memory_MemFree_bytes{instance=~"{{instance}}"} + node_memory_Buffers_bytes{instance=~"{{instance}}"} + node_memory_Cached_bytes{instance=~"{{instance}}"} + node_memory_Slab_bytes{instance=~"{{instance}}"})) / node_memory_MemTotal_bytes{instance=~"{{instance}}"}) * 100'
|
||||
unit: "%"
|
||||
label: instance
|
||||
|
||||
|
@ -18,7 +27,7 @@ panel_groups:
|
|||
y_label: "% CPU used"
|
||||
metrics:
|
||||
- id: node_cpu_usage_percentage
|
||||
query_range: '(avg without (mode,cpu) (1 - irate(node_cpu_seconds_total{mode="idle"}[5m]))) * 100'
|
||||
query_range: '(avg without (mode,cpu) (1 - irate(node_cpu_seconds_total{mode="idle",instance=~"{{instance}}"}[5m]))) * 100'
|
||||
unit: "%"
|
||||
label: instance
|
||||
|
||||
|
|
|
@ -42,6 +42,7 @@ backtrace
|
|||
backtraced
|
||||
backtraces
|
||||
backtracing
|
||||
badging
|
||||
Bamboo
|
||||
Bitbucket
|
||||
blockquote
|
||||
|
@ -121,6 +122,8 @@ Ecto
|
|||
Elasticsearch
|
||||
enablement
|
||||
enqueued
|
||||
enum
|
||||
enums
|
||||
ETag
|
||||
Excon
|
||||
expirable
|
||||
|
@ -139,6 +142,7 @@ Forgerock
|
|||
Fugit
|
||||
Gantt
|
||||
Gemnasium
|
||||
Gemojione
|
||||
gettext
|
||||
Git
|
||||
Gitaly
|
||||
|
@ -148,6 +152,7 @@ GitLab
|
|||
gitlabsos
|
||||
Gitleaks
|
||||
Gitter
|
||||
globals
|
||||
Gmail
|
||||
Google
|
||||
Gosec
|
||||
|
@ -297,6 +302,7 @@ preloading
|
|||
preloads
|
||||
prepend
|
||||
prepended
|
||||
prepending
|
||||
prepends
|
||||
Pritaly
|
||||
profiler
|
||||
|
|
|
@ -11240,7 +11240,7 @@ type Snippet implements Noteable {
|
|||
"""
|
||||
The owner of the snippet
|
||||
"""
|
||||
author: User!
|
||||
author: User
|
||||
|
||||
"""
|
||||
Snippet blob
|
||||
|
|
|
@ -33183,13 +33183,9 @@
|
|||
|
||||
],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "OBJECT",
|
||||
"name": "User",
|
||||
"ofType": null
|
||||
}
|
||||
"kind": "OBJECT",
|
||||
"name": "User",
|
||||
"ofType": null
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
|
|
|
@ -1641,7 +1641,7 @@ Represents a snippet entry
|
|||
|
||||
| Name | Type | Description |
|
||||
| --- | ---- | ---------- |
|
||||
| `author` | User! | The owner of the snippet |
|
||||
| `author` | User | The owner of the snippet |
|
||||
| `blob` | SnippetBlob! | Snippet blob |
|
||||
| `blobs` | SnippetBlob! => Array | Snippet blobs |
|
||||
| `createdAt` | Time! | Timestamp this snippet was created |
|
||||
|
|
|
@ -594,7 +594,7 @@ There are two ways to determine the value of `DOCKER_AUTH_CONFIG`:
|
|||
```
|
||||
|
||||
- **Second way -** In some setups, it's possible that Docker client
|
||||
will use the available system keystore to store the result of `docker
|
||||
will use the available system key store to store the result of `docker
|
||||
login`. In that case, it's impossible to read `~/.docker/config.json`,
|
||||
so you will need to prepare the required base64-encoded version of
|
||||
`${username}:${password}` and create the Docker configuration JSON manually.
|
||||
|
@ -712,7 +712,7 @@ To configure credentials store, follow these steps:
|
|||
```
|
||||
|
||||
- Or, if you are running self-managed Runners, add the above JSON to
|
||||
`${GITLAB_RUNNER_HOME}/.docker/config.json`. GitLab Runner will read this config file
|
||||
`${GITLAB_RUNNER_HOME}/.docker/config.json`. GitLab Runner will read this configuration file
|
||||
and will use the needed helper for this specific repository.
|
||||
|
||||
NOTE: **Note:** `credsStore` is used to access ALL the registries.
|
||||
|
@ -761,7 +761,7 @@ To configure access for `aws_account_id.dkr.ecr.region.amazonaws.com`, follow th
|
|||
|
||||
- Or, if you are running self-managed Runners,
|
||||
add the above JSON to `${GITLAB_RUNNER_HOME}/.docker/config.json`.
|
||||
GitLab Runner will read this config file and will use the needed helper for this
|
||||
GitLab Runner will read this configuration file and will use the needed helper for this
|
||||
specific repository.
|
||||
|
||||
1. You can now use any private image from `aws_account_id.dkr.ecr.region.amazonaws.com` defined in
|
||||
|
|
|
@ -109,7 +109,7 @@ parameter in `.gitlab-ci.yml` to use the custom location instead of the default
|
|||
|
||||
Now it's time we set up [GitLab CI/CD](https://about.gitlab.com/stages-devops-lifecycle/continuous-integration/) to automatically build, test and deploy the dependency!
|
||||
|
||||
GitLab CI/CD uses a file in the root of the repo, named `.gitlab-ci.yml`, to read the definitions for jobs
|
||||
GitLab CI/CD uses a file in the root of the repository, named `.gitlab-ci.yml`, to read the definitions for jobs
|
||||
that will be executed by the configured GitLab Runners. You can read more about this file in the [GitLab Documentation](../../yaml/README.md).
|
||||
|
||||
First of all, remember to set up variables for your deployment. Navigate to your project's **Settings > CI/CD > Environment variables** page
|
||||
|
@ -119,7 +119,7 @@ and add the following ones (replace them with your current values, of course):
|
|||
- **MAVEN_REPO_USER**: `gitlab` (your Artifactory username)
|
||||
- **MAVEN_REPO_PASS**: `AKCp2WXr3G61Xjz1PLmYa3arm3yfBozPxSta4taP3SeNu2HPXYa7FhNYosnndFNNgoEds8BCS` (your Artifactory Encrypted Password)
|
||||
|
||||
Now it's time to define jobs in `.gitlab-ci.yml` and push it to the repo:
|
||||
Now it's time to define jobs in `.gitlab-ci.yml` and push it to the repository:
|
||||
|
||||
```yaml
|
||||
image: maven:latest
|
||||
|
@ -154,7 +154,7 @@ deploy:
|
|||
GitLab Runner will use the latest [Maven Docker image](https://hub.docker.com/_/maven/), which already contains all the tools and the dependencies you need to manage the project,
|
||||
in order to run the jobs.
|
||||
|
||||
Environment variables are set to instruct Maven to use the `homedir` of the repo instead of the user's home when searching for configuration and dependencies.
|
||||
Environment variables are set to instruct Maven to use the `homedir` of the repository instead of the user's home when searching for configuration and dependencies.
|
||||
|
||||
Caching the `.m2/repository folder` (where all the Maven files are stored), and the `target` folder (where our application will be created), is useful for speeding up the process
|
||||
by running all Maven phases in a sequential order, therefore, executing `mvn test` will automatically run `mvn compile` if necessary.
|
||||
|
@ -164,7 +164,7 @@ Both `build` and `test` jobs leverage the `mvn` command to compile the applicati
|
|||
Deploy to Artifactory is done as defined by the variables we have just set up.
|
||||
The deployment occurs only if we're pushing or merging to `master` branch, so that the development versions are tested but not published.
|
||||
|
||||
Done! Now you have all the changes in the GitLab repo, and a pipeline has already been started for this commit. In the **Pipelines** tab you can see what's happening.
|
||||
Done! Now you have all the changes in the GitLab repository, and a pipeline has already been started for this commit. In the **Pipelines** tab you can see what's happening.
|
||||
If the deployment has been successful, the deploy job log will output:
|
||||
|
||||
```plaintext
|
||||
|
@ -177,7 +177,7 @@ If the deployment has been successful, the deploy job log will output:
|
|||
>**Note**:
|
||||
the `mvn` command downloads a lot of files from the internet, so you'll see a lot of extra activity in the log the first time you run it.
|
||||
|
||||
Yay! You did it! Checking in Artifactory will confirm that you have a new artifact available in the `libs-release-local` repo.
|
||||
Yay! You did it! Checking in Artifactory will confirm that you have a new artifact available in the `libs-release-local` repository.
|
||||
|
||||
## Create the main Maven application
|
||||
|
||||
|
@ -228,7 +228,7 @@ Here is how you can get the content of the file directly from Artifactory:
|
|||
1. Click on **Generate Maven Settings**
|
||||
1. Click on **Generate Settings**
|
||||
1. Copy to clipboard the configuration file
|
||||
1. Save the file as `.m2/settings.xml` in your repo
|
||||
1. Save the file as `.m2/settings.xml` in your repository
|
||||
|
||||
Now you are ready to use the Artifactory repository to resolve dependencies and use `simple-maven-dep` in your main application!
|
||||
|
||||
|
@ -239,7 +239,7 @@ You need a last step to have everything in place: configure the `.gitlab-ci.yml`
|
|||
You want to leverage [GitLab CI/CD](https://about.gitlab.com/stages-devops-lifecycle/continuous-integration/) to automatically build, test and run your awesome application,
|
||||
and see if you can get the greeting as expected!
|
||||
|
||||
All you need to do is to add the following `.gitlab-ci.yml` to the repo:
|
||||
All you need to do is to add the following `.gitlab-ci.yml` to the repository:
|
||||
|
||||
```yaml
|
||||
image: maven:latest
|
||||
|
|
|
@ -56,7 +56,7 @@ To use different provider take a look at long list of [Supported Providers](http
|
|||
## Using Dpl with Docker
|
||||
|
||||
In most cases, you will have configured [GitLab Runner](https://docs.gitlab.com/runner/) to use your server's shell commands.
|
||||
This means that all commands are run in the context of local user (e.g. gitlab_runner or gitlab_ci_multi_runner).
|
||||
This means that all commands are run in the context of local user (e.g. `gitlab_runner` or `gitlab_ci_multi_runner`).
|
||||
It also means that most probably in your Docker container you don't have the Ruby runtime installed.
|
||||
You will have to install it:
|
||||
|
||||
|
|
|
@ -47,7 +47,7 @@ All these operations will put all files into a `build` folder, which is ready to
|
|||
|
||||
## How to transfer files to a live server
|
||||
|
||||
You have multiple options: rsync, scp, sftp, and so on. For now, we will use scp.
|
||||
You have multiple options: rsync, SCP, SFTP, and so on. For now, we will use SCP.
|
||||
|
||||
To make this work, you need to add a GitLab CI/CD Variable (accessible on `gitlab.example/your-project-name/variables`). That variable will be called `STAGING_PRIVATE_KEY` and it's the **private** SSH key of your server.
|
||||
|
||||
|
@ -123,7 +123,7 @@ Therefore, for a production environment we use additional steps to ensure that a
|
|||
Since this was a WordPress project, I gave real life code snippets. Some further ideas you can pursue:
|
||||
|
||||
- Having a slightly different script for `master` branch will allow you to deploy to a production server from that branch and to a stage server from any other branches.
|
||||
- Instead of pushing it live, you can push it to WordPress official repo (with creating a SVN commit, etc.).
|
||||
- Instead of pushing it live, you can push it to WordPress official repository (with creating a SVN commit, etc.).
|
||||
- You could generate i18n text domains on the fly.
|
||||
|
||||
---
|
||||
|
|
|
@ -65,7 +65,7 @@ docker-php-ext-install pdo_mysql
|
|||
```
|
||||
|
||||
You might wonder what `docker-php-ext-install` is. In short, it is a script
|
||||
provided by the official php Docker image that you can use to easily install
|
||||
provided by the official PHP Docker image that you can use to easily install
|
||||
extensions. For more information read the documentation at
|
||||
<https://hub.docker.com/_/php>.
|
||||
|
||||
|
@ -174,7 +174,7 @@ Finally, push to GitLab and let the tests begin!
|
|||
### Test against different PHP versions in Shell builds
|
||||
|
||||
The [phpenv](https://github.com/phpenv/phpenv) project allows you to easily manage different versions of PHP
|
||||
each with its own config. This is especially useful when testing PHP projects
|
||||
each with its own configuration. This is especially useful when testing PHP projects
|
||||
with the Shell executor.
|
||||
|
||||
You will have to install it on your build machine under the `gitlab-runner`
|
||||
|
|
|
@ -44,7 +44,7 @@ You can add a command to your `.gitlab-ci.yml` file to
|
|||
| `CI_COMMIT_TITLE` | 10.8 | all | The title of the commit - the full first line of the message |
|
||||
| `CI_CONCURRENT_ID` | all | 11.10 | Unique ID of build execution within a single executor. |
|
||||
| `CI_CONCURRENT_PROJECT_ID` | all | 11.10 | Unique ID of build execution within a single executor and project. |
|
||||
| `CI_CONFIG_PATH` | 9.4 | 0.5 | The path to CI config file. Defaults to `.gitlab-ci.yml` |
|
||||
| `CI_CONFIG_PATH` | 9.4 | 0.5 | The path to CI configuration file. Defaults to `.gitlab-ci.yml` |
|
||||
| `CI_DEBUG_TRACE` | all | 1.7 | Whether [debug logging (tracing)](README.md#debug-logging) is enabled |
|
||||
| `CI_DEFAULT_BRANCH` | 12.4 | all | The name of the default branch for the project. |
|
||||
| `CI_DEPLOY_PASSWORD` | 10.8 | all | Authentication password of the [GitLab Deploy Token](../../user/project/deploy_tokens/index.md#gitlab-deploy-token), only present if the Project has one related. |
|
||||
|
@ -97,7 +97,7 @@ You can add a command to your `.gitlab-ci.yml` file to
|
|||
| `CI_PROJECT_DIR` | all | all | The full path where the repository is cloned and where the job is run. If the GitLab Runner `builds_dir` parameter is set, this variable is set relative to the value of `builds_dir`. For more information, see [Advanced configuration](https://docs.gitlab.com/runner/configuration/advanced-configuration.html#the-runners-section) for GitLab Runner. |
|
||||
| `CI_PROJECT_ID` | all | all | The unique ID of the current project that GitLab CI/CD uses internally |
|
||||
| `CI_PROJECT_NAME` | 8.10 | 0.5 | The name of the directory for the project that is currently being built. For example, if the project URL is `gitlab.example.com/group-name/project-1`, the `CI_PROJECT_NAME` would be `project-1`. |
|
||||
| `CI_PROJECT_NAMESPACE` | 8.10 | 0.5 | The project namespace (username or groupname) that is currently being built |
|
||||
| `CI_PROJECT_NAMESPACE` | 8.10 | 0.5 | The project namespace (username or group name) that is currently being built |
|
||||
| `CI_PROJECT_PATH` | 8.10 | 0.5 | The namespace with project name |
|
||||
| `CI_PROJECT_PATH_SLUG` | 9.3 | all | `$CI_PROJECT_PATH` lowercased and with everything except `0-9` and `a-z` replaced with `-`. Use in URLs and domain names. |
|
||||
| `CI_PROJECT_REPOSITORY_LANGUAGES` | 12.3 | all | Comma-separated, lowercased list of the languages used in the repository (e.g. `ruby,javascript,html,css`) |
|
||||
|
|
|
@ -120,7 +120,7 @@ For instance:
|
|||
|
||||
In order to validate some parameters in the API request, we validate them
|
||||
before sending them further (say Gitaly). The following are the
|
||||
[custom validators](https://gitlab.com/gitlab-org/gitlab/-/tree/master/lib/api/validations/validators),
|
||||
[custom validators](https://GitLab.com/gitlab-org/gitlab/-/tree/master/lib/api/validations/validators),
|
||||
which we have added so far and how to use them. We also wrote a
|
||||
guide on how you can add a new custom validator.
|
||||
|
||||
|
|
|
@ -83,7 +83,7 @@ module EE
|
|||
end
|
||||
```
|
||||
|
||||
This looks working as a workaround, however, this approach has some donwside that:
|
||||
This looks working as a workaround, however, this approach has some downsides that:
|
||||
|
||||
- Features could move from EE to FOSS or vice versa. Therefore, the offset might be mixed between FOSS and EE in the future.
|
||||
e.g. When you move `activity_limit_exceeded` to FOSS, you'll see `{ unknown_failure: 0, config_error: 1, activity_limit_exceeded: 1_000 }`.
|
||||
|
|
|
@ -27,7 +27,7 @@ process boundaries, the correlation ID is injected into the outgoing request. Th
|
|||
the propagation of the correlation ID to each downstream subsystem.
|
||||
|
||||
Correlation IDs are normally generated in the Rails application in response to
|
||||
certain webrequests. Some user facing systems don't generate correlation IDs in
|
||||
certain web requests. Some user facing systems don't generate correlation IDs in
|
||||
response to user requests (for example, Git pushes over SSH).
|
||||
|
||||
### Developer guidelines for working with correlation IDs
|
||||
|
|
|
@ -356,7 +356,7 @@ files.
|
|||
```
|
||||
|
||||
This also allows the nav to be displayed on other
|
||||
highest-level dirs (`/omnibus/`, `/runner/`, etc),
|
||||
highest-level directories (`/omnibus/`, `/runner/`, etc),
|
||||
linking them back to `/ee/`.
|
||||
|
||||
The same logic is applied to all sections (`sec[:section_url]`),
|
||||
|
|
|
@ -107,13 +107,13 @@ The pipeline in the `gitlab-docs` project:
|
|||
|
||||
Once a week on Mondays, a scheduled pipeline runs and rebuilds the Docker images
|
||||
used in various pipeline jobs, like `docs-lint`. The Docker image configuration files are
|
||||
located at <https://gitlab.com/gitlab-org/gitlab-docs/-/tree/master/dockerfiles>.
|
||||
located in the [Dockerfiles directory](https://gitlab.com/gitlab-org/gitlab-docs/-/tree/master/dockerfiles).
|
||||
|
||||
If you need to rebuild the Docker images immediately (must have maintainer level permissions):
|
||||
|
||||
CAUTION: **Caution**
|
||||
If you change the dockerfile configuration and rebuild the images, you can break the master
|
||||
pipeline in the main `gitlab` repo as well as in `gitlab-docs`. Create an image with
|
||||
pipeline in the main `gitlab` repository as well as in `gitlab-docs`. Create an image with
|
||||
a different name first and test it to ensure you do not break the pipelines.
|
||||
|
||||
1. In [`gitlab-docs`](https://gitlab.com/gitlab-org/gitlab-docs), go to **{rocket}** **CI / CD > Pipelines**.
|
||||
|
@ -207,22 +207,22 @@ If you don't specify `editor:`, the simple one is used by default.
|
|||
|
||||
## Algolia search engine
|
||||
|
||||
The docs site uses [Algolia docsearch](https://community.algolia.com/docsearch/)
|
||||
The docs site uses [Algolia DocSearch](https://community.algolia.com/docsearch/)
|
||||
for its search function. This is how it works:
|
||||
|
||||
1. GitLab is a member of the [docsearch program](https://community.algolia.com/docsearch/#join-docsearch-program),
|
||||
1. GitLab is a member of the [DocSearch program](https://community.algolia.com/docsearch/#join-docsearch-program),
|
||||
which is the free tier of [Algolia](https://www.algolia.com/).
|
||||
1. Algolia hosts a [DocSearch configuration](https://github.com/algolia/docsearch-configs/blob/master/configs/gitlab.json)
|
||||
for the GitLab docs site, and we've worked together to refine it.
|
||||
1. That [config](https://community.algolia.com/docsearch/config-file.html) is
|
||||
1. That [configuration](https://community.algolia.com/docsearch/config-file.html) is
|
||||
parsed by their [crawler](https://community.algolia.com/docsearch/crawler-overview.html)
|
||||
every 24h and [stores](https://community.algolia.com/docsearch/inside-the-engine.html)
|
||||
the [docsearch index](https://community.algolia.com/docsearch/how-do-we-build-an-index.html)
|
||||
the [DocSearch index](https://community.algolia.com/docsearch/how-do-we-build-an-index.html)
|
||||
on [Algolia's servers](https://community.algolia.com/docsearch/faq.html#where-is-my-data-hosted%3F).
|
||||
1. On the docs side, we use a [docsearch layout](https://gitlab.com/gitlab-org/gitlab-docs/blob/master/layouts/docsearch.html) which
|
||||
1. On the docs side, we use a [DocSearch layout](https://gitlab.com/gitlab-org/gitlab-docs/blob/master/layouts/docsearch.html) which
|
||||
is present on pretty much every page except <https://docs.gitlab.com/search/>,
|
||||
which uses its [own layout](https://gitlab.com/gitlab-org/gitlab-docs/blob/master/layouts/instantsearch.html). In those layouts,
|
||||
there's a JavaScript snippet which initiates docsearch by using an API key
|
||||
there's a JavaScript snippet which initiates DocSearch by using an API key
|
||||
and an index name (`gitlab`) that are needed for Algolia to show the results.
|
||||
|
||||
NOTE: **For GitLab employees:**
|
||||
|
|
|
@ -34,7 +34,7 @@ For additional details on each, see the [template for new docs](#template-for-ne
|
|||
below.
|
||||
|
||||
Note that you can include additional subsections, as appropriate, such as 'How it Works', 'Architecture',
|
||||
and other logical divisions such as pre- and post-deployment steps.
|
||||
and other logical divisions such as pre-deployment and post-deployment steps.
|
||||
|
||||
## Template for new docs
|
||||
|
||||
|
|
|
@ -24,19 +24,19 @@ See the [Elasticsearch GDK setup instructions](https://gitlab.com/gitlab-org/git
|
|||
- `gitlab:elastic:test:index_size`: Tells you how much space the current index is using, as well as how many documents are in the index.
|
||||
- `gitlab:elastic:test:index_size_change`: Outputs index size, reindexes, and outputs index size again. Useful when testing improvements to indexing size.
|
||||
|
||||
Additionally, if you need large repos or multiple forks for testing, please consider [following these instructions](rake_tasks.md#extra-project-seed-options)
|
||||
Additionally, if you need large repositories or multiple forks for testing, please consider [following these instructions](rake_tasks.md#extra-project-seed-options)
|
||||
|
||||
## How does it work?
|
||||
|
||||
The Elasticsearch integration depends on an external indexer. We ship an [indexer written in Go](https://gitlab.com/gitlab-org/gitlab-elasticsearch-indexer). The user must trigger the initial indexing via a Rake task but, after this is done, GitLab itself will trigger reindexing when required via `after_` callbacks on create, update, and destroy that are inherited from [/ee/app/models/concerns/elastic/application_versioned_search.rb](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/app/models/concerns/elastic/application_versioned_search.rb).
|
||||
The Elasticsearch integration depends on an external indexer. We ship an [indexer written in Go](https://gitlab.com/gitlab-org/gitlab-elasticsearch-indexer). The user must trigger the initial indexing via a Rake task but, after this is done, GitLab itself will trigger reindexing when required via `after_` callbacks on create, update, and destroy that are inherited from [`/ee/app/models/concerns/elastic/application_versioned_search.rb`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/app/models/concerns/elastic/application_versioned_search.rb).
|
||||
|
||||
After initial indexing is complete, create, update, and delete operations for all models except projects (see [#207494](https://gitlab.com/gitlab-org/gitlab/-/issues/207494)) are tracked in a Redis [`ZSET`](https://redis.io/topics/data-types#sorted-sets). A regular `sidekiq-cron` `ElasticIndexBulkCronWorker` processes this queue, updating many Elasticsearch documents at a time with the [Bulk Request API](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html).
|
||||
|
||||
Search queries are generated by the concerns found in [ee/app/models/concerns/elastic](https://gitlab.com/gitlab-org/gitlab/tree/master/ee/app/models/concerns/elastic). These concerns are also in charge of access control, and have been a historic source of security bugs so please pay close attention to them!
|
||||
Search queries are generated by the concerns found in [`ee/app/models/concerns/elastic`](https://gitlab.com/gitlab-org/gitlab/tree/master/ee/app/models/concerns/elastic). These concerns are also in charge of access control, and have been a historic source of security bugs so please pay close attention to them!
|
||||
|
||||
## Existing Analyzers/Tokenizers/Filters
|
||||
|
||||
These are all defined in [ee/lib/elastic/latest/config.rb](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/elastic/latest/config.rb)
|
||||
These are all defined in [`ee/lib/elastic/latest/config.rb`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/elastic/latest/config.rb)
|
||||
|
||||
### Analyzers
|
||||
|
||||
|
@ -71,7 +71,7 @@ Not directly used for indexing, but rather used to transform a search input. Use
|
|||
|
||||
#### `sha_tokenizer`
|
||||
|
||||
This is a custom tokenizer that uses the [`edgeNGram` tokenizer](https://www.elastic.co/guide/en/elasticsearch/reference/5.5/analysis-edgengram-tokenizer.html) to allow SHAs to be searcheable by any sub-set of it (minimum of 5 chars).
|
||||
This is a custom tokenizer that uses the [`edgeNGram` tokenizer](https://www.elastic.co/guide/en/elasticsearch/reference/5.5/analysis-edgengram-tokenizer.html) to allow SHAs to be searchable by any sub-set of it (minimum of 5 chars).
|
||||
|
||||
Example:
|
||||
|
||||
|
@ -149,7 +149,7 @@ These proxy objects would talk to Elasticsearch server directly (see top half of
|
|||
|
||||
![Elasticsearch Architecture](img/elasticsearch_architecture.svg)
|
||||
|
||||
In the planned new design, each model would have a pair of corresponding subclassed proxy objects, in which model-specific logic is located. For example, `Snippet` would have `SnippetClassProxy` and `SnippetInstanceProxy` (being subclass of `Elasticsearch::Model::Proxy::ClassMethodsProxy` and `Elasticsearch::Model::Proxy::InstanceMethodsProxy`, respectively).
|
||||
In the planned new design, each model would have a pair of corresponding sub-classed proxy objects, in which model-specific logic is located. For example, `Snippet` would have `SnippetClassProxy` and `SnippetInstanceProxy` (being subclass of `Elasticsearch::Model::Proxy::ClassMethodsProxy` and `Elasticsearch::Model::Proxy::InstanceMethodsProxy`, respectively).
|
||||
|
||||
`__elasticsearch__` would represent another layer of proxy object, keeping track of multiple actual proxy objects. It would forward method calls to the appropriate index. For example:
|
||||
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
# Axios
|
||||
|
||||
We use [axios](https://github.com/axios/axios) to communicate with the server in Vue applications and most new code.
|
||||
We use [Axios](https://github.com/axios/axios) to communicate with the server in Vue applications and most new code.
|
||||
|
||||
In order to guarantee all defaults are set you *should not use `axios` directly*, you should import `axios` from `axios_utils`.
|
||||
In order to guarantee all defaults are set you *should not use Axios directly*, you should import Axios from `axios_utils`.
|
||||
|
||||
## CSRF token
|
||||
|
||||
All our request require a CSRF token.
|
||||
To guarantee this token is set, we are importing [axios](https://github.com/axios/axios), setting the token, and exporting `axios` .
|
||||
All our requests require a CSRF token.
|
||||
To guarantee this token is set, we are importing [Axios](https://github.com/axios/axios), setting the token, and exporting `axios` .
|
||||
|
||||
This exported module should be used instead of directly using `axios` to ensure the token is set.
|
||||
This exported module should be used instead of directly using Axios to ensure the token is set.
|
||||
|
||||
## Usage
|
||||
|
||||
|
@ -30,7 +30,7 @@ This exported module should be used instead of directly using `axios` to ensure
|
|||
});
|
||||
```
|
||||
|
||||
## Mock axios response in tests
|
||||
## Mock Axios response in tests
|
||||
|
||||
To help us mock the responses we are using [axios-mock-adapter](https://github.com/ctimmerm/axios-mock-adapter).
|
||||
|
||||
|
@ -41,7 +41,7 @@ Advantages over [`spyOn()`](https://jasmine.github.io/api/edge/global.html#spyOn
|
|||
- simple API to test error cases
|
||||
- provides `replyOnce()` to allow for different responses
|
||||
|
||||
We have also decided against using [axios interceptors](https://github.com/axios/axios#interceptors) because they are not suitable for mocking.
|
||||
We have also decided against using [Axios interceptors](https://github.com/axios/axios#interceptors) because they are not suitable for mocking.
|
||||
|
||||
### Example
|
||||
|
||||
|
@ -67,7 +67,7 @@ We have also decided against using [axios interceptors](https://github.com/axios
|
|||
});
|
||||
```
|
||||
|
||||
### Mock poll requests in tests with axios
|
||||
### Mock poll requests in tests with Axios
|
||||
|
||||
Because polling function requires a header object, we need to always include an object as the third argument:
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
Add the `Ajax` object to the plugins array of a `DropLab.prototype.init` or `DropLab.prototype.addHook` call.
|
||||
|
||||
`Ajax` requires 2 config values, the `endpoint` and `method`.
|
||||
`Ajax` requires 2 configuration values, the `endpoint` and `method`.
|
||||
|
||||
- `endpoint` should be a URL to the request endpoint.
|
||||
- `method` should be `setData` or `addData`.
|
||||
|
|
|
@ -7,7 +7,7 @@ to the dropdown using a simple fuzzy string search of an input value.
|
|||
|
||||
Add the `Filter` object to the plugins array of a `DropLab.prototype.init` or `DropLab.prototype.addHook` call.
|
||||
|
||||
- `Filter` requires a config value for `template`.
|
||||
- `Filter` requires a configuration value for `template`.
|
||||
- `template` should be the key of the objects within your data array that you want to compare
|
||||
to the user input string, for filtering.
|
||||
|
||||
|
|
|
@ -6,12 +6,12 @@
|
|||
|
||||
Add the `InputSetter` object to the plugins array of a `DropLab.prototype.init` or `DropLab.prototype.addHook` call.
|
||||
|
||||
- `InputSetter` requires a config value for `input` and `valueAttribute`.
|
||||
- `InputSetter` requires a configuration value for `input` and `valueAttribute`.
|
||||
- `input` should be the DOM element that you want to manipulate.
|
||||
- `valueAttribute` should be a string that is the name of an attribute on your list items that is used to get the value
|
||||
to update the `input` element with.
|
||||
|
||||
You can also set the `InputSetter` config to an array of objects, which will allow you to update multiple elements.
|
||||
You can also set the `InputSetter` configuration to an array of objects, which will allow you to update multiple elements.
|
||||
|
||||
```html
|
||||
<input id="input" value="">
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# Emojis
|
||||
|
||||
GitLab supports native unicode emojis and fallsback to image-based emojis selectively
|
||||
GitLab supports native Unicode emojis and falls back to image-based emojis selectively
|
||||
when your platform does not support it.
|
||||
|
||||
## How to update Emojis
|
||||
|
@ -21,7 +21,7 @@ when your platform does not support it.
|
|||
1. Ensure you see new individual images copied into `app/assets/images/emoji/`
|
||||
1. Ensure you can see the new emojis and their aliases in the GFM Autocomplete
|
||||
1. Ensure you can see the new emojis and their aliases in the award emoji menu
|
||||
1. You might need to add new emoji unicode support checks and rules for platforms
|
||||
1. You might need to add new emoji Unicode support checks and rules for platforms
|
||||
that do not support a certain emoji and we need to fallback to an image.
|
||||
See `app/assets/javascripts/emoji/support/is_emoji_unicode_supported.js`
|
||||
and `app/assets/javascripts/emoji/support/unicode_support_map.js`
|
||||
|
|
|
@ -89,7 +89,7 @@ Default client accepts two parameters: `resolvers` and `config`.
|
|||
## GraphQL Queries
|
||||
|
||||
To save query compilation at runtime, webpack can directly import `.graphql`
|
||||
files. This allows webpack to preprocess the query at compile time instead
|
||||
files. This allows webpack to pre-process the query at compile time instead
|
||||
of the client doing compilation of queries.
|
||||
|
||||
To distinguish queries from mutations and fragments, the following naming convention is recommended:
|
||||
|
|
|
@ -24,7 +24,7 @@ sprite_icon(icon_name, size: nil, css_class: '')
|
|||
- **icon_name** Use the icon_name that you can find in the SVG Sprite
|
||||
([Overview is available here](https://gitlab-org.gitlab.io/gitlab-svgs)).
|
||||
- **size (optional)** Use one of the following sizes : 16, 24, 32, 48, 72 (this will be translated into a `s16` class)
|
||||
- **css_class (optional)** If you want to add additional css classes
|
||||
- **css_class (optional)** If you want to add additional CSS classes
|
||||
|
||||
**Example**
|
||||
|
||||
|
@ -67,8 +67,8 @@ export default {
|
|||
|
||||
- **name** Name of the Icon in the SVG Sprite ([Overview is available here](https://gitlab-org.gitlab.io/gitlab-svgs)).
|
||||
- **size (optional)** Number value for the size which is then mapped to a specific CSS class
|
||||
(Available Sizes: 8, 12, 16, 18, 24, 32, 48, 72 are mapped to `sXX` css classes)
|
||||
- **css-classes (optional)** Additional CSS Classes to add to the svg tag.
|
||||
(Available Sizes: 8, 12, 16, 18, 24, 32, 48, 72 are mapped to `sXX` CSS classes)
|
||||
- **css-classes (optional)** Additional CSS Classes to add to the SVG tag.
|
||||
|
||||
### Usage in HTML/JS
|
||||
|
||||
|
@ -91,7 +91,7 @@ Please use the class `svg-content` around it to ensure nice rendering.
|
|||
|
||||
### Usage in Vue
|
||||
|
||||
To use an SVG illustrations in a template provide the path as a property and display it through a standard img tag.
|
||||
To use an SVG illustrations in a template provide the path as a property and display it through a standard `img` tag.
|
||||
|
||||
Component:
|
||||
|
||||
|
|
|
@ -6,9 +6,9 @@ To get started with Vue, read through [their documentation](https://vuejs.org/v2
|
|||
|
||||
What is described in the following sections can be found in these examples:
|
||||
|
||||
- web ide: <https://gitlab.com/gitlab-org/gitlab-foss/tree/master/app/assets/javascripts/ide/stores>
|
||||
- security products: <https://gitlab.com/gitlab-org/gitlab/tree/master/ee/app/assets/javascripts/vue_shared/security_reports>
|
||||
- registry: <https://gitlab.com/gitlab-org/gitlab-foss/tree/master/app/assets/javascripts/registry/stores>
|
||||
- [Web IDE](https://gitlab.com/gitlab-org/gitlab-foss/tree/master/app/assets/javascripts/ide/stores)
|
||||
- [Security products](https://gitlab.com/gitlab-org/gitlab/tree/master/ee/app/assets/javascripts/vue_shared/security_reports)
|
||||
- [Registry](https://gitlab.com/gitlab-org/gitlab-foss/tree/master/app/assets/javascripts/registry/stores)
|
||||
|
||||
## Vue architecture
|
||||
|
||||
|
@ -16,7 +16,7 @@ All new features built with Vue.js must follow a [Flux architecture](https://fac
|
|||
The main goal we are trying to achieve is to have only one data flow and only one data entry.
|
||||
In order to achieve this goal we use [vuex](#vuex).
|
||||
|
||||
You can also read about this architecture in vue docs about [state management](https://vuejs.org/v2/guide/state-management.html#Simple-State-Management-from-Scratch)
|
||||
You can also read about this architecture in Vue docs about [state management](https://vuejs.org/v2/guide/state-management.html#Simple-State-Management-from-Scratch)
|
||||
and about [one way data flow](https://vuejs.org/v2/guide/components.html#One-Way-Data-Flow).
|
||||
|
||||
### Components and Store
|
||||
|
@ -59,7 +59,7 @@ To do that, provide the data through `data` attributes in the HTML element and q
|
|||
_Note:_ You should only do this while initializing the application, because the mounted element will be replaced with Vue-generated DOM.
|
||||
|
||||
The advantage of providing data from the DOM to the Vue instance through `props` in the `render` function
|
||||
instead of querying the DOM inside the main vue component is that makes tests easier by avoiding the need to
|
||||
instead of querying the DOM inside the main Vue component is that makes tests easier by avoiding the need to
|
||||
create a fixture or an HTML element in the unit test. See the following example:
|
||||
|
||||
```javascript
|
||||
|
|
|
@ -0,0 +1,86 @@
|
|||
---
|
||||
type: how-tos
|
||||
---
|
||||
|
||||
# Develop on a feature branch
|
||||
|
||||
GitLab values encourage the use of [Minimal Viable Change (MVC)](https://about.gitlab.com/handbook/values/#minimal-viable-change-mvc).
|
||||
However, viable changes are not always small. In such cases, it can help to set up a dedicated feature branch.
|
||||
People can contribute MRs to that feature branch, without affecting the functionality of the default (usually `master`) branch.
|
||||
|
||||
Once work on the development branch is complete, then the feature branch can be finally merged into the default branch.
|
||||
|
||||
GitLab frequently implements this process whenever there is an MVC that requires multiple MRs.
|
||||
|
||||
## Use case: GitLab's release posts
|
||||
|
||||
This section describes the use case with GitLab [release posts](https://about.gitlab.com/handbook/marketing/blog/release-posts/).
|
||||
Dozens of GitLab team members contribute to each monthly release post.
|
||||
In such cases, it may be more efficient to submit an MR on the release post feature branch instead of master.
|
||||
|
||||
In this case, the feature branch would be `release-X-Y`. Assuming the `release-X-Y` branch already exists, you can set up an MR against that branch, with the following steps:
|
||||
|
||||
1. Create a new branch (`test-branch`) against the feature branch (`release-X-Y`):
|
||||
|
||||
```shell
|
||||
git checkout -b test-branch release-X-Y
|
||||
```
|
||||
|
||||
You should now be on a branch named `test-branch`.
|
||||
|
||||
1. Make desired changes on the `test-branch`.
|
||||
1. Add your changes, commit, and push to the `test-branch`:
|
||||
|
||||
```shell
|
||||
git add .
|
||||
```
|
||||
|
||||
1. Commit your changes:
|
||||
|
||||
```shell
|
||||
git commit -m "Some good reason"
|
||||
```
|
||||
|
||||
1. Push your changes to the repository:
|
||||
|
||||
```shell
|
||||
git push --set-upstream origin test-branch
|
||||
```
|
||||
|
||||
1. Navigate to the URL for your repository. In this case, the repository is `www-gitlab-com`, available at `https://gitlab.com/gitlab-com/www-gitlab-com`.
|
||||
|
||||
If needed, sign in to GitLab. You should then see an option to **Create merge request**:
|
||||
|
||||
![Create merge request](img/create_merge_request_v13_1.png)
|
||||
|
||||
1. After you click **Create merge request**, you'll see an option to **Change branches**. Select that option.
|
||||
|
||||
1. In the **New Merge Request** screen, you can now select the **Source** and **Target** branches.
|
||||
In the screenshot shown,
|
||||
we have selected `test-branch` as the source, and `release-13-0` as the target.
|
||||
|
||||
![Modify branches](img/modify_branches_v13_1.png)
|
||||
|
||||
1. Once you've selected the Source and Target branches, click **Compare branches and continue**.
|
||||
You should see an entry similar to:
|
||||
|
||||
```plaintext
|
||||
New Merge Request
|
||||
|
||||
From test-branch into release-13-0
|
||||
```
|
||||
|
||||
An entry like this confirms that your MR will **not** merge into master.
|
||||
|
||||
1. Make any additional changes in the **New Merge Request** screen, and click **Submit merge request**.
|
||||
1. In the new merge request, look for **Request to merge**. You'll see an entry similar to:
|
||||
|
||||
```plaintext
|
||||
Request to merge test-branch into release-13-0
|
||||
```
|
||||
|
||||
That confirms you've set up the MR to merge into the specified branch, not master.
|
||||
|
||||
1. Proceed with the change as you would with any other MR.
|
||||
1. When your MR is approved, and an appropriate user merges that MR, you can rest assured that your work is incorporated directly into the feature branch.
|
||||
When the feature branch is ready, it can then be merged into master.
|
Binary file not shown.
After Width: | Height: | Size: 16 KiB |
Binary file not shown.
After Width: | Height: | Size: 73 KiB |
|
@ -68,6 +68,7 @@ If you have problems with Git, the following may help:
|
|||
## Branching strategies
|
||||
|
||||
- [Feature branch workflow](../../gitlab-basics/feature_branch_workflow.md)
|
||||
- [Develop on a feature branch](feature_branch_development.md)
|
||||
- [GitLab Flow](../gitlab_flow.md)
|
||||
- [Git Branching - Branches in a Nutshell](https://git-scm.com/book/en/v2/Git-Branching-Branches-in-a-Nutshell)
|
||||
- [Git Branching - Branching Workflows](https://git-scm.com/book/en/v2/Git-Branching-Branching-Workflows)
|
||||
|
|
|
@ -59,7 +59,7 @@ After this basic package structure is created, we need to tag it in Git and push
|
|||
|
||||
```shell
|
||||
git init
|
||||
add composer.json
|
||||
git add composer.json
|
||||
git commit -m 'Composer package test'
|
||||
git tag v1.0.0
|
||||
git add origin git@gitlab.com:<namespace>/<project-name>.git
|
||||
|
|
|
@ -23,9 +23,6 @@ GitLab [administrators](../administration/index.md) receive all permissions.
|
|||
To add or import a user, you can follow the
|
||||
[project members documentation](project/members/index.md).
|
||||
|
||||
For information on eligible approvers for Merge Requests, see
|
||||
[Eligible approvers](project/merge_requests/merge_request_approvals.md#eligible-approvers).
|
||||
|
||||
## Principles behind permissions
|
||||
|
||||
See our [product handbook on permissions](https://about.gitlab.com/handbook/product/#permissions-in-gitlab)
|
||||
|
@ -99,6 +96,7 @@ The following table depicts the various user permission levels in a project.
|
|||
| Assign merge requests | | | ✓ | ✓ | ✓ |
|
||||
| Label merge requests | | | ✓ | ✓ | ✓ |
|
||||
| Lock merge request threads | | | ✓ | ✓ | ✓ |
|
||||
| Approve merge requests (*9*) | | | ✓ | ✓ | ✓ |
|
||||
| Manage/Accept merge requests | | | ✓ | ✓ | ✓ |
|
||||
| Create new environments | | | ✓ | ✓ | ✓ |
|
||||
| Stop environments | | | ✓ | ✓ | ✓ |
|
||||
|
@ -177,6 +175,8 @@ The following table depicts the various user permission levels in a project.
|
|||
1. Guest users can access GitLab [**Releases**](project/releases/index.md) for downloading assets but are not allowed to download the source code nor see repository information like tags and commits.
|
||||
1. Actions are limited only to records owned (referenced) by user.
|
||||
1. When [Share Group Lock](./group/index.md#share-with-group-lock) is enabled the project can't be shared with other groups. It does not affect group with group sharing.
|
||||
1. For information on eligible approvers for merge requests, see
|
||||
[Eligible approvers](project/merge_requests/merge_request_approvals.md#eligible-approvers).
|
||||
|
||||
## Project features permissions
|
||||
|
||||
|
|
|
@ -73,19 +73,27 @@ be used for merge request approvals:
|
|||
- As [merge request eligible approvers](merge_requests/merge_request_approvals.md#code-owners-as-eligible-approvers).
|
||||
- As required approvers for [protected branches](protected_branches.md#protected-branches-approval-by-code-owners-premium). **(PREMIUM)**
|
||||
|
||||
NOTE: **Note**:
|
||||
Developer or higher [permissions](../permissions.md) are required in order to
|
||||
approve a merge request.
|
||||
|
||||
Once set, Code Owners are displayed in merge requests widgets:
|
||||
|
||||
![MR widget - Code Owners](img/code_owners_mr_widget_v12_4.png)
|
||||
|
||||
NOTE: **Note**:
|
||||
While the`CODEOWNERS` file can be used in addition to Merge Request [Approval Rules](merge_requests/merge_request_approvals.md#approval-rules) it can also be used as the sole driver of a Merge Request approval (without using [Approval Rules](merge_requests/merge_request_approvals.md#approval-rules)) by simply creating the file in one of the three locations specified above, configuring the Code Owners to be required approvers for [protected branches](protected_branches.md#protected-branches-approval-by-code-owners-premium) and then using [the syntax of Code Owners files](code_owners.md#the-syntax-of-code-owners-files) to specify the actual owners and granular permissions.
|
||||
While the `CODEOWNERS` file can be used in addition to Merge Request [Approval Rules](merge_requests/merge_request_approvals.md#approval-rules)
|
||||
it can also be used as the sole driver of merge request approvals
|
||||
(without using [Approval Rules](merge_requests/merge_request_approvals.md#approval-rules)).
|
||||
To do so, create the file in one of the three locations specified above and
|
||||
set the code owners as required approvers for [protected branches](protected_branches.md#protected-branches-approval-by-code-owners-premium).
|
||||
Use [the syntax of Code Owners files](code_owners.md#the-syntax-of-code-owners-files)
|
||||
to specify the actual owners and granular permissions.
|
||||
|
||||
NOTE: **Note**:
|
||||
Using Code Owners in conjunction with [Protected Branches Approvals](protected_branches.md#protected-branches-approval-by-code-owners-premium)
|
||||
will prevent any user who is not specified in the `CODEOWNERS` file from pushing changes
|
||||
Using Code Owners in conjunction with [Protected Branches Approvals](protected_branches.md#protected-branches-approval-by-code-owners-premium)
|
||||
will prevent any user who is not specified in the `CODEOWNERS` file from pushing changes
|
||||
for the specified files/paths, even if their role is included in the **Allowed to push** column.
|
||||
This allows for a more inclusive push strategy, as administrators don't have to restrict developers
|
||||
from pushing directly to the protected branch, but can restrict pushing to certain
|
||||
from pushing directly to the protected branch, but can restrict pushing to certain
|
||||
files where a review by Code Owners is required.
|
||||
|
||||
## The syntax of Code Owners files
|
||||
|
|
|
@ -1,25 +1,34 @@
|
|||
# Custom Issue Tracker Service
|
||||
# Custom Issue Tracker service
|
||||
|
||||
To enable the Custom Issue Tracker integration in a project, navigate to the
|
||||
[Integrations page](overview.md#accessing-integrations), click
|
||||
the **Customer Issue Tracker** service, and fill in the required details on the page as described
|
||||
in the table below. You will be able to edit the title and description later as well.
|
||||
To enable the Custom Issue Tracker integration in a project:
|
||||
|
||||
| Field | Description |
|
||||
| ----- | ----------- |
|
||||
| `title` | A title for the issue tracker (to differentiate between instances, for example). |
|
||||
| `description` | A name for the issue tracker (to differentiate between instances, for example) |
|
||||
| `project_url` | The URL to the project in the custom issue tracker. |
|
||||
| `issues_url` | The URL to the issue in the issue tracker project that is linked to this GitLab project. Note that the `issues_url` requires `:id` in the URL. This ID is used by GitLab as a placeholder to replace the issue number. For example, `https://customissuetracker.com/project-name/:id`. |
|
||||
| `new_issue_url` | Currently unused. Will be changed in a future release. |
|
||||
1. Go to **{settings}** **Settings > Integrations**.
|
||||
1. Click **Custom Issue Tracker**
|
||||
1. Fill in the tracker's details, such as title, description, and URLs.
|
||||
You will be able to edit these fields later as well.
|
||||
|
||||
Once you have configured and enabled Custom Issue Tracker Service you'll see a link on the GitLab project pages that takes you to that custom issue tracker.
|
||||
These are some of the required fields:
|
||||
|
||||
| Field | Description |
|
||||
| --------------- | ----------- |
|
||||
| **Title** | A title for the issue tracker (for example, to differentiate between instances). |
|
||||
| **Description** | A name for the issue tracker (for example, to differentiate between instances). |
|
||||
| **Project URL** | The URL to the project in the custom issue tracker. |
|
||||
| **Issues URL** | The URL to the issue in the issue tracker project that is linked to this GitLab project. Note that the `issues_url` requires `:id` in the URL. This ID is used by GitLab as a placeholder to replace the issue number. For example, `https://customissuetracker.com/project-name/:id`. |
|
||||
| **New issue URL** | Currently unused. Will be changed in a future release. |
|
||||
|
||||
1. Click **Test settings and save changes**.
|
||||
|
||||
After you configure and enable the Custom Issue Tracker service, you'll see a link on the GitLab
|
||||
project pages that takes you to that custom issue tracker.
|
||||
|
||||
## Referencing issues
|
||||
|
||||
- Issues are referenced with `ANYTHING-<ID>`, where `ANYTHING` can be any string in CAPS and `<ID>`
|
||||
is a number used in the target project of the custom integration (for example, `PROJECT-143`).
|
||||
- `ANYTHING` is a placeholder to differentiate against GitLab issues, which are referenced with `#<ID>`. You can use a project name or project key to replace it for example.
|
||||
- When building the hyperlink, the `ANYTHING` part is ignored, and links always point to the address
|
||||
Issues are referenced with `<ANYTHING>-<ID>` (for example, `PROJECT-143`), where `<ANYTHING>` can be any string in CAPS, and `<ID>`
|
||||
is a number used in the target project of the custom integration.
|
||||
|
||||
`<ANYTHING>` is a placeholder to differentiate against GitLab issues, which are referenced with `#<ID>`. You can use a project name or project key to replace it for example.
|
||||
|
||||
When building the hyperlink, the `<ANYTHING>` part is ignored, and links always point to the address
|
||||
specified in `issues_url`, so in the example above, `PROJECT-143` would refer to
|
||||
`https://customissuetracker.com/project-name/143`.
|
||||
|
|
|
@ -34,10 +34,12 @@ minimum number of required approvers can still be set in the [project settings f
|
|||
|
||||
### Eligible approvers
|
||||
|
||||
The following can approve merge requests:
|
||||
The following users can approve merge requests:
|
||||
|
||||
- Users being added as approvers at project or merge request level.
|
||||
- [Code owners](#code-owners-as-eligible-approvers) to the files changed by the merge request.
|
||||
- Users who have been added as approvers at the project or merge request levels with
|
||||
developer or higher [permissions](../../permissions.md).
|
||||
- [Code owners](#code-owners-as-eligible-approvers) of the files changed by the merge request
|
||||
that have developer or higher [permissions](../../permissions.md).
|
||||
|
||||
An individual user can be added as an approver for a project if they are a member of:
|
||||
|
||||
|
@ -68,7 +70,7 @@ were not explicitly listed in the approval rules.
|
|||
|
||||
If you add [Code Owners](../code_owners.md) to your repository, the owners to the
|
||||
corresponding files will become eligible approvers, together with members with Developer
|
||||
or higher permissions.
|
||||
or higher [permissions](../../permissions.md).
|
||||
|
||||
To enable this merge request approval rule:
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ module Gitlab
|
|||
end
|
||||
|
||||
def puma?
|
||||
!!defined?(::Puma)
|
||||
!!defined?(::Puma) && !defined?(ACTION_CABLE_SERVER)
|
||||
end
|
||||
|
||||
# For unicorn, we need to check for actual server instances to avoid false positives.
|
||||
|
@ -70,11 +70,11 @@ module Gitlab
|
|||
end
|
||||
|
||||
def web_server?
|
||||
puma? || unicorn?
|
||||
puma? || unicorn? || action_cable?
|
||||
end
|
||||
|
||||
def action_cable?
|
||||
web_server? && (!!defined?(ACTION_CABLE_SERVER) || Gitlab.config.action_cable.in_app)
|
||||
!!defined?(ACTION_CABLE_SERVER)
|
||||
end
|
||||
|
||||
def multi_threaded?
|
||||
|
@ -82,21 +82,19 @@ module Gitlab
|
|||
end
|
||||
|
||||
def max_threads
|
||||
threads = 1 # main thread
|
||||
main_thread = 1
|
||||
|
||||
if puma?
|
||||
threads += Puma.cli_config.options[:max_threads]
|
||||
if action_cable?
|
||||
Gitlab::Application.config.action_cable.worker_pool_size
|
||||
elsif puma?
|
||||
Puma.cli_config.options[:max_threads]
|
||||
elsif sidekiq?
|
||||
# An extra thread for the poller in Sidekiq Cron:
|
||||
# https://github.com/ondrejbartas/sidekiq-cron#under-the-hood
|
||||
threads += Sidekiq.options[:concurrency] + 1
|
||||
end
|
||||
|
||||
if action_cable?
|
||||
threads += Gitlab.config.action_cable.worker_pool_size
|
||||
end
|
||||
|
||||
threads
|
||||
Sidekiq.options[:concurrency] + 1
|
||||
else
|
||||
0
|
||||
end + main_thread
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -79,6 +79,7 @@
|
|||
"diff": "^3.4.0",
|
||||
"document-register-element": "1.14.3",
|
||||
"dropzone": "^4.2.0",
|
||||
"editorconfig": "^0.15.3",
|
||||
"emoji-regex": "^7.0.3",
|
||||
"emoji-unicode-version": "^0.2.1",
|
||||
"exports-loader": "^0.7.0",
|
||||
|
@ -103,6 +104,7 @@
|
|||
"marked": "^0.3.12",
|
||||
"mermaid": "^8.5.1",
|
||||
"mersenne-twister": "1.1.0",
|
||||
"minimatch": "^3.0.4",
|
||||
"mitt": "^1.2.0",
|
||||
"monaco-editor": "^0.18.1",
|
||||
"monaco-editor-webpack-plugin": "^1.7.0",
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# Exit early if we don't want to build the image
|
||||
if [[ "${BUILD_ASSETS_IMAGE}" != "true" ]]
|
||||
then
|
||||
exit 0
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Generate the image name based on the project this is being run in
|
||||
|
|
|
@ -16,6 +16,10 @@ module Trigger
|
|||
%w[gitlab gitlab-ee].include?(ENV['CI_PROJECT_NAME'])
|
||||
end
|
||||
|
||||
def self.security?
|
||||
%r{\Agitlab-org/security(\z|/)}.match?(ENV['CI_PROJECT_NAMESPACE'])
|
||||
end
|
||||
|
||||
def self.non_empty_variable_value(variable)
|
||||
variable_value = ENV[variable]
|
||||
|
||||
|
@ -26,6 +30,9 @@ module Trigger
|
|||
|
||||
class Base
|
||||
def invoke!(post_comment: false, downstream_job_name: nil)
|
||||
# gitlab-bot's token "GitLab multi-project pipeline polling"
|
||||
Gitlab.private_token = access_token
|
||||
|
||||
pipeline_variables = variables
|
||||
|
||||
puts "Triggering downstream pipeline on #{downstream_project_path}"
|
||||
|
@ -40,7 +47,7 @@ module Trigger
|
|||
puts "Triggered downstream pipeline: #{pipeline.web_url}\n"
|
||||
puts "Waiting for downstream pipeline status"
|
||||
|
||||
Trigger::CommitComment.post!(pipeline, access_token) if post_comment
|
||||
Trigger::CommitComment.post!(pipeline) if post_comment
|
||||
downstream_job =
|
||||
if downstream_job_name
|
||||
Gitlab.pipeline_jobs(downstream_project_path, pipeline.id).auto_paginate.find do |potential_job|
|
||||
|
@ -49,9 +56,9 @@ module Trigger
|
|||
end
|
||||
|
||||
if downstream_job
|
||||
Trigger::Job.new(downstream_project_path, downstream_job.id, access_token)
|
||||
Trigger::Job.new(downstream_project_path, downstream_job.id)
|
||||
else
|
||||
Trigger::Pipeline.new(downstream_project_path, pipeline.id, access_token)
|
||||
Trigger::Pipeline.new(downstream_project_path, pipeline.id)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -140,6 +147,7 @@ module Trigger
|
|||
{
|
||||
'GITLAB_VERSION' => Trigger.non_empty_variable_value('CI_MERGE_REQUEST_SOURCE_BRANCH_SHA') || ENV['CI_COMMIT_SHA'],
|
||||
'ALTERNATIVE_SOURCES' => 'true',
|
||||
'SECURITY_SOURCES' => Trigger.security? ? 'true' : 'false',
|
||||
'ee' => Trigger.ee? ? 'true' : 'false',
|
||||
'QA_BRANCH' => ENV['QA_BRANCH'] || 'master'
|
||||
}
|
||||
|
@ -197,9 +205,7 @@ module Trigger
|
|||
end
|
||||
|
||||
class CommitComment
|
||||
def self.post!(downstream_pipeline, access_token)
|
||||
Gitlab.private_token = access_token
|
||||
|
||||
def self.post!(downstream_pipeline)
|
||||
Gitlab.create_commit_comment(
|
||||
ENV['CI_PROJECT_PATH'],
|
||||
Trigger.non_empty_variable_value('CI_MERGE_REQUEST_SOURCE_BRANCH_SHA') || ENV['CI_COMMIT_SHA'],
|
||||
|
@ -214,7 +220,7 @@ module Trigger
|
|||
INTERVAL = 60 # seconds
|
||||
MAX_DURATION = 3600 * 3 # 3 hours
|
||||
|
||||
attr_reader :project, :id, :api_token
|
||||
attr_reader :project, :id
|
||||
|
||||
def self.unscoped_class_name
|
||||
name.split('::').last
|
||||
|
@ -224,14 +230,10 @@ module Trigger
|
|||
unscoped_class_name.downcase
|
||||
end
|
||||
|
||||
def initialize(project, id, api_token)
|
||||
def initialize(project, id)
|
||||
@project = project
|
||||
@id = id
|
||||
@api_token = api_token
|
||||
@start = Time.now.to_i
|
||||
|
||||
# gitlab-bot's token "GitLab multi-project pipeline polling"
|
||||
Gitlab.private_token = api_token
|
||||
end
|
||||
|
||||
def wait!
|
||||
|
|
|
@ -301,6 +301,66 @@ RSpec.describe GroupsController do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'tracking group creation for onboarding issues experiment' do
|
||||
before do
|
||||
sign_in(user)
|
||||
end
|
||||
|
||||
subject(:create_namespace) { post :create, params: { group: { name: 'new_group', path: 'new_group' } } }
|
||||
|
||||
context 'experiment disabled' do
|
||||
before do
|
||||
stub_experiment(onboarding_issues: false)
|
||||
end
|
||||
|
||||
it 'does not track anything' do
|
||||
expect(Gitlab::Tracking).not_to receive(:event)
|
||||
|
||||
create_namespace
|
||||
end
|
||||
end
|
||||
|
||||
context 'experiment enabled' do
|
||||
before do
|
||||
stub_experiment(onboarding_issues: true)
|
||||
end
|
||||
|
||||
context 'and the user is part of the control group' do
|
||||
before do
|
||||
stub_experiment_for_user(onboarding_issues: false)
|
||||
end
|
||||
|
||||
it 'tracks the event with the "created_namespace" action with the "control_group" property' do
|
||||
expect(Gitlab::Tracking).to receive(:event).with(
|
||||
'Growth::Conversion::Experiment::OnboardingIssues',
|
||||
'created_namespace',
|
||||
label: anything,
|
||||
property: 'control_group'
|
||||
)
|
||||
|
||||
create_namespace
|
||||
end
|
||||
end
|
||||
|
||||
context 'and the user is part of the experimental group' do
|
||||
before do
|
||||
stub_experiment_for_user(onboarding_issues: true)
|
||||
end
|
||||
|
||||
it 'tracks the event with the "created_namespace" action with the "experimental_group" property' do
|
||||
expect(Gitlab::Tracking).to receive(:event).with(
|
||||
'Growth::Conversion::Experiment::OnboardingIssues',
|
||||
'created_namespace',
|
||||
label: anything,
|
||||
property: 'experimental_group'
|
||||
)
|
||||
|
||||
create_namespace
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'GET #index' do
|
||||
|
|
|
@ -9,5 +9,8 @@ import 'monaco-editor/esm/vs/language/json/monaco.contribution';
|
|||
import 'monaco-editor/esm/vs/language/html/monaco.contribution';
|
||||
import 'monaco-editor/esm/vs/basic-languages/monaco.contribution';
|
||||
|
||||
// This language starts trying to spin up web workers which obviously breaks in Jest environment
|
||||
jest.mock('monaco-editor/esm/vs/language/typescript/tsMode');
|
||||
|
||||
export * from 'monaco-editor/esm/vs/editor/editor.api';
|
||||
export default global.monaco;
|
||||
|
|
|
@ -14,6 +14,13 @@ describe('Clusters', () => {
|
|||
|
||||
const endpoint = 'some/endpoint';
|
||||
|
||||
const entryData = {
|
||||
endpoint,
|
||||
imgTagsAwsText: 'AWS Icon',
|
||||
imgTagsDefaultText: 'Default Icon',
|
||||
imgTagsGcpText: 'GCP Icon',
|
||||
};
|
||||
|
||||
const findLoader = () => wrapper.find(GlLoadingIcon);
|
||||
const findPaginatedButtons = () => wrapper.find(GlPagination);
|
||||
const findTable = () => wrapper.find(GlTable);
|
||||
|
@ -24,7 +31,7 @@ describe('Clusters', () => {
|
|||
};
|
||||
|
||||
const mountWrapper = () => {
|
||||
store = ClusterStore({ endpoint });
|
||||
store = ClusterStore(entryData);
|
||||
wrapper = mount(Clusters, { store });
|
||||
return axios.waitForAll();
|
||||
};
|
||||
|
@ -87,6 +94,23 @@ describe('Clusters', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('cluster icon', () => {
|
||||
it.each`
|
||||
providerText | lineNumber
|
||||
${'GCP Icon'} | ${0}
|
||||
${'AWS Icon'} | ${1}
|
||||
${'Default Icon'} | ${2}
|
||||
${'Default Icon'} | ${3}
|
||||
${'Default Icon'} | ${4}
|
||||
${'Default Icon'} | ${5}
|
||||
`('renders provider image and alt text for each cluster', ({ providerText, lineNumber }) => {
|
||||
const images = findTable().findAll('.js-status img');
|
||||
const image = images.at(lineNumber);
|
||||
|
||||
expect(image.attributes('alt')).toBe(providerText);
|
||||
});
|
||||
});
|
||||
|
||||
describe('cluster status', () => {
|
||||
it.each`
|
||||
statusName | lineNumber | result
|
||||
|
|
|
@ -3,6 +3,7 @@ export const clusterList = [
|
|||
name: 'My Cluster 1',
|
||||
environment_scope: '*',
|
||||
cluster_type: 'group_type',
|
||||
provider_type: 'gcp',
|
||||
status: 'creating',
|
||||
nodes: null,
|
||||
},
|
||||
|
@ -10,6 +11,7 @@ export const clusterList = [
|
|||
name: 'My Cluster 2',
|
||||
environment_scope: 'development',
|
||||
cluster_type: 'project_type',
|
||||
provider_type: 'aws',
|
||||
status: 'unreachable',
|
||||
nodes: [
|
||||
{
|
||||
|
@ -22,6 +24,7 @@ export const clusterList = [
|
|||
name: 'My Cluster 3',
|
||||
environment_scope: 'development',
|
||||
cluster_type: 'project_type',
|
||||
provider_type: 'none',
|
||||
status: 'authentication_failure',
|
||||
nodes: [
|
||||
{
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,146 @@
|
|||
export const exampleConfigs = [
|
||||
{
|
||||
path: 'foo/bar/baz/.editorconfig',
|
||||
content: `
|
||||
[*]
|
||||
tab_width = 6
|
||||
indent_style = tab
|
||||
`,
|
||||
},
|
||||
{
|
||||
path: 'foo/bar/.editorconfig',
|
||||
content: `
|
||||
root = false
|
||||
|
||||
[*]
|
||||
indent_size = 5
|
||||
indent_style = space
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*_spec.{js,py}]
|
||||
end_of_line = crlf
|
||||
`,
|
||||
},
|
||||
{
|
||||
path: 'foo/.editorconfig',
|
||||
content: `
|
||||
[*]
|
||||
tab_width = 4
|
||||
indent_style = tab
|
||||
`,
|
||||
},
|
||||
{
|
||||
path: '.editorconfig',
|
||||
content: `
|
||||
root = true
|
||||
|
||||
[*]
|
||||
indent_size = 3
|
||||
indent_style = space
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
|
||||
[*.js]
|
||||
indent_size = 2
|
||||
indent_style = space
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*.txt]
|
||||
end_of_line = crlf
|
||||
`,
|
||||
},
|
||||
{
|
||||
path: 'foo/bar/root/.editorconfig',
|
||||
content: `
|
||||
root = true
|
||||
|
||||
[*]
|
||||
tab_width = 1
|
||||
indent_style = tab
|
||||
`,
|
||||
},
|
||||
];
|
||||
|
||||
export const exampleFiles = [
|
||||
{
|
||||
path: 'foo/bar/root/README.md',
|
||||
rules: {
|
||||
indent_style: 'tab', // foo/bar/root/.editorconfig
|
||||
tab_width: '1', // foo/bar/root/.editorconfig
|
||||
},
|
||||
monacoRules: {
|
||||
insertSpaces: false,
|
||||
tabSize: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
path: 'foo/bar/baz/my_spec.js',
|
||||
rules: {
|
||||
end_of_line: 'crlf', // foo/bar/.editorconfig (for _spec.js files)
|
||||
indent_size: '5', // foo/bar/.editorconfig
|
||||
indent_style: 'tab', // foo/bar/baz/.editorconfig
|
||||
insert_final_newline: 'true', // .editorconfig
|
||||
tab_width: '6', // foo/bar/baz/.editorconfig
|
||||
trim_trailing_whitespace: 'true', // .editorconfig (for .js files)
|
||||
},
|
||||
monacoRules: {
|
||||
endOfLine: 1,
|
||||
insertFinalNewline: true,
|
||||
insertSpaces: false,
|
||||
tabSize: 6,
|
||||
trimTrailingWhitespace: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
path: 'foo/my_file.js',
|
||||
rules: {
|
||||
end_of_line: 'lf', // .editorconfig
|
||||
indent_size: '2', // .editorconfig (for .js files)
|
||||
indent_style: 'tab', // foo/.editorconfig
|
||||
insert_final_newline: 'true', // .editorconfig
|
||||
tab_width: '4', // foo/.editorconfig
|
||||
trim_trailing_whitespace: 'true', // .editorconfig (for .js files)
|
||||
},
|
||||
monacoRules: {
|
||||
endOfLine: 0,
|
||||
insertFinalNewline: true,
|
||||
insertSpaces: false,
|
||||
tabSize: 4,
|
||||
trimTrailingWhitespace: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
path: 'foo/my_file.md',
|
||||
rules: {
|
||||
end_of_line: 'lf', // .editorconfig
|
||||
indent_size: '3', // .editorconfig
|
||||
indent_style: 'tab', // foo/.editorconfig
|
||||
insert_final_newline: 'true', // .editorconfig
|
||||
tab_width: '4', // foo/.editorconfig
|
||||
},
|
||||
monacoRules: {
|
||||
endOfLine: 0,
|
||||
insertFinalNewline: true,
|
||||
insertSpaces: false,
|
||||
tabSize: 4,
|
||||
},
|
||||
},
|
||||
{
|
||||
path: 'foo/bar/my_file.txt',
|
||||
rules: {
|
||||
end_of_line: 'crlf', // .editorconfig (for .txt files)
|
||||
indent_size: '5', // foo/bar/.editorconfig
|
||||
indent_style: 'space', // foo/bar/.editorconfig
|
||||
insert_final_newline: 'true', // .editorconfig
|
||||
tab_width: '4', // foo/.editorconfig
|
||||
trim_trailing_whitespace: 'true', // foo/bar/.editorconfig
|
||||
},
|
||||
monacoRules: {
|
||||
endOfLine: 1,
|
||||
insertFinalNewline: true,
|
||||
insertSpaces: true,
|
||||
tabSize: 4,
|
||||
trimTrailingWhitespace: true,
|
||||
},
|
||||
},
|
||||
];
|
|
@ -0,0 +1,18 @@
|
|||
import { getRulesWithTraversal } from '~/ide/lib/editorconfig/parser';
|
||||
import { exampleConfigs, exampleFiles } from './mock_data';
|
||||
|
||||
describe('~/ide/lib/editorconfig/parser', () => {
|
||||
const getExampleConfigContent = path =>
|
||||
Promise.resolve(exampleConfigs.find(x => x.path === path)?.content);
|
||||
|
||||
describe('getRulesWithTraversal', () => {
|
||||
it.each(exampleFiles)(
|
||||
'traverses through all editorconfig files in parent directories (until root=true is hit) and finds rules for this file (case %#)',
|
||||
({ path, rules }) => {
|
||||
return getRulesWithTraversal(path, getExampleConfigContent).then(result => {
|
||||
expect(result).toEqual(rules);
|
||||
});
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,43 @@
|
|||
import mapRulesToMonaco from '~/ide/lib/editorconfig/rules_mapper';
|
||||
|
||||
describe('mapRulesToMonaco', () => {
|
||||
const multipleEntries = {
|
||||
input: { indent_style: 'tab', indent_size: '4', insert_final_newline: 'true' },
|
||||
output: { insertSpaces: false, tabSize: 4, insertFinalNewline: true },
|
||||
};
|
||||
|
||||
// tab width takes precedence
|
||||
const tabWidthAndIndent = {
|
||||
input: { indent_style: 'tab', indent_size: '4', tab_width: '3' },
|
||||
output: { insertSpaces: false, tabSize: 3 },
|
||||
};
|
||||
|
||||
it.each`
|
||||
rule | monacoOption
|
||||
${{ indent_style: 'tab' }} | ${{ insertSpaces: false }}
|
||||
${{ indent_style: 'space' }} | ${{ insertSpaces: true }}
|
||||
${{ indent_style: 'unset' }} | ${{}}
|
||||
${{ indent_size: '4' }} | ${{ tabSize: 4 }}
|
||||
${{ indent_size: '4.4' }} | ${{ tabSize: 4 }}
|
||||
${{ indent_size: '0' }} | ${{}}
|
||||
${{ indent_size: '-10' }} | ${{}}
|
||||
${{ indent_size: 'NaN' }} | ${{}}
|
||||
${{ tab_width: '4' }} | ${{ tabSize: 4 }}
|
||||
${{ tab_width: '5.4' }} | ${{ tabSize: 5 }}
|
||||
${{ tab_width: '-10' }} | ${{}}
|
||||
${{ trim_trailing_whitespace: 'true' }} | ${{ trimTrailingWhitespace: true }}
|
||||
${{ trim_trailing_whitespace: 'false' }} | ${{ trimTrailingWhitespace: false }}
|
||||
${{ trim_trailing_whitespace: 'unset' }} | ${{}}
|
||||
${{ end_of_line: 'lf' }} | ${{ endOfLine: 0 }}
|
||||
${{ end_of_line: 'crlf' }} | ${{ endOfLine: 1 }}
|
||||
${{ end_of_line: 'cr' }} | ${{}}
|
||||
${{ end_of_line: 'unset' }} | ${{}}
|
||||
${{ insert_final_newline: 'true' }} | ${{ insertFinalNewline: true }}
|
||||
${{ insert_final_newline: 'false' }} | ${{ insertFinalNewline: false }}
|
||||
${{ insert_final_newline: 'unset' }} | ${{}}
|
||||
${multipleEntries.input} | ${multipleEntries.output}
|
||||
${tabWidthAndIndent.input} | ${tabWidthAndIndent.output}
|
||||
`('correctly maps editorconfig rule to monaco option: $rule', ({ rule, monacoOption }) => {
|
||||
expect(mapRulesToMonaco(rule)).toEqual(monacoOption);
|
||||
});
|
||||
});
|
|
@ -16,6 +16,44 @@ describe GitlabSchema.types['Snippet'] do
|
|||
expect(described_class).to have_graphql_fields(*expected_fields)
|
||||
end
|
||||
|
||||
context 'when restricted visibility level is set to public' do
|
||||
let_it_be(:snippet) { create(:personal_snippet, :repository, :public, author: user) }
|
||||
|
||||
let(:current_user) { user }
|
||||
let(:query) do
|
||||
%(
|
||||
{
|
||||
snippets {
|
||||
nodes {
|
||||
author {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
end
|
||||
let(:response) { subject.dig('data', 'snippets', 'nodes')[0] }
|
||||
|
||||
subject { GitlabSchema.execute(query, context: { current_user: current_user }).as_json }
|
||||
|
||||
before do
|
||||
stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
|
||||
end
|
||||
|
||||
it 'returns snippet author' do
|
||||
expect(response['author']).to be_present
|
||||
end
|
||||
|
||||
context 'when user is not logged in' do
|
||||
let(:current_user) { nil }
|
||||
|
||||
it 'returns snippet author as nil' do
|
||||
expect(response['author']).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'authorizations' do
|
||||
specify { expect(described_class).to require_graphql_authorizations(:read_snippet) }
|
||||
end
|
||||
|
|
|
@ -59,6 +59,22 @@ describe ClustersHelper do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#js_clusters_list_data' do
|
||||
it 'displays endpoint path and images' do
|
||||
js_data = helper.js_clusters_list_data('/path')
|
||||
|
||||
expect(js_data[:endpoint]).to eq('/path')
|
||||
|
||||
expect(js_data.dig(:img_tags, :aws, :path)).to match(%r(/illustrations/logos/amazon_eks|svg))
|
||||
expect(js_data.dig(:img_tags, :default, :path)).to match(%r(/illustrations/logos/kubernetes|svg))
|
||||
expect(js_data.dig(:img_tags, :gcp, :path)).to match(%r(/illustrations/logos/google_gke|svg))
|
||||
|
||||
expect(js_data.dig(:img_tags, :aws, :text)).to eq('Amazon EKS')
|
||||
expect(js_data.dig(:img_tags, :default, :text)).to eq('Kubernetes Cluster')
|
||||
expect(js_data.dig(:img_tags, :gcp, :text)).to eq('Google GKE')
|
||||
end
|
||||
end
|
||||
|
||||
describe '#provider_icon' do
|
||||
it 'will return GCP logo with gcp argument' do
|
||||
logo = helper.provider_icon('gcp')
|
||||
|
|
|
@ -48,45 +48,18 @@ describe Gitlab::Runtime do
|
|||
before do
|
||||
stub_const('::Puma', puma_type)
|
||||
allow(puma_type).to receive_message_chain(:cli_config, :options).and_return(max_threads: 2)
|
||||
stub_config(action_cable: { in_app: false })
|
||||
end
|
||||
|
||||
it_behaves_like "valid runtime", :puma, 3
|
||||
|
||||
context "when ActionCable in-app mode is enabled" do
|
||||
before do
|
||||
stub_config(action_cable: { in_app: true, worker_pool_size: 3 })
|
||||
end
|
||||
|
||||
it_behaves_like "valid runtime", :puma, 6
|
||||
end
|
||||
|
||||
context "when ActionCable standalone is run" do
|
||||
before do
|
||||
stub_const('ACTION_CABLE_SERVER', true)
|
||||
stub_config(action_cable: { worker_pool_size: 8 })
|
||||
end
|
||||
|
||||
it_behaves_like "valid runtime", :puma, 11
|
||||
end
|
||||
end
|
||||
|
||||
context "unicorn" do
|
||||
before do
|
||||
stub_const('::Unicorn', Module.new)
|
||||
stub_const('::Unicorn::HttpServer', Class.new)
|
||||
stub_config(action_cable: { in_app: false })
|
||||
end
|
||||
|
||||
it_behaves_like "valid runtime", :unicorn, 1
|
||||
|
||||
context "when ActionCable in-app mode is enabled" do
|
||||
before do
|
||||
stub_config(action_cable: { in_app: true, worker_pool_size: 3 })
|
||||
end
|
||||
|
||||
it_behaves_like "valid runtime", :unicorn, 4
|
||||
end
|
||||
end
|
||||
|
||||
context "sidekiq" do
|
||||
|
@ -132,4 +105,17 @@ describe Gitlab::Runtime do
|
|||
|
||||
it_behaves_like "valid runtime", :rails_runner, 1
|
||||
end
|
||||
|
||||
context "action_cable" do
|
||||
before do
|
||||
stub_const('ACTION_CABLE_SERVER', true)
|
||||
stub_const('::Puma', Module.new)
|
||||
|
||||
allow(Gitlab::Application).to receive_message_chain(:config, :action_cable, :worker_pool_size).and_return(8)
|
||||
end
|
||||
|
||||
it "reports its maximum concurrency based on ActionCable's worker pool size" do
|
||||
expect(subject.max_threads).to eq(9)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -6064,6 +6064,14 @@ describe Project do
|
|||
it { is_expected.not_to include(user) }
|
||||
end
|
||||
|
||||
describe "#metrics_setting" do
|
||||
let(:project) { build(:project) }
|
||||
|
||||
it 'creates setting if it does not exist' do
|
||||
expect(project.metrics_setting).to be_an_instance_of(ProjectMetricsSetting)
|
||||
end
|
||||
end
|
||||
|
||||
def finish_job(export_job)
|
||||
export_job.start
|
||||
export_job.finish
|
||||
|
|
|
@ -16,6 +16,7 @@ describe ClusterSerializer do
|
|||
:name,
|
||||
:nodes,
|
||||
:path,
|
||||
:provider_type,
|
||||
:status)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -19,7 +19,7 @@ module ImportExport
|
|||
end
|
||||
|
||||
def setup_reader(reader)
|
||||
if reader == :ndjson_reader && Feature.enabled?(:project_import_ndjson)
|
||||
if reader == :ndjson_reader && Feature.enabled?(:project_import_ndjson, default_enabled: true)
|
||||
allow_any_instance_of(Gitlab::ImportExport::JSON::LegacyReader::File).to receive(:exist?).and_return(false)
|
||||
allow_any_instance_of(Gitlab::ImportExport::JSON::NdjsonReader).to receive(:exist?).and_return(true)
|
||||
else
|
||||
|
|
Loading…
Reference in New Issue