Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
4a17cae545
commit
dcc096f219
|
@ -130,25 +130,23 @@ The Geo primary site needs to checksum every replicable so secondaries can verif
|
|||
FAILED_VERIFICATION_INDEX_NAME = "index_cool_widget_states_failed_verification"
|
||||
NEEDS_VERIFICATION_INDEX_NAME = "index_cool_widget_states_needs_verification"
|
||||
|
||||
disable_ddl_transaction!
|
||||
enable_lock_retries!
|
||||
|
||||
def up
|
||||
with_lock_retries do
|
||||
create_table :cool_widget_states, id: false do |t|
|
||||
t.references :cool_widget, primary_key: true, null: false, foreign_key: { on_delete: :cascade }
|
||||
t.integer :verification_state, default: 0, limit: 2, null: false
|
||||
t.column :verification_started_at, :datetime_with_timezone
|
||||
t.datetime_with_timezone :verification_retry_at
|
||||
t.datetime_with_timezone :verified_at
|
||||
t.integer :verification_retry_count, limit: 2
|
||||
t.binary :verification_checksum, using: 'verification_checksum::bytea'
|
||||
t.text :verification_failure, limit: 255
|
||||
create_table :cool_widget_states, id: false do |t|
|
||||
t.datetime_with_timezone :verification_started_at
|
||||
t.datetime_with_timezone :verification_retry_at
|
||||
t.datetime_with_timezone :verified_at
|
||||
t.references :cool_widget, primary_key: true, null: false, foreign_key: { on_delete: :cascade }
|
||||
t.integer :verification_state, default: 0, limit: 2, null: false
|
||||
t.integer :verification_retry_count, limit: 2
|
||||
t.binary :verification_checksum, using: 'verification_checksum::bytea'
|
||||
t.text :verification_failure, limit: 255
|
||||
|
||||
t.index :verification_state, name: VERIFICATION_STATE_INDEX_NAME
|
||||
t.index :verified_at, where: "(verification_state = 0)", order: { verified_at: 'ASC NULLS FIRST' }, name: PENDING_VERIFICATION_INDEX_NAME
|
||||
t.index :verification_retry_at, where: "(verification_state = 3)", order: { verification_retry_at: 'ASC NULLS FIRST' }, name: FAILED_VERIFICATION_INDEX_NAME
|
||||
t.index :verification_state, where: "(verification_state = 0 OR verification_state = 3)", name: NEEDS_VERIFICATION_INDEX_NAME
|
||||
end
|
||||
t.index :verification_state, name: VERIFICATION_STATE_INDEX_NAME
|
||||
t.index :verified_at, where: "(verification_state = 0)", order: { verified_at: 'ASC NULLS FIRST' }, name: PENDING_VERIFICATION_INDEX_NAME
|
||||
t.index :verification_retry_at, where: "(verification_state = 3)", order: { verification_retry_at: 'ASC NULLS FIRST' }, name: FAILED_VERIFICATION_INDEX_NAME
|
||||
t.index :verification_state, where: "(verification_state = 0 OR verification_state = 3)", name: NEEDS_VERIFICATION_INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -489,7 +487,7 @@ That's all of the required database changes.
|
|||
module Geo
|
||||
class CoolWidgetState < ApplicationRecord
|
||||
include EachBatch
|
||||
|
||||
|
||||
self.primary_key = :cool_widget_id
|
||||
|
||||
belongs_to :cool_widget, inverse_of: :cool_widget_state
|
||||
|
|
|
@ -132,25 +132,23 @@ The Geo primary site needs to checksum every replicable so secondaries can verif
|
|||
FAILED_VERIFICATION_INDEX_NAME = "index_cool_widget_states_failed_verification"
|
||||
NEEDS_VERIFICATION_INDEX_NAME = "index_cool_widget_states_needs_verification"
|
||||
|
||||
disable_ddl_transaction!
|
||||
enable_lock_retries!
|
||||
|
||||
def up
|
||||
with_lock_retries do
|
||||
create_table :cool_widget_states, id: false do |t|
|
||||
t.references :cool_widget, primary_key: true, null: false, foreign_key: { on_delete: :cascade }
|
||||
t.integer :verification_state, default: 0, limit: 2, null: false
|
||||
t.column :verification_started_at, :datetime_with_timezone
|
||||
t.datetime_with_timezone :verification_retry_at
|
||||
t.datetime_with_timezone :verified_at
|
||||
t.integer :verification_retry_count, limit: 2
|
||||
t.binary :verification_checksum, using: 'verification_checksum::bytea'
|
||||
t.text :verification_failure, limit: 255
|
||||
create_table :cool_widget_states, id: false do |t|
|
||||
t.datetime_with_timezone :verification_started_at
|
||||
t.datetime_with_timezone :verification_retry_at
|
||||
t.datetime_with_timezone :verified_at
|
||||
t.references :cool_widget, primary_key: true, null: false, foreign_key: { on_delete: :cascade }
|
||||
t.integer :verification_state, default: 0, limit: 2, null: false
|
||||
t.integer :verification_retry_count, limit: 2
|
||||
t.binary :verification_checksum, using: 'verification_checksum::bytea'
|
||||
t.text :verification_failure, limit: 255
|
||||
|
||||
t.index :verification_state, name: VERIFICATION_STATE_INDEX_NAME
|
||||
t.index :verified_at, where: "(verification_state = 0)", order: { verified_at: 'ASC NULLS FIRST' }, name: PENDING_VERIFICATION_INDEX_NAME
|
||||
t.index :verification_retry_at, where: "(verification_state = 3)", order: { verification_retry_at: 'ASC NULLS FIRST' }, name: FAILED_VERIFICATION_INDEX_NAME
|
||||
t.index :verification_state, where: "(verification_state = 0 OR verification_state = 3)", name: NEEDS_VERIFICATION_INDEX_NAME
|
||||
end
|
||||
t.index :verification_state, name: VERIFICATION_STATE_INDEX_NAME
|
||||
t.index :verified_at, where: "(verification_state = 0)", order: { verified_at: 'ASC NULLS FIRST' }, name: PENDING_VERIFICATION_INDEX_NAME
|
||||
t.index :verification_retry_at, where: "(verification_state = 3)", order: { verification_retry_at: 'ASC NULLS FIRST' }, name: FAILED_VERIFICATION_INDEX_NAME
|
||||
t.index :verification_state, where: "(verification_state = 0 OR verification_state = 3)", name: NEEDS_VERIFICATION_INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -453,7 +451,7 @@ That's all of the required database changes.
|
|||
module Geo
|
||||
class CoolWidgetState < ApplicationRecord
|
||||
include EachBatch
|
||||
|
||||
|
||||
self.primary_key = :cool_widget_id
|
||||
|
||||
belongs_to :cool_widget, inverse_of: :cool_widget_state
|
||||
|
|
|
@ -70,6 +70,7 @@ import {
|
|||
getInitialPageParams,
|
||||
getSortKey,
|
||||
getSortOptions,
|
||||
isSortKey,
|
||||
} from '../utils';
|
||||
import NewIssueDropdown from './new_issue_dropdown.vue';
|
||||
|
||||
|
@ -138,7 +139,8 @@ export default {
|
|||
const state = getParameterByName(PARAM_STATE);
|
||||
const defaultSortKey = state === IssuableStates.Closed ? UPDATED_DESC : CREATED_DESC;
|
||||
const dashboardSortKey = getSortKey(this.initialSort);
|
||||
const graphQLSortKey = this.initialSort?.toUpperCase();
|
||||
const graphQLSortKey =
|
||||
isSortKey(this.initialSort?.toUpperCase()) && this.initialSort.toUpperCase();
|
||||
|
||||
// The initial sort is an old enum value when it is saved on the dashboard issues page.
|
||||
// The initial sort is a GraphQL enum value when it is saved on the Vue issues list page.
|
||||
|
|
|
@ -50,6 +50,8 @@ export const getInitialPageParams = (sortKey) =>
|
|||
export const getSortKey = (sort) =>
|
||||
Object.keys(urlSortParams).find((key) => urlSortParams[key] === sort);
|
||||
|
||||
export const isSortKey = (sort) => Object.keys(urlSortParams).includes(sort);
|
||||
|
||||
export const getDueDateValue = (value) => (DUE_DATE_VALUES.includes(value) ? value : undefined);
|
||||
|
||||
export const getSortOptions = (hasIssueWeightsFeature, hasBlockedIssuesFeature) => {
|
||||
|
|
|
@ -0,0 +1,121 @@
|
|||
/**
|
||||
* This file adds a merge function to be used with a yaml Document as defined by
|
||||
* the yaml@2.x package: https://eemeli.org/yaml/#yaml
|
||||
*
|
||||
* Ultimately, this functionality should be merged upstream into the package,
|
||||
* track the progress of that effort at https://github.com/eemeli/yaml/pull/347
|
||||
* */
|
||||
|
||||
import { visit, Scalar, isCollection, isDocument, isScalar, isNode, isMap, isSeq } from 'yaml';
|
||||
|
||||
function getPath(ancestry) {
|
||||
return ancestry.reduce((p, { key }) => {
|
||||
return key !== undefined ? [...p, key.value] : p;
|
||||
}, []);
|
||||
}
|
||||
|
||||
function getFirstChildNode(collection) {
|
||||
let firstChildKey;
|
||||
let type;
|
||||
switch (collection.constructor.name) {
|
||||
case 'YAMLSeq': // eslint-disable-line @gitlab/require-i18n-strings
|
||||
return collection.items.find((i) => isNode(i));
|
||||
case 'YAMLMap': // eslint-disable-line @gitlab/require-i18n-strings
|
||||
firstChildKey = collection.items[0]?.key;
|
||||
if (!firstChildKey) return undefined;
|
||||
return isScalar(firstChildKey) ? firstChildKey : new Scalar(firstChildKey);
|
||||
default:
|
||||
type = collection.constructor?.name || typeof collection;
|
||||
throw Error(`Cannot identify a child Node for type ${type}`);
|
||||
}
|
||||
}
|
||||
|
||||
function moveMetaPropsToFirstChildNode(collection) {
|
||||
const firstChildNode = getFirstChildNode(collection);
|
||||
const { comment, commentBefore, spaceBefore } = collection;
|
||||
if (!(comment || commentBefore || spaceBefore)) return;
|
||||
if (!firstChildNode)
|
||||
throw new Error('Cannot move meta properties to a child of an empty Collection'); // eslint-disable-line @gitlab/require-i18n-strings
|
||||
Object.assign(firstChildNode, { comment, commentBefore, spaceBefore });
|
||||
Object.assign(collection, {
|
||||
comment: undefined,
|
||||
commentBefore: undefined,
|
||||
spaceBefore: undefined,
|
||||
});
|
||||
}
|
||||
|
||||
function assert(isTypeFn, node, path) {
|
||||
if (![isSeq, isMap].includes(isTypeFn)) {
|
||||
throw new Error('assert() can only be used with isSeq() and isMap()');
|
||||
}
|
||||
const expectedTypeName = isTypeFn === isSeq ? 'YAMLSeq' : 'YAMLMap'; // eslint-disable-line @gitlab/require-i18n-strings
|
||||
if (!isTypeFn(node)) {
|
||||
const type = node?.constructor?.name || typeof node;
|
||||
throw new Error(
|
||||
`Type conflict at "${path.join(
|
||||
'.',
|
||||
)}": Destination node is of type ${type}, the node to be merged is of type ${expectedTypeName}.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function mergeCollection(target, node, path) {
|
||||
// In case both the source and the target node have comments or spaces
|
||||
// We'll move them to their first child so they do not conflict
|
||||
moveMetaPropsToFirstChildNode(node);
|
||||
if (target.hasIn(path)) {
|
||||
const targetNode = target.getIn(path, true);
|
||||
assert(isSeq(node) ? isSeq : isMap, targetNode, path);
|
||||
moveMetaPropsToFirstChildNode(targetNode);
|
||||
}
|
||||
}
|
||||
|
||||
function mergePair(target, node, path) {
|
||||
if (!isScalar(node.value)) return undefined;
|
||||
if (target.hasIn([...path, node.key.value])) {
|
||||
target.setIn(path, node);
|
||||
} else {
|
||||
target.addIn(path, node);
|
||||
}
|
||||
return visit.SKIP;
|
||||
}
|
||||
|
||||
function getVisitorFn(target, options) {
|
||||
return {
|
||||
Map: (_, node, ancestors) => {
|
||||
mergeCollection(target, node, getPath(ancestors));
|
||||
},
|
||||
Pair: (_, node, ancestors) => {
|
||||
mergePair(target, node, getPath(ancestors));
|
||||
},
|
||||
Seq: (_, node, ancestors) => {
|
||||
const path = getPath(ancestors);
|
||||
mergeCollection(target, node, path);
|
||||
if (options.onSequence === 'replace') {
|
||||
target.setIn(path, node);
|
||||
return visit.SKIP;
|
||||
}
|
||||
node.items.forEach((item) => target.addIn(path, item));
|
||||
return visit.SKIP;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/** Merge another collection into this */
|
||||
export function merge(target, source, options = {}) {
|
||||
const opt = {
|
||||
onSequence: 'replace',
|
||||
...options,
|
||||
};
|
||||
const sourceNode = target.createNode(isDocument(source) ? source.contents : source);
|
||||
if (!isCollection(sourceNode)) {
|
||||
const type = source?.constructor?.name || typeof source;
|
||||
throw new Error(`Cannot merge type "${type}", expected a Collection`);
|
||||
}
|
||||
if (!isCollection(target.contents)) {
|
||||
// If the target doc is empty add the source to it directly
|
||||
Object.assign(target, { contents: sourceNode });
|
||||
return;
|
||||
}
|
||||
visit(sourceNode, getVisitorFn(target, opt));
|
||||
}
|
|
@ -15,6 +15,9 @@ module Ci
|
|||
end
|
||||
|
||||
scope :by_namespace_id, -> (namespace_id) { where(namespace_id: namespace_id) }
|
||||
scope :namespace_id_from_traversal_ids, -> do
|
||||
select('ci_namespace_mirrors.traversal_ids[array_length(ci_namespace_mirrors.traversal_ids, 1)] AS namespace_id')
|
||||
end
|
||||
|
||||
class << self
|
||||
def sync!(event)
|
||||
|
|
|
@ -54,7 +54,7 @@ class ResourceLabelEvent < ResourceEvent
|
|||
end
|
||||
|
||||
def banzai_render_context(field)
|
||||
super.merge(pipeline: :label, only_path: true)
|
||||
super.merge(pipeline: :label, only_path: true, label_url_method: label_url_method)
|
||||
end
|
||||
|
||||
def refresh_invalid_reference
|
||||
|
@ -91,6 +91,10 @@ class ResourceLabelEvent < ResourceEvent
|
|||
end
|
||||
end
|
||||
|
||||
def label_url_method
|
||||
issuable.is_a?(MergeRequest) ? :project_merge_requests_url : :project_issues_url
|
||||
end
|
||||
|
||||
def expire_etag_cache
|
||||
issuable.expire_note_etag_cache
|
||||
end
|
||||
|
|
|
@ -2230,20 +2230,32 @@ class User < ApplicationRecord
|
|||
end
|
||||
|
||||
def ci_owned_project_runners_from_group_members
|
||||
Ci::RunnerProject
|
||||
cte_project_ids = Gitlab::SQL::CTE.new(
|
||||
:cte_project_ids,
|
||||
Ci::ProjectMirror
|
||||
.select(:project_id)
|
||||
.joins('JOIN ci_namespace_mirrors ON ci_namespace_mirrors.traversal_ids[array_length(ci_namespace_mirrors.traversal_ids, 1)] = ci_project_mirrors.namespace_id')
|
||||
.merge(ci_namespace_mirrors_for_group_members(Gitlab::Access::MAINTAINER))
|
||||
)
|
||||
|
||||
Ci::Runner
|
||||
.select('ci_runners.*')
|
||||
.joins(:runner)
|
||||
.joins('JOIN ci_project_mirrors ON ci_project_mirrors.project_id = ci_runner_projects.project_id')
|
||||
.joins('JOIN ci_namespace_mirrors ON ci_namespace_mirrors.namespace_id = ci_project_mirrors.namespace_id')
|
||||
.merge(ci_namespace_mirrors_for_group_members(Gitlab::Access::MAINTAINER))
|
||||
.joins(:runner_projects)
|
||||
.where('ci_runner_projects.project_id IN (SELECT project_id FROM cte_project_ids)')
|
||||
.with(cte_project_ids.to_arel)
|
||||
end
|
||||
|
||||
def ci_owned_group_runners
|
||||
Ci::RunnerNamespace
|
||||
cte_namespace_ids = Gitlab::SQL::CTE.new(
|
||||
:cte_namespace_ids,
|
||||
ci_namespace_mirrors_for_group_members(Gitlab::Access::OWNER).namespace_id_from_traversal_ids
|
||||
)
|
||||
|
||||
Ci::Runner
|
||||
.select('ci_runners.*')
|
||||
.joins(:runner)
|
||||
.joins('JOIN ci_namespace_mirrors ON ci_namespace_mirrors.namespace_id = ci_runner_namespaces.namespace_id')
|
||||
.merge(ci_namespace_mirrors_for_group_members(Gitlab::Access::OWNER))
|
||||
.joins(:runner_namespaces)
|
||||
.where('ci_runner_namespaces.namespace_id IN (SELECT namespace_id FROM cte_namespace_ids)')
|
||||
.with(cte_namespace_ids.to_arel)
|
||||
end
|
||||
|
||||
def ci_namespace_mirrors_for_group_members(level)
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
= logo_text
|
||||
- if Gitlab.com_and_canary?
|
||||
= link_to Gitlab::Saas.canary_toggle_com_url, class: 'canary-badge bg-transparent', data: { qa_selector: 'canary_badge_link' }, target: :_blank, rel: 'noopener noreferrer' do
|
||||
= gl_badge_tag({ variant: :success }) do
|
||||
= gl_badge_tag({ variant: :success, size: :sm }) do
|
||||
= _('Next')
|
||||
|
||||
- if current_user
|
||||
|
|
|
@ -44,7 +44,7 @@ and the documentation for it has been removed from the official page. This
|
|||
means that the original reason to use Docker Machine is no longer valid too.
|
||||
|
||||
To keep supporting our customers and the wider community we need to design a
|
||||
new mechanism for GitLab Runner autoscaling. It not only needs to support
|
||||
new mechanism for GitLab Runner auto-scaling. It not only needs to support
|
||||
auto-scaling, but it also needs to do that in the way to enable us to build on
|
||||
top of it to improve efficiency, reliability and availability.
|
||||
|
||||
|
@ -144,7 +144,7 @@ on a single machine bring. It is difficult to predict that, so ideally we
|
|||
should build a PoC that will help us to better understand what we can expect
|
||||
from this.
|
||||
|
||||
To run this experiement we most likely we will need to build an experimental
|
||||
To run this experiment we most likely we will need to build an experimental
|
||||
plugin, that not only allows us to schedule running multiple builds on a single
|
||||
machine, but also has a set of comprehensive metrics built into it, to make it
|
||||
easier to understand how it performs.
|
||||
|
@ -204,7 +204,7 @@ document, define requirements and score the solution accordingly. This will
|
|||
allow us to choose a solution that will work best for us and the wider
|
||||
community.
|
||||
|
||||
### Plugin system design principles
|
||||
### Design principles
|
||||
|
||||
Our goal is to design a GitLab Runner plugin system interface that is flexible
|
||||
and simple for the wider community to consume. As we cannot build plugins for
|
||||
|
@ -215,7 +215,16 @@ To achieve this goal, we will follow a few critical design principles. These
|
|||
principles will guide our development process for the new plugin system
|
||||
abstraction.
|
||||
|
||||
General high-level principles:
|
||||
#### General high-level principles
|
||||
|
||||
1. Design the new auto-scaling architecture aiming for having more choices and
|
||||
flexibility in the future, instead of imposing new constraints.
|
||||
1. Design the new auto-scaling architecture to experiment with running multiple
|
||||
jobs in parallel, on a single machine.
|
||||
1. Design the new provisioning architecture to replace Docker Machine in a way
|
||||
that the wider community can easily build on top of the new abstractions.
|
||||
|
||||
#### Principles for the new plugin system
|
||||
|
||||
1. Make the entry barrier for writing a new plugin low.
|
||||
1. Developing a new plugin should be simple and require only basic knowledge of
|
||||
|
@ -227,7 +236,7 @@ General high-level principles:
|
|||
1. Invest in a flexible solution, avoid one-way-door decisions, foster iteration.
|
||||
1. When in doubts err on the side of making things more simple for the wider community.
|
||||
|
||||
A few most important technical details:
|
||||
#### The most important technical details
|
||||
|
||||
1. Favor gRPC communication between a plugin and GitLab Runner.
|
||||
1. Make it possible to version communication interface and support many versions.
|
||||
|
|
|
@ -513,7 +513,7 @@ Feature.remove(:feature_flag_name)
|
|||
|
||||
- Any change behind a feature flag **disabled** by default **should not** have a changelog entry.
|
||||
- **Exception:** database migrations **should** have a changelog entry.
|
||||
- Any change related to a feature flag itself (flag removal, default-on setting) **should** have a changelog entry.
|
||||
- Any change related to a feature flag itself (flag removal, default-on setting) **should** have [a changelog entry](../changelog.md).
|
||||
Use the flowchart to determine the changelog entry type.
|
||||
|
||||
```mermaid
|
||||
|
|
|
@ -141,6 +141,34 @@ docker stop gitlab-gitaly-cluster praefect postgres gitaly3 gitaly2 gitaly1
|
|||
docker rm gitlab-gitaly-cluster praefect postgres gitaly3 gitaly2 gitaly1
|
||||
```
|
||||
|
||||
## Tests that require a runner
|
||||
|
||||
To execute tests that use a runner without errors, while creating the GitLab Docker instance the `--hostname` parameter in the Docker `run` command should be given a specific interface IP address or a non-loopback hostname accessible from the runner container. Having `localhost` (or `127.0.0.1`) as the GitLab hostname won't work (unless the GitLab Runner is created with the Docker network as `host`)
|
||||
|
||||
Examples of tests which require a runner:
|
||||
|
||||
- `qa/qa/specs/features/ee/browser_ui/13_secure/create_merge_request_with_secure_spec.rb`
|
||||
- `qa/qa/specs/features/browser_ui/4_verify/runner/register_runner_spec.rb`
|
||||
|
||||
Example:
|
||||
|
||||
```shell
|
||||
docker run \
|
||||
--detach \
|
||||
--hostname interface_ip_address \
|
||||
--publish 80:80 \
|
||||
--name gitlab \
|
||||
--restart always \
|
||||
--volume ~/ee_volume/config:/etc/gitlab \
|
||||
--volume ~/ee_volume/logs:/var/log/gitlab \
|
||||
--volume ~/ee_volume/data:/var/opt/gitlab \
|
||||
--shm-size 256m \
|
||||
gitlab/gitlab-ee:latest
|
||||
```
|
||||
|
||||
Where `interface_ip_address` is your local network's interface IP, which you can find with the `ifconfig` command.
|
||||
The same would apply to GDK running with the instance address as `localhost` too.
|
||||
|
||||
## Guide to run and debug Monitor tests
|
||||
|
||||
### How to set up
|
||||
|
|
|
@ -8,7 +8,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
|
||||
# Provision GitLab Cloud Native Hybrid on AWS EKS **(FREE SELF)**
|
||||
|
||||
GitLab "Cloud Native Hybrid" is a hybrid of the cloud native technology Kubernetes (EKS) and EC2. While as much of the GitLab application as possible runs in Kubernetes or on AWS services (PaaS), the GitLab service Gitaly must still be run on Ec2. Gitaly is a layer designed to overcome limitations of the Git binaries in a horizontally scaled architecture. You can read more here about why Gitaly was built and why the limitations of Git mean that it must currently run on instance compute in [Git Characteristics That Make Horizontal Scaling Difficult](https://gitlab.com/gitlab-org/gitaly/-/blob/master/doc/DESIGN.md#git-characteristics-that-make-horizontal-scaling-difficult).
|
||||
GitLab "Cloud Native Hybrid" is a hybrid of the cloud native technology Kubernetes (EKS) and EC2. While as much of the GitLab application as possible runs in Kubernetes or on AWS services (PaaS), the GitLab service Gitaly must still be run on EC2. Gitaly is a layer designed to overcome limitations of the Git binaries in a horizontally scaled architecture. You can read more here about why Gitaly was built and why the limitations of Git mean that it must currently run on instance compute in [Git Characteristics That Make Horizontal Scaling Difficult](https://gitlab.com/gitlab-org/gitaly/-/blob/master/doc/DESIGN.md#git-characteristics-that-make-horizontal-scaling-difficult).
|
||||
|
||||
Amazon provides a managed Kubernetes service offering known as [Amazon Elastic Kubernetes Service (EKS)](https://aws.amazon.com/eks/).
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ module Gitlab
|
|||
end
|
||||
|
||||
def satisfied_by?(pipeline, context)
|
||||
return true if pipeline.modified_paths.nil?
|
||||
return true unless pipeline&.modified_paths
|
||||
|
||||
expanded_globs = expand_globs(context)
|
||||
pipeline.modified_paths.any? do |path|
|
||||
|
|
|
@ -8,7 +8,7 @@ module Gitlab
|
|||
@expression = expression
|
||||
end
|
||||
|
||||
def satisfied_by?(pipeline, context)
|
||||
def satisfied_by?(_pipeline, context)
|
||||
::Gitlab::Ci::Pipeline::Expression::Statement.new(
|
||||
@expression, context.variables_hash).truthful?
|
||||
end
|
||||
|
|
|
@ -202,7 +202,7 @@
|
|||
"webpack-stats-plugin": "^0.3.1",
|
||||
"worker-loader": "^2.0.0",
|
||||
"xterm": "3.14.5",
|
||||
"yaml": "^2.0.0-8"
|
||||
"yaml": "^2.0.0-10"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/plugin-transform-modules-commonjs": "^7.10.1",
|
||||
|
|
|
@ -343,6 +343,20 @@ describe('CE IssuesListApp component', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('when initial sort value is invalid', () => {
|
||||
it.each(['', 'asdf', null, undefined])(
|
||||
'initial sort is set to value CREATED_DESC',
|
||||
(sort) => {
|
||||
wrapper = mountComponent({ provide: { initialSort: sort } });
|
||||
|
||||
expect(findIssuableList().props()).toMatchObject({
|
||||
initialSortBy: CREATED_DESC,
|
||||
urlParams: { sort: urlSortParams[CREATED_DESC] },
|
||||
});
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
describe('when sort is manual and issue repositioning is disabled', () => {
|
||||
beforeEach(() => {
|
||||
wrapper = mountComponent({
|
||||
|
|
|
@ -24,6 +24,7 @@ import {
|
|||
getInitialPageParams,
|
||||
getSortKey,
|
||||
getSortOptions,
|
||||
isSortKey,
|
||||
} from '~/issues/list/utils';
|
||||
|
||||
describe('getInitialPageParams', () => {
|
||||
|
@ -45,6 +46,16 @@ describe('getSortKey', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('isSortKey', () => {
|
||||
it.each(Object.keys(urlSortParams))('returns true given %s', (sort) => {
|
||||
expect(isSortKey(sort)).toBe(true);
|
||||
});
|
||||
|
||||
it.each(['', 'asdf', null, undefined])('returns false given %s', (sort) => {
|
||||
expect(isSortKey(sort)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDueDateValue', () => {
|
||||
it.each(DUE_DATE_VALUES)('returns the argument when it is `%s`', (value) => {
|
||||
expect(getDueDateValue(value)).toBe(value);
|
||||
|
|
|
@ -0,0 +1,105 @@
|
|||
import { Document, parseDocument } from 'yaml';
|
||||
import { merge } from '~/lib/utils/yaml';
|
||||
|
||||
// Mock data for Comments on pairs
|
||||
const COMMENTS_ON_PAIRS_SOURCE = `foo:
|
||||
# barbaz
|
||||
bar: baz
|
||||
|
||||
# bazboo
|
||||
baz: boo
|
||||
`;
|
||||
|
||||
const COMMENTS_ON_PAIRS_TARGET = `foo:
|
||||
# abcdef
|
||||
abc: def
|
||||
# boobaz
|
||||
boo: baz
|
||||
`;
|
||||
|
||||
const COMMENTS_ON_PAIRS_EXPECTED = `foo:
|
||||
# abcdef
|
||||
abc: def
|
||||
# boobaz
|
||||
boo: baz
|
||||
# barbaz
|
||||
bar: baz
|
||||
|
||||
# bazboo
|
||||
baz: boo
|
||||
`;
|
||||
|
||||
// Mock data for Comments on seqs
|
||||
const COMMENTS_ON_SEQS_SOURCE = `foo:
|
||||
# barbaz
|
||||
- barbaz
|
||||
# bazboo
|
||||
- baz: boo
|
||||
`;
|
||||
|
||||
const COMMENTS_ON_SEQS_TARGET = `foo:
|
||||
# abcdef
|
||||
- abcdef
|
||||
|
||||
# boobaz
|
||||
- boobaz
|
||||
`;
|
||||
|
||||
const COMMENTS_ON_SEQS_EXPECTED = `foo:
|
||||
# abcdef
|
||||
- abcdef
|
||||
|
||||
# boobaz
|
||||
- boobaz
|
||||
# barbaz
|
||||
- barbaz
|
||||
# bazboo
|
||||
- baz: boo
|
||||
`;
|
||||
|
||||
describe('Yaml utility functions', () => {
|
||||
describe('merge', () => {
|
||||
const getAsNode = (yamlStr) => {
|
||||
return parseDocument(yamlStr).contents;
|
||||
};
|
||||
|
||||
describe('Merge two Nodes', () => {
|
||||
it.each`
|
||||
scenario | source | target | options | expected
|
||||
${'merge a map'} | ${getAsNode('foo:\n bar: baz\n')} | ${'foo:\n abc: def\n'} | ${undefined} | ${'foo:\n abc: def\n bar: baz\n'}
|
||||
${'merge a seq'} | ${getAsNode('foo:\n - bar\n')} | ${'foo:\n - abc\n'} | ${undefined} | ${'foo:\n - bar\n'}
|
||||
${'merge-append seqs'} | ${getAsNode('foo:\n - bar\n')} | ${'foo:\n - abc\n'} | ${{ onSequence: 'append' }} | ${'foo:\n - abc\n - bar\n'}
|
||||
${'merge-replace a seq'} | ${getAsNode('foo:\n - bar\n')} | ${'foo:\n - abc\n'} | ${{ onSequence: 'replace' }} | ${'foo:\n - bar\n'}
|
||||
${'override existing paths'} | ${getAsNode('foo:\n bar: baz\n')} | ${'foo:\n bar: boo\n'} | ${undefined} | ${'foo:\n bar: baz\n'}
|
||||
${'deep maps'} | ${getAsNode('foo:\n bar:\n abc: def\n')} | ${'foo:\n bar:\n baz: boo\n jkl: mno\n'} | ${undefined} | ${'foo:\n bar:\n baz: boo\n abc: def\n jkl: mno\n'}
|
||||
${'append maps inside seqs'} | ${getAsNode('foo:\n - abc: def\n')} | ${'foo:\n - bar: baz\n'} | ${{ onSequence: 'append' }} | ${'foo:\n - bar: baz\n - abc: def\n'}
|
||||
${'inexistent paths create new nodes'} | ${getAsNode('foo:\n bar: baz\n')} | ${'abc: def\n'} | ${undefined} | ${'abc: def\nfoo:\n bar: baz\n'}
|
||||
${'document as source'} | ${parseDocument('foo:\n bar: baz\n')} | ${'foo:\n abc: def\n'} | ${undefined} | ${'foo:\n abc: def\n bar: baz\n'}
|
||||
${'object as source'} | ${{ foo: { bar: 'baz' } }} | ${'foo:\n abc: def\n'} | ${undefined} | ${'foo:\n abc: def\n bar: baz\n'}
|
||||
${'comments on pairs'} | ${parseDocument(COMMENTS_ON_PAIRS_SOURCE)} | ${COMMENTS_ON_PAIRS_TARGET} | ${undefined} | ${COMMENTS_ON_PAIRS_EXPECTED}
|
||||
${'comments on seqs'} | ${parseDocument(COMMENTS_ON_SEQS_SOURCE)} | ${COMMENTS_ON_SEQS_TARGET} | ${{ onSequence: 'append' }} | ${COMMENTS_ON_SEQS_EXPECTED}
|
||||
`('$scenario', ({ source, target, expected, options }) => {
|
||||
const targetDoc = parseDocument(target);
|
||||
merge(targetDoc, source, options);
|
||||
const expectedDoc = parseDocument(expected);
|
||||
expect(targetDoc.toString()).toEqual(expectedDoc.toString());
|
||||
});
|
||||
|
||||
it('type conflict will throw an Error', () => {
|
||||
const sourceDoc = parseDocument('foo:\n bar:\n - baz\n');
|
||||
const targetDoc = parseDocument('foo:\n bar: def\n');
|
||||
expect(() => merge(targetDoc, sourceDoc)).toThrow(
|
||||
'Type conflict at "foo.bar": Destination node is of type Scalar, the node' +
|
||||
' to be merged is of type YAMLSeq',
|
||||
);
|
||||
});
|
||||
|
||||
it('merging a collection into an empty doc', () => {
|
||||
const targetDoc = new Document();
|
||||
merge(targetDoc, { foo: { bar: 'baz' } });
|
||||
const expected = parseDocument('foo:\n bar: baz\n');
|
||||
expect(targetDoc.toString()).toEqual(expected.toString());
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -4,14 +4,23 @@ require 'spec_helper'
|
|||
|
||||
RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes do
|
||||
describe '#satisfied_by?' do
|
||||
subject { described_class.new(globs).satisfied_by?(pipeline, context) }
|
||||
|
||||
it_behaves_like 'a glob matching rule' do
|
||||
let(:pipeline) { build(:ci_pipeline) }
|
||||
let(:context) {}
|
||||
|
||||
before do
|
||||
allow(pipeline).to receive(:modified_paths).and_return(files.keys)
|
||||
end
|
||||
end
|
||||
|
||||
subject { described_class.new(globs).satisfied_by?(pipeline, nil) }
|
||||
context 'when pipeline is nil' do
|
||||
let(:pipeline) {}
|
||||
let(:context) {}
|
||||
let(:globs) { [] }
|
||||
|
||||
it { is_expected.to be_truthy }
|
||||
end
|
||||
|
||||
context 'when using variable expansion' do
|
||||
|
@ -20,8 +29,6 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes do
|
|||
let(:globs) { ['$HELM_DIR/**/*'] }
|
||||
let(:context) { double('context') }
|
||||
|
||||
subject { described_class.new(globs).satisfied_by?(pipeline, context) }
|
||||
|
||||
before do
|
||||
allow(pipeline).to receive(:modified_paths).and_return(modified_paths)
|
||||
end
|
||||
|
@ -32,6 +39,12 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes do
|
|||
it { is_expected.to be_falsey }
|
||||
end
|
||||
|
||||
context 'when modified paths are nil' do
|
||||
let(:modified_paths) {}
|
||||
|
||||
it { is_expected.to be_truthy }
|
||||
end
|
||||
|
||||
context 'when context has the specified variables' do
|
||||
let(:variables_hash) do
|
||||
{ 'HELM_DIR' => 'helm' }
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe LabelNote do
|
||||
include Gitlab::Routing.url_helpers
|
||||
|
||||
let_it_be(:project) { create(:project, :repository) }
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:label) { create(:label, project: project) }
|
||||
|
@ -14,11 +16,27 @@ RSpec.describe LabelNote do
|
|||
let_it_be(:resource) { create(:issue, project: project) }
|
||||
|
||||
it_behaves_like 'label note created from events'
|
||||
|
||||
it 'includes a link to the list of issues filtered by the label' do
|
||||
note = described_class.from_events([
|
||||
create(:resource_label_event, label: label, issue: resource)
|
||||
])
|
||||
|
||||
expect(note.note_html).to include(project_issues_path(project, label_name: label.title))
|
||||
end
|
||||
end
|
||||
|
||||
context 'when resource is merge request' do
|
||||
let_it_be(:resource) { create(:merge_request, source_project: project, target_project: project) }
|
||||
|
||||
it_behaves_like 'label note created from events'
|
||||
|
||||
it 'includes a link to the list of merge requests filtered by the label' do
|
||||
note = described_class.from_events([
|
||||
create(:resource_label_event, label: label, merge_request: resource)
|
||||
])
|
||||
|
||||
expect(note.note_html).to include(project_merge_requests_path(project, label_name: label.title))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -12586,10 +12586,10 @@ yaml@^1.10.0:
|
|||
resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b"
|
||||
integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==
|
||||
|
||||
yaml@^2.0.0-8:
|
||||
version "2.0.0-8"
|
||||
resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.0.0-8.tgz#226365f0d804ba7fb8cc2b527a00a7a4a3d8ea5f"
|
||||
integrity sha512-QaYgJZMfWD6fKN/EYMk6w1oLWPCr1xj9QaPSZW5qkDb3y8nGCXhy2Ono+AF4F+CSL/vGcqswcAT0BaS//pgD2A==
|
||||
yaml@^2.0.0-10:
|
||||
version "2.0.0-10"
|
||||
resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.0.0-10.tgz#d5b59e2d14b8683313a534f2bbc648e211a2753e"
|
||||
integrity sha512-FHV8s5ODFFQXX/enJEU2EkanNl1UDBUz8oa4k5Qo/sR+Iq7VmhCDkRMb0/mjJCNeAWQ31W8WV6PYStDE4d9EIw==
|
||||
|
||||
yargs-parser@^13.1.2:
|
||||
version "13.1.2"
|
||||
|
|
Loading…
Reference in New Issue