Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-06-30 12:09:03 +00:00
parent 312ac59328
commit b0139a824f
56 changed files with 1377 additions and 885 deletions

View File

@ -58,7 +58,7 @@ export default {
},
computed: {
fields() {
const tdClass = 'gl-py-5!';
const tdClass = 'gl-pt-3! gl-pb-4! gl-vertical-align-middle!';
return [
{
key: 'name',
@ -184,7 +184,7 @@ export default {
data-testid="cluster-agent-connection-status"
>
<span :class="$options.AGENT_STATUSES[item.status].class" class="gl-mr-3">
<gl-icon :name="$options.AGENT_STATUSES[item.status].icon" :size="12" /></span
<gl-icon :name="$options.AGENT_STATUSES[item.status].icon" :size="16" /></span
>{{ $options.AGENT_STATUSES[item.status].name }}
</span>
<gl-tooltip v-if="item.status === 'active'" :target="getStatusCellId(item)" placement="right">

View File

@ -145,8 +145,8 @@ export const AGENT_STATUSES = {
},
inactive: {
name: s__('ClusterAgents|Not connected'),
icon: 'severity-critical',
class: 'text-danger-800',
icon: 'status-alert',
class: 'text-danger-500',
tooltip: {
title: s__('ClusterAgents|Agent might not be connected to GitLab'),
body: sprintf(
@ -159,7 +159,7 @@ export const AGENT_STATUSES = {
unused: {
name: s__('ClusterAgents|Never connected'),
icon: 'status-neutral',
class: 'text-secondary-400',
class: 'text-secondary-500',
tooltip: {
title: s__('ClusterAgents|Agent never connected to GitLab'),
body: s__('ClusterAgents|Make sure you are using a valid token.'),

View File

@ -0,0 +1,132 @@
<script>
import { GlButton, GlFormCheckbox, GlFormGroup, GlFormSelect } from '@gitlab/ui';
import { s__ } from '~/locale';
const i18n = {
gcpProjectLabel: s__('CloudSeed|Google Cloud project'),
gcpProjectDescription: s__(
'CloudSeed|Database instance is generated within the selected Google Cloud project',
),
refsLabel: s__('CloudSeed|Refs'),
refsDescription: s__(
'CloudSeed|Generated database instance is linked to the selected branch or tag',
),
databaseVersionLabel: s__('CloudSeed|Database version'),
tierLabel: s__('CloudSeed|Machine type'),
tierDescription: s__('CloudSeed|Determines memory and virtual cores available to your instance'),
checkboxLabel: s__(
'CloudSeed|I accept Google Cloud pricing and responsibilities involved with managing database instances',
),
cancelLabel: s__('CloudSeed|Cancel'),
submitLabel: s__('CloudSeed|Create instance'),
all: s__('CloudSeed|All'),
};
export default {
ALL_REFS: '*',
components: {
GlButton,
GlFormCheckbox,
GlFormGroup,
GlFormSelect,
},
props: {
cancelPath: { required: true, type: String },
gcpProjects: { required: true, type: Array },
refs: { required: true, type: Array },
formTitle: { required: true, type: String },
formDescription: { required: true, type: String },
databaseVersions: { required: true, type: Array },
tiers: { required: true, type: Array },
},
i18n,
};
</script>
<template>
<div>
<header class="gl-my-5 gl-border-b-1 gl-border-b-gray-100 gl-border-b-solid">
<h2 class="gl-font-size-h1">{{ formTitle }}</h2>
<p>{{ formDescription }}</p>
</header>
<gl-form-group
data-testid="form_group_gcp_project"
label-for="gcp_project"
:label="$options.i18n.gcpProjectLabel"
:description="$options.i18n.gcpProjectDescription"
>
<gl-form-select id="gcp_project" data-testid="select_gcp_project" name="gcp_project" required>
<option
v-for="gcpProject in gcpProjects"
:key="gcpProject.project_id"
:value="gcpProject.project_id"
>
{{ gcpProject.name }}
</option>
</gl-form-select>
</gl-form-group>
<gl-form-group
data-testid="form_group_environments"
label-for="ref"
:label="$options.i18n.refsLabel"
:description="$options.i18n.refsDescription"
>
<gl-form-select id="ref" data-testid="select_environments" name="ref" required>
<option :value="$options.ALL_REFS">{{ $options.i18n.all }}</option>
<option v-for="ref in refs" :key="ref" :value="ref">
{{ ref }}
</option>
</gl-form-select>
</gl-form-group>
<gl-form-group
data-testid="form_group_tier"
label-for="tier"
:label="$options.i18n.tierLabel"
:description="$options.i18n.tierDescription"
>
<gl-form-select id="tier" data-testid="select_tier" name="tier" required>
<option v-for="tier in tiers" :key="tier.value" :value="tier.value">
{{ tier.label }}
</option>
</gl-form-select>
</gl-form-group>
<gl-form-group
data-testid="form_group_database_version"
label-for="database-version"
:label="$options.i18n.databaseVersionLabel"
>
<gl-form-select
id="database-version"
data-testid="select_database_version"
name="database_version"
required
>
<option
v-for="databaseVersion in databaseVersions"
:key="databaseVersion.value"
:value="databaseVersion.value"
>
{{ databaseVersion.label }}
</option>
</gl-form-select>
</gl-form-group>
<gl-form-group>
<gl-form-checkbox name="confirmation" required>
{{ $options.i18n.checkboxLabel }}
</gl-form-checkbox>
</gl-form-group>
<div class="form-actions row">
<gl-button type="submit" category="primary" variant="confirm" data-testid="submit-button">
{{ $options.i18n.submitLabel }}
</gl-button>
<gl-button class="gl-ml-1" :href="cancelPath" data-testid="cancel-button">{{
$options.i18n.cancelLabel
}}</gl-button>
</div>
</div>
</template>

View File

@ -0,0 +1,75 @@
<script>
import { GlEmptyState, GlLink, GlTable } from '@gitlab/ui';
import { encodeSaferUrl, setUrlParams } from '~/lib/utils/url_utility';
import { s__ } from '~/locale';
const i18n = {
noInstancesTitle: s__('CloudSeed|No instances'),
noInstancesDescription: s__('CloudSeed|There are no instances to display.'),
title: s__('CloudSeed|Instances'),
description: s__('CloudSeed|Database instances associated with this project'),
};
export default {
components: { GlEmptyState, GlLink, GlTable },
props: {
cloudsqlInstances: {
type: Array,
required: true,
},
emptyIllustrationUrl: {
type: String,
required: true,
},
},
computed: {
tableData() {
return this.cloudsqlInstances.filter((instance) => instance.instance_name);
},
},
methods: {
gcpProjectUrl(id) {
return setUrlParams({ project: id }, 'https://console.cloud.google.com/sql/instances');
},
instanceUrl(name, id) {
const saferName = encodeSaferUrl(name);
return setUrlParams(
{ project: id },
`https://console.cloud.google.com/sql/instances/${saferName}/overview`,
);
},
},
fields: [
{ key: 'ref', label: s__('CloudSeed|Environment') },
{ key: 'gcp_project', label: s__('CloudSeed|Google Cloud Project') },
{ key: 'instance_name', label: s__('CloudSeed|CloudSQL Instance') },
{ key: 'version', label: s__('CloudSeed|Version') },
],
i18n,
};
</script>
<template>
<div class="gl-mx-3">
<gl-empty-state
v-if="tableData.length === 0"
:title="$options.i18n.noInstancesTitle"
:description="$options.i18n.noInstancesDescription"
:svg-path="emptyIllustrationUrl"
/>
<div v-else>
<h2 class="gl-font-size-h2">{{ $options.i18n.title }}</h2>
<p>{{ $options.i18n.description }}</p>
<gl-table :fields="$options.fields" :items="tableData">
<template #cell(gcp_project)="{ value }">
<gl-link :href="gcpProjectUrl(value)">{{ value }}</gl-link>
</template>
<template #cell(instance_name)="{ item: { instance_name, gcp_project } }">
<a :href="instanceUrl(instance_name, gcp_project)">{{ instance_name }}</a>
</template>
</gl-table>
</div>
</div>
</template>

View File

@ -0,0 +1,221 @@
<script>
import { GlAlert, GlButton, GlLink, GlSprintf, GlTable } from '@gitlab/ui';
import { helpPagePath } from '~/helpers/help_page_helper';
import { s__ } from '~/locale';
const KEY_CLOUDSQL_POSTGRES = 'cloudsql-postgres';
const KEY_CLOUDSQL_MYSQL = 'cloudsql-mysql';
const KEY_CLOUDSQL_SQLSERVER = 'cloudsql-sqlserver';
const KEY_ALLOYDB_POSTGRES = 'alloydb-postgres';
const KEY_MEMORYSTORE_REDIS = 'memorystore-redis';
const KEY_FIRESTORE = 'firestore';
const i18n = {
columnService: s__('CloudSeed|Service'),
columnDescription: s__('CloudSeed|Description'),
cloudsqlPostgresTitle: s__('CloudSeed|Cloud SQL for Postgres'),
cloudsqlPostgresDescription: s__(
'CloudSeed|Fully managed relational database service for PostgreSQL',
),
cloudsqlMysqlTitle: s__('CloudSeed|Cloud SQL for MySQL'),
cloudsqlMysqlDescription: s__('CloudSeed|Fully managed relational database service for MySQL'),
cloudsqlSqlserverTitle: s__('CloudSeed|Cloud SQL for SQL Server'),
cloudsqlSqlserverDescription: s__(
'CloudSeed|Fully managed relational database service for SQL Server',
),
alloydbPostgresTitle: s__('CloudSeed|AlloyDB for Postgres'),
alloydbPostgresDescription: s__(
'CloudSeed|Fully managed PostgreSQL-compatible service for high-demand workloads',
),
memorystoreRedisTitle: s__('CloudSeed|Memorystore for Redis'),
memorystoreRedisDescription: s__(
'CloudSeed|Scalable, secure, and highly available in-memory service for Redis',
),
firestoreTitle: s__('CloudSeed|Cloud Firestore'),
firestoreDescription: s__(
'CloudSeed|Flexible, scalable NoSQL cloud database for client- and server-side development',
),
createInstance: s__('CloudSeed|Create instance'),
createCluster: s__('CloudSeed|Create cluster'),
createDatabase: s__('CloudSeed|Create database'),
title: s__('CloudSeed|Services'),
description: s__('CloudSeed|Available database services through which instances may be created'),
pricingAlert: s__(
'CloudSeed|Learn more about pricing for %{cloudsqlPricingStart}Cloud SQL%{cloudsqlPricingEnd}, %{alloydbPricingStart}Alloy DB%{alloydbPricingEnd}, %{memorystorePricingStart}Memorystore%{memorystorePricingEnd} and %{firestorePricingStart}Firestore%{firestorePricingEnd}.',
),
secretManagersDescription: s__(
'CloudSeed|Enhance security by storing database variables in secret managers - learn more about %{docLinkStart}secret management with GitLab%{docLinkEnd}',
),
};
const helpUrlSecrets = helpPagePath('ee/ci/secrets');
export default {
components: { GlAlert, GlButton, GlLink, GlSprintf, GlTable },
props: {
cloudsqlPostgresUrl: {
type: String,
required: true,
},
cloudsqlMysqlUrl: {
type: String,
required: true,
},
cloudsqlSqlserverUrl: {
type: String,
required: true,
},
alloydbPostgresUrl: {
type: String,
required: true,
},
memorystoreRedisUrl: {
type: String,
required: true,
},
firestoreUrl: {
type: String,
required: true,
},
},
methods: {
actionUrl(key) {
switch (key) {
case KEY_CLOUDSQL_POSTGRES:
return this.cloudsqlPostgresUrl;
case KEY_CLOUDSQL_MYSQL:
return this.cloudsqlMysqlUrl;
case KEY_CLOUDSQL_SQLSERVER:
return this.cloudsqlSqlserverUrl;
case KEY_ALLOYDB_POSTGRES:
return this.alloydbPostgresUrl;
case KEY_MEMORYSTORE_REDIS:
return this.memorystoreRedisUrl;
case KEY_FIRESTORE:
return this.firestoreUrl;
default:
return '#';
}
},
},
fields: [
{ key: 'title', label: i18n.columnService },
{ key: 'description', label: i18n.columnDescription },
{ key: 'action', label: '' },
],
items: [
{
title: i18n.cloudsqlPostgresTitle,
description: i18n.cloudsqlPostgresDescription,
action: {
key: KEY_CLOUDSQL_POSTGRES,
title: i18n.createInstance,
testId: 'button-cloudsql-postgres',
},
},
{
title: i18n.cloudsqlMysqlTitle,
description: i18n.cloudsqlMysqlDescription,
action: {
disabled: false,
key: KEY_CLOUDSQL_MYSQL,
title: i18n.createInstance,
testId: 'button-cloudsql-mysql',
},
},
{
title: i18n.cloudsqlSqlserverTitle,
description: i18n.cloudsqlSqlserverDescription,
action: {
disabled: false,
key: KEY_CLOUDSQL_SQLSERVER,
title: i18n.createInstance,
testId: 'button-cloudsql-sqlserver',
},
},
{
title: i18n.alloydbPostgresTitle,
description: i18n.alloydbPostgresDescription,
action: {
disabled: true,
key: KEY_ALLOYDB_POSTGRES,
title: i18n.createCluster,
testId: 'button-alloydb-postgres',
},
},
{
title: i18n.memorystoreRedisTitle,
description: i18n.memorystoreRedisDescription,
action: {
disabled: true,
key: KEY_MEMORYSTORE_REDIS,
title: i18n.createInstance,
testId: 'button-memorystore-redis',
},
},
{
title: i18n.firestoreTitle,
description: i18n.firestoreDescription,
action: {
disabled: true,
key: KEY_FIRESTORE,
title: i18n.createDatabase,
testId: 'button-firestore',
},
},
],
helpUrlSecrets,
i18n,
};
</script>
<template>
<div class="gl-mx-3">
<h2 class="gl-font-size-h2">{{ $options.i18n.title }}</h2>
<p>{{ $options.i18n.description }}</p>
<gl-table :fields="$options.fields" :items="$options.items">
<template #cell(action)="{ value }">
<gl-button
block
:disabled="value.disabled"
:href="actionUrl(value.key)"
:data-testid="value.testId"
category="secondary"
variant="confirm"
>
{{ value.title }}
</gl-button>
</template>
</gl-table>
<gl-alert class="gl-mt-5" :dismissible="false" variant="tip">
<gl-sprintf :message="$options.i18n.pricingAlert">
<template #cloudsqlPricing="{ content }">
<gl-link href="https://cloud.google.com/sql/pricing">{{ content }}</gl-link>
</template>
<template #alloydbPricing="{ content }">
<gl-link href="https://cloud.google.com/alloydb/pricing">{{ content }}</gl-link>
</template>
<template #memorystorePricing="{ content }">
<gl-link href="https://cloud.google.com/memorystore/docs/redis/pricing">{{
content
}}</gl-link>
</template>
<template #firestorePricing="{ content }">
<gl-link href="https://cloud.google.com/firestore/pricing">{{ content }}</gl-link>
</template>
</gl-sprintf>
</gl-alert>
<gl-alert class="gl-mt-5" :dismissible="false" variant="tip">
<gl-sprintf :message="$options.i18n.secretManagersDescription">
<template #docLink="{ content }">
<gl-link :href="$options.helpUrlSecrets">
{{ content }}
</gl-link>
</template>
</gl-sprintf>
</gl-alert>
</div>
</template>

View File

@ -1,5 +1,4 @@
<script>
import { escape } from 'lodash';
import { __ } from '~/locale';
export default {
@ -21,15 +20,11 @@ export default {
},
},
methods: {
getSanitizedTitle(inputEl) {
const { innerText } = inputEl;
return escape(innerText);
},
handleBlur({ target }) {
this.$emit('title-changed', this.getSanitizedTitle(target));
this.$emit('title-changed', target.innerText);
},
handleInput({ target }) {
this.$emit('title-input', this.getSanitizedTitle(target));
this.$emit('title-input', target.innerText);
},
handleSubmit() {
this.$refs.titleEl.blur();

View File

@ -142,7 +142,14 @@ export default {
:work-item-id="workItem.id"
:assignees="workItemAssignees.nodes"
/>
<work-item-weight v-if="workItemWeight" :weight="workItemWeight.weight" />
<work-item-weight
v-if="workItemWeight"
class="gl-mb-5"
:can-update="canUpdate"
:weight="workItemWeight.weight"
:work-item-id="workItem.id"
:work-item-type="workItemType"
/>
</template>
<work-item-description
v-if="hasDescriptionWidget"

View File

@ -1,28 +1,142 @@
<script>
import { GlForm, GlFormGroup, GlFormInput } from '@gitlab/ui';
import { __ } from '~/locale';
import Tracking from '~/tracking';
import { TRACKING_CATEGORY_SHOW } from '../constants';
import localUpdateWorkItemMutation from '../graphql/local_update_work_item.mutation.graphql';
/* eslint-disable @gitlab/require-i18n-strings */
const allowedKeys = [
'Alt',
'ArrowDown',
'ArrowLeft',
'ArrowRight',
'ArrowUp',
'Backspace',
'Control',
'Delete',
'End',
'Enter',
'Home',
'Meta',
'PageDown',
'PageUp',
'Tab',
'0',
'1',
'2',
'3',
'4',
'5',
'6',
'7',
'8',
'9',
];
/* eslint-enable @gitlab/require-i18n-strings */
export default {
inputId: 'weight-widget-input',
components: {
GlForm,
GlFormGroup,
GlFormInput,
},
mixins: [Tracking.mixin()],
inject: ['hasIssueWeightsFeature'],
props: {
canUpdate: {
type: Boolean,
required: false,
default: false,
},
weight: {
type: Number,
required: false,
default: undefined,
},
workItemId: {
type: String,
required: true,
},
workItemType: {
type: String,
required: true,
},
},
data() {
return {
isEditing: false,
};
},
computed: {
weightText() {
return this.weight ?? __('None');
placeholder() {
return this.canUpdate && this.isEditing ? __('Enter a number') : __('None');
},
tracking() {
return {
category: TRACKING_CATEGORY_SHOW,
label: 'item_weight',
property: `type_${this.workItemType}`,
};
},
type() {
return this.canUpdate && this.isEditing ? 'number' : 'text';
},
},
methods: {
blurInput() {
this.$refs.input.$el.blur();
},
handleFocus() {
this.isEditing = true;
},
handleKeydown(event) {
if (!allowedKeys.includes(event.key)) {
event.preventDefault();
}
},
updateWeight(event) {
this.isEditing = false;
this.track('updated_weight');
this.$apollo.mutate({
mutation: localUpdateWorkItemMutation,
variables: {
input: {
id: this.workItemId,
weight: event.target.value === '' ? null : Number(event.target.value),
},
},
});
},
},
};
</script>
<template>
<div v-if="hasIssueWeightsFeature" class="gl-mb-5 form-row">
<span class="gl-font-weight-bold col-lg-2 col-3 gl-overflow-wrap-break">{{
__('Weight')
}}</span>
<span class="gl-ml-5">{{ weightText }}</span>
</div>
<gl-form v-if="hasIssueWeightsFeature" @submit.prevent="blurInput">
<gl-form-group
class="gl-align-items-center"
:label="__('Weight')"
:label-for="$options.inputId"
label-class="gl-pb-0! gl-overflow-wrap-break"
label-cols="3"
label-cols-lg="2"
>
<gl-form-input
:id="$options.inputId"
ref="input"
min="0"
:placeholder="placeholder"
:readonly="!canUpdate"
size="sm"
:type="type"
:value="weight"
@blur="updateWeight"
@focus="handleFocus"
@keydown="handleKeydown"
@keydown.exact.esc.stop="blurInput"
/>
</gl-form-group>
</gl-form>
</template>

View File

@ -1,6 +1,6 @@
#import "./work_item.fragment.graphql"
mutation localUpdateWorkItem($input: LocalWorkItemAssigneesInput) {
mutation localUpdateWorkItem($input: LocalUpdateWorkItemInput) {
localUpdateWorkItem(input: $input) @client {
workItem {
...WorkItem

View File

@ -2,7 +2,7 @@ import produce from 'immer';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createDefaultClient from '~/lib/graphql';
import { WIDGET_TYPE_ASSIGNEE } from '../constants';
import { WIDGET_TYPE_ASSIGNEE, WIDGET_TYPE_WEIGHT } from '../constants';
import typeDefs from './typedefs.graphql';
import workItemQuery from './work_item.query.graphql';
@ -10,7 +10,7 @@ export const temporaryConfig = {
typeDefs,
cacheConfig: {
possibleTypes: {
LocalWorkItemWidget: ['LocalWorkItemAssignees'],
LocalWorkItemWidget: ['LocalWorkItemAssignees', 'LocalWorkItemWeight'],
},
typePolicies: {
WorkItem: {
@ -46,7 +46,7 @@ export const temporaryConfig = {
{
__typename: 'LocalWorkItemWeight',
type: 'WEIGHT',
weight: 0,
weight: null,
},
]
);
@ -67,10 +67,19 @@ export const resolvers = {
});
const data = produce(sourceData, (draftData) => {
const assigneesWidget = draftData.workItem.mockWidgets.find(
(widget) => widget.type === WIDGET_TYPE_ASSIGNEE,
);
assigneesWidget.nodes = [...input.assignees];
if (input.assignees) {
const assigneesWidget = draftData.workItem.mockWidgets.find(
(widget) => widget.type === WIDGET_TYPE_ASSIGNEE,
);
assigneesWidget.nodes = [...input.assignees];
}
if (input.weight != null) {
const weightWidget = draftData.workItem.mockWidgets.find(
(widget) => widget.type === WIDGET_TYPE_WEIGHT,
);
weightWidget.weight = input.weight;
}
});
cache.writeQuery({

View File

@ -21,9 +21,10 @@ extend type WorkItem {
mockWidgets: [LocalWorkItemWidget]
}
type LocalWorkItemAssigneesInput {
input LocalUpdateWorkItemInput {
id: WorkItemID!
assignees: [UserCore!]
weight: Int
}
type LocalWorkItemPayload {
@ -32,5 +33,5 @@ type LocalWorkItemPayload {
}
extend type Mutation {
localUpdateWorkItem(input: LocalWorkItemAssigneesInput!): LocalWorkItemPayload
localUpdateWorkItem(input: LocalUpdateWorkItemInput!): LocalWorkItemPayload
}

View File

@ -10,6 +10,7 @@ export const initWorkItemsRoot = () => {
return new Vue({
el,
name: 'WorkItemsRoot',
router: createRouter(el.dataset.fullPath),
apolloProvider: createApolloProvider(),
provide: {

View File

@ -32,4 +32,3 @@
@import './pages/storage_quota';
@import './pages/tree';
@import './pages/users';
@import './pages/work_items';

View File

@ -67,7 +67,7 @@
}
.user-avatar-link {
display: flow-root;
display: inline-block;
text-decoration: none;
}

View File

@ -0,0 +1,15 @@
@import 'mixins_and_variables_and_functions';
.gl-token-selector-token-container {
display: flex;
align-items: center;
}
#weight-widget-input:not(:hover, :focus),
#weight-widget-input[readonly] {
box-shadow: inset 0 0 0 $gl-border-size-1 var(--white, $white);
}
#weight-widget-input[readonly] {
background-color: var(--white, $white);
}

View File

@ -1,4 +0,0 @@
.gl-token-selector-token-container {
display: flex;
align-items: center;
}

View File

@ -1,103 +0,0 @@
# frozen_string_literal: true
module Projects
class LogsController < Projects::ApplicationController
include ::Gitlab::Utils::StrongMemoize
before_action :authorize_read_pod_logs!
before_action :ensure_deployments, only: %i(k8s elasticsearch)
feature_category :logging
urgency :low
def index
return render_404 unless Feature.enabled?(:monitor_logging, project)
if environment || cluster
render :index
else
render :empty_logs
end
end
def k8s
render_logs(::PodLogs::KubernetesService, k8s_params)
end
def elasticsearch
render_logs(::PodLogs::ElasticsearchService, elasticsearch_params)
end
private
def render_logs(service, permitted_params)
::Gitlab::PollingInterval.set_header(response, interval: 3_000)
result = service.new(cluster, namespace, params: permitted_params).execute
if result.nil?
head :accepted
elsif result[:status] == :success
render json: result
else
render status: :bad_request, json: result
end
end
# cluster is selected either via environment or directly by id
def cluster_params
params.permit(:environment_name, :cluster_id)
end
def k8s_params
params.permit(:container_name, :pod_name)
end
def elasticsearch_params
params.permit(:container_name, :pod_name, :search, :start_time, :end_time, :cursor)
end
def environment
strong_memoize(:environment) do
if cluster_params.key?(:environment_name)
::Environments::EnvironmentsFinder.new(project, current_user, name: cluster_params[:environment_name]).execute.first
else
project.default_environment
end
end
end
def cluster
strong_memoize(:cluster) do
if gitlab_managed_apps_logs?
clusters = ClusterAncestorsFinder.new(project, current_user).execute
clusters.find { |cluster| cluster.id == cluster_params[:cluster_id].to_i }
else
environment&.deployment_platform&.cluster
end
end
end
def namespace
if gitlab_managed_apps_logs?
Gitlab::Kubernetes::Helm::NAMESPACE
else
environment.deployment_namespace
end
end
def ensure_deployments
return if gitlab_managed_apps_logs?
return if cluster && namespace.present?
render status: :bad_request, json: {
status: :error,
message: _('Environment does not have deployments')
}
end
def gitlab_managed_apps_logs?
cluster_params.key?(:cluster_id)
end
end
end

View File

@ -2,6 +2,7 @@
class ProjectExportJob < ApplicationRecord
belongs_to :project
has_many :relation_exports, class_name: 'Projects::ImportExport::RelationExport'
validates :project, :jid, :status, presence: true

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
module Projects
module ImportExport
class RelationExport < ApplicationRecord
self.table_name = 'project_relation_exports'
belongs_to :project_export_job
has_one :upload,
class_name: 'Projects::ImportExport::RelationExportUpload',
foreign_key: :project_relation_export_id,
inverse_of: :relation_export
validates :export_error, length: { maximum: 300 }
validates :jid, length: { maximum: 255 }
validates :project_export_job, presence: true
validates :relation, presence: true, length: { maximum: 255 }, uniqueness: { scope: :project_export_job_id }
validates :status, numericality: { only_integer: true }, presence: true
end
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
module Projects
module ImportExport
class RelationExportUpload < ApplicationRecord
include WithUploads
include ObjectStorage::BackgroundMove
self.table_name = 'project_relation_export_uploads'
belongs_to :relation_export,
class_name: 'Projects::ImportExport::RelationExport',
foreign_key: :project_relation_export_id,
inverse_of: :upload
mount_uploader :export_file, ImportExportUploader
end
end
end

View File

@ -66,22 +66,6 @@ class EnvironmentEntity < Grape::Entity
environment.available? && can?(current_user, :stop_environment, environment)
end
expose :logs_path, if: -> (*) { can_read_pod_logs? } do |environment|
project_logs_path(environment.project, environment_name: environment.name)
end
expose :logs_api_path, if: -> (*) { can_read_pod_logs? } do |environment|
if environment.elastic_stack_available?
elasticsearch_project_logs_path(environment.project, environment_name: environment.name, format: :json)
else
k8s_project_logs_path(environment.project, environment_name: environment.name, format: :json)
end
end
expose :enable_advanced_logs_querying, if: -> (*) { can_read_pod_logs? } do |environment|
environment.elastic_stack_available?
end
expose :can_delete do |environment|
can?(current_user, :destroy_environment, environment)
end
@ -102,11 +86,6 @@ class EnvironmentEntity < Grape::Entity
can?(current_user, :update_environment, environment)
end
def can_read_pod_logs?
Feature.enabled?(:monitor_logging, environment.project) &&
can?(current_user, :read_pod_logs, environment.project)
end
def can_read_deploy_board?
can?(current_user, :read_deploy_board, environment.project)
end

View File

@ -1,91 +0,0 @@
# frozen_string_literal: true
module PodLogs
class BaseService < ::BaseService
include ReactiveCaching
include Stepable
attr_reader :cluster, :namespace, :params
CACHE_KEY_GET_POD_LOG = 'get_pod_log'
K8S_NAME_MAX_LENGTH = 253
self.reactive_cache_work_type = :external_dependency
def id
cluster.id
end
def initialize(cluster, namespace, params: {})
@cluster = cluster
@namespace = namespace
@params = filter_params(params.dup.stringify_keys).to_hash
end
def execute
with_reactive_cache(
CACHE_KEY_GET_POD_LOG,
namespace,
params
) do |result|
result
end
end
def calculate_reactive_cache(request, _namespace, _params)
case request
when CACHE_KEY_GET_POD_LOG
execute_steps
else
exception = StandardError.new('Unknown reactive cache request')
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(exception, request: request)
error(_('Unknown cache key'))
end
end
private
def valid_params
%w(pod_name container_name)
end
def success_return_keys
%i(status logs pod_name container_name pods)
end
def check_arguments(result)
return error(_('Cluster does not exist')) if cluster.nil?
return error(_('Namespace is empty')) if namespace.blank?
result[:pod_name] = params['pod_name'].presence
result[:container_name] = params['container_name'].presence
return error(_('Invalid pod_name')) if result[:pod_name] && !result[:pod_name].is_a?(String)
return error(_('Invalid container_name')) if result[:container_name] && !result[:container_name].is_a?(String)
success(result)
end
def get_raw_pods(result)
raise NotImplementedError
end
def get_pod_names(result)
result[:pods] = result[:raw_pods].map { |p| p[:name] }
success(result)
end
def pod_logs(result)
raise NotImplementedError
end
def filter_return_keys(result)
result.slice(*success_return_keys)
end
def filter_params(params)
params.slice(*valid_params)
end
end
end

View File

@ -1,98 +0,0 @@
# frozen_string_literal: true
module PodLogs
class ElasticsearchService < PodLogs::BaseService
steps :check_arguments,
:get_raw_pods,
:get_pod_names,
:check_times,
:check_search,
:check_cursor,
:pod_logs,
:filter_return_keys
self.reactive_cache_worker_finder = ->(id, _cache_key, namespace, params) { new(::Clusters::Cluster.find(id), namespace, params: params) }
private
def valid_params
super + %w(search start_time end_time cursor)
end
def success_return_keys
super + %i(cursor)
end
def get_raw_pods(result)
client = cluster&.elasticsearch_client
return error(_('Unable to connect to Elasticsearch')) unless client
result[:raw_pods] = ::Gitlab::Elasticsearch::Logs::Pods.new(client).pods(namespace)
success(result)
rescue Elasticsearch::Transport::Transport::ServerError => e
::Gitlab::ErrorTracking.track_exception(e)
error(_('Elasticsearch returned status code: %{status_code}') % {
# ServerError is the parent class of exceptions named after HTTP status codes, eg: "Elasticsearch::Transport::Transport::Errors::NotFound"
# there is no method on the exception other than the class name to determine the type of error encountered.
status_code: e.class.name.split('::').last
})
end
def check_times(result)
result[:start_time] = params['start_time'] if params.key?('start_time') && Time.iso8601(params['start_time'])
result[:end_time] = params['end_time'] if params.key?('end_time') && Time.iso8601(params['end_time'])
success(result)
rescue ArgumentError
error(_('Invalid start or end time format'))
end
def check_search(result)
result[:search] = params['search'] if params.key?('search')
return error(_('Invalid search parameter')) if result[:search] && !result[:search].is_a?(String)
success(result)
end
def check_cursor(result)
result[:cursor] = params['cursor'] if params.key?('cursor')
return error(_('Invalid cursor parameter')) if result[:cursor] && !result[:cursor].is_a?(String)
success(result)
end
def pod_logs(result)
client = cluster&.elasticsearch_client
return error(_('Unable to connect to Elasticsearch')) unless client
response = ::Gitlab::Elasticsearch::Logs::Lines.new(client).pod_logs(
namespace,
pod_name: result[:pod_name],
container_name: result[:container_name],
search: result[:search],
start_time: result[:start_time],
end_time: result[:end_time],
cursor: result[:cursor],
chart_above_v2: cluster.elastic_stack_adapter.chart_above_v2?
)
result.merge!(response)
success(result)
rescue Elasticsearch::Transport::Transport::ServerError => e
::Gitlab::ErrorTracking.track_exception(e)
error(_('Elasticsearch returned status code: %{status_code}') % {
# ServerError is the parent class of exceptions named after HTTP status codes, eg: "Elasticsearch::Transport::Transport::Errors::NotFound"
# there is no method on the exception other than the class name to determine the type of error encountered.
status_code: e.class.name.split('::').last
})
rescue ::Gitlab::Elasticsearch::Logs::Lines::InvalidCursor
error(_('Invalid cursor value provided'))
end
end
end

View File

@ -1,151 +0,0 @@
# frozen_string_literal: true
module PodLogs
class KubernetesService < PodLogs::BaseService
LOGS_LIMIT = 500
REPLACEMENT_CHAR = "\u{FFFD}"
EncodingHelperError = Class.new(StandardError)
steps :check_arguments,
:get_raw_pods,
:get_pod_names,
:check_pod_name,
:check_container_name,
:pod_logs,
:encode_logs_to_utf8,
:split_logs,
:filter_return_keys
self.reactive_cache_worker_finder = ->(id, _cache_key, namespace, params) { new(::Clusters::Cluster.find(id), namespace, params: params) }
private
def get_raw_pods(result)
result[:raw_pods] = cluster.kubeclient.get_pods(namespace: namespace).map do |pod|
{
name: pod.metadata.name,
container_names: pod.spec.containers.map(&:name)
}
end
success(result)
end
def check_pod_name(result)
# If pod_name is not received as parameter, get the pod logs of the first
# pod of this namespace.
result[:pod_name] ||= result[:pods].first
unless result[:pod_name]
return error(_('No pods available'))
end
unless result[:pod_name].length.to_i <= K8S_NAME_MAX_LENGTH
return error(_('pod_name cannot be larger than %{max_length}'\
' chars' % { max_length: K8S_NAME_MAX_LENGTH }))
end
unless result[:pod_name] =~ Gitlab::Regex.kubernetes_dns_subdomain_regex
return error(_('pod_name can contain only lowercase letters, digits, \'-\', and \'.\' and must start and end with an alphanumeric character'))
end
unless result[:pods].include?(result[:pod_name])
return error(_('Pod does not exist'))
end
success(result)
end
def check_container_name(result)
pod_details = result[:raw_pods].find { |p| p[:name] == result[:pod_name] }
container_names = pod_details[:container_names]
# select first container if not specified
result[:container_name] ||= container_names.first
unless result[:container_name]
return error(_('No containers available'))
end
unless result[:container_name].length.to_i <= K8S_NAME_MAX_LENGTH
return error(_('container_name cannot be larger than'\
' %{max_length} chars' % { max_length: K8S_NAME_MAX_LENGTH }))
end
unless result[:container_name] =~ Gitlab::Regex.kubernetes_dns_subdomain_regex
return error(_('container_name can contain only lowercase letters, digits, \'-\', and \'.\' and must start and end with an alphanumeric character'))
end
unless container_names.include?(result[:container_name])
return error(_('Container does not exist'))
end
success(result)
end
def pod_logs(result)
result[:logs] = cluster.kubeclient.get_pod_log(
result[:pod_name],
namespace,
container: result[:container_name],
tail_lines: LOGS_LIMIT,
timestamps: true
).body
success(result)
rescue Kubeclient::ResourceNotFoundError
error(_('Pod not found'))
rescue Kubeclient::HttpError => e
::Gitlab::ErrorTracking.track_exception(e)
error(_('Kubernetes API returned status code: %{error_code}') % {
error_code: e.error_code
})
end
# Check https://gitlab.com/gitlab-org/gitlab/issues/34965#note_292261879
# for more details on why this is necessary.
def encode_logs_to_utf8(result)
return success(result) if result[:logs].nil?
return success(result) if result[:logs].encoding == Encoding::UTF_8
result[:logs] = encode_utf8(result[:logs])
success(result)
rescue EncodingHelperError
error(_('Unable to convert Kubernetes logs encoding to UTF-8'))
end
def split_logs(result)
result[:logs] = result[:logs].strip.lines(chomp: true).map do |line|
# message contains a RFC3339Nano timestamp, then a space, then the log line.
# resolution of the nanoseconds can vary, so we split on the first space
values = line.split(' ', 2)
{
timestamp: values[0],
message: values[1],
pod: result[:pod_name]
}
end
success(result)
end
def encode_utf8(logs)
utf8_logs = Gitlab::EncodingHelper.encode_utf8(logs.dup, replace: REPLACEMENT_CHAR)
# Gitlab::EncodingHelper.encode_utf8 can return '' or nil if an exception
# is raised while encoding. We prefer to return an error rather than wrongly
# display blank logs.
no_utf8_logs = logs.present? && utf8_logs.blank?
unexpected_encoding = utf8_logs&.encoding != Encoding::UTF_8
if no_utf8_logs || unexpected_encoding
raise EncodingHelperError, 'Could not convert Kubernetes logs to UTF-8'
end
utf8_logs
end
end
end

View File

@ -3,6 +3,7 @@
- breadcrumb_title @issue.to_reference
- page_title "#{@issue.title} (#{@issue.to_reference})", _("Issues")
- add_page_specific_style 'page_bundles/issues_show'
- add_page_specific_style 'page_bundles/work_items'
= render 'projects/issuable/show', issuable: @issue, api_awards_path: award_emoji_issue_api_path(@issue)
= render 'projects/invite_members_modal', project: @project

View File

@ -1,3 +1,4 @@
- page_title s_('WorkItem|Work Items')
- add_page_specific_style 'page_bundles/work_items'
#js-work-items{ data: work_items_index_data(@project) }

View File

@ -304,6 +304,7 @@ module Gitlab
config.assets.precompile << "page_bundles/terms.css"
config.assets.precompile << "page_bundles/todos.css"
config.assets.precompile << "page_bundles/wiki.css"
config.assets.precompile << "page_bundles/work_items.css"
config.assets.precompile << "page_bundles/xterm.css"
config.assets.precompile << "lazy_bundles/cropper.css"
config.assets.precompile << "lazy_bundles/select2.css"

View File

@ -0,0 +1,9 @@
---
table_name: project_relation_export_uploads
classes:
- Projects::ImportExport::RelationExportUpload
feature_categories:
- importers
description: Used to store relation export files location
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/90624
milestone: '15.2'

View File

@ -0,0 +1,9 @@
---
table_name: project_relation_exports
classes:
- Projects::ImportExport::RelationExport
feature_categories:
- importers
description: Used to track the generation of relation export files for projects
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/90624
milestone: '15.2'

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
class CreateProjectRelationExports < Gitlab::Database::Migration[2.0]
enable_lock_retries!
UNIQUE_INDEX_NAME = 'index_project_export_job_relation'
def change
create_table :project_relation_exports do |t|
t.references :project_export_job, null: false, foreign_key: { on_delete: :cascade }
t.timestamps_with_timezone null: false
t.integer :status, limit: 2, null: false, default: 0
t.text :relation, null: false, limit: 255
t.text :jid, limit: 255
t.text :export_error, limit: 300
t.index [:project_export_job_id, :relation], unique: true, name: UNIQUE_INDEX_NAME
end
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class CreateProjectRelationExportUploads < Gitlab::Database::Migration[2.0]
enable_lock_retries!
INDEX = 'index_project_relation_export_upload_id'
def change
create_table :project_relation_export_uploads do |t|
t.references :project_relation_export, null: false, foreign_key: { on_delete: :cascade }, index: { name: INDEX }
t.timestamps_with_timezone null: false
t.text :export_file, null: false, limit: 255
end
end
end

View File

@ -0,0 +1 @@
f8830ecd0c49aea19857fec9b07d238f4bc269a758b6a3495d57222ab1604c74

View File

@ -0,0 +1 @@
2cdbc5b29e11a2ce0679f218adc57c95d483139ca0bcd1801ea97fbd4ba68ddf

View File

@ -19502,6 +19502,47 @@ CREATE TABLE project_pages_metadata (
onboarding_complete boolean DEFAULT false NOT NULL
);
CREATE TABLE project_relation_export_uploads (
id bigint NOT NULL,
project_relation_export_id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
export_file text NOT NULL,
CONSTRAINT check_d8ee243e9e CHECK ((char_length(export_file) <= 255))
);
CREATE SEQUENCE project_relation_export_uploads_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE project_relation_export_uploads_id_seq OWNED BY project_relation_export_uploads.id;
CREATE TABLE project_relation_exports (
id bigint NOT NULL,
project_export_job_id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
status smallint DEFAULT 0 NOT NULL,
relation text NOT NULL,
jid text,
export_error text,
CONSTRAINT check_15e644d856 CHECK ((char_length(jid) <= 255)),
CONSTRAINT check_4b5880b795 CHECK ((char_length(relation) <= 255)),
CONSTRAINT check_dbd1cf73d0 CHECK ((char_length(export_error) <= 300))
);
CREATE SEQUENCE project_relation_exports_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE project_relation_exports_id_seq OWNED BY project_relation_exports.id;
CREATE TABLE project_repositories (
id bigint NOT NULL,
shard_id integer NOT NULL,
@ -23317,6 +23358,10 @@ ALTER TABLE ONLY project_incident_management_settings ALTER COLUMN project_id SE
ALTER TABLE ONLY project_mirror_data ALTER COLUMN id SET DEFAULT nextval('project_mirror_data_id_seq'::regclass);
ALTER TABLE ONLY project_relation_export_uploads ALTER COLUMN id SET DEFAULT nextval('project_relation_export_uploads_id_seq'::regclass);
ALTER TABLE ONLY project_relation_exports ALTER COLUMN id SET DEFAULT nextval('project_relation_exports_id_seq'::regclass);
ALTER TABLE ONLY project_repositories ALTER COLUMN id SET DEFAULT nextval('project_repositories_id_seq'::regclass);
ALTER TABLE ONLY project_repository_states ALTER COLUMN id SET DEFAULT nextval('project_repository_states_id_seq'::regclass);
@ -25455,6 +25500,12 @@ ALTER TABLE ONLY project_mirror_data
ALTER TABLE ONLY project_pages_metadata
ADD CONSTRAINT project_pages_metadata_pkey PRIMARY KEY (project_id);
ALTER TABLE ONLY project_relation_export_uploads
ADD CONSTRAINT project_relation_export_uploads_pkey PRIMARY KEY (id);
ALTER TABLE ONLY project_relation_exports
ADD CONSTRAINT project_relation_exports_pkey PRIMARY KEY (id);
ALTER TABLE ONLY project_repositories
ADD CONSTRAINT project_repositories_pkey PRIMARY KEY (id);
@ -29082,6 +29133,8 @@ CREATE INDEX index_project_deploy_tokens_on_deploy_token_id ON project_deploy_to
CREATE UNIQUE INDEX index_project_deploy_tokens_on_project_id_and_deploy_token_id ON project_deploy_tokens USING btree (project_id, deploy_token_id);
CREATE UNIQUE INDEX index_project_export_job_relation ON project_relation_exports USING btree (project_export_job_id, relation);
CREATE UNIQUE INDEX index_project_export_jobs_on_jid ON project_export_jobs USING btree (jid);
CREATE INDEX index_project_export_jobs_on_project_id_and_jid ON project_export_jobs USING btree (project_id, jid);
@ -29120,6 +29173,10 @@ CREATE INDEX index_project_pages_metadata_on_pages_deployment_id ON project_page
CREATE INDEX index_project_pages_metadata_on_project_id_and_deployed_is_true ON project_pages_metadata USING btree (project_id) WHERE (deployed = true);
CREATE INDEX index_project_relation_export_upload_id ON project_relation_export_uploads USING btree (project_relation_export_id);
CREATE INDEX index_project_relation_exports_on_project_export_job_id ON project_relation_exports USING btree (project_export_job_id);
CREATE UNIQUE INDEX index_project_repositories_on_disk_path ON project_repositories USING btree (disk_path);
CREATE UNIQUE INDEX index_project_repositories_on_project_id ON project_repositories USING btree (project_id);
@ -33016,6 +33073,9 @@ ALTER TABLE ONLY design_management_versions
ALTER TABLE ONLY approval_merge_request_rules_approved_approvers
ADD CONSTRAINT fk_rails_6577725edb FOREIGN KEY (approval_merge_request_rule_id) REFERENCES approval_merge_request_rules(id) ON DELETE CASCADE;
ALTER TABLE ONLY project_relation_export_uploads
ADD CONSTRAINT fk_rails_660ada90c9 FOREIGN KEY (project_relation_export_id) REFERENCES project_relation_exports(id) ON DELETE CASCADE;
ALTER TABLE ONLY operations_feature_flags_clients
ADD CONSTRAINT fk_rails_6650ed902c FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
@ -33826,6 +33886,9 @@ ALTER TABLE ONLY ci_daily_build_group_report_results
ALTER TABLE ONLY packages_debian_group_architectures
ADD CONSTRAINT fk_rails_ef667d1b03 FOREIGN KEY (distribution_id) REFERENCES packages_debian_group_distributions(id) ON DELETE CASCADE;
ALTER TABLE ONLY project_relation_exports
ADD CONSTRAINT fk_rails_ef89b354fc FOREIGN KEY (project_export_job_id) REFERENCES project_export_jobs(id) ON DELETE CASCADE;
ALTER TABLE ONLY label_priorities
ADD CONSTRAINT fk_rails_ef916d14fa FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;

View File

@ -293,6 +293,8 @@ in the `handle_event` method of the subscriber worker.
## Testing
### Testing the publisher
The publisher's responsibility is to ensure that the event is published correctly.
To test that an event has been published correctly, we can use the RSpec matcher `:publish_event`:
@ -308,6 +310,25 @@ it 'publishes a ProjectDeleted event with project id and namespace id' do
end
```
It is also possible to compose matchers inside the `:publish_event` matcher.
This could be useful when we want to assert that an event is created with a certain kind of value,
but we do not know the value in advance. An example of this is when publishing an event
after creating a new record.
```ruby
it 'publishes a ProjectCreatedEvent with project id and namespace id' do
# The project ID will only be generated when the `create_project`
# is called in the expect block.
expected_data = { project_id: kind_of(Numeric), namespace_id: group_id }
expect { create_project(user, name: 'Project', path: 'project', namespace_id: group_id) }
.to publish_event(Projects::ProjectCreatedEvent)
.with(expected_data)
end
```
### Testing the subscriber
The subscriber must ensure that a published event can be consumed correctly. For this purpose
we have added helpers and shared examples to standardize the way we test subscribers:

View File

@ -379,7 +379,7 @@ Find where your version sits in the upgrade path below, and upgrade GitLab
accordingly, while also consulting the
[version-specific upgrade instructions](#version-specific-upgrading-instructions):
`8.11.Z` -> `8.12.0` -> `8.17.7` -> `9.5.10` -> `10.8.7` -> [`11.11.8`](#1200) -> `12.0.12` -> [`12.1.17`](#1210) -> [`12.10.14`](#12100) -> `13.0.14` -> [`13.1.11`](#1310) -> [`13.8.8`](#1388) -> [`13.12.15`](#13120) -> [`14.0.12`](#1400) -> [`14.3.6`](#1430) -> [`14.9.5`](#1490) -> [`14.10.Z`](#1410) -> [`15.0.Z`](#1500) -> [latest `15.Y.Z`](https://gitlab.com/gitlab-org/gitlab/-/releases)
`8.11.Z` -> `8.12.0` -> `8.17.7` -> `9.5.10` -> `10.8.7` -> [`11.11.8`](#1200) -> `12.0.12` -> [`12.1.17`](#1210) -> [`12.10.14`](#12100) -> `13.0.14` -> [`13.1.11`](#1310) -> [`13.8.8`](#1388) -> [`13.12.15`](#13120) -> [`14.0.12`](#1400) -> [`14.3.6`](#1430) -> [`14.9.5`](#1490) -> [`14.10.Z`](#14100) -> [`15.0.Z`](#1500) -> [latest `15.Y.Z`](https://gitlab.com/gitlab-org/gitlab/-/releases)
The following table, while not exhaustive, shows some examples of the supported
upgrade paths.

View File

@ -824,6 +824,11 @@ The available values for `object_attributes.action` in the payload are:
- `unapproval`
- `merge`
The field `object_attributes.oldrev` is only available when there are actual code changes, like:
- New code is pushed.
- A [suggestion](../merge_requests/reviews/suggestions.md) is applied.
Request header:
```plaintext

View File

@ -425,6 +425,8 @@ project_incident_management_settings: :gitlab_main
project_metrics_settings: :gitlab_main
project_mirror_data: :gitlab_main
project_pages_metadata: :gitlab_main
project_relation_export_uploads: :gitlab_main
project_relation_exports: :gitlab_main
project_repositories: :gitlab_main
project_repository_states: :gitlab_main
project_repository_storage_moves: :gitlab_main

View File

@ -373,7 +373,7 @@ module Gitlab
end
def rendered
return unless use_semantic_ipynb_diff? && ipynb? && modified_file? && !too_large?
return unless use_semantic_ipynb_diff? && ipynb? && modified_file? && !collapsed? && !too_large?
strong_memoize(:rendered) { Rendered::Notebook::DiffFile.new(self) }
end

View File

@ -1,157 +0,0 @@
# frozen_string_literal: true
module Gitlab
module Elasticsearch
module Logs
class Lines
InvalidCursor = Class.new(RuntimeError)
# How many log lines to fetch in a query
LOGS_LIMIT = 500
def initialize(client)
@client = client
end
def pod_logs(namespace, pod_name: nil, container_name: nil, search: nil, start_time: nil, end_time: nil, cursor: nil, chart_above_v2: true)
query = { bool: { must: [] } }.tap do |q|
filter_pod_name(q, pod_name)
filter_namespace(q, namespace)
filter_container_name(q, container_name)
filter_search(q, search)
filter_times(q, start_time, end_time)
end
body = build_body(query, cursor, chart_above_v2)
response = @client.search body: body
format_response(response)
end
private
def build_body(query, cursor = nil, chart_above_v2 = true)
offset_field = chart_above_v2 ? "log.offset" : "offset"
body = {
query: query,
# reverse order so we can query N-most recent records
sort: [
{ "@timestamp": { order: :desc } },
{ "#{offset_field}": { order: :desc } }
],
# only return these fields in the response
_source: ["@timestamp", "message", "kubernetes.pod.name"],
# fixed limit for now, we should support paginated queries
size: ::Gitlab::Elasticsearch::Logs::Lines::LOGS_LIMIT
}
unless cursor.nil?
body[:search_after] = decode_cursor(cursor)
end
body
end
def filter_pod_name(query, pod_name)
# We can filter by "all pods" with a null pod_name
return if pod_name.nil?
query[:bool][:must] << {
match_phrase: {
"kubernetes.pod.name" => {
query: pod_name
}
}
}
end
def filter_namespace(query, namespace)
query[:bool][:must] << {
match_phrase: {
"kubernetes.namespace" => {
query: namespace
}
}
}
end
def filter_container_name(query, container_name)
# A pod can contain multiple containers.
# By default we return logs from every container
return if container_name.nil?
query[:bool][:must] << {
match_phrase: {
"kubernetes.container.name" => {
query: container_name
}
}
}
end
def filter_search(query, search)
return if search.nil?
query[:bool][:must] << {
simple_query_string: {
query: search,
fields: [:message],
default_operator: :and
}
}
end
def filter_times(query, start_time, end_time)
return unless start_time || end_time
time_range = { range: { :@timestamp => {} } }.tap do |tr|
tr[:range][:@timestamp][:gte] = start_time if start_time
tr[:range][:@timestamp][:lt] = end_time if end_time
end
query[:bool][:filter] = [time_range]
end
def format_response(response)
results = response.fetch("hits", {}).fetch("hits", [])
last_result = results.last
results = results.map do |hit|
{
timestamp: hit["_source"]["@timestamp"],
message: hit["_source"]["message"],
pod: hit["_source"]["kubernetes"]["pod"]["name"]
}
end
# we queried for the N-most recent records but we want them ordered oldest to newest
{
logs: results.reverse,
cursor: last_result.nil? ? nil : encode_cursor(last_result["sort"])
}
end
# we want to hide the implementation details of the search_after parameter from the frontend
# behind a single easily transmitted value
def encode_cursor(obj)
obj.join(',')
end
def decode_cursor(obj)
cursor = obj.split(',').map(&:to_i)
unless valid_cursor(cursor)
raise InvalidCursor, "invalid cursor format"
end
cursor
end
def valid_cursor(cursor)
cursor.instance_of?(Array) &&
cursor.length == 2 &&
cursor.map {|i| i.instance_of?(Integer)}.reduce(:&)
end
end
end
end
end

View File

@ -1,70 +0,0 @@
# frozen_string_literal: true
module Gitlab
module Elasticsearch
module Logs
class Pods
# How many items to fetch in a query
PODS_LIMIT = 500
CONTAINERS_LIMIT = 500
def initialize(client)
@client = client
end
def pods(namespace)
body = build_body(namespace)
response = @client.search body: body
format_response(response)
end
private
def build_body(namespace)
{
aggs: {
pods: {
aggs: {
containers: {
terms: {
field: 'kubernetes.container.name',
size: ::Gitlab::Elasticsearch::Logs::Pods::CONTAINERS_LIMIT
}
}
},
terms: {
field: 'kubernetes.pod.name',
size: ::Gitlab::Elasticsearch::Logs::Pods::PODS_LIMIT
}
}
},
query: {
bool: {
must: {
match_phrase: {
"kubernetes.namespace": namespace
}
}
}
},
# don't populate hits, only the aggregation is needed
size: 0
}
end
def format_response(response)
results = response.dig("aggregations", "pods", "buckets") || []
results.map do |bucket|
{
name: bucket["key"],
container_names: (bucket.dig("containers", "buckets") || []).map do |cbucket|
cbucket["key"]
end
}
end
end
end
end
end
end

View File

@ -708,6 +708,9 @@ msgstr ""
msgid "%{labelStart}Crash Address:%{labelEnd} %{crash_address}"
msgstr ""
msgid "%{labelStart}Crash State:%{labelEnd} %{crash_state}"
msgstr ""
msgid "%{labelStart}Crash State:%{labelEnd} %{stacktrace_snippet}"
msgstr ""
@ -8252,6 +8255,123 @@ msgstr ""
msgid "Cloud Storage"
msgstr ""
msgid "CloudSeed|All"
msgstr ""
msgid "CloudSeed|AlloyDB for Postgres"
msgstr ""
msgid "CloudSeed|Available database services through which instances may be created"
msgstr ""
msgid "CloudSeed|Cancel"
msgstr ""
msgid "CloudSeed|Cloud Firestore"
msgstr ""
msgid "CloudSeed|Cloud SQL for MySQL"
msgstr ""
msgid "CloudSeed|Cloud SQL for Postgres"
msgstr ""
msgid "CloudSeed|Cloud SQL for SQL Server"
msgstr ""
msgid "CloudSeed|CloudSQL Instance"
msgstr ""
msgid "CloudSeed|Create cluster"
msgstr ""
msgid "CloudSeed|Create database"
msgstr ""
msgid "CloudSeed|Create instance"
msgstr ""
msgid "CloudSeed|Database instance is generated within the selected Google Cloud project"
msgstr ""
msgid "CloudSeed|Database instances associated with this project"
msgstr ""
msgid "CloudSeed|Database version"
msgstr ""
msgid "CloudSeed|Description"
msgstr ""
msgid "CloudSeed|Determines memory and virtual cores available to your instance"
msgstr ""
msgid "CloudSeed|Enhance security by storing database variables in secret managers - learn more about %{docLinkStart}secret management with GitLab%{docLinkEnd}"
msgstr ""
msgid "CloudSeed|Environment"
msgstr ""
msgid "CloudSeed|Flexible, scalable NoSQL cloud database for client- and server-side development"
msgstr ""
msgid "CloudSeed|Fully managed PostgreSQL-compatible service for high-demand workloads"
msgstr ""
msgid "CloudSeed|Fully managed relational database service for MySQL"
msgstr ""
msgid "CloudSeed|Fully managed relational database service for PostgreSQL"
msgstr ""
msgid "CloudSeed|Fully managed relational database service for SQL Server"
msgstr ""
msgid "CloudSeed|Generated database instance is linked to the selected branch or tag"
msgstr ""
msgid "CloudSeed|Google Cloud Project"
msgstr ""
msgid "CloudSeed|Google Cloud project"
msgstr ""
msgid "CloudSeed|I accept Google Cloud pricing and responsibilities involved with managing database instances"
msgstr ""
msgid "CloudSeed|Instances"
msgstr ""
msgid "CloudSeed|Learn more about pricing for %{cloudsqlPricingStart}Cloud SQL%{cloudsqlPricingEnd}, %{alloydbPricingStart}Alloy DB%{alloydbPricingEnd}, %{memorystorePricingStart}Memorystore%{memorystorePricingEnd} and %{firestorePricingStart}Firestore%{firestorePricingEnd}."
msgstr ""
msgid "CloudSeed|Machine type"
msgstr ""
msgid "CloudSeed|Memorystore for Redis"
msgstr ""
msgid "CloudSeed|No instances"
msgstr ""
msgid "CloudSeed|Refs"
msgstr ""
msgid "CloudSeed|Scalable, secure, and highly available in-memory service for Redis"
msgstr ""
msgid "CloudSeed|Service"
msgstr ""
msgid "CloudSeed|Services"
msgstr ""
msgid "CloudSeed|There are no instances to display."
msgstr ""
msgid "CloudSeed|Version"
msgstr ""
msgid "Cluster"
msgstr ""
@ -8261,9 +8381,6 @@ msgstr ""
msgid "Cluster cache cleared."
msgstr ""
msgid "Cluster does not exist"
msgstr ""
msgid "Cluster is required for Stages::ClusterEndpointInserter"
msgstr ""
@ -9699,9 +9816,6 @@ msgstr ""
msgid "Container Scanning"
msgstr ""
msgid "Container does not exist"
msgstr ""
msgid "Container must be a project or a group."
msgstr ""
@ -13915,9 +14029,6 @@ msgstr ""
msgid "Elasticsearch reindexing was not started: %{errors}"
msgstr ""
msgid "Elasticsearch returned status code: %{status_code}"
msgstr ""
msgid "Elasticsearch zero-downtime reindexing"
msgstr ""
@ -14374,9 +14485,6 @@ msgstr ""
msgid "Environment"
msgstr ""
msgid "Environment does not have deployments"
msgstr ""
msgid "Environment is required for Stages::MetricEndpointInserter"
msgstr ""
@ -16889,9 +16997,6 @@ msgstr ""
msgid "Geo|Primary"
msgstr ""
msgid "Geo|Primary node"
msgstr ""
msgid "Geo|Primary site"
msgstr ""
@ -16991,9 +17096,6 @@ msgstr ""
msgid "Geo|Secondary"
msgstr ""
msgid "Geo|Secondary node"
msgstr ""
msgid "Geo|Secondary site"
msgstr ""
@ -20928,15 +21030,6 @@ msgstr ""
msgid "Invalid URL: %{url}"
msgstr ""
msgid "Invalid container_name"
msgstr ""
msgid "Invalid cursor parameter"
msgstr ""
msgid "Invalid cursor value provided"
msgstr ""
msgid "Invalid date"
msgstr ""
@ -20979,9 +21072,6 @@ msgstr ""
msgid "Invalid pin code."
msgstr ""
msgid "Invalid pod_name"
msgstr ""
msgid "Invalid policy type"
msgstr ""
@ -20994,15 +21084,9 @@ msgstr ""
msgid "Invalid rule"
msgstr ""
msgid "Invalid search parameter"
msgstr ""
msgid "Invalid server response"
msgstr ""
msgid "Invalid start or end time format"
msgstr ""
msgid "Invalid status"
msgstr ""
@ -22365,9 +22449,6 @@ msgstr ""
msgid "Kubernetes"
msgstr ""
msgid "Kubernetes API returned status code: %{error_code}"
msgstr ""
msgid "Kubernetes Cluster"
msgstr ""
@ -25251,9 +25332,6 @@ msgstr ""
msgid "Namespace ID:"
msgstr ""
msgid "Namespace is empty"
msgstr ""
msgid "Namespace:"
msgstr ""
@ -25726,9 +25804,6 @@ msgstr ""
msgid "No connection could be made to a Gitaly Server, please check your logs!"
msgstr ""
msgid "No containers available"
msgstr ""
msgid "No contributions"
msgstr ""
@ -25864,9 +25939,6 @@ msgstr ""
msgid "No plan"
msgstr ""
msgid "No pods available"
msgstr ""
msgid "No policy matches this license"
msgstr ""
@ -28875,12 +28947,6 @@ msgstr ""
msgid "Please wait while we import the repository for you. Refresh at will."
msgstr ""
msgid "Pod does not exist"
msgstr ""
msgid "Pod not found"
msgstr ""
msgid "Pods in use"
msgstr ""
@ -40769,9 +40835,6 @@ msgstr ""
msgid "Unable to collect memory info"
msgstr ""
msgid "Unable to connect to Elasticsearch"
msgstr ""
msgid "Unable to connect to Prometheus server"
msgstr ""
@ -40781,9 +40844,6 @@ msgstr ""
msgid "Unable to connect to the Jira instance. Please check your Jira integration configuration."
msgstr ""
msgid "Unable to convert Kubernetes logs encoding to UTF-8"
msgstr ""
msgid "Unable to create link to vulnerability"
msgstr ""
@ -40922,9 +40982,6 @@ msgstr ""
msgid "Unknown Error"
msgstr ""
msgid "Unknown cache key"
msgstr ""
msgid "Unknown encryption strategy: %{encrypted_strategy}!"
msgstr ""
@ -45280,12 +45337,6 @@ msgstr ""
msgid "container registry images"
msgstr ""
msgid "container_name can contain only lowercase letters, digits, '-', and '.' and must start and end with an alphanumeric character"
msgstr ""
msgid "container_name cannot be larger than %{max_length} chars"
msgstr ""
msgid "contains URLs that exceed the 1024 character limit (%{urls})"
msgstr ""
@ -46276,12 +46327,6 @@ msgstr ""
msgid "pipelineEditorWalkthrough|You can use the file tree to view your pipeline configuration files. %{linkStart}Learn more%{linkEnd}"
msgstr ""
msgid "pod_name can contain only lowercase letters, digits, '-', and '.' and must start and end with an alphanumeric character"
msgstr ""
msgid "pod_name cannot be larger than %{max_length} chars"
msgstr ""
msgid "point"
msgid_plural "points"
msgstr[0] ""

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
FactoryBot.define do
factory :project_relation_export, class: 'Projects::ImportExport::RelationExport' do
project_export_job factory: :project_export_job
relation { 'labels' }
status { 0 }
sequence(:jid) { |n| "project_relation_export_#{n}" }
end
end

Binary file not shown.

View File

@ -70,10 +70,10 @@ describe('AgentTable', () => {
});
it.each`
status | iconName | lineNumber
${'Never connected'} | ${'status-neutral'} | ${0}
${'Connected'} | ${'status-success'} | ${1}
${'Not connected'} | ${'severity-critical'} | ${2}
status | iconName | lineNumber
${'Never connected'} | ${'status-neutral'} | ${0}
${'Connected'} | ${'status-success'} | ${1}
${'Not connected'} | ${'status-alert'} | ${2}
`(
'displays agent connection status as "$status" at line $lineNumber',
({ status, iconName, lineNumber }) => {

View File

@ -0,0 +1,103 @@
import { GlFormCheckbox } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import InstanceForm from '~/google_cloud/components/cloudsql/create_instance_form.vue';
describe('google_cloud::cloudsql::create_instance_form component', () => {
let wrapper;
const findByTestId = (id) => wrapper.findByTestId(id);
const findCancelButton = () => findByTestId('cancel-button');
const findCheckbox = () => wrapper.findComponent(GlFormCheckbox);
const findHeader = () => wrapper.find('header');
const findSubmitButton = () => findByTestId('submit-button');
const propsData = {
gcpProjects: [],
refs: [],
cancelPath: '#cancel-url',
formTitle: 'mock form title',
formDescription: 'mock form description',
databaseVersions: [],
tiers: [],
};
beforeEach(() => {
wrapper = shallowMountExtended(InstanceForm, { propsData, stubs: { GlFormCheckbox } });
});
afterEach(() => {
wrapper.destroy();
});
it('contains header', () => {
expect(findHeader().exists()).toBe(true);
});
it('contains GCP project form group', () => {
const formGroup = findByTestId('form_group_gcp_project');
expect(formGroup.exists()).toBe(true);
expect(formGroup.attributes('label')).toBe(InstanceForm.i18n.gcpProjectLabel);
expect(formGroup.attributes('description')).toBe(InstanceForm.i18n.gcpProjectDescription);
});
it('contains GCP project dropdown', () => {
const select = findByTestId('select_gcp_project');
expect(select.exists()).toBe(true);
});
it('contains Environments form group', () => {
const formGroup = findByTestId('form_group_environments');
expect(formGroup.exists()).toBe(true);
expect(formGroup.attributes('label')).toBe(InstanceForm.i18n.refsLabel);
expect(formGroup.attributes('description')).toBe(InstanceForm.i18n.refsDescription);
});
it('contains Environments dropdown', () => {
const select = findByTestId('select_environments');
expect(select.exists()).toBe(true);
});
it('contains Tier form group', () => {
const formGroup = findByTestId('form_group_tier');
expect(formGroup.exists()).toBe(true);
expect(formGroup.attributes('label')).toBe(InstanceForm.i18n.tierLabel);
expect(formGroup.attributes('description')).toBe(InstanceForm.i18n.tierDescription);
});
it('contains Tier dropdown', () => {
const select = findByTestId('select_tier');
expect(select.exists()).toBe(true);
});
it('contains Database Version form group', () => {
const formGroup = findByTestId('form_group_database_version');
expect(formGroup.exists()).toBe(true);
expect(formGroup.attributes('label')).toBe(InstanceForm.i18n.databaseVersionLabel);
});
it('contains Database Version dropdown', () => {
const select = findByTestId('select_database_version');
expect(select.exists()).toBe(true);
});
it('contains Submit button', () => {
expect(findSubmitButton().exists()).toBe(true);
expect(findSubmitButton().text()).toBe(InstanceForm.i18n.submitLabel);
});
it('contains Cancel button', () => {
expect(findCancelButton().exists()).toBe(true);
expect(findCancelButton().text()).toBe(InstanceForm.i18n.cancelLabel);
expect(findCancelButton().attributes('href')).toBe('#cancel-url');
});
it('contains Confirmation checkbox', () => {
const checkbox = findCheckbox();
expect(checkbox.text()).toBe(InstanceForm.i18n.checkboxLabel);
});
it('checkbox must be required', () => {
const checkbox = findCheckbox();
expect(checkbox.attributes('required')).toBe('true');
});
});

View File

@ -0,0 +1,65 @@
import { shallowMount } from '@vue/test-utils';
import { GlEmptyState, GlTable } from '@gitlab/ui';
import InstanceTable from '~/google_cloud/components/cloudsql/instance_table.vue';
describe('google_cloud::databases::service_table component', () => {
let wrapper;
const findEmptyState = () => wrapper.findComponent(GlEmptyState);
const findTable = () => wrapper.findComponent(GlTable);
afterEach(() => {
wrapper.destroy();
});
describe('when there are no instances', () => {
beforeEach(() => {
const propsData = {
cloudsqlInstances: [],
emptyIllustrationUrl: '#empty-illustration-url',
};
wrapper = shallowMount(InstanceTable, { propsData });
});
it('should depict empty state', () => {
const emptyState = findEmptyState();
expect(emptyState.exists()).toBe(true);
expect(emptyState.attributes('title')).toBe(InstanceTable.i18n.noInstancesTitle);
expect(emptyState.attributes('description')).toBe(InstanceTable.i18n.noInstancesDescription);
});
});
describe('when there are three instances', () => {
beforeEach(() => {
const propsData = {
cloudsqlInstances: [
{
ref: '*',
gcp_project: 'test-gcp-project',
instance_name: 'postgres-14-instance',
version: 'POSTGRES_14',
},
{
ref: 'production',
gcp_project: 'prod-gcp-project',
instance_name: 'postgres-14-instance',
version: 'POSTGRES_14',
},
{
ref: 'staging',
gcp_project: 'test-gcp-project',
instance_name: 'postgres-14-instance',
version: 'POSTGRES_14',
},
],
emptyIllustrationUrl: '#empty-illustration-url',
};
wrapper = shallowMount(InstanceTable, { propsData });
});
it('should contain a table', () => {
const table = findTable();
expect(table.exists()).toBe(true);
});
});
});

View File

@ -0,0 +1,44 @@
import { GlTable } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import ServiceTable from '~/google_cloud/components/databases/service_table.vue';
describe('google_cloud::databases::service_table component', () => {
let wrapper;
const findTable = () => wrapper.findComponent(GlTable);
beforeEach(() => {
const propsData = {
cloudsqlPostgresUrl: '#url-cloudsql-postgres',
cloudsqlMysqlUrl: '#url-cloudsql-mysql',
cloudsqlSqlserverUrl: '#url-cloudsql-sqlserver',
alloydbPostgresUrl: '#url-alloydb-postgres',
memorystoreRedisUrl: '#url-memorystore-redis',
firestoreUrl: '#url-firestore',
};
wrapper = mountExtended(ServiceTable, { propsData });
});
afterEach(() => {
wrapper.destroy();
});
it('should contain a table', () => {
expect(findTable().exists()).toBe(true);
});
it.each`
name | testId | url
${'cloudsql-postgres'} | ${'button-cloudsql-postgres'} | ${'#url-cloudsql-postgres'}
${'cloudsql-mysql'} | ${'button-cloudsql-mysql'} | ${'#url-cloudsql-mysql'}
${'cloudsql-sqlserver'} | ${'button-cloudsql-sqlserver'} | ${'#url-cloudsql-sqlserver'}
${'alloydb-postgres'} | ${'button-alloydb-postgres'} | ${'#url-alloydb-postgres'}
${'memorystore-redis'} | ${'button-memorystore-redis'} | ${'#url-memorystore-redis'}
${'firestore'} | ${'button-firestore'} | ${'#url-firestore'}
`('renders $name button with correct url', ({ testId, url }) => {
const button = wrapper.findByTestId(testId);
expect(button.exists()).toBe(true);
expect(button.attributes('href')).toBe(url);
});
});

View File

@ -42,12 +42,36 @@ describe('App component', () => {
let wrapper;
let userCalloutDismissSpy;
const createComponent = ({ shouldShowCallout = true, ...propsData }) => {
const securityFeaturesMock = [
{
name: SAST_NAME,
shortName: SAST_SHORT_NAME,
description: SAST_DESCRIPTION,
helpPath: SAST_HELP_PATH,
configurationHelpPath: SAST_CONFIG_HELP_PATH,
type: REPORT_TYPE_SAST,
available: true,
},
];
const complianceFeaturesMock = [
{
name: LICENSE_COMPLIANCE_NAME,
description: LICENSE_COMPLIANCE_DESCRIPTION,
helpPath: LICENSE_COMPLIANCE_HELP_PATH,
type: REPORT_TYPE_LICENSE_COMPLIANCE,
configurationHelpPath: LICENSE_COMPLIANCE_HELP_PATH,
},
];
const createComponent = ({ shouldShowCallout = true, ...propsData } = {}) => {
userCalloutDismissSpy = jest.fn();
wrapper = extendedWrapper(
mount(SecurityConfigurationApp, {
propsData: {
augmentedSecurityFeatures: securityFeaturesMock,
augmentedComplianceFeatures: complianceFeaturesMock,
securityTrainingEnabled: true,
...propsData,
},
@ -108,38 +132,13 @@ describe('App component', () => {
const findAutoDevopsEnabledAlert = () => wrapper.findComponent(AutoDevopsEnabledAlert);
const findVulnerabilityManagementTab = () => wrapper.findByTestId('vulnerability-management-tab');
const securityFeaturesMock = [
{
name: SAST_NAME,
shortName: SAST_SHORT_NAME,
description: SAST_DESCRIPTION,
helpPath: SAST_HELP_PATH,
configurationHelpPath: SAST_CONFIG_HELP_PATH,
type: REPORT_TYPE_SAST,
available: true,
},
];
const complianceFeaturesMock = [
{
name: LICENSE_COMPLIANCE_NAME,
description: LICENSE_COMPLIANCE_DESCRIPTION,
helpPath: LICENSE_COMPLIANCE_HELP_PATH,
type: REPORT_TYPE_LICENSE_COMPLIANCE,
configurationHelpPath: LICENSE_COMPLIANCE_HELP_PATH,
},
];
afterEach(() => {
wrapper.destroy();
});
describe('basic structure', () => {
beforeEach(async () => {
createComponent({
augmentedSecurityFeatures: securityFeaturesMock,
augmentedComplianceFeatures: complianceFeaturesMock,
});
beforeEach(() => {
createComponent();
});
it('renders main-heading with correct text', () => {
@ -199,10 +198,7 @@ describe('App component', () => {
describe('Manage via MR Error Alert', () => {
beforeEach(() => {
createComponent({
augmentedSecurityFeatures: securityFeaturesMock,
augmentedComplianceFeatures: complianceFeaturesMock,
});
createComponent();
});
describe('on initial load', () => {
@ -238,8 +234,6 @@ describe('App component', () => {
describe('given the right props', () => {
beforeEach(() => {
createComponent({
augmentedSecurityFeatures: securityFeaturesMock,
augmentedComplianceFeatures: complianceFeaturesMock,
autoDevopsEnabled: false,
gitlabCiPresent: false,
canEnableAutoDevops: true,
@ -261,10 +255,7 @@ describe('App component', () => {
describe('given the wrong props', () => {
beforeEach(() => {
createComponent({
augmentedSecurityFeatures: securityFeaturesMock,
augmentedComplianceFeatures: complianceFeaturesMock,
});
createComponent();
});
it('should not show AutoDevopsAlert', () => {
expect(findAutoDevopsAlert().exists()).toBe(false);
@ -289,8 +280,6 @@ describe('App component', () => {
}
createComponent({
augmentedSecurityFeatures: securityFeaturesMock,
augmentedComplianceFeatures: complianceFeaturesMock,
autoDevopsEnabled,
});
});
@ -348,7 +337,6 @@ describe('App component', () => {
describe('given at least one unavailable feature', () => {
beforeEach(() => {
createComponent({
augmentedSecurityFeatures: securityFeaturesMock,
augmentedComplianceFeatures: complianceFeaturesMock.map(makeAvailable(false)),
});
});
@ -369,7 +357,6 @@ describe('App component', () => {
describe('given at least one unavailable feature, but banner is already dismissed', () => {
beforeEach(() => {
createComponent({
augmentedSecurityFeatures: securityFeaturesMock,
augmentedComplianceFeatures: complianceFeaturesMock.map(makeAvailable(false)),
shouldShowCallout: false,
});
@ -397,8 +384,6 @@ describe('App component', () => {
describe('when given latestPipelinePath props', () => {
beforeEach(() => {
createComponent({
augmentedSecurityFeatures: securityFeaturesMock,
augmentedComplianceFeatures: complianceFeaturesMock,
latestPipelinePath: 'test/path',
});
});
@ -425,8 +410,6 @@ describe('App component', () => {
describe('given gitlabCiPresent & gitlabCiHistoryPath props', () => {
beforeEach(() => {
createComponent({
augmentedSecurityFeatures: securityFeaturesMock,
augmentedComplianceFeatures: complianceFeaturesMock,
gitlabCiPresent: true,
gitlabCiHistoryPath,
});
@ -446,8 +429,6 @@ describe('App component', () => {
beforeEach(async () => {
createComponent({
augmentedSecurityFeatures: securityFeaturesMock,
augmentedComplianceFeatures: complianceFeaturesMock,
...props,
});
});

View File

@ -1,5 +1,4 @@
import { shallowMount } from '@vue/test-utils';
import { escape } from 'lodash';
import ItemTitle from '~/work_items/components/item_title.vue';
jest.mock('lodash/escape', () => jest.fn((fn) => fn));
@ -51,6 +50,5 @@ describe('ItemTitle', () => {
await findInputEl().trigger(sourceEvent);
expect(wrapper.emitted(eventName)).toBeTruthy();
expect(escape).toHaveBeenCalledWith(mockUpdatedTitle);
});
});

View File

@ -1,21 +1,51 @@
import { shallowMount } from '@vue/test-utils';
import { GlForm, GlFormInput } from '@gitlab/ui';
import { nextTick } from 'vue';
import { mockTracking } from 'helpers/tracking_helper';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { __ } from '~/locale';
import WorkItemWeight from '~/work_items/components/work_item_weight.vue';
import { TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
import localUpdateWorkItemMutation from '~/work_items/graphql/local_update_work_item.mutation.graphql';
describe('WorkItemAssignees component', () => {
describe('WorkItemWeight component', () => {
let wrapper;
const createComponent = ({ weight, hasIssueWeightsFeature = true } = {}) => {
wrapper = shallowMount(WorkItemWeight, {
const mutateSpy = jest.fn();
const workItemId = 'gid://gitlab/WorkItem/1';
const workItemType = 'Task';
const findForm = () => wrapper.findComponent(GlForm);
const findInput = () => wrapper.findComponent(GlFormInput);
const createComponent = ({
canUpdate = false,
hasIssueWeightsFeature = true,
isEditing = false,
weight,
} = {}) => {
wrapper = mountExtended(WorkItemWeight, {
propsData: {
canUpdate,
weight,
workItemId,
workItemType,
},
provide: {
hasIssueWeightsFeature,
},
mocks: {
$apollo: {
mutate: mutateSpy,
},
},
});
if (isEditing) {
findInput().vm.$emit('focus');
}
};
describe('weight licensed feature', () => {
describe('`issue_weights` licensed feature', () => {
describe.each`
description | hasIssueWeightsFeature | exists
${'when available'} | ${true} | ${true}
@ -24,23 +54,111 @@ describe('WorkItemAssignees component', () => {
it(hasIssueWeightsFeature ? 'renders component' : 'does not render component', () => {
createComponent({ hasIssueWeightsFeature });
expect(wrapper.find('div').exists()).toBe(exists);
expect(findForm().exists()).toBe(exists);
});
});
});
describe('weight text', () => {
describe.each`
description | weight | text
${'renders 1'} | ${1} | ${'1'}
${'renders 0'} | ${0} | ${'0'}
${'renders None'} | ${null} | ${'None'}
${'renders None'} | ${undefined} | ${'None'}
`('when weight is $weight', ({ description, weight, text }) => {
it(description, () => {
createComponent({ weight });
describe('weight input', () => {
it('has "Weight" label', () => {
createComponent();
expect(wrapper.text()).toContain(text);
expect(wrapper.findByLabelText(__('Weight')).exists()).toBe(true);
});
describe('placeholder attribute', () => {
describe.each`
description | isEditing | canUpdate | value
${'when not editing and cannot update'} | ${false} | ${false} | ${__('None')}
${'when editing and cannot update'} | ${true} | ${false} | ${__('None')}
${'when not editing and can update'} | ${false} | ${true} | ${__('None')}
${'when editing and can update'} | ${true} | ${true} | ${__('Enter a number')}
`('$description', ({ isEditing, canUpdate, value }) => {
it(`has a value of "${value}"`, async () => {
createComponent({ canUpdate, isEditing });
await nextTick();
expect(findInput().attributes('placeholder')).toBe(value);
});
});
});
describe('readonly attribute', () => {
describe.each`
description | canUpdate | value
${'when cannot update'} | ${false} | ${'readonly'}
${'when can update'} | ${true} | ${undefined}
`('$description', ({ canUpdate, value }) => {
it(`renders readonly=${value}`, () => {
createComponent({ canUpdate });
expect(findInput().attributes('readonly')).toBe(value);
});
});
});
describe('type attribute', () => {
describe.each`
description | isEditing | canUpdate | type
${'when not editing and cannot update'} | ${false} | ${false} | ${'text'}
${'when editing and cannot update'} | ${true} | ${false} | ${'text'}
${'when not editing and can update'} | ${false} | ${true} | ${'text'}
${'when editing and can update'} | ${true} | ${true} | ${'number'}
`('$description', ({ isEditing, canUpdate, type }) => {
it(`has a value of "${type}"`, async () => {
createComponent({ canUpdate, isEditing });
await nextTick();
expect(findInput().attributes('type')).toBe(type);
});
});
});
describe('value attribute', () => {
describe.each`
weight | value
${1} | ${'1'}
${0} | ${'0'}
${null} | ${''}
${undefined} | ${''}
`('when `weight` prop is "$weight"', ({ weight, value }) => {
it(`value is "${value}"`, () => {
createComponent({ weight });
expect(findInput().element.value).toBe(value);
});
});
});
describe('when blurred', () => {
it('calls a mutation to update the weight', () => {
const weight = 0;
createComponent({ isEditing: true, weight });
findInput().trigger('blur');
expect(mutateSpy).toHaveBeenCalledWith({
mutation: localUpdateWorkItemMutation,
variables: {
input: {
id: workItemId,
weight,
},
},
});
});
it('tracks updating the weight', () => {
const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
createComponent();
findInput().trigger('blur');
expect(trackingSpy).toHaveBeenCalledWith(TRACKING_CATEGORY_SHOW, 'updated_weight', {
category: TRACKING_CATEGORY_SHOW,
label: 'item_weight',
property: 'type_Task',
});
});
});
});

View File

@ -129,6 +129,14 @@ RSpec.describe Gitlab::Diff::File do
expect(diff_file.rendered).to be_kind_of(Gitlab::Diff::Rendered::Notebook::DiffFile)
end
context 'when collapsed' do
it 'is nil' do
expect(diff).to receive(:collapsed?).and_return(true)
expect(diff_file.rendered).to be_nil
end
end
context 'when too large' do
it 'is nil' do
expect(diff).to receive(:too_large?).and_return(true)

View File

@ -3,17 +3,14 @@
require 'spec_helper'
RSpec.describe ProjectExportJob, type: :model do
let(:project) { create(:project) }
let!(:job1) { create(:project_export_job, project: project, status: 0) }
let!(:job2) { create(:project_export_job, project: project, status: 2) }
describe 'associations' do
it { expect(job1).to belong_to(:project) }
it { is_expected.to belong_to(:project) }
it { is_expected.to have_many(:relation_exports) }
end
describe 'validations' do
it { expect(job1).to validate_presence_of(:project) }
it { expect(job1).to validate_presence_of(:jid) }
it { expect(job1).to validate_presence_of(:status) }
it { is_expected.to validate_presence_of(:project) }
it { is_expected.to validate_presence_of(:jid) }
it { is_expected.to validate_presence_of(:status) }
end
end

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::ImportExport::RelationExport, type: :model do
subject { create(:project_relation_export) }
describe 'associations' do
it { is_expected.to belong_to(:project_export_job) }
it { is_expected.to have_one(:upload) }
end
describe 'validations' do
it { is_expected.to validate_presence_of(:project_export_job) }
it { is_expected.to validate_presence_of(:relation) }
it { is_expected.to validate_uniqueness_of(:relation).scoped_to(:project_export_job_id) }
it { is_expected.to validate_presence_of(:status) }
it { is_expected.to validate_numericality_of(:status).only_integer }
it { is_expected.to validate_length_of(:relation).is_at_most(255) }
it { is_expected.to validate_length_of(:jid).is_at_most(255) }
it { is_expected.to validate_length_of(:export_error).is_at_most(300) }
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::ImportExport::RelationExportUpload, type: :model do
subject { described_class.new(relation_export: project_relation_export) }
let_it_be(:project_relation_export) { create(:project_relation_export) }
describe 'associations' do
it { is_expected.to belong_to(:relation_export) }
end
it 'stores export file' do
stub_uploads_object_storage(ImportExportUploader, enabled: false)
filename = 'labels.tar.gz'
subject.export_file = fixture_file_upload("spec/fixtures/gitlab/import_export/#{filename}")
subject.save!
url = "/uploads/-/system/projects/import_export/relation_export_upload/export_file/#{subject.id}/#{filename}"
expect(subject.export_file.url).to eq(url)
end
end

View File

@ -1,6 +1,8 @@
# frozen_string_literal: true
RSpec::Matchers.define :publish_event do |expected_event_class|
include RSpec::Matchers::Composable
supports_block_expectations
match do |proc|
@ -15,10 +17,17 @@ RSpec::Matchers.define :publish_event do |expected_event_class|
proc.call
@events.any? do |event|
event.instance_of?(expected_event_class) && event.data == @expected_data
event.instance_of?(expected_event_class) && match_data?(event.data, @expected_data)
end
end
def match_data?(actual, expected)
values_match?(actual.keys, expected.keys) &&
actual.keys.each do |key|
values_match?(actual[key], expected[key])
end
end
chain :with do |expected_data|
@expected_data = expected_data
end