Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-02-05 12:09:15 +00:00
parent 26384c9a61
commit 20d564f106
73 changed files with 1655 additions and 363 deletions

View File

@ -29,7 +29,7 @@
extends:
- .default-tags
- .default-retry
image: registry.gitlab.com/gitlab-org/gitlab-build-images:gitlab-qa-alpine
image: registry.gitlab.com/gitlab-org/gitlab-build-images:gitlab-qa-alpine-ruby-2.6
services:
- docker:19.03.0-dind
tags:

View File

@ -0,0 +1,103 @@
<script>
import { GlResizeObserverDirective } from '@gitlab/ui';
import { GlStackedColumnChart } from '@gitlab/ui/dist/charts';
import { getSvgIconPathContent } from '~/lib/utils/icon_utils';
import { chartHeight } from '../../constants';
import { graphDataValidatorForValues } from '../../utils';
export default {
components: {
GlStackedColumnChart,
},
directives: {
GlResizeObserverDirective,
},
props: {
graphData: {
type: Object,
required: true,
validator: graphDataValidatorForValues.bind(null, false),
},
},
data() {
return {
width: 0,
height: chartHeight,
svgs: {},
};
},
computed: {
chartData() {
return this.graphData.metrics.map(metric => metric.result[0].values.map(val => val[1]));
},
xAxisTitle() {
return this.graphData.x_label !== undefined ? this.graphData.x_label : '';
},
yAxisTitle() {
return this.graphData.y_label !== undefined ? this.graphData.y_label : '';
},
xAxisType() {
return this.graphData.x_type !== undefined ? this.graphData.x_type : 'category';
},
groupBy() {
return this.graphData.metrics[0].result[0].values.map(val => val[0]);
},
dataZoomConfig() {
const handleIcon = this.svgs['scroll-handle'];
return handleIcon ? { handleIcon } : {};
},
chartOptions() {
return {
dataZoom: this.dataZoomConfig,
};
},
seriesNames() {
return this.graphData.metrics.map(metric => metric.series_name);
},
},
created() {
this.setSvg('scroll-handle');
},
methods: {
setSvg(name) {
getSvgIconPathContent(name)
.then(path => {
if (path) {
this.$set(this.svgs, name, `path://${path}`);
}
})
.catch(e => {
// eslint-disable-next-line no-console, @gitlab/i18n/no-non-i18n-strings
console.error('SVG could not be rendered correctly: ', e);
});
},
onResize() {
if (!this.$refs.chart) return;
const { width } = this.$refs.chart.$el.getBoundingClientRect();
this.width = width;
},
},
};
</script>
<template>
<div v-gl-resize-observer-directive="onResize" class="prometheus-graph">
<div class="prometheus-graph-header">
<h5 ref="graphTitle" class="prometheus-graph-title">{{ graphData.title }}</h5>
<div ref="graphWidgets" class="prometheus-graph-widgets"><slot></slot></div>
</div>
<gl-stacked-column-chart
ref="chart"
v-bind="$attrs"
:data="chartData"
:option="chartOptions"
:x-axis-title="xAxisTitle"
:y-axis-title="yAxisTitle"
:x-axis-type="xAxisType"
:group-by="groupBy"
:width="width"
:height="height"
:series-names="seriesNames"
/>
</div>
</template>

View File

@ -15,6 +15,7 @@ import MonitorAnomalyChart from './charts/anomaly.vue';
import MonitorSingleStatChart from './charts/single_stat.vue';
import MonitorHeatmapChart from './charts/heatmap.vue';
import MonitorColumnChart from './charts/column.vue';
import MonitorStackedColumnChart from './charts/stacked_column.vue';
import MonitorEmptyChart from './charts/empty_chart.vue';
import TrackEventDirective from '~/vue_shared/directives/track_event';
import { downloadCSVOptions, generateLinkToChartOptions } from '../utils';
@ -24,6 +25,7 @@ export default {
MonitorSingleStatChart,
MonitorColumnChart,
MonitorHeatmapChart,
MonitorStackedColumnChart,
MonitorEmptyChart,
Icon,
GlDropdown,
@ -121,6 +123,10 @@ export default {
v-else-if="isPanelType('column') && graphDataHasMetrics"
:graph-data="graphData"
/>
<monitor-stacked-column-chart
v-else-if="isPanelType('stacked-column') && graphDataHasMetrics"
:graph-data="graphData"
/>
<component
:is="monitorChartComponent"
v-else-if="graphDataHasMetrics"

View File

@ -160,6 +160,11 @@ $ide-commit-header-height: 48px;
height: 0;
}
// stylelint-disable selector-class-pattern
// stylelint-disable selector-max-compound-selectors
// stylelint-disable stylelint-gitlab/duplicate-selectors
// stylelint-disable stylelint-gitlab/utility-classes
.blob-editor-container {
flex: 1;
height: 0;
@ -301,6 +306,11 @@ $ide-commit-header-height: 48px;
}
}
// stylelint-enable selector-class-pattern
// stylelint-enable selector-max-compound-selectors
// stylelint-enable stylelint-gitlab/duplicate-selectors
// stylelint-enable stylelint-gitlab/utility-classes
.preview-container {
flex-grow: 1;
position: relative;

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
module TimeFrameFilter
def by_timeframe(items)
return items unless params[:start_date] && params[:start_date]
start_date = params[:start_date].to_date
end_date = params[:end_date].to_date
items.within_timeframe(start_date, end_date)
rescue ArgumentError
items
end
end

View File

@ -11,6 +11,7 @@
class MilestonesFinder
include FinderMethods
include TimeFrameFilter
attr_reader :params
@ -24,6 +25,7 @@ class MilestonesFinder
items = by_title(items)
items = by_search_title(items)
items = by_state(items)
items = by_timeframe(items)
order(items)
end

View File

@ -0,0 +1,30 @@
# frozen_string_literal: true
module TimeFrameArguments
extend ActiveSupport::Concern
included do
argument :start_date, Types::TimeType,
required: false,
description: 'List items within a time frame where items.start_date is between startDate and endDate parameters (endDate parameter must be present)'
argument :end_date, Types::TimeType,
required: false,
description: 'List items within a time frame where items.end_date is between startDate and endDate parameters (startDate parameter must be present)'
end
def validate_timeframe_params!(args)
return unless args[:start_date].present? || args[:end_date].present?
error_message =
if args[:start_date].nil? || args[:end_date].nil?
"Both startDate and endDate must be present."
elsif args[:start_date] > args[:end_date]
"startDate is after endDate"
end
if error_message
raise Gitlab::Graphql::Errors::ArgumentError, error_message
end
end
end

View File

@ -0,0 +1,50 @@
# frozen_string_literal: true
module Resolvers
class MilestoneResolver < BaseResolver
include Gitlab::Graphql::Authorize::AuthorizeResource
include TimeFrameArguments
argument :state, Types::MilestoneStateEnum,
required: false,
description: 'Filter milestones by state'
type Types::MilestoneType, null: true
def resolve(**args)
validate_timeframe_params!(args)
authorize!
MilestonesFinder.new(milestones_finder_params(args)).execute
end
private
def milestones_finder_params(args)
{
state: args[:state] || 'all',
start_date: args[:start_date],
end_date: args[:end_date]
}.merge(parent_id_parameter)
end
def parent
@parent ||= object.respond_to?(:sync) ? object.sync : object
end
def parent_id_parameter
if parent.is_a?(Group)
{ group_ids: parent.id }
elsif parent.is_a?(Project)
{ project_ids: parent.id }
end
end
# MilestonesFinder does not check for current_user permissions,
# so for now we need to keep it here.
def authorize!
Ability.allowed?(context[:current_user], :read_milestone, parent) || raise_resource_not_available_error!
end
end
end

View File

@ -42,6 +42,10 @@ module Types
field :parent, GroupType, null: true,
description: 'Parent group',
resolve: -> (obj, _args, _ctx) { Gitlab::Graphql::Loaders::BatchModelLoader.new(Group, obj.parent_id).find }
field :milestones, Types::MilestoneType.connection_type, null: true,
description: 'Find milestones',
resolver: Resolvers::MilestoneResolver
end
end

View File

@ -0,0 +1,8 @@
# frozen_string_literal: true
module Types
class MilestoneStateEnum < BaseEnum
value 'active'
value 'closed'
end
end

View File

@ -3,25 +3,36 @@
module Types
class MilestoneType < BaseObject
graphql_name 'Milestone'
description 'Represents a milestone.'
present_using MilestonePresenter
authorize :read_milestone
field :id, GraphQL::ID_TYPE, null: false,
description: 'ID of the milestone'
field :description, GraphQL::STRING_TYPE, null: true,
description: 'Description of the milestone'
field :title, GraphQL::STRING_TYPE, null: false,
description: 'Title of the milestone'
field :state, GraphQL::STRING_TYPE, null: false,
field :description, GraphQL::STRING_TYPE, null: true,
description: 'Description of the milestone'
field :state, Types::MilestoneStateEnum, null: false,
description: 'State of the milestone'
field :web_path, GraphQL::STRING_TYPE, null: false, method: :milestone_path,
description: 'Web path of the milestone'
field :due_date, Types::TimeType, null: true,
description: 'Timestamp of the milestone due date'
field :start_date, Types::TimeType, null: true,
description: 'Timestamp of the milestone start date'
field :created_at, Types::TimeType, null: false,
description: 'Timestamp of milestone creation'
field :updated_at, Types::TimeType, null: false,
description: 'Timestamp of last milestone update'
end

View File

@ -86,19 +86,6 @@ module SearchHelper
}).html_safe
end
def find_project_for_result_blob(projects, result)
@project
end
# Used in EE
def blob_projects(results)
nil
end
def parse_search_result(result)
result
end
# Overriden in EE
def search_blob_title(project, path)
path

View File

@ -59,6 +59,12 @@ class Milestone < ApplicationRecord
where(project_id: projects).or(where(group_id: groups))
end
scope :within_timeframe, -> (start_date, end_date) do
where('start_date is not NULL or due_date is not NULL')
.where('start_date is NULL or start_date <= ?', end_date)
.where('due_date is NULL or due_date >= ?', start_date)
end
scope :order_by_name_asc, -> { order(Arel::Nodes::Ascending.new(arel_table[:title].lower)) }
scope :reorder_by_due_date_asc, -> { reorder(Gitlab::Database.nulls_last_order('due_date', 'ASC')) }

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class MilestonePresenter < Gitlab::View::Presenter::Delegated
presents :milestone
def milestone_path
url_builder.milestone_path(milestone)
end
private
def url_builder
@url_builder ||= Gitlab::UrlBuilder.new(milestone)
end
end

View File

@ -0,0 +1,52 @@
# frozen_string_literal: true
module Ci
class CreateJobArtifactsService
ArtifactsExistError = Class.new(StandardError)
def execute(job, artifacts_file, params, metadata_file: nil)
expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
job.job_artifacts.build(
project: job.project,
file: artifacts_file,
file_type: params['artifact_type'],
file_format: params['artifact_format'],
file_sha256: artifacts_file.sha256,
expire_in: expire_in)
if metadata_file
job.job_artifacts.build(
project: job.project,
file: metadata_file,
file_type: :metadata,
file_format: :gzip,
file_sha256: metadata_file.sha256,
expire_in: expire_in)
end
job.update(artifacts_expire_in: expire_in)
rescue ActiveRecord::RecordNotUnique => error
return true if sha256_matches_existing_artifact?(job, params['artifact_type'], artifacts_file)
Gitlab::ErrorTracking.track_exception(error,
job_id: job.id,
project_id: job.project_id,
uploading_type: params['artifact_type']
)
job.errors.add(:base, 'another artifact of the same type already exists')
false
end
private
def sha256_matches_existing_artifact?(job, artifact_type, artifacts_file)
existing_artifact = job.job_artifacts.find_by_file_type(artifact_type)
return false unless existing_artifact
existing_artifact.file_sha256 == artifacts_file.sha256
end
end
end

View File

@ -32,8 +32,7 @@
.term
= render 'shared/projects/list', projects: @search_objects, pipeline_status: false
- else
- locals = { projects: blob_projects(@search_objects) } if %w[blobs wiki_blobs].include?(@scope)
= render partial: "search/results/#{@scope.singularize}", collection: @search_objects, locals: locals
= render partial: "search/results/#{@scope.singularize}", collection: @search_objects
- if @scope != 'projects'
= paginate_collection(@search_objects)

View File

@ -1,7 +1,5 @@
- project = find_project_for_result_blob(projects, blob)
- project = blob.project
- return unless project
- blob = parse_search_result(blob)
- blob_link = project_blob_path(project, tree_join(blob.ref, blob.path))
= render partial: 'search/results/blob_data', locals: { blob: blob, project: project, path: blob.path, blob_link: blob_link }

View File

@ -1,5 +1,4 @@
- project = find_project_for_result_blob(projects, wiki_blob)
- wiki_blob = parse_search_result(wiki_blob)
- project = wiki_blob.project
- wiki_blob_link = project_wiki_path(project, wiki_blob.basename)
= render partial: 'search/results/blob_data', locals: { blob: wiki_blob, project: project, path: wiki_blob.path, blob_link: wiki_blob_link }

View File

@ -0,0 +1,5 @@
---
title: Add emails_disabled to projects API
merge_request: 23616
author: Mathieu Parent
type: added

View File

@ -0,0 +1,5 @@
---
title: Replace artifacts via Runner API if already exist
merge_request: 24165
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Expose group milestones on GraphQL
merge_request: 23635
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Add support for stacked column charts
merge_request: 23474
author:
type: changed

View File

@ -1735,8 +1735,8 @@ type Epic implements Noteable {
before: String
"""
List epics within a time frame where epics.end_date is between start_date
and end_date parameters (start_date parameter must be present)
List items within a time frame where items.end_date is between startDate and
endDate parameters (startDate parameter must be present)
"""
endDate: Time
@ -1776,8 +1776,8 @@ type Epic implements Noteable {
sort: EpicSort
"""
List epics within a time frame where epics.start_date is between start_date
and end_date parameters (end_date parameter must be present)
List items within a time frame where items.start_date is between startDate
and endDate parameters (endDate parameter must be present)
"""
startDate: Time
@ -2704,8 +2704,8 @@ type Group {
authorUsername: String
"""
List epics within a time frame where epics.end_date is between start_date
and end_date parameters (start_date parameter must be present)
List items within a time frame where items.end_date is between startDate and
endDate parameters (startDate parameter must be present)
"""
endDate: Time
@ -2735,8 +2735,8 @@ type Group {
sort: EpicSort
"""
List epics within a time frame where epics.start_date is between start_date
and end_date parameters (end_date parameter must be present)
List items within a time frame where items.start_date is between startDate
and endDate parameters (endDate parameter must be present)
"""
startDate: Time
@ -2766,8 +2766,8 @@ type Group {
before: String
"""
List epics within a time frame where epics.end_date is between start_date
and end_date parameters (start_date parameter must be present)
List items within a time frame where items.end_date is between startDate and
endDate parameters (startDate parameter must be present)
"""
endDate: Time
@ -2807,8 +2807,8 @@ type Group {
sort: EpicSort
"""
List epics within a time frame where epics.start_date is between start_date
and end_date parameters (end_date parameter must be present)
List items within a time frame where items.start_date is between startDate
and endDate parameters (endDate parameter must be present)
"""
startDate: Time
@ -2853,6 +2853,48 @@ type Group {
"""
mentionsDisabled: Boolean
"""
Find milestones
"""
milestones(
"""
Returns the elements in the list that come after the specified cursor.
"""
after: String
"""
Returns the elements in the list that come before the specified cursor.
"""
before: String
"""
List items within a time frame where items.end_date is between startDate and
endDate parameters (startDate parameter must be present)
"""
endDate: Time
"""
Returns the first _n_ elements from the list.
"""
first: Int
"""
Returns the last _n_ elements from the list.
"""
last: Int
"""
List items within a time frame where items.start_date is between startDate
and endDate parameters (endDate parameter must be present)
"""
startDate: Time
"""
Filter milestones by state
"""
state: MilestoneStateEnum
): MilestoneConnection
"""
Name of the namespace
"""
@ -4457,6 +4499,9 @@ type Metadata {
version: String!
}
"""
Represents a milestone.
"""
type Milestone {
"""
Timestamp of milestone creation
@ -4486,7 +4531,7 @@ type Milestone {
"""
State of the milestone
"""
state: String!
state: MilestoneStateEnum!
"""
Title of the milestone
@ -4497,6 +4542,51 @@ type Milestone {
Timestamp of last milestone update
"""
updatedAt: Time!
"""
Web path of the milestone
"""
webPath: String!
}
"""
The connection type for Milestone.
"""
type MilestoneConnection {
"""
A list of edges.
"""
edges: [MilestoneEdge]
"""
A list of nodes.
"""
nodes: [Milestone]
"""
Information to aid in pagination.
"""
pageInfo: PageInfo!
}
"""
An edge in a connection.
"""
type MilestoneEdge {
"""
A cursor for use in pagination.
"""
cursor: String!
"""
The item at the end of the edge.
"""
node: Milestone
}
enum MilestoneStateEnum {
active
closed
}
"""

View File

@ -3150,6 +3150,26 @@
"name": "epic",
"description": "Find a single epic",
"args": [
{
"name": "startDate",
"description": "List items within a time frame where items.start_date is between startDate and endDate parameters (endDate parameter must be present)",
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"defaultValue": null
},
{
"name": "endDate",
"description": "List items within a time frame where items.end_date is between startDate and endDate parameters (startDate parameter must be present)",
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"defaultValue": null
},
{
"name": "iid",
"description": "IID of the epic, e.g., \"1\"",
@ -3235,26 +3255,6 @@
}
},
"defaultValue": null
},
{
"name": "startDate",
"description": "List epics within a time frame where epics.start_date is between start_date and end_date parameters (end_date parameter must be present)",
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"defaultValue": null
},
{
"name": "endDate",
"description": "List epics within a time frame where epics.end_date is between start_date and end_date parameters (start_date parameter must be present)",
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"defaultValue": null
}
],
"type": {
@ -3269,6 +3269,26 @@
"name": "epics",
"description": "Find epics",
"args": [
{
"name": "startDate",
"description": "List items within a time frame where items.start_date is between startDate and endDate parameters (endDate parameter must be present)",
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"defaultValue": null
},
{
"name": "endDate",
"description": "List items within a time frame where items.end_date is between startDate and endDate parameters (startDate parameter must be present)",
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"defaultValue": null
},
{
"name": "iid",
"description": "IID of the epic, e.g., \"1\"",
@ -3355,26 +3375,6 @@
},
"defaultValue": null
},
{
"name": "startDate",
"description": "List epics within a time frame where epics.start_date is between start_date and end_date parameters (end_date parameter must be present)",
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"defaultValue": null
},
{
"name": "endDate",
"description": "List epics within a time frame where epics.end_date is between start_date and end_date parameters (start_date parameter must be present)",
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"defaultValue": null
},
{
"name": "after",
"description": "Returns the elements in the list that come after the specified cursor.",
@ -3534,6 +3534,89 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "milestones",
"description": "Find milestones",
"args": [
{
"name": "startDate",
"description": "List items within a time frame where items.start_date is between startDate and endDate parameters (endDate parameter must be present)",
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"defaultValue": null
},
{
"name": "endDate",
"description": "List items within a time frame where items.end_date is between startDate and endDate parameters (startDate parameter must be present)",
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"defaultValue": null
},
{
"name": "state",
"description": "Filter milestones by state",
"type": {
"kind": "ENUM",
"name": "MilestoneStateEnum",
"ofType": null
},
"defaultValue": null
},
{
"name": "after",
"description": "Returns the elements in the list that come after the specified cursor.",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "before",
"description": "Returns the elements in the list that come before the specified cursor.",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "first",
"description": "Returns the first _n_ elements from the list.",
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"defaultValue": null
},
{
"name": "last",
"description": "Returns the last _n_ elements from the list.",
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"defaultValue": null
}
],
"type": {
"kind": "OBJECT",
"name": "MilestoneConnection",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "name",
"description": "Name of the namespace",
@ -3923,6 +4006,304 @@
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "MilestoneConnection",
"description": "The connection type for Milestone.",
"fields": [
{
"name": "edges",
"description": "A list of edges.",
"args": [
],
"type": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "OBJECT",
"name": "MilestoneEdge",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "nodes",
"description": "A list of nodes.",
"args": [
],
"type": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "OBJECT",
"name": "Milestone",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "pageInfo",
"description": "Information to aid in pagination.",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "OBJECT",
"name": "PageInfo",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "MilestoneEdge",
"description": "An edge in a connection.",
"fields": [
{
"name": "cursor",
"description": "A cursor for use in pagination.",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "node",
"description": "The item at the end of the edge.",
"args": [
],
"type": {
"kind": "OBJECT",
"name": "Milestone",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "Milestone",
"description": "Represents a milestone.",
"fields": [
{
"name": "createdAt",
"description": "Timestamp of milestone creation",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "description",
"description": "Description of the milestone",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "dueDate",
"description": "Timestamp of the milestone due date",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "id",
"description": "ID of the milestone",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "ID",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "startDate",
"description": "Timestamp of the milestone start date",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "state",
"description": "State of the milestone",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "ENUM",
"name": "MilestoneStateEnum",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "title",
"description": "Title of the milestone",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "updatedAt",
"description": "Timestamp of last milestone update",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "webPath",
"description": "Web path of the milestone",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "ENUM",
"name": "MilestoneStateEnum",
"description": null,
"fields": null,
"inputFields": null,
"interfaces": null,
"enumValues": [
{
"name": "active",
"description": null,
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "closed",
"description": null,
"isDeprecated": false,
"deprecationReason": null
}
],
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "Epic",
@ -3950,6 +4331,26 @@
"name": "children",
"description": "Children (sub-epics) of the epic",
"args": [
{
"name": "startDate",
"description": "List items within a time frame where items.start_date is between startDate and endDate parameters (endDate parameter must be present)",
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"defaultValue": null
},
{
"name": "endDate",
"description": "List items within a time frame where items.end_date is between startDate and endDate parameters (startDate parameter must be present)",
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"defaultValue": null
},
{
"name": "iid",
"description": "IID of the epic, e.g., \"1\"",
@ -4036,26 +4437,6 @@
},
"defaultValue": null
},
{
"name": "startDate",
"description": "List epics within a time frame where epics.start_date is between start_date and end_date parameters (end_date parameter must be present)",
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"defaultValue": null
},
{
"name": "endDate",
"description": "List epics within a time frame where epics.end_date is between start_date and end_date parameters (start_date parameter must be present)",
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"defaultValue": null
},
{
"name": "after",
"description": "Returns the elements in the list that come after the specified cursor.",
@ -9583,151 +9964,6 @@
],
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "Milestone",
"description": null,
"fields": [
{
"name": "createdAt",
"description": "Timestamp of milestone creation",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "description",
"description": "Description of the milestone",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "dueDate",
"description": "Timestamp of the milestone due date",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "id",
"description": "ID of the milestone",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "ID",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "startDate",
"description": "Timestamp of the milestone start date",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "state",
"description": "State of the milestone",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "title",
"description": "Title of the milestone",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "updatedAt",
"description": "Timestamp of last milestone update",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "TaskCompletionStatus",

View File

@ -683,6 +683,8 @@ Autogenerated return type of MergeRequestSetWip
## Milestone
Represents a milestone.
| Name | Type | Description |
| --- | ---- | ---------- |
| `createdAt` | Time! | Timestamp of milestone creation |
@ -690,9 +692,10 @@ Autogenerated return type of MergeRequestSetWip
| `dueDate` | Time | Timestamp of the milestone due date |
| `id` | ID! | ID of the milestone |
| `startDate` | Time | Timestamp of the milestone start date |
| `state` | String! | State of the milestone |
| `state` | MilestoneStateEnum! | State of the milestone |
| `title` | String! | Title of the milestone |
| `updatedAt` | Time! | Timestamp of last milestone update |
| `webPath` | String! | Web path of the milestone |
## Namespace

View File

@ -1016,6 +1016,7 @@ POST /projects
| `wiki_access_level` | string | no | One of `disabled`, `private` or `enabled` |
| `snippets_access_level` | string | no | One of `disabled`, `private` or `enabled` |
| `pages_access_level` | string | no | One of `disabled`, `private`, `enabled` or `public` |
| `emails_disabled` | boolean | no | Disable email notifications |
| `resolve_outdated_diff_discussions` | boolean | no | Automatically resolve merge request diffs discussions on lines changed with a push |
| `container_registry_enabled` | boolean | no | Enable container registry for this project |
| `container_expiration_policy_attributes` | hash | no | Update the container expiration policy for this project. Accepts: `cadence` (string), `keep_n` (string), `older_than` (string), `name_regex` (string), `enabled` (boolean) |
@ -1083,6 +1084,7 @@ POST /projects/user/:user_id
| `wiki_access_level` | string | no | One of `disabled`, `private` or `enabled` |
| `snippets_access_level` | string | no | One of `disabled`, `private` or `enabled` |
| `pages_access_level` | string | no | One of `disabled`, `private`, `enabled` or `public` |
| `emails_disabled` | boolean | no | Disable email notifications |
| `resolve_outdated_diff_discussions` | boolean | no | Automatically resolve merge request diffs discussions on lines changed with a push |
| `container_registry_enabled` | boolean | no | Enable container registry for this project |
| `shared_runners_enabled` | boolean | no | Enable shared runners for this project |
@ -1149,6 +1151,7 @@ PUT /projects/:id
| `wiki_access_level` | string | no | One of `disabled`, `private` or `enabled` |
| `snippets_access_level` | string | no | One of `disabled`, `private` or `enabled` |
| `pages_access_level` | string | no | One of `disabled`, `private`, `enabled` or `public` |
| `emails_disabled` | boolean | no | Disable email notifications |
| `resolve_outdated_diff_discussions` | boolean | no | Automatically resolve merge request diffs discussions on lines changed with a push |
| `container_registry_enabled` | boolean | no | Enable container registry for this project |
| `container_expiration_policy_attributes` | hash | no | Update the container expiration policy for this project. Accepts: `cadence` (string), `keep_n` (string), `older_than` (string), `name_regex` (string), `enabled` (boolean) |

View File

@ -2150,6 +2150,11 @@ dashboards. It is not available for download through the web interface.
##### `artifacts:reports:license_management` **(ULTIMATE)**
CAUTION: **Warning:**
This artifact is still valid but was **deprecated** in favor of the
[artifacts:reports:license_scanning](#artifactsreportslicense_scanning-ultimate)
introduced in GitLab 12.8.
> Introduced in GitLab 11.5. Requires GitLab Runner 11.5 and above.
The `license_management` report collects [Licenses](../../user/application_security/license_compliance/index.md)
@ -2159,6 +2164,17 @@ The collected License Compliance report will be uploaded to GitLab as an artifac
be summarized in the merge requests and pipeline view. It is also used to provide data for security
dashboards. It is not available for download through the web interface.
##### `artifacts:reports:license_scanning` **(ULTIMATE)**
> Introduced in GitLab 12.8. Requires GitLab Runner 11.5 and above.
The `license_scanning` report collects [Licenses](../../user/application_security/license_compliance/index.md)
as artifacts.
The License Compliance report will be uploaded to GitLab as an artifact and will
be automatically shown in merge requests, pipeline view and provide data for security
dashboards.
##### `artifacts:reports:performance` **(PREMIUM)**
> Introduced in GitLab 11.5. Requires GitLab Runner 11.5 and above.

View File

@ -52,17 +52,18 @@ graph TB
Geo[GitLab Geo Node] -- TCP 22, 80, 443 --> NGINX
GitLabShell --TCP 8080 -->Unicorn["Unicorn (GitLab Rails)"]
GitLabShell --> Gitaly
GitLabShell --> Praefect
GitLabShell --> Redis
Unicorn --> PgBouncer[PgBouncer]
Unicorn --> Redis
Unicorn --> Gitaly
Unicorn --> Praefect
Sidekiq --> Redis
Sidekiq --> PgBouncer
Sidekiq --> Gitaly
Sidekiq --> Praefect
GitLabWorkhorse[GitLab Workhorse] --> Unicorn
GitLabWorkhorse --> Redis
GitLabWorkhorse --> Gitaly
GitLabWorkhorse --> Praefect
Praefect --> Gitaly
NGINX --> GitLabWorkhorse
NGINX -- TCP 8090 --> GitLabPages[GitLab Pages]
NGINX --> Grafana[Grafana]
@ -128,6 +129,7 @@ Component statuses are linked to configuration documentation for each component.
| [Unicorn (GitLab Rails)](#unicorn) | Handles requests for the web interface and API | [✅][unicorn-omnibus] | [✅][unicorn-charts] | [✅][unicorn-charts] | [](../user/gitlab_com/index.md#unicorn) | [⚙][unicorn-source] | [✅][gitlab-yml] | CE & EE |
| [Sidekiq](#sidekiq) | Background jobs processor | [✅][sidekiq-omnibus] | [✅][sidekiq-charts] | [](https://docs.gitlab.com/charts/charts/gitlab/sidekiq/index.html) | [](../user/gitlab_com/index.md#sidekiq) | [✅][gitlab-yml] | [✅][gitlab-yml] | CE & EE |
| [Gitaly](#gitaly) | Git RPC service for handling all Git calls made by GitLab | [✅][gitaly-omnibus] | [✅][gitaly-charts] | [✅][gitaly-charts] | [](https://about.gitlab.com/handbook/engineering/infrastructure/production-architecture/#service-architecture) | [⚙][gitaly-source] | ✅ | CE & EE |
| [Praefect](#praefect) | A transparant proxy between any Git client and Gitaly storage nodes. | [✅][gitaly-omnibus] | [❌][gitaly-charts] | [❌][gitaly-charts] | [](https://about.gitlab.com/handbook/engineering/infrastructure/production-architecture/#service-architecture) | [⚙][praefect-source] | ✅ | CE & EE |
| [GitLab Workhorse](#gitlab-workhorse) | Smart reverse proxy, handles large HTTP requests | [✅][workhorse-omnibus] | [✅][workhorse-charts] | [✅][workhorse-charts] | [](https://about.gitlab.com/handbook/engineering/infrastructure/production-architecture/#service-architecture) | [⚙][workhorse-source] | ✅ | CE & EE |
| [GitLab Shell](#gitlab-shell) | Handles `git` over SSH sessions | [✅][shell-omnibus] | [✅][shell-charts] | [✅][shell-charts] | [](https://about.gitlab.com/handbook/engineering/infrastructure/production-architecture/#service-architecture) | [⚙][shell-source] | [✅][gitlab-yml] | CE & EE |
| [GitLab Pages](#gitlab-pages) | Hosts static websites | [⚙][pages-omnibus] | [❌][pages-charts] | [❌][pages-charts] | [](../user/gitlab_com/index.md#gitlab-pages) | [⚙][pages-source] | [⚙][pages-gdk] | CE & EE |
@ -220,6 +222,16 @@ Elasticsearch is a distributed RESTful search engine built for the cloud.
Gitaly is a service designed by GitLab to remove our need for NFS for Git storage in distributed deployments of GitLab (think GitLab.com or High Availability Deployments). As of 11.3.0, this service handles all Git level access in GitLab. You can read more about the project [in the project's readme](https://gitlab.com/gitlab-org/gitaly).
#### Praefect
- [Project page](https://gitlab.com/gitlab-org/gitaly/blob/master/README.md)
- Configuration: [Omnibus][gitaly-omnibus], [Source][praefect-source]
- Layer: Core Service (Data)
- Process: `praefect`
Praefect is a transparent proxy between each Git client and the Gitaly coordinating the replication of
repository updates to secondairy nodes.
#### GitLab Geo
- Configuration: [Omnibus][geo-omnibus], [Charts][geo-charts], [GDK][geo-gdk]
@ -641,6 +653,7 @@ We've also detailed [our architecture of GitLab.com](https://about.gitlab.com/ha
[gitaly-omnibus]: ../administration/gitaly/index.md
[gitaly-charts]: https://docs.gitlab.com/charts/charts/gitlab/gitaly/
[gitaly-source]: ../install/installation.md#install-gitaly
[praefect-source]: ../install/installation.md#install-gitaly
[workhorse-omnibus]: https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/files/gitlab-config-template/gitlab.rb.template
[workhorse-charts]: https://docs.gitlab.com/charts/charts/gitlab/unicorn/
[workhorse-source]: ../install/installation.md#install-gitlab-workhorse

Binary file not shown.

Before

Width:  |  Height:  |  Size: 34 KiB

After

Width:  |  Height:  |  Size: 36 KiB

View File

@ -26,7 +26,7 @@ licenses in your project's settings.
NOTE: **Note:**
If the license compliance report doesn't have anything to compare to, no information
will be displayed in the merge request area. That is the case when you add the
`license_management` job in your `.gitlab-ci.yml` for the first time.
`license_scanning` job in your `.gitlab-ci.yml` for the first time.
Consecutive merge requests will have something to compare to and the license
compliance report will be shown properly.
@ -70,25 +70,38 @@ To run a License Compliance scanning job, you need GitLab Runner with the
## Configuration
For GitLab 11.9 and later, to enable License Compliance, you must
For GitLab 12.8 and later, to enable License Compliance, you must
[include](../../../ci/yaml/README.md#includetemplate) the
[`License-Management.gitlab-ci.yml` template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Security/License-Management.gitlab-ci.yml)
[`License-Scanning.gitlab-ci.yml` template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Security/License-Scanning.gitlab-ci.yml)
that's provided as a part of your GitLab installation.
For older versions of GitLab from 11.9 to 12.7, you must
[include](../../../ci/yaml/README.md#includetemplate) the
[`License-Management.gitlab-ci.yml` template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Security/License-Management.gitlab-ci.yml).
For GitLab versions earlier than 11.9, you can copy and use the job as defined
that template.
NOTE: **Note:**
In GitLab 13.0, the `License-Management.gitlab-ci.yml` template is scheduled to be removed.
Use `License-Scanning.gitlab-ci.yml` instead.
Add the following to your `.gitlab-ci.yml` file:
```yaml
include:
template: License-Management.gitlab-ci.yml
template: License-Scanning.gitlab-ci.yml
```
The included template will create a `license_management` job in your CI/CD pipeline
The included template will create a `license_scanning` job in your CI/CD pipeline
and scan your dependencies to find their licenses.
NOTE: **Note:**
Before GitLab 12.8, the `license_scanning` job was named `license_management`.
In GitLab 13.0, the `license_management` job is scheduled to be removed completely,
so you're advised to migrate to the `license_scanning` job and used the new
`License-Scanning.gitlab-ci.yml` template.
The results will be saved as a
[License Compliance report artifact](../../../ci/yaml/README.md#artifactsreportslicense_management-ultimate)
[License Compliance report artifact](../../../ci/yaml/README.md#artifactsreportslicense_scanning-ultimate)
that you can later download and analyze. Due to implementation limitations, we
always take the latest License Compliance artifact available. Behind the scenes, the
[GitLab License Compliance Docker image](https://gitlab.com/gitlab-org/security-products/license-management)
@ -128,7 +141,7 @@ For example:
```yaml
include:
template: License-Management.gitlab-ci.yml
template: License-Scanning.gitlab-ci.yml
variables:
LICENSE_MANAGEMENT_SETUP_CMD: sh my-custom-install-script.sh
@ -140,14 +153,14 @@ directory of your project.
### Overriding the template
If you want to override the job definition (for example, change properties like
`variables` or `dependencies`), you need to declare a `license_management` job
`variables` or `dependencies`), you need to declare a `license_scanning` job
after the template inclusion and specify any additional keys under it. For example:
```yaml
include:
template: License-Management.gitlab-ci.yml
template: License-Scanning.gitlab-ci.yml
license_management:
license_scanning:
variables:
CI_DEBUG_TRACE: "true"
```
@ -160,9 +173,9 @@ Feel free to use it for the customization of Maven execution. For example:
```yaml
include:
template: License-Management.gitlab-ci.yml
template: License-Scanning.gitlab-ci.yml
license_management:
license_scanning:
variables:
MAVEN_CLI_OPTS: --debug
```
@ -186,13 +199,48 @@ License Compliance uses Python 3.8 and pip 19.1 by default.
If your project requires Python 2, you can switch to Python 2.7 and pip 10.0
by setting the `LM_PYTHON_VERSION` environment variable to `2`.
```yaml
include:
template: License-Scanning.gitlab-ci.yml
license_scanning:
variables:
LM_PYTHON_VERSION: 2
```
### Migration from `license_management` to `license_scanning`
In GitLab 12.8 a new name for `license_management` job was introduced. This change was made to improve clarity around the purpose of the scan, which is to scan and collect the types of licenses present in a projects dependencies.
The support of `license_management` is scheduled to be dropped in GitLab 13.0.
If you're using a custom setup for License Compliance, you're required
to update your CI config accordingly:
1. Change the CI template to `License-Scanning.gitlab-ci.yml`.
1. Change the job name to `license_management` (if you mention it in `.gitlab-ci.yml`).
1. Change the artifact name to `gl-license-scanning-report.json` (if you mention it in `.gitlab-ci.yml`).
For example, the following `.gitlab-ci.yml`:
```yaml
include:
template: License-Management.gitlab-ci.yml
license_management:
variables:
LM_PYTHON_VERSION: 2
artifacts:
reports:
license_management: gl-license-management-report.json
```
Should be changed to:
```yaml
include:
template: License-Scanning.gitlab-ci.yml
license_scanning:
artifacts:
reports:
license_scanning: gl-license-scanning-report.json
```
## Project policies for License Compliance

View File

@ -336,6 +336,18 @@ error during connect: Get http://docker:2376/v1.39/info: dial tcp: lookup docker
It is possible to create a per-project expiration policy, so that you can make sure that
older tags and images are regularly removed from the Container Registry.
The expiration policy algorithm starts by collecting all the tags for a given repository in a list,
then goes through a process of excluding tags from it until only the ones to be deleted remain:
1. Collect all the tags for a given repository in a list.
1. Excludes the tag named `latest` from the list.
1. Evaluates the `name_regex`, excluding non-matching names from the list.
1. Excludes any tags that do not have a manifest (not part of the options).
1. Orders the remaining tags by `created_date`.
1. Excludes from the list the N tags based on the `keep_n` value (Expiration latest).
1. Excludes from the list the tags older than the `older_than` value (Expiration interval).
1. Finally, the remaining tags in the list are deleted from the Container Registry.
### Managing project expiration policy through the API
You can set, update, and disable the expiration policies using the GitLab API.

View File

@ -64,6 +64,7 @@ module API
expose(:snippets_access_level) { |project, options| project.project_feature.string_access_level(:snippets) }
expose(:pages_access_level) { |project, options| project.project_feature.string_access_level(:pages) }
expose :emails_disabled
expose :shared_runners_enabled
expose :lfs_enabled?, as: :lfs_enabled
expose :creator_id

View File

@ -29,6 +29,7 @@ module API
optional :snippets_access_level, type: String, values: %w(disabled private enabled), desc: 'Snippets access level. One of `disabled`, `private` or `enabled`'
optional :pages_access_level, type: String, values: %w(disabled private enabled public), desc: 'Pages access level. One of `disabled`, `private`, `enabled` or `public`'
optional :emails_disabled, type: Boolean, desc: 'Disable email notifications'
optional :shared_runners_enabled, type: Boolean, desc: 'Flag indication if shared runners are enabled for that project'
optional :resolve_outdated_diff_discussions, type: Boolean, desc: 'Automatically resolve merge request diffs discussions on lines changed with a push'
optional :remove_source_branch_after_merge, type: Boolean, desc: 'Remove the source branch by default after merge'
@ -87,6 +88,7 @@ module API
def self.update_params_at_least_one_of
[
:autoclose_referenced_issues,
:auto_devops_enabled,
:auto_devops_deploy_strategy,
:auto_cancel_pending_pipelines,
@ -100,7 +102,7 @@ module API
:container_expiration_policy_attributes,
:default_branch,
:description,
:autoclose_referenced_issues,
:emails_disabled,
:issues_access_level,
:lfs_enabled,
:merge_requests_access_level,

View File

@ -276,29 +276,8 @@ module API
bad_request!('Missing artifacts file!') unless artifacts
file_too_large! unless artifacts.size < max_artifacts_size(job)
expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
job.job_artifacts.build(
project: job.project,
file: artifacts,
file_type: params['artifact_type'],
file_format: params['artifact_format'],
file_sha256: artifacts.sha256,
expire_in: expire_in)
if metadata
job.job_artifacts.build(
project: job.project,
file: metadata,
file_type: :metadata,
file_format: :gzip,
file_sha256: metadata.sha256,
expire_in: expire_in)
end
if job.update(artifacts_expire_in: expire_in)
present Ci::BuildRunnerPresenter.new(job), with: Entities::JobRequest::Response
if Ci::CreateJobArtifactsService.new.execute(job, artifacts, params, metadata_file: metadata)
status :created
else
render_validation_error!(job)
end

View File

@ -32,10 +32,6 @@ module API
results = SearchService.new(current_user, search_params).search_objects
process_results(results)
end
def process_results(results)
paginate(results)
end

View File

@ -7,6 +7,7 @@ module Gitlab
include Presentable
include BlobLanguageFromGitAttributes
include Gitlab::Utils::StrongMemoize
include BlobActiveModel
attr_reader :project, :content_match, :blob_path

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
# - rendering by using data purely from Elasticsearch and does not trigger Gitaly calls.
# - allows policy check
module Gitlab
module Search
class FoundWikiPage < SimpleDelegator
attr_reader :wiki
def self.declarative_policy_class
'WikiPagePolicy'
end
# @param found_blob [Gitlab::Search::FoundBlob]
def initialize(found_blob)
super
@wiki = found_blob.project.wiki
end
def to_ability_name
'wiki_page'
end
end
end
end

View File

@ -146,7 +146,7 @@
},
"devDependencies": {
"@babel/plugin-transform-modules-commonjs": "^7.5.0",
"@gitlab/eslint-config": "^2.1.1",
"@gitlab/eslint-config": "^2.1.2",
"@gitlab/eslint-plugin-i18n": "^1.1.0",
"@gitlab/eslint-plugin-vue-i18n": "^1.2.0",
"@vue/test-utils": "^1.0.0-beta.30",

View File

@ -3,13 +3,14 @@
require 'spec_helper'
describe MilestonesFinder do
let(:now) { Time.now }
let(:group) { create(:group) }
let(:project_1) { create(:project, namespace: group) }
let(:project_2) { create(:project, namespace: group) }
let!(:milestone_1) { create(:milestone, group: group, title: 'one test', due_date: Date.today) }
let!(:milestone_2) { create(:milestone, group: group) }
let!(:milestone_3) { create(:milestone, project: project_1, state: 'active', due_date: Date.tomorrow) }
let!(:milestone_4) { create(:milestone, project: project_2, state: 'active') }
let!(:milestone_1) { create(:milestone, group: group, title: 'one test', start_date: now - 1.day, due_date: now) }
let!(:milestone_2) { create(:milestone, group: group, start_date: now + 1.day, due_date: now + 2.days) }
let!(:milestone_3) { create(:milestone, project: project_1, state: 'active', start_date: now + 2.days, due_date: now + 3.days) }
let!(:milestone_4) { create(:milestone, project: project_2, state: 'active', start_date: now + 4.days, due_date: now + 5.days) }
it 'returns milestones for projects' do
result = described_class.new(project_ids: [project_1.id, project_2.id], state: 'all').execute
@ -33,8 +34,11 @@ describe MilestonesFinder do
end
it 'orders milestones by due date' do
expect(result.first).to eq(milestone_1)
expect(result.second).to eq(milestone_3)
milestone = create(:milestone, group: group, due_date: now - 2.days)
expect(result.first).to eq(milestone)
expect(result.second).to eq(milestone_1)
expect(result.third).to eq(milestone_2)
end
end
@ -77,6 +81,34 @@ describe MilestonesFinder do
expect(result.to_a).to contain_exactly(milestone_1)
end
context 'by timeframe' do
it 'returns milestones with start_date and due_date between timeframe' do
params.merge!(start_date: now - 1.day, end_date: now + 3.days)
milestones = described_class.new(params).execute
expect(milestones).to match_array([milestone_1, milestone_2, milestone_3])
end
it 'returns milestones which starts before the timeframe' do
milestone = create(:milestone, project: project_2, start_date: now - 5.days)
params.merge!(start_date: now - 3.days, end_date: now - 2.days)
milestones = described_class.new(params).execute
expect(milestones).to match_array([milestone])
end
it 'returns milestones which ends after the timeframe' do
milestone = create(:milestone, project: project_2, due_date: now + 6.days)
params.merge!(start_date: now + 6.days, end_date: now + 7.days)
milestones = described_class.new(params).execute
expect(milestones).to match_array([milestone])
end
end
end
describe '#find_by' do

View File

@ -0,0 +1,45 @@
import { shallowMount } from '@vue/test-utils';
import { GlStackedColumnChart } from '@gitlab/ui/dist/charts';
import StackedColumnChart from '~/monitoring/components/charts/stacked_column.vue';
import { stackedColumnMockedData } from '../../mock_data';
jest.mock('~/lib/utils/icon_utils', () => ({
getSvgIconPathContent: jest.fn().mockResolvedValue('mockSvgPathContent'),
}));
describe('Stacked column chart component', () => {
let wrapper;
const glStackedColumnChart = () => wrapper.find(GlStackedColumnChart);
beforeEach(() => {
wrapper = shallowMount(StackedColumnChart, {
propsData: {
graphData: stackedColumnMockedData,
},
});
});
afterEach(() => {
wrapper.destroy();
});
describe('with graphData present', () => {
it('is a Vue instance', () => {
expect(glStackedColumnChart().exists()).toBe(true);
});
it('should contain the same number of elements in the seriesNames computed prop as the graphData metrics prop', () =>
wrapper.vm
.$nextTick()
.then(expect(wrapper.vm.seriesNames).toHaveLength(stackedColumnMockedData.metrics.length)));
it('should contain the same number of elements in the groupBy computed prop as the graphData result prop', () =>
wrapper.vm
.$nextTick()
.then(
expect(wrapper.vm.groupBy).toHaveLength(
stackedColumnMockedData.metrics[0].result[0].values.length,
),
));
});
});

View File

@ -665,3 +665,50 @@ export const graphDataPrometheusQueryRangeMultiTrack = {
},
],
};
export const stackedColumnMockedData = {
title: 'memories',
type: 'stacked-column',
x_label: 'x label',
y_label: 'y label',
metrics: [
{
label: 'memory_1024',
unit: 'count',
series_name: 'group 1',
prometheus_endpoint_path:
'/root/autodevops-deploy-6/-/environments/24/prometheus/api/v1/query_range?query=avg%28sum%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+by+%28job%29%29+without+%28job%29+%2F+count%28avg%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+without+%28job%29%29+%2F1024%2F1024',
metric_id: 'undefined_metric_of_ages_1024',
metricId: 'undefined_metric_of_ages_1024',
result: [
{
metric: {},
values: [
['2020-01-30 12:00:00', '5'],
['2020-01-30 12:01:00', '10'],
['2020-01-30 12:02:00', '15'],
],
},
],
},
{
label: 'memory_1000',
unit: 'count',
series_name: 'group 2',
prometheus_endpoint_path:
'/root/autodevops-deploy-6/-/environments/24/prometheus/api/v1/query_range?query=avg%28sum%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+by+%28job%29%29+without+%28job%29+%2F+count%28avg%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+without+%28job%29%29+%2F1024%2F1024',
metric_id: 'undefined_metric_of_ages_1000',
metricId: 'undefined_metric_of_ages_1000',
result: [
{
metric: {},
values: [
['2020-01-30 12:00:00', '20'],
['2020-01-30 12:01:00', '25'],
['2020-01-30 12:02:00', '30'],
],
},
],
},
],
};

View File

@ -0,0 +1,93 @@
# frozen_string_literal: true
require 'spec_helper'
describe Resolvers::MilestoneResolver do
include GraphqlHelpers
describe '#resolve' do
let_it_be(:current_user) { create(:user) }
context 'for group milestones' do
let_it_be(:now) { Time.now }
let_it_be(:group) { create(:group, :private) }
def resolve_group_milestones(args = {}, context = { current_user: current_user })
resolve(described_class, obj: group, args: args, ctx: context)
end
before do
group.add_developer(current_user)
end
it 'calls MilestonesFinder#execute' do
expect_next_instance_of(MilestonesFinder) do |finder|
expect(finder).to receive(:execute)
end
resolve_group_milestones
end
context 'without parameters' do
it 'calls MilestonesFinder to retrieve all milestones' do
expect(MilestonesFinder).to receive(:new)
.with(group_ids: group.id, state: 'all', start_date: nil, end_date: nil)
.and_call_original
resolve_group_milestones
end
end
context 'with parameters' do
it 'calls MilestonesFinder with correct parameters' do
start_date = now
end_date = start_date + 1.hour
expect(MilestonesFinder).to receive(:new)
.with(group_ids: group.id, state: 'closed', start_date: start_date, end_date: end_date)
.and_call_original
resolve_group_milestones(start_date: start_date, end_date: end_date, state: 'closed')
end
end
context 'by timeframe' do
context 'when start_date and end_date are present' do
context 'when start date is after end_date' do
it 'raises error' do
expect do
resolve_group_milestones(start_date: now, end_date: now - 2.days)
end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, "startDate is after endDate")
end
end
end
context 'when only start_date is present' do
it 'raises error' do
expect do
resolve_group_milestones(start_date: now)
end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/)
end
end
context 'when only end_date is present' do
it 'raises error' do
expect do
resolve_group_milestones(end_date: now)
end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/)
end
end
end
context 'when user cannot read milestones' do
it 'raises error' do
unauthorized_user = create(:user)
expect do
resolve_group_milestones({}, { current_user: unauthorized_user })
end.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
end
end
end

View File

@ -156,4 +156,14 @@ describe Gitlab::Search::FoundBlob do
end
end
end
describe 'policy' do
let(:project) { build(:project, :repository) }
subject { described_class.new(project: project) }
it 'works with policy' do
expect(Ability.allowed?(project.creator, :read_blob, subject)).to be_truthy
end
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Search::FoundWikiPage do
let(:project) { create(:project, :public, :repository) }
describe 'policy' do
let(:project) { build(:project, :repository) }
let(:found_blob) { Gitlab::Search::FoundBlob.new(project: project) }
subject { described_class.new(found_blob) }
it 'works with policy' do
expect(Ability.allowed?(project.creator, :read_wiki_page, subject)).to be_truthy
end
end
end

View File

@ -3,8 +3,8 @@
require 'spec_helper'
describe AbuseReport do
set(:report) { create(:abuse_report) }
set(:user) { create(:admin) }
let_it_be(:report, reload: true) { create(:abuse_report) }
let_it_be(:user, reload: true) { create(:admin) }
subject { report }
it { expect(subject).to be_valid }

View File

@ -45,8 +45,8 @@ describe AwardEmoji do
end
describe 'scopes' do
set(:thumbsup) { create(:award_emoji, name: 'thumbsup') }
set(:thumbsdown) { create(:award_emoji, name: 'thumbsdown') }
let_it_be(:thumbsup) { create(:award_emoji, name: 'thumbsup') }
let_it_be(:thumbsdown) { create(:award_emoji, name: 'thumbsdown') }
describe '.upvotes' do
it { expect(described_class.upvotes).to contain_exactly(thumbsup) }

View File

@ -6,9 +6,8 @@ describe BlobViewer::GitlabCiYml do
include FakeBlobHelpers
include RepoHelpers
set(:project) { create(:project, :repository) }
set(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:data) { File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) }
let(:blob) { fake_blob(path: '.gitlab-ci.yml', data: data) }
let(:sha) { sample_commit.id }

View File

@ -3,8 +3,8 @@
require 'spec_helper'
describe Ci::ArtifactBlob do
set(:project) { create(:project, :public) }
set(:build) { create(:ci_build, :artifacts, project: project) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:build) { create(:ci_build, :artifacts, project: project) }
let(:entry) { build.artifacts_metadata_entry('other_artifacts_0.1.2/another-subdirectory/banana_sample.gif') }
subject { described_class.new(entry) }

View File

@ -7,7 +7,7 @@ describe Ci::Pipeline, :mailer do
include StubRequests
let(:user) { create(:user) }
set(:project) { create(:project) }
let_it_be(:project) { create(:project) }
let(:pipeline) do
create(:ci_empty_pipeline, status: :created, project: project)
@ -231,7 +231,7 @@ describe Ci::Pipeline, :mailer do
describe '#legacy_detached_merge_request_pipeline?' do
subject { pipeline.legacy_detached_merge_request_pipeline? }
set(:merge_request) { create(:merge_request) }
let_it_be(:merge_request) { create(:merge_request) }
let(:ref) { 'feature' }
let(:target_sha) { nil }

View File

@ -544,7 +544,7 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
end
describe '#applications' do
set(:cluster) { create(:cluster) }
let_it_be(:cluster, reload: true) { create(:cluster) }
subject { cluster.applications }

View File

@ -17,7 +17,7 @@ describe Commit do
end
describe '.lazy' do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
context 'when the commits are found' do
let(:oids) do

View File

@ -3,9 +3,9 @@
require 'spec_helper'
describe CommitStatus do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
set(:pipeline) do
let_it_be(:pipeline) do
create(:ci_pipeline, project: project, sha: project.commit.id)
end

View File

@ -15,7 +15,7 @@ describe BatchDestroyDependentAssociations do
end
describe '#dependent_associations_to_destroy' do
set(:project) { TestProject.new }
let_it_be(:project) { TestProject.new }
it 'returns the right associations' do
expect(project.dependent_associations_to_destroy.map(&:name)).to match_array([:builds])
@ -23,9 +23,9 @@ describe BatchDestroyDependentAssociations do
end
describe '#destroy_dependent_associations_in_batches' do
set(:project) { create(:project) }
set(:build) { create(:ci_build, project: project) }
set(:notification_setting) { create(:notification_setting, project: project) }
let_it_be(:project) { create(:project) }
let_it_be(:build) { create(:ci_build, project: project) }
let_it_be(:notification_setting) { create(:notification_setting, project: project) }
let!(:todos) { create(:todo, project: project) }
it 'destroys multiple builds' do

View File

@ -13,7 +13,7 @@ describe Identity do
end
describe 'validations' do
set(:user) { create(:user) }
let_it_be(:user) { create(:user) }
context 'with existing user and provider' do
before do

View File

@ -3,20 +3,20 @@
require 'spec_helper'
describe LabelNote do
set(:project) { create(:project, :repository) }
set(:user) { create(:user) }
set(:label) { create(:label, project: project) }
set(:label2) { create(:label, project: project) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let_it_be(:label) { create(:label, project: project) }
let_it_be(:label2) { create(:label, project: project) }
let(:resource_parent) { project }
context 'when resource is issue' do
set(:resource) { create(:issue, project: project) }
let_it_be(:resource) { create(:issue, project: project) }
it_behaves_like 'label note created from events'
end
context 'when resource is merge request' do
set(:resource) { create(:merge_request, source_project: project, target_project: project) }
let_it_be(:resource) { create(:merge_request, source_project: project, target_project: project) }
it_behaves_like 'label note created from events'
end

View File

@ -3,7 +3,7 @@
require 'spec_helper'
describe LfsFileLock do
set(:lfs_file_lock) { create(:lfs_file_lock) }
let_it_be(:lfs_file_lock, reload: true) { create(:lfs_file_lock) }
subject { lfs_file_lock }
it { is_expected.to belong_to(:project) }

View File

@ -44,8 +44,8 @@ describe LfsObject do
end
describe '#project_allowed_access?' do
set(:lfs_object) { create(:lfs_objects_project).lfs_object }
set(:project) { create(:project) }
let_it_be(:lfs_object) { create(:lfs_objects_project).lfs_object }
let_it_be(:project, reload: true) { create(:project) }
it 'returns true when project is linked' do
create(:lfs_objects_project, lfs_object: lfs_object, project: project)
@ -58,9 +58,9 @@ describe LfsObject do
end
context 'when project is a member of a fork network' do
set(:fork_network) { create(:fork_network) }
set(:fork_network_root_project) { fork_network.root_project }
set(:fork_network_membership) { create(:fork_network_member, project: project, fork_network: fork_network) }
let_it_be(:fork_network) { create(:fork_network) }
let_it_be(:fork_network_root_project, reload: true) { fork_network.root_project }
let_it_be(:fork_network_membership) { create(:fork_network_member, project: project, fork_network: fork_network) }
it 'returns true for all members when forked project is linked' do
create(:lfs_objects_project, lfs_object: lfs_object, project: project)

View File

@ -3,7 +3,7 @@
require 'spec_helper'
describe LfsObjectsProject do
set(:project) { create(:project) }
let_it_be(:project) { create(:project) }
subject do
create(:lfs_objects_project, project: project)

View File

@ -54,20 +54,20 @@ describe MergeRequestDiff do
end
describe '.ids_for_external_storage_migration' do
set(:merge_request) { create(:merge_request) }
set(:outdated) { merge_request.merge_request_diff }
set(:latest) { merge_request.create_merge_request_diff }
let_it_be(:merge_request) { create(:merge_request) }
let_it_be(:outdated) { merge_request.merge_request_diff }
let_it_be(:latest) { merge_request.create_merge_request_diff }
set(:closed_mr) { create(:merge_request, :closed_last_month) }
let_it_be(:closed_mr) { create(:merge_request, :closed_last_month) }
let(:closed) { closed_mr.merge_request_diff }
set(:merged_mr) { create(:merge_request, :merged_last_month) }
let_it_be(:merged_mr) { create(:merge_request, :merged_last_month) }
let(:merged) { merged_mr.merge_request_diff }
set(:recently_closed_mr) { create(:merge_request, :closed) }
let_it_be(:recently_closed_mr) { create(:merge_request, :closed) }
let(:closed_recently) { recently_closed_mr.merge_request_diff }
set(:recently_merged_mr) { create(:merge_request, :merged) }
let_it_be(:recently_merged_mr) { create(:merge_request, :merged) }
let(:merged_recently) { recently_merged_mr.merge_request_diff }
before do

View File

@ -1091,8 +1091,8 @@ describe MergeRequest do
end
describe '#can_remove_source_branch?' do
set(:user) { create(:user) }
set(:merge_request) { create(:merge_request, :simple) }
let_it_be(:user) { create(:user) }
let_it_be(:merge_request, reload: true) { create(:merge_request, :simple) }
subject { merge_request }

View File

@ -197,6 +197,15 @@ describe Milestone do
end
end
it_behaves_like 'within_timeframe scope' do
let_it_be(:now) { Time.now }
let_it_be(:project) { create(:project, :empty_repo) }
let_it_be(:resource_1) { create(:milestone, project: project, start_date: now - 1.day, due_date: now + 1.day) }
let_it_be(:resource_2) { create(:milestone, project: project, start_date: now + 2.days, due_date: now + 3.days) }
let_it_be(:resource_3) { create(:milestone, project: project, due_date: now) }
let_it_be(:resource_4) { create(:milestone, project: project, start_date: now) }
end
describe "#percent_complete" do
it "does not count open issues" do
milestone.issues << issue
@ -517,9 +526,9 @@ describe Milestone do
end
describe '.sort_by_attribute' do
set(:milestone_1) { create(:milestone, title: 'Foo') }
set(:milestone_2) { create(:milestone, title: 'Bar') }
set(:milestone_3) { create(:milestone, title: 'Zoo') }
let_it_be(:milestone_1) { create(:milestone, title: 'Foo') }
let_it_be(:milestone_2) { create(:milestone, title: 'Bar') }
let_it_be(:milestone_3) { create(:milestone, title: 'Zoo') }
context 'ordering by name ascending' do
it 'sorts by title ascending' do
@ -555,7 +564,7 @@ describe Milestone do
end
it 'returns the quantity of milestones in each possible state' do
expected_count = { opened: 5, closed: 6, all: 11 }
expected_count = { opened: 2, closed: 6, all: 8 }
count = described_class.states_count(Project.all, Group.all)
expect(count).to eq(expected_count)

View File

@ -3,7 +3,7 @@
require 'spec_helper'
describe ProjectAutoDevops do
set(:project) { build(:project) }
let_it_be(:project) { build(:project) }
it_behaves_like 'having unique enum values'

View File

@ -40,7 +40,7 @@ describe Repository do
end
describe '#branch_names_contains' do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
let(:repository) { project.repository }
subject { repository.branch_names_contains(sample_commit.id) }
@ -328,7 +328,7 @@ describe Repository do
end
describe '#new_commits' do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
let(:repository) { project.repository }
subject { repository.new_commits(rev) }
@ -356,7 +356,7 @@ describe Repository do
end
describe '#commits_by' do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
let(:oids) { TestEnv::BRANCH_SHA.values }
subject { project.repository.commits_by(oids: oids) }
@ -2575,7 +2575,7 @@ describe Repository do
end
describe 'commit cache' do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
it 'caches based on SHA' do
# Gets the commit oid, and warms the cache
@ -2723,7 +2723,7 @@ describe Repository do
end
describe '#merge_base' do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
subject(:repository) { project.repository }
it 'only makes one gitaly call' do
@ -2782,7 +2782,7 @@ describe Repository do
end
describe "#blobs_metadata" do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
let(:repository) { project.repository }
def expect_metadata_blob(thing)

View File

@ -3,8 +3,8 @@
require 'spec_helper'
describe SentNotification do
set(:user) { create(:user) }
set(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
describe 'validation' do
describe 'note validity' do

View File

@ -2196,7 +2196,7 @@ describe User, :do_not_mock_admin_mode do
describe '.find_by_private_commit_email' do
context 'with email' do
set(:user) { create(:user) }
let_it_be(:user) { create(:user) }
it 'returns user through private commit email' do
expect(described_class.find_by_private_commit_email(user.private_commit_email)).to eq(user)

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
require 'spec_helper'
describe MilestonePresenter do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:milestone) { create(:milestone, group: group) }
let_it_be(:presenter) { described_class.new(milestone, current_user: user) }
before do
group.add_developer(user)
end
describe '#milestone_path' do
it 'returns correct path' do
expect(presenter.milestone_path).to eq("/groups/#{group.full_path}/-/milestones/#{milestone.iid}")
end
end
end

View File

@ -0,0 +1,85 @@
# frozen_string_literal: true
require 'spec_helper'
describe 'Milestones through GroupQuery' do
include GraphqlHelpers
let_it_be(:user) { create(:user) }
let_it_be(:now) { Time.now }
let_it_be(:group) { create(:group, :private) }
let_it_be(:milestone_1) { create(:milestone, group: group) }
let_it_be(:milestone_2) { create(:milestone, group: group, state: :closed, start_date: now, due_date: now + 1.day) }
let_it_be(:milestone_3) { create(:milestone, group: group, start_date: now, due_date: now + 2.days) }
let_it_be(:milestone_4) { create(:milestone, group: group, state: :closed, start_date: now - 2.days, due_date: now - 1.day) }
let_it_be(:milestone_from_other_group) { create(:milestone, group: create(:group)) }
let(:milestone_data) { graphql_data['group']['milestones']['edges'] }
describe 'Get list of milestones from a group' do
before do
group.add_developer(user)
end
context 'when the request is correct' do
before do
fetch_milestones(user)
end
it_behaves_like 'a working graphql query'
it 'returns milestones successfully' do
expect(response).to have_gitlab_http_status(200)
expect(graphql_errors).to be_nil
expect_array_response(milestone_1.to_global_id.to_s, milestone_2.to_global_id.to_s, milestone_3.to_global_id.to_s, milestone_4.to_global_id.to_s)
end
end
context 'when filtering by timeframe' do
it 'fetches milestones between start_date and due_date' do
fetch_milestones(user, { start_date: now.to_s, end_date: (now + 2.days).to_s })
expect_array_response(milestone_2.to_global_id.to_s, milestone_3.to_global_id.to_s)
end
end
context 'when filtering by state' do
it 'returns milestones with given state' do
fetch_milestones(user, { state: :active })
expect_array_response(milestone_1.to_global_id.to_s, milestone_3.to_global_id.to_s)
end
end
def fetch_milestones(user = nil, args = {})
post_graphql(milestones_query(args), current_user: user)
end
def milestones_query(args = {})
milestone_node = <<~NODE
edges {
node {
id
title
state
}
}
NODE
graphql_query_for("group",
{ full_path: group.full_path },
[query_graphql_field("milestones", args, milestone_node)]
)
end
def expect_array_response(*items)
expect(response).to have_gitlab_http_status(:success)
expect(milestone_data).to be_an Array
expect(milestone_node_array('id')).to match_array(items)
end
def milestone_node_array(extract_attribute = nil)
node_array(milestone_data, extract_attribute)
end
end
end

View File

@ -1402,6 +1402,7 @@ describe API::Projects do
expect(json_response['merge_requests_access_level']).to be_present
expect(json_response['wiki_access_level']).to be_present
expect(json_response['builds_access_level']).to be_present
expect(json_response).to have_key('emails_disabled')
expect(json_response['resolve_outdated_diff_discussions']).to eq(project.resolve_outdated_diff_discussions)
expect(json_response['remove_source_branch_after_merge']).to be_truthy
expect(json_response['container_registry_enabled']).to be_present
@ -1412,18 +1413,18 @@ describe API::Projects do
expect(json_response['namespace']).to be_present
expect(json_response['import_status']).to be_present
expect(json_response).to include("import_error")
expect(json_response['avatar_url']).to be_nil
expect(json_response).to have_key('avatar_url')
expect(json_response['star_count']).to be_present
expect(json_response['forks_count']).to be_present
expect(json_response['public_jobs']).to be_present
expect(json_response['ci_config_path']).to be_nil
expect(json_response).to have_key('ci_config_path')
expect(json_response['shared_with_groups']).to be_an Array
expect(json_response['shared_with_groups'].length).to eq(1)
expect(json_response['shared_with_groups'][0]['group_id']).to eq(group.id)
expect(json_response['shared_with_groups'][0]['group_name']).to eq(group.name)
expect(json_response['shared_with_groups'][0]['group_full_path']).to eq(group.full_path)
expect(json_response['shared_with_groups'][0]['group_access_level']).to eq(link.group_access)
expect(json_response['shared_with_groups'][0]['expires_at']).to be_nil
expect(json_response['shared_with_groups'][0]).to have_key('expires_at')
expect(json_response['only_allow_merge_if_pipeline_succeeds']).to eq(project.only_allow_merge_if_pipeline_succeeds)
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to eq(project.only_allow_merge_if_all_discussions_are_resolved)
expect(json_response['ci_default_git_depth']).to eq(project.ci_default_git_depth)
@ -2243,6 +2244,16 @@ describe API::Projects do
expect(json_response['pages_access_level']).to eq('private')
end
it 'updates emails_disabled' do
project_param = { emails_disabled: true }
put api("/projects/#{project3.id}", user), params: project_param
expect(response).to have_gitlab_http_status(200)
expect(json_response['emails_disabled']).to eq(true)
end
it 'updates build_git_strategy' do
project_param = { build_git_strategy: 'clone' }

View File

@ -1607,7 +1607,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it_behaves_like 'successful artifacts upload'
end
context 'for file stored remotelly' do
context 'for file stored remotely' do
let!(:fog_connection) do
stub_artifacts_object_storage(direct_upload: true)
end
@ -1894,6 +1894,46 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
end
context 'when artifacts already exist for the job' do
let(:params) do
{
artifact_type: :archive,
artifact_format: :zip,
'file.sha256' => uploaded_sha256
}
end
let(:existing_sha256) { '0' * 64 }
let!(:existing_artifact) do
create(:ci_job_artifact, :archive, file_sha256: existing_sha256, job: job)
end
context 'when sha256 is the same of the existing artifact' do
let(:uploaded_sha256) { existing_sha256 }
it 'ignores the new artifact' do
upload_artifacts(file_upload, headers_with_token, params)
expect(response).to have_gitlab_http_status(:created)
expect(job.reload.job_artifacts_archive).to eq(existing_artifact)
end
end
context 'when sha256 is different than the existing artifact' do
let(:uploaded_sha256) { '1' * 64 }
it 'logs and returns an error' do
expect(Gitlab::ErrorTracking).to receive(:track_exception)
upload_artifacts(file_upload, headers_with_token, params)
expect(response).to have_gitlab_http_status(:bad_request)
expect(job.reload.job_artifacts_archive).to eq(existing_artifact)
end
end
end
context 'when artifacts are being stored outside of tmp path' do
let(:new_tmpdir) { Dir.mktmpdir }

View File

@ -0,0 +1,121 @@
# frozen_string_literal: true
require 'spec_helper'
describe Ci::CreateJobArtifactsService do
let(:service) { described_class.new }
let(:job) { create(:ci_build) }
let(:artifacts_sha256) { '0' * 64 }
let(:metadata_file) { nil }
let(:artifacts_file) do
file_to_upload('spec/fixtures/ci_build_artifacts.zip', sha256: artifacts_sha256)
end
let(:params) do
{
'artifact_type' => 'archive',
'artifact_format' => 'zip'
}
end
def file_to_upload(path, params = {})
upload = Tempfile.new('upload')
FileUtils.copy(path, upload.path)
UploadedFile.new(upload.path, params)
end
describe '#execute' do
subject { service.execute(job, artifacts_file, params, metadata_file: metadata_file) }
context 'when artifacts file is uploaded' do
it 'saves artifact for the given type' do
expect { subject }.to change { Ci::JobArtifact.count }.by(1)
new_artifact = job.job_artifacts.last
expect(new_artifact.project).to eq(job.project)
expect(new_artifact.file).to be_present
expect(new_artifact.file_type).to eq(params['artifact_type'])
expect(new_artifact.file_format).to eq(params['artifact_format'])
expect(new_artifact.file_sha256).to eq(artifacts_sha256)
end
context 'when metadata file is also uploaded' do
let(:metadata_file) do
file_to_upload('spec/fixtures/ci_build_artifacts_metadata.gz', sha256: artifacts_sha256)
end
before do
stub_application_setting(default_artifacts_expire_in: '1 day')
end
it 'saves metadata artifact' do
expect { subject }.to change { Ci::JobArtifact.count }.by(2)
new_artifact = job.job_artifacts.last
expect(new_artifact.project).to eq(job.project)
expect(new_artifact.file).to be_present
expect(new_artifact.file_type).to eq('metadata')
expect(new_artifact.file_format).to eq('gzip')
expect(new_artifact.file_sha256).to eq(artifacts_sha256)
end
it 'sets expiration date according to application settings' do
expected_expire_at = 1.day.from_now
expect(subject).to be_truthy
archive_artifact, metadata_artifact = job.job_artifacts.last(2)
expect(job.artifacts_expire_at).to be_within(1.minute).of(expected_expire_at)
expect(archive_artifact.expire_at).to be_within(1.minute).of(expected_expire_at)
expect(metadata_artifact.expire_at).to be_within(1.minute).of(expected_expire_at)
end
context 'when expire_in params is set' do
before do
params.merge!('expire_in' => '2 hours')
end
it 'sets expiration date according to the parameter' do
expected_expire_at = 2.hours.from_now
expect(subject).to be_truthy
archive_artifact, metadata_artifact = job.job_artifacts.last(2)
expect(job.artifacts_expire_at).to be_within(1.minute).of(expected_expire_at)
expect(archive_artifact.expire_at).to be_within(1.minute).of(expected_expire_at)
expect(metadata_artifact.expire_at).to be_within(1.minute).of(expected_expire_at)
end
end
end
end
context 'when artifacts file already exists' do
let!(:existing_artifact) do
create(:ci_job_artifact, :archive, file_sha256: existing_sha256, job: job)
end
context 'when sha256 of uploading artifact is the same of the existing one' do
let(:existing_sha256) { artifacts_sha256 }
it 'ignores the changes' do
expect { subject }.not_to change { Ci::JobArtifact.count }
expect(subject).to be_truthy
end
end
context 'when sha256 of uploading artifact is different than the existing one' do
let(:existing_sha256) { '1' * 64 }
it 'returns false and logs the error' do
expect(Gitlab::ErrorTracking).to receive(:track_exception).and_call_original
expect { subject }.not_to change { Ci::JobArtifact.count }
expect(subject).to be_falsey
expect(job.errors[:base]).to contain_exactly('another artifact of the same type already exists')
end
end
end
end
end

View File

@ -185,12 +185,13 @@ module GraphqlHelpers
end
# Fairly dumb Ruby => GraphQL rendering function. Only suitable for testing.
# Missing support for Enums (feel free to add if you need it).
# Use symbol for Enum values
def as_graphql_literal(value)
case value
when Array then "[#{value.map { |v| as_graphql_literal(v) }.join(',')}]"
when Integer, Float then value.to_s
when String then "\"#{value.gsub(/"/, '\\"')}\""
when Symbol then value
when nil then 'null'
when true then 'true'
when false then 'false'

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
RSpec.shared_examples 'within_timeframe scope' do
describe '.within_timeframe' do
it 'returns resources with start_date and/or end_date between timeframe' do
resources = described_class.within_timeframe(now + 2.days, now + 3.days)
expect(resources).to match_array([resource_2, resource_4])
end
it 'returns resources which starts before the timeframe' do
resources = described_class.within_timeframe(now, now + 1.day)
expect(resources).to match_array([resource_1, resource_3, resource_4])
end
it 'returns resources which ends after the timeframe' do
resources = described_class.within_timeframe(now + 3.days, now + 5.days)
expect(resources).to match_array([resource_2, resource_4])
end
end
end

View File

@ -705,10 +705,10 @@
exec-sh "^0.3.2"
minimist "^1.2.0"
"@gitlab/eslint-config@^2.1.1":
version "2.1.1"
resolved "https://registry.yarnpkg.com/@gitlab/eslint-config/-/eslint-config-2.1.1.tgz#64fcc8135f1a6055181fd64b991e33eb43913153"
integrity sha512-+rQA+gIcZbkaQ7GIjDjfMnYz41fFtsEaF0cRmk0KSqXWTKmOi4gcYZppIPdRvJWKhNPRS735Y5Of3gdIObINYQ==
"@gitlab/eslint-config@^2.1.2":
version "2.1.2"
resolved "https://registry.yarnpkg.com/@gitlab/eslint-config/-/eslint-config-2.1.2.tgz#9f4011d3bf15f3e2668a1faa754f0b9804f23f8f"
integrity sha512-+9yd5PKyipUVngEtKOdBxq7C6tXsUNdaGVD+SLBDqX0VaCNxQVWJvmQ2FPxb9gOLZsSAnP5Yl2Rj7dY0fJV4Gw==
dependencies:
"@gitlab/eslint-plugin-i18n" "^1.1.0"
"@gitlab/eslint-plugin-vue-i18n" "^1.2.0"
@ -718,7 +718,6 @@
eslint-plugin-babel "^5.3.0"
eslint-plugin-filenames "^1.3.2"
eslint-plugin-import "^2.20.0"
eslint-plugin-no-jquery "^2.3.1"
eslint-plugin-promise "^4.2.1"
eslint-plugin-vue "^5.2.3"
@ -4350,7 +4349,7 @@ eslint-plugin-jest@^22.3.0:
resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-22.3.0.tgz#a10f10dedfc92def774ec9bb5bfbd2fb8e1c96d2"
integrity sha512-P1mYVRNlOEoO5T9yTqOfucjOYf1ktmJ26NjwjH8sxpCFQa6IhBGr5TpKl3hcAAT29hOsRJVuMWmTsHoUVo9FoA==
eslint-plugin-no-jquery@^2.3.0, eslint-plugin-no-jquery@^2.3.1:
eslint-plugin-no-jquery@^2.3.0:
version "2.3.1"
resolved "https://registry.yarnpkg.com/eslint-plugin-no-jquery/-/eslint-plugin-no-jquery-2.3.1.tgz#1c364cb863a38cc1570c8020155b6004cca62178"
integrity sha512-/fiQUBSOMUETnfBuiK5ewvtRbek1IRTy5ov/6RZ6nlybvZ337vyGaNPWM1KgaIoIeN7dairNrPfq0h7A0tpT3A==