Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
a4c6555151
commit
ae1efa2e1d
85 changed files with 1440 additions and 985 deletions
|
@ -74,9 +74,8 @@ export default {
|
|||
{
|
||||
key: 'title',
|
||||
label: s__('AlertManagement|Alert'),
|
||||
thClass: `${thClass} gl-pointer-events-none`,
|
||||
thClass: `gl-pointer-events-none`,
|
||||
tdClass,
|
||||
sortable: false,
|
||||
},
|
||||
{
|
||||
key: 'eventCount',
|
||||
|
@ -88,7 +87,7 @@ export default {
|
|||
{
|
||||
key: 'assignees',
|
||||
label: s__('AlertManagement|Assignees'),
|
||||
thClass: 'gl-w-eighth',
|
||||
thClass: 'gl-w-eighth gl-pointer-events-none',
|
||||
tdClass,
|
||||
},
|
||||
{
|
||||
|
|
|
@ -17,7 +17,7 @@ import UnmetPrerequisitesBlock from './unmet_prerequisites_block.vue';
|
|||
import Sidebar from './sidebar.vue';
|
||||
import { sprintf } from '~/locale';
|
||||
import delayedJobMixin from '../mixins/delayed_job_mixin';
|
||||
import { isNewJobLogActive } from '../store/utils';
|
||||
import Log from './log/log.vue';
|
||||
|
||||
export default {
|
||||
name: 'JobPageApp',
|
||||
|
@ -28,7 +28,7 @@ export default {
|
|||
EnvironmentsBlock,
|
||||
ErasedBlock,
|
||||
Icon,
|
||||
Log: () => (isNewJobLogActive() ? import('./log/log.vue') : import('./job_log.vue')),
|
||||
Log,
|
||||
LogTopBar,
|
||||
StuckBlock,
|
||||
UnmetPrerequisitesBlock,
|
||||
|
|
|
@ -1,59 +0,0 @@
|
|||
<script>
|
||||
import { mapState, mapActions } from 'vuex';
|
||||
|
||||
export default {
|
||||
name: 'JobLog',
|
||||
props: {
|
||||
trace: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
isComplete: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
...mapState(['isScrolledToBottomBeforeReceivingTrace']),
|
||||
},
|
||||
updated() {
|
||||
this.$nextTick(() => {
|
||||
this.handleScrollDown();
|
||||
});
|
||||
},
|
||||
mounted() {
|
||||
this.$nextTick(() => {
|
||||
this.handleScrollDown();
|
||||
});
|
||||
},
|
||||
methods: {
|
||||
...mapActions(['scrollBottom']),
|
||||
/**
|
||||
* The job log is sent in HTML, which means we need to use `v-html` to render it
|
||||
* Using the updated hook with $nextTick is not enough to wait for the DOM to be updated
|
||||
* in this case because it runs before `v-html` has finished running, since there's no
|
||||
* Vue binding.
|
||||
* In order to scroll the page down after `v-html` has finished, we need to use setTimeout
|
||||
*/
|
||||
handleScrollDown() {
|
||||
if (this.isScrolledToBottomBeforeReceivingTrace) {
|
||||
setTimeout(() => {
|
||||
this.scrollBottom();
|
||||
}, 0);
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
<template>
|
||||
<pre class="js-build-trace build-trace qa-build-trace">
|
||||
<code class="bash" v-html="trace">
|
||||
</code>
|
||||
|
||||
<div v-if="!isComplete" class="js-log-animation build-loader-animation">
|
||||
<div class="dot"></div>
|
||||
<div class="dot"></div>
|
||||
<div class="dot"></div>
|
||||
</div>
|
||||
</pre>
|
||||
</template>
|
|
@ -2,9 +2,7 @@
|
|||
import LineNumber from './line_number.vue';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
LineNumber,
|
||||
},
|
||||
functional: true,
|
||||
props: {
|
||||
line: {
|
||||
type: Object,
|
||||
|
@ -15,18 +13,28 @@ export default {
|
|||
required: true,
|
||||
},
|
||||
},
|
||||
render(h, { props }) {
|
||||
const { line, path } = props;
|
||||
|
||||
const chars = line.content.map(content => {
|
||||
return h(
|
||||
'span',
|
||||
{
|
||||
class: ['ws-pre-wrap', content.style],
|
||||
},
|
||||
content.text,
|
||||
);
|
||||
});
|
||||
|
||||
return h('div', { class: 'js-line log-line' }, [
|
||||
h(LineNumber, {
|
||||
props: {
|
||||
lineNumber: line.lineNumber,
|
||||
path,
|
||||
},
|
||||
}),
|
||||
...chars,
|
||||
]);
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="js-line log-line">
|
||||
<line-number :line-number="line.lineNumber" :path="path" />
|
||||
<span
|
||||
v-for="(content, i) in line.content"
|
||||
:key="i"
|
||||
:class="content.style"
|
||||
class="ws-pre-wrap"
|
||||
>{{ content.text }}</span
|
||||
>
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
@ -1,10 +1,6 @@
|
|||
<script>
|
||||
import { GlLink } from '@gitlab/ui';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlLink,
|
||||
},
|
||||
functional: true,
|
||||
props: {
|
||||
lineNumber: {
|
||||
type: Number,
|
||||
|
@ -15,41 +11,24 @@ export default {
|
|||
required: true,
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
/**
|
||||
* Builds the url for each line number
|
||||
*
|
||||
* @returns {String}
|
||||
*/
|
||||
buildLineNumber() {
|
||||
return `${this.path}#${this.lineNumberId}`;
|
||||
},
|
||||
/**
|
||||
* Array indexes start with 0, so we add 1
|
||||
* to create the line number
|
||||
*
|
||||
* @returns {Number} the line number
|
||||
*/
|
||||
parsedLineNumber() {
|
||||
return this.lineNumber + 1;
|
||||
},
|
||||
render(h, { props }) {
|
||||
const { lineNumber, path } = props;
|
||||
|
||||
/**
|
||||
* Creates the anchor for each link
|
||||
*
|
||||
* @returns {String}
|
||||
*/
|
||||
lineNumberId() {
|
||||
return `L${this.parsedLineNumber}`;
|
||||
},
|
||||
const parsedLineNumber = lineNumber + 1;
|
||||
const lineId = `L${parsedLineNumber}`;
|
||||
const lineHref = `${path}#${lineId}`;
|
||||
|
||||
return h(
|
||||
'a',
|
||||
{
|
||||
class: 'gl-link d-inline-block text-right line-number flex-shrink-0',
|
||||
attrs: {
|
||||
id: lineId,
|
||||
href: lineHref,
|
||||
},
|
||||
},
|
||||
parsedLineNumber,
|
||||
);
|
||||
},
|
||||
};
|
||||
</script>
|
||||
<template>
|
||||
<gl-link
|
||||
:id="lineNumberId"
|
||||
class="d-inline-block text-right line-number flex-shrink-0"
|
||||
:href="buildLineNumber"
|
||||
>{{ parsedLineNumber }}</gl-link
|
||||
>
|
||||
</template>
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import Vue from 'vue';
|
||||
import * as types from './mutation_types';
|
||||
import { logLinesParser, updateIncrementalTrace, isNewJobLogActive } from './utils';
|
||||
import { logLinesParser, updateIncrementalTrace } from './utils';
|
||||
|
||||
export default {
|
||||
[types.SET_JOB_ENDPOINT](state, endpoint) {
|
||||
|
@ -25,22 +25,16 @@ export default {
|
|||
}
|
||||
|
||||
if (log.append) {
|
||||
if (isNewJobLogActive()) {
|
||||
state.trace = log.lines ? updateIncrementalTrace(log.lines, state.trace) : state.trace;
|
||||
} else {
|
||||
state.trace += log.html;
|
||||
}
|
||||
state.trace = log.lines ? updateIncrementalTrace(log.lines, state.trace) : state.trace;
|
||||
|
||||
state.traceSize += log.size;
|
||||
} else {
|
||||
// When the job still does not have a trace
|
||||
// the trace response will not have a defined
|
||||
// html or size. We keep the old value otherwise these
|
||||
// will be set to `null`
|
||||
if (isNewJobLogActive()) {
|
||||
state.trace = log.lines ? logLinesParser(log.lines) : state.trace;
|
||||
} else {
|
||||
state.trace = log.html || state.trace;
|
||||
}
|
||||
state.trace = log.lines ? logLinesParser(log.lines) : state.trace;
|
||||
|
||||
state.traceSize = log.size || state.traceSize;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
import { isNewJobLogActive } from './utils';
|
||||
|
||||
export default () => ({
|
||||
jobEndpoint: null,
|
||||
traceEndpoint: null,
|
||||
|
@ -18,7 +16,7 @@ export default () => ({
|
|||
// Used to check if we should keep the automatic scroll
|
||||
isScrolledToBottomBeforeReceivingTrace: true,
|
||||
|
||||
trace: isNewJobLogActive() ? [] : '',
|
||||
trace: [],
|
||||
isTraceComplete: false,
|
||||
traceSize: 0,
|
||||
isTraceSizeVisible: false,
|
||||
|
|
|
@ -177,5 +177,3 @@ export const updateIncrementalTrace = (newLog = [], oldParsed = []) => {
|
|||
|
||||
return logLinesParser(newLog, parsedLog);
|
||||
};
|
||||
|
||||
export const isNewJobLogActive = () => gon && gon.features && gon.features.jobLogJson;
|
||||
|
|
|
@ -8,14 +8,9 @@
|
|||
outline: none;
|
||||
}
|
||||
|
||||
> :not([aria-sort='none']).b-table-sort-icon-left:hover::before {
|
||||
content: '' !important;
|
||||
}
|
||||
|
||||
td,
|
||||
th {
|
||||
// TODO: There is no gl-pl-9 utlity for this padding, to be done and then removed.
|
||||
padding-left: 1.25rem;
|
||||
@include gl-pl-9;
|
||||
@include gl-py-5;
|
||||
@include gl-outline-none;
|
||||
@include gl-relative;
|
||||
|
@ -26,24 +21,8 @@
|
|||
font-weight: $gl-font-weight-bold;
|
||||
color: $gl-gray-600;
|
||||
|
||||
&:hover::before {
|
||||
left: 3%;
|
||||
top: 34%;
|
||||
@include gl-absolute;
|
||||
content: url("data:image/svg+xml,%3Csvg \
|
||||
xmlns='http://www.w3.org/2000/svg' \
|
||||
width='14' height='14' viewBox='0 0 16 \
|
||||
16'%3E%3Cpath fill='%23BABABA' fill-rule='evenodd' \
|
||||
d='M11.707085,11.7071 L7.999975,15.4142 L4.292875,11.7071 \
|
||||
C3.902375,11.3166 3.902375,10.6834 \
|
||||
4.292875,10.2929 C4.683375,9.90237 \
|
||||
5.316575,9.90237 5.707075,10.2929 \
|
||||
L6.999975,11.5858 L6.999975,2 C6.999975,1.44771 \
|
||||
7.447695,1 7.999975,1 C8.552255,1 8.999975,1.44771 \
|
||||
8.999975,2 L8.999975,11.5858 L10.292865,10.2929 \
|
||||
C10.683395,9.90237 11.316555,9.90237 11.707085,10.2929 \
|
||||
C12.097605,10.6834 12.097605,11.3166 11.707085,11.7071 \
|
||||
Z'/%3E%3C/svg%3E%0A");
|
||||
&[aria-sort='none']:hover {
|
||||
background-image: url('data:image/svg+xml, %3csvg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="4 0 8 16"%3e %3cpath style="fill: %23BABABA;" fill-rule="evenodd" d="M11.707085,11.7071 L7.999975,15.4142 L4.292875,11.7071 C3.902375,11.3166 3.902375, 10.6834 4.292875,10.2929 C4.683375,9.90237 5.316575,9.90237 5.707075,10.2929 L6.999975, 11.5858 L6.999975,2 C6.999975,1.44771 7.447695,1 7.999975,1 C8.552255,1 8.999975,1.44771 8.999975,2 L8.999975,11.5858 L10.292865,10.2929 C10.683395 ,9.90237 11.316555,9.90237 11.707085,10.2929 C12.097605,10.6834 12.097605,11.3166 11.707085,11.7071 Z"/%3e %3c/svg%3e');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,8 @@ class Projects::ImportsController < Projects::ApplicationController
|
|||
include ImportUrlParams
|
||||
|
||||
# Authorize
|
||||
before_action :authorize_admin_project!
|
||||
before_action :authorize_admin_project!, only: [:new, :create]
|
||||
before_action :require_namespace_project_creation_permission, only: :show
|
||||
before_action :require_no_repo, only: [:new, :create]
|
||||
before_action :redirect_if_progress, only: [:new, :create]
|
||||
before_action :redirect_if_no_import, only: :show
|
||||
|
@ -51,6 +52,10 @@ class Projects::ImportsController < Projects::ApplicationController
|
|||
end
|
||||
end
|
||||
|
||||
def require_namespace_project_creation_permission
|
||||
render_404 unless current_user.can?(:admin_project, @project) || current_user.can?(:create_projects, @project.namespace)
|
||||
end
|
||||
|
||||
def redirect_if_progress
|
||||
if @project.import_in_progress?
|
||||
redirect_to project_import_path(@project)
|
||||
|
|
|
@ -11,9 +11,6 @@ class Projects::JobsController < Projects::ApplicationController
|
|||
before_action :authorize_erase_build!, only: [:erase]
|
||||
before_action :authorize_use_build_terminal!, only: [:terminal, :terminal_websocket_authorize]
|
||||
before_action :verify_api_request!, only: :terminal_websocket_authorize
|
||||
before_action only: [:show] do
|
||||
push_frontend_feature_flag(:job_log_json, project, default_enabled: true)
|
||||
end
|
||||
before_action :authorize_create_proxy_build!, only: :proxy_websocket_authorize
|
||||
before_action :verify_proxy_request!, only: :proxy_websocket_authorize
|
||||
|
||||
|
@ -55,15 +52,10 @@ class Projects::JobsController < Projects::ApplicationController
|
|||
format.json do
|
||||
build.trace.being_watched!
|
||||
|
||||
# TODO: when the feature flag is removed we should not pass
|
||||
# content_format to serialize method.
|
||||
content_format = Feature.enabled?(:job_log_json, @project, default_enabled: true) ? :json : :html
|
||||
|
||||
build_trace = Ci::BuildTrace.new(
|
||||
build: @build,
|
||||
stream: stream,
|
||||
state: params[:state],
|
||||
content_format: content_format)
|
||||
state: params[:state])
|
||||
|
||||
render json: BuildTraceSerializer
|
||||
.new(project: @project, current_user: @current_user)
|
||||
|
|
|
@ -109,8 +109,8 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
|
|||
# or from cache if already merged
|
||||
@commits =
|
||||
set_commits_for_rendering(
|
||||
@merge_request.recent_commits.with_latest_pipeline(@merge_request.source_branch),
|
||||
commits_count: @merge_request.commits_count
|
||||
@merge_request.recent_commits.with_latest_pipeline(@merge_request.source_branch).with_markdown_cache,
|
||||
commits_count: @merge_request.commits_count
|
||||
)
|
||||
|
||||
render json: { html: view_to_html_string('projects/merge_requests/_commits') }
|
||||
|
|
50
app/finders/projects/integrations/jira/issues_finder.rb
Normal file
50
app/finders/projects/integrations/jira/issues_finder.rb
Normal file
|
@ -0,0 +1,50 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Projects
|
||||
module Integrations
|
||||
module Jira
|
||||
IntegrationError = Class.new(StandardError)
|
||||
RequestError = Class.new(StandardError)
|
||||
|
||||
class IssuesFinder
|
||||
attr_reader :issues, :total_count
|
||||
|
||||
def initialize(project, params = {})
|
||||
@project = project
|
||||
@jira_service = project.jira_service
|
||||
@page = params[:page].presence || 1
|
||||
@params = params
|
||||
end
|
||||
|
||||
def execute
|
||||
return [] unless Feature.enabled?(:jira_integration, project)
|
||||
|
||||
raise IntegrationError, _('Jira service not configured.') unless jira_service&.active?
|
||||
|
||||
project_key = jira_service.project_key
|
||||
raise IntegrationError, _('Jira project key is not configured') if project_key.blank?
|
||||
|
||||
fetch_issues(project_key)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :project, :jira_service, :page, :params
|
||||
|
||||
# rubocop: disable CodeReuse/ServiceClass
|
||||
def fetch_issues(project_key)
|
||||
jql = ::Jira::JqlBuilderService.new(project_key, params).execute
|
||||
response = ::Jira::Requests::Issues::ListService.new(jira_service, { jql: jql, page: page }).execute
|
||||
|
||||
if response.success?
|
||||
@total_count = response.payload[:total_count]
|
||||
@issues = response.payload[:issues]
|
||||
else
|
||||
raise RequestError, response.message
|
||||
end
|
||||
end
|
||||
# rubocop: enable CodeReuse/ServiceClass
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -37,7 +37,7 @@ module Resolvers
|
|||
def jira_projects(name:)
|
||||
args = { query: name }.compact
|
||||
|
||||
return Jira::Requests::Projects.new(project.jira_service, args).execute
|
||||
return Jira::Requests::Projects::ListService.new(project.jira_service, args).execute
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -271,6 +271,20 @@ module GitlabRoutingHelper
|
|||
end
|
||||
end
|
||||
|
||||
def gitlab_raw_snippet_blob_url(snippet, path, ref = nil)
|
||||
params = {
|
||||
snippet_id: snippet,
|
||||
ref: ref || snippet.repository.root_ref,
|
||||
path: path
|
||||
}
|
||||
|
||||
if snippet.is_a?(ProjectSnippet)
|
||||
project_snippet_blob_raw_url(snippet.project, params)
|
||||
else
|
||||
snippet_blob_raw_url(params)
|
||||
end
|
||||
end
|
||||
|
||||
def gitlab_snippet_notes_path(snippet, *args)
|
||||
new_args = snippet_query_params(snippet, *args)
|
||||
snippet_notes_path(snippet, *new_args)
|
||||
|
|
|
@ -2,40 +2,22 @@
|
|||
|
||||
module Ci
|
||||
class BuildTrace
|
||||
CONVERTERS = {
|
||||
html: Gitlab::Ci::Ansi2html,
|
||||
json: Gitlab::Ci::Ansi2json
|
||||
}.freeze
|
||||
|
||||
attr_reader :trace, :build
|
||||
|
||||
delegate :state, :append, :truncated, :offset, :size, :total, to: :trace, allow_nil: true
|
||||
delegate :id, :status, :complete?, to: :build, prefix: true
|
||||
|
||||
def initialize(build:, stream:, state:, content_format:)
|
||||
def initialize(build:, stream:, state:)
|
||||
@build = build
|
||||
@content_format = content_format
|
||||
|
||||
if stream.valid?
|
||||
stream.limit
|
||||
@trace = CONVERTERS.fetch(content_format).convert(stream.stream, state)
|
||||
@trace = Gitlab::Ci::Ansi2json.convert(stream.stream, state)
|
||||
end
|
||||
end
|
||||
|
||||
def json?
|
||||
@content_format == :json
|
||||
end
|
||||
|
||||
def html?
|
||||
@content_format == :html
|
||||
end
|
||||
|
||||
def json_lines
|
||||
@trace&.lines if json?
|
||||
end
|
||||
|
||||
def html_lines
|
||||
@trace&.html if html?
|
||||
def lines
|
||||
@trace&.lines
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -53,6 +53,17 @@ class CommitCollection
|
|||
self
|
||||
end
|
||||
|
||||
# Returns the collection with markdown fields preloaded.
|
||||
#
|
||||
# Get the markdown cache from redis using pipeline to prevent n+1 requests
|
||||
# when rendering the markdown of an attribute (e.g. title, full_title,
|
||||
# description).
|
||||
def with_markdown_cache
|
||||
Commit.preload_markdown_cache!(commits)
|
||||
|
||||
self
|
||||
end
|
||||
|
||||
def unenriched
|
||||
commits.reject(&:gitaly_commit?)
|
||||
end
|
||||
|
|
|
@ -23,7 +23,7 @@ class JiraService < IssueTrackerService
|
|||
|
||||
# TODO: we can probably just delegate as part of
|
||||
# https://gitlab.com/gitlab-org/gitlab/issues/29404
|
||||
data_field :username, :password, :url, :api_url, :jira_issue_transition_id
|
||||
data_field :username, :password, :url, :api_url, :jira_issue_transition_id, :project_key
|
||||
|
||||
before_update :reset_password
|
||||
|
||||
|
|
|
@ -334,7 +334,13 @@ class Snippet < ApplicationRecord
|
|||
def file_name_on_repo
|
||||
return if repository.empty?
|
||||
|
||||
repository.ls_files(repository.root_ref).first
|
||||
list_files(repository.root_ref).first
|
||||
end
|
||||
|
||||
def list_files(ref = nil)
|
||||
return [] if repository.empty?
|
||||
|
||||
repository.ls_files(ref)
|
||||
end
|
||||
|
||||
class << self
|
||||
|
|
|
@ -12,6 +12,5 @@ class BuildTraceEntity < Grape::Entity
|
|||
expose :size
|
||||
expose :total
|
||||
|
||||
expose :json_lines, as: :lines, if: ->(*) { object.json? }
|
||||
expose :html_lines, as: :html, if: ->(*) { object.html? }
|
||||
expose :lines
|
||||
end
|
||||
|
|
30
app/services/jira/jql_builder_service.rb
Normal file
30
app/services/jira/jql_builder_service.rb
Normal file
|
@ -0,0 +1,30 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Jira
|
||||
class JqlBuilderService
|
||||
DEFAULT_SORT = "created"
|
||||
DEFAULT_SORT_DIRECTION = "DESC"
|
||||
|
||||
def initialize(jira_project_key, params = {})
|
||||
@jira_project_key = jira_project_key
|
||||
@sort = params[:sort] || DEFAULT_SORT
|
||||
@sort_direction = params[:sort_direction] || DEFAULT_SORT_DIRECTION
|
||||
end
|
||||
|
||||
def execute
|
||||
[by_project, order_by].join(' ')
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :jira_project_key, :sort, :sort_direction
|
||||
|
||||
def by_project
|
||||
"project = #{jira_project_key}"
|
||||
end
|
||||
|
||||
def order_by
|
||||
"order by #{sort} #{sort_direction}"
|
||||
end
|
||||
end
|
||||
end
|
|
@ -5,12 +5,11 @@ module Jira
|
|||
class Base
|
||||
include ProjectServicesLoggable
|
||||
|
||||
attr_reader :jira_service, :project, :query
|
||||
JIRA_API_VERSION = 2
|
||||
|
||||
def initialize(jira_service, query: nil)
|
||||
def initialize(jira_service, params = {})
|
||||
@project = jira_service&.project
|
||||
@jira_service = jira_service
|
||||
@query = query
|
||||
end
|
||||
|
||||
def execute
|
||||
|
@ -19,8 +18,19 @@ module Jira
|
|||
request
|
||||
end
|
||||
|
||||
def base_api_url
|
||||
"/rest/api/#{api_version}"
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :jira_service, :project
|
||||
|
||||
# override this method in the specific request class implementation if a differnt API version is required
|
||||
def api_version
|
||||
JIRA_API_VERSION
|
||||
end
|
||||
|
||||
def client
|
||||
@client ||= jira_service.client
|
||||
end
|
||||
|
|
56
app/services/jira/requests/issues/list_service.rb
Normal file
56
app/services/jira/requests/issues/list_service.rb
Normal file
|
@ -0,0 +1,56 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Jira
|
||||
module Requests
|
||||
module Issues
|
||||
class ListService < Base
|
||||
extend ::Gitlab::Utils::Override
|
||||
|
||||
PER_PAGE = 100
|
||||
|
||||
def initialize(jira_service, params = {})
|
||||
super(jira_service, params)
|
||||
|
||||
@jql = params[:jql].to_s
|
||||
@page = params[:page].to_i || 1
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :jql, :page
|
||||
|
||||
override :url
|
||||
def url
|
||||
"#{base_api_url}/search?jql=#{CGI.escape(jql)}&startAt=#{start_at}&maxResults=#{PER_PAGE}&fields=*all"
|
||||
end
|
||||
|
||||
override :build_service_response
|
||||
def build_service_response(response)
|
||||
return ServiceResponse.success(payload: empty_payload) if response.blank? || response["issues"].blank?
|
||||
|
||||
ServiceResponse.success(payload: {
|
||||
issues: map_issues(response["issues"]),
|
||||
is_last: last?(response),
|
||||
total_count: response["total"].to_i
|
||||
})
|
||||
end
|
||||
|
||||
def map_issues(response)
|
||||
response.map { |v| JIRA::Resource::Issue.build(client, v) }
|
||||
end
|
||||
|
||||
def empty_payload
|
||||
{ issues: [], is_last: true, total_count: 0 }
|
||||
end
|
||||
|
||||
def last?(response)
|
||||
response["total"].to_i <= response["startAt"].to_i + response["issues"].size
|
||||
end
|
||||
|
||||
def start_at
|
||||
(page - 1) * PER_PAGE
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,37 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Jira
|
||||
module Requests
|
||||
class Projects < Base
|
||||
extend ::Gitlab::Utils::Override
|
||||
|
||||
private
|
||||
|
||||
override :url
|
||||
def url
|
||||
'/rest/api/2/project'
|
||||
end
|
||||
|
||||
override :build_service_response
|
||||
def build_service_response(response)
|
||||
return ServiceResponse.success(payload: empty_payload) unless response.present?
|
||||
|
||||
ServiceResponse.success(payload: { projects: map_projects(response), is_last: true })
|
||||
end
|
||||
|
||||
def map_projects(response)
|
||||
response.map { |v| JIRA::Resource::Project.build(client, v) }.select(&method(:match_query?))
|
||||
end
|
||||
|
||||
def match_query?(jira_project)
|
||||
query = self.query.to_s.downcase
|
||||
|
||||
jira_project&.key&.downcase&.include?(query) || jira_project&.name&.downcase&.include?(query)
|
||||
end
|
||||
|
||||
def empty_payload
|
||||
{ projects: [], is_last: true }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
47
app/services/jira/requests/projects/list_service.rb
Normal file
47
app/services/jira/requests/projects/list_service.rb
Normal file
|
@ -0,0 +1,47 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Jira
|
||||
module Requests
|
||||
module Projects
|
||||
class ListService < Base
|
||||
extend ::Gitlab::Utils::Override
|
||||
|
||||
def initialize(jira_service, params: {})
|
||||
super(jira_service, params)
|
||||
|
||||
@query = params[:query]
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :query
|
||||
|
||||
override :url
|
||||
def url
|
||||
"#{base_api_url}/project"
|
||||
end
|
||||
|
||||
override :build_service_response
|
||||
def build_service_response(response)
|
||||
return ServiceResponse.success(payload: empty_payload) unless response.present?
|
||||
|
||||
ServiceResponse.success(payload: { projects: map_projects(response), is_last: true })
|
||||
end
|
||||
|
||||
def map_projects(response)
|
||||
response.map { |v| JIRA::Resource::Project.build(client, v) }.select(&method(:match_query?))
|
||||
end
|
||||
|
||||
def match_query?(jira_project)
|
||||
query = query.to_s.downcase
|
||||
|
||||
jira_project&.key&.downcase&.include?(query) || jira_project&.name&.downcase&.include?(query)
|
||||
end
|
||||
|
||||
def empty_payload
|
||||
{ projects: [], is_last: true }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -8,10 +8,10 @@
|
|||
= f.hidden_field :remember_me, value: resource_params.fetch(:remember_me, 0)
|
||||
%div
|
||||
= f.label 'Two-Factor Authentication code', name: :otp_attempt
|
||||
= f.text_field :otp_attempt, class: 'form-control', required: true, autofocus: true, autocomplete: 'off', title: 'This field is required.'
|
||||
= f.text_field :otp_attempt, class: 'form-control', required: true, autofocus: true, autocomplete: 'off', title: 'This field is required.', data: { qa_selector: 'two_fa_code_field' }
|
||||
%p.form-text.text-muted.hint Enter the code from the two-factor app on your mobile device. If you've lost your device, you may enter one of your recovery codes.
|
||||
.prepend-top-20
|
||||
= f.submit "Verify code", class: "btn btn-success"
|
||||
= f.submit "Verify code", class: "btn btn-success", data: { qa_selector: 'verify_code_button' }
|
||||
|
||||
- if @user.two_factor_u2f_enabled?
|
||||
= render "u2f/authenticate", params: params, resource: resource, resource_name: resource_name, render_remember_me: true, target_path: new_user_session_path
|
||||
|
|
|
@ -9,5 +9,5 @@
|
|||
%span.monospace= code
|
||||
|
||||
.d-flex
|
||||
= link_to _('Proceed'), profile_account_path, class: 'btn btn-success append-right-10'
|
||||
= link_to _('Proceed'), profile_account_path, class: 'btn btn-success append-right-10', data: { qa_selector: 'proceed_button' }
|
||||
= link_to _('Download codes'), "data:text/plain;charset=utf-8,#{CGI.escape(@codes.join("\n"))}", download: "gitlab-recovery-codes.txt", class: 'btn btn-default'
|
||||
|
|
|
@ -39,7 +39,7 @@
|
|||
= _('To add the entry manually, provide the following details to the application on your phone.')
|
||||
%p.gl-mt-0.gl-mb-0
|
||||
= _('Account: %{account}') % { account: @account_string }
|
||||
%p.gl-mt-0.gl-mb-0
|
||||
%p.gl-mt-0.gl-mb-0{ data: { qa_selector: 'otp_secret_content' } }
|
||||
= _('Key: %{key}') %{ key: current_user.otp_secret.scan(/.{4}/).join(' ') }
|
||||
%p.two-factor-new-manual-content
|
||||
= _('Time based: Yes')
|
||||
|
@ -49,9 +49,9 @@
|
|||
= @error
|
||||
.form-group
|
||||
= label_tag :pin_code, _('Pin code'), class: "label-bold"
|
||||
= text_field_tag :pin_code, nil, class: "form-control", required: true
|
||||
= text_field_tag :pin_code, nil, class: "form-control", required: true, data: { qa_selector: 'pin_code_field' }
|
||||
.gl-mt-3
|
||||
= submit_tag _('Register with two-factor app'), class: 'btn btn-success'
|
||||
= submit_tag _('Register with two-factor app'), class: 'btn btn-success', data: { qa_selector: 'register_2fa_app_button' }
|
||||
|
||||
%hr
|
||||
|
||||
|
|
|
@ -1,25 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module ProjectExportOptions
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
EXPORT_RETRY_COUNT = 3
|
||||
|
||||
included do
|
||||
sidekiq_options retry: EXPORT_RETRY_COUNT, status_expiration: StuckExportJobsWorker::EXPORT_JOBS_EXPIRATION
|
||||
|
||||
# We mark the project export as failed once we have exhausted all retries
|
||||
sidekiq_retries_exhausted do |job|
|
||||
project = Project.find(job['args'][1])
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
job = project.export_jobs.find_by(jid: job["jid"])
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
if job&.fail_op
|
||||
Sidekiq.logger.info "Job #{job['jid']} for project #{project.id} has been set to failed state"
|
||||
else
|
||||
Sidekiq.logger.error "Failed to set Job #{job['jid']} for project #{project.id} to failed state"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -6,6 +6,7 @@ class GroupExportWorker # rubocop:disable Scalability/IdempotentWorker
|
|||
|
||||
feature_category :importers
|
||||
loggable_arguments 2
|
||||
sidekiq_options retry: false
|
||||
|
||||
def perform(current_user_id, group_id, params = {})
|
||||
current_user = User.find(current_user_id)
|
||||
|
|
|
@ -3,12 +3,13 @@
|
|||
class ProjectExportWorker # rubocop:disable Scalability/IdempotentWorker
|
||||
include ApplicationWorker
|
||||
include ExceptionBacktrace
|
||||
include ProjectExportOptions
|
||||
|
||||
feature_category :importers
|
||||
worker_resource_boundary :memory
|
||||
urgency :throttled
|
||||
loggable_arguments 2, 3
|
||||
sidekiq_options retry: false
|
||||
sidekiq_options status_expiration: StuckExportJobsWorker::EXPORT_JOBS_EXPIRATION
|
||||
|
||||
def perform(current_user_id, project_id, after_export_strategy = {}, params = {})
|
||||
current_user = User.find(current_user_id)
|
||||
|
|
|
@ -4,10 +4,11 @@ class RepositoryImportWorker # rubocop:disable Scalability/IdempotentWorker
|
|||
include ApplicationWorker
|
||||
include ExceptionBacktrace
|
||||
include ProjectStartImport
|
||||
include ProjectImportOptions
|
||||
|
||||
feature_category :importers
|
||||
worker_has_external_dependencies!
|
||||
sidekiq_options retry: false
|
||||
sidekiq_options status_expiration: Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION
|
||||
|
||||
# technical debt: https://gitlab.com/gitlab-org/gitlab/issues/33991
|
||||
sidekiq_options memory_killer_memory_growth_kb: ENV.fetch('MEMORY_KILLER_REPOSITORY_IMPORT_WORKER_MEMORY_GROWTH_KB', 50).to_i
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Preload commits markdown cache
|
||||
merge_request: 35314
|
||||
author:
|
||||
type: performance
|
5
changelogs/unreleased/221242-fix-alerts-list-sorting.yml
Normal file
5
changelogs/unreleased/221242-fix-alerts-list-sorting.yml
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Fix alert sort styling issues
|
||||
merge_request: 35741
|
||||
author:
|
||||
type: fixed
|
5
changelogs/unreleased/psi-funtional-line-component.yml
Normal file
5
changelogs/unreleased/psi-funtional-line-component.yml
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Performance improvement for job logs
|
||||
merge_request: 35504
|
||||
author:
|
||||
type: performance
|
5
changelogs/unreleased/remove-ff-job-log-json.yml
Normal file
5
changelogs/unreleased/remove-ff-job-log-json.yml
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Remove legacy job log rendering
|
||||
merge_request: 34538
|
||||
author:
|
||||
type: other
|
5
changelogs/unreleased/xanf-fix-404-on-import.yml
Normal file
5
changelogs/unreleased/xanf-fix-404-on-import.yml
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Fix 404 when importing project with developer permission
|
||||
merge_request: 35667
|
||||
author:
|
||||
type: fixed
|
|
@ -72,6 +72,7 @@ exceptions:
|
|||
- USB
|
||||
- URI
|
||||
- URL
|
||||
- UUID
|
||||
- VPC
|
||||
- WIP
|
||||
- XML
|
||||
|
|
|
@ -789,207 +789,212 @@ Though the name of the Prometheus metric contains `rate_limiting`, it is a concu
|
|||
a rate limiter. If a Gitaly client makes 1000 requests in a row very quickly, concurrency will not
|
||||
exceed 1 and the concurrency limiter has no effect.
|
||||
|
||||
## Rotating a Gitaly authentication token
|
||||
## Rotate Gitaly authentication token
|
||||
|
||||
Rotating credentials in a production environment often either requires
|
||||
downtime, or causes outages, or both. If you are careful, though, you
|
||||
*can* rotate Gitaly credentials without a service interruption.
|
||||
Rotating credentials in a production environment often requires downtime, causes outages, or both.
|
||||
|
||||
This procedure also works if you are running GitLab on a single server.
|
||||
In that case, "Gitaly server" and "Gitaly client" refers to the same
|
||||
machine.
|
||||
However, you can rotate Gitaly credentials without a service interruption. Rotating a Gitaly
|
||||
authentication token involves:
|
||||
|
||||
### 1. Monitor current authentication behavior
|
||||
- [Verifying authentication monitoring](#verify-authentication-monitoring).
|
||||
- [Enabling "auth transitioning" mode](#enable-auth-transitioning-mode).
|
||||
- [Updating Gitaly authentication tokens](#update-gitaly-authentication-token).
|
||||
- [Ensuring there are no authentication failures](#ensure-there-are-no-authentication-failures).
|
||||
- [Disabling "auth transitioning" mode](#disable-auth-transitioning-mode).
|
||||
- [Verifying authentication is enforced](#verify-authentication-is-enforced).
|
||||
|
||||
Use Prometheus to see what the current authentication behavior of your
|
||||
GitLab installation is.
|
||||
This procedure also works if you are running GitLab on a single server. In that case, "Gitaly
|
||||
server" and "Gitaly client" refers to the same machine.
|
||||
|
||||
### Verify authentication monitoring
|
||||
|
||||
Before rotating a Gitaly authentication token, verify that you can monitor the authentication
|
||||
behavior of your GitLab installation using Prometheus. Use the following Prometheus query:
|
||||
|
||||
```prometheus
|
||||
sum(rate(gitaly_authentications_total[5m])) by (enforced, status)
|
||||
```
|
||||
|
||||
In a system where authentication is configured correctly, and where you
|
||||
have live traffic, you will see something like this:
|
||||
In a system where authentication is configured correctly and where you have live traffic, you will
|
||||
see something like this:
|
||||
|
||||
```prometheus
|
||||
{enforced="true",status="ok"} 4424.985419441742
|
||||
```
|
||||
|
||||
There may also be other numbers with rate 0. We only care about the
|
||||
non-zero numbers.
|
||||
There may also be other numbers with rate 0. We only care about the non-zero numbers.
|
||||
|
||||
The only non-zero number should have `enforced="true",status="ok"`. If
|
||||
you have other non-zero numbers, something is wrong in your
|
||||
configuration.
|
||||
The only non-zero number should have `enforced="true",status="ok"`. If you have other non-zero
|
||||
numbers, something is wrong in your configuration.
|
||||
|
||||
The `status="ok"` number reflects your current request rate. In the example
|
||||
above, Gitaly is handling about 4000 requests per second.
|
||||
The `status="ok"` number reflects your current request rate. In the example above, Gitaly is
|
||||
handling about 4000 requests per second.
|
||||
|
||||
Now you have established that you can monitor the Gitaly authentication
|
||||
behavior of your GitLab installation.
|
||||
Now that you have established that you can monitor the Gitaly authentication behavior of your GitLab
|
||||
installation, you can begin the rest of the procedure.
|
||||
|
||||
### 2. Reconfigure all Gitaly servers to be in "auth transitioning" mode
|
||||
### Enable "auth transitioning" mode
|
||||
|
||||
The second step is to temporarily disable authentication on the Gitaly servers.
|
||||
Temporarily disable Gitaly authentication on the Gitaly servers by putting them into "auth
|
||||
transitioning" mode as follows:
|
||||
|
||||
```ruby
|
||||
# in /etc/gitlab/gitlab.rb
|
||||
gitaly['auth_transitioning'] = true
|
||||
```
|
||||
|
||||
After you have applied this, your Prometheus query should return
|
||||
something like this:
|
||||
After you have made this change, your [Prometheus query](#verify-authentication-monitoring)
|
||||
should return something like:
|
||||
|
||||
```prometheus
|
||||
{enforced="false",status="would be ok"} 4424.985419441742
|
||||
```
|
||||
|
||||
Because `enforced="false"`, it will be safe to start rolling out the new
|
||||
token.
|
||||
Because `enforced="false"`, it is safe to start rolling out the new token.
|
||||
|
||||
### 3. Update Gitaly token on all clients and servers
|
||||
### Update Gitaly authentication token
|
||||
|
||||
```ruby
|
||||
# in /etc/gitlab/gitlab.rb
|
||||
To update to a new Gitaly authentication token, on each Gitaly client **and** Gitaly server:
|
||||
|
||||
gitaly['auth_token'] = 'my new secret token'
|
||||
```
|
||||
1. Update the configuration:
|
||||
|
||||
Remember to apply this on both your Gitaly clients *and* servers. If you
|
||||
check your Prometheus query while this change is being rolled out, you
|
||||
will see non-zero values for the `enforced="false",status="denied"` counter.
|
||||
```ruby
|
||||
# in /etc/gitlab/gitlab.rb
|
||||
|
||||
### 4. Use Prometheus to ensure there are no authentication failures
|
||||
gitaly['auth_token'] = '<new secret token>'
|
||||
```
|
||||
|
||||
After you applied the Gitaly token change everywhere, and all services
|
||||
involved have been restarted, you should will temporarily see a mix of
|
||||
`status="would be ok"` and `status="denied"`.
|
||||
1. Restart Gitaly:
|
||||
|
||||
After the new token has been picked up by all Gitaly clients and
|
||||
servers, the **only non-zero rate** should be
|
||||
`enforced="false",status="would be ok"`.
|
||||
```shell
|
||||
gitlab-ctl restart gitaly
|
||||
```
|
||||
|
||||
### 5. Disable "auth transitioning" Mode
|
||||
If you run your [Prometheus query](#verify-authentication-monitoring) while this change is
|
||||
being rolled out, you will see non-zero values for the `enforced="false",status="denied"` counter.
|
||||
|
||||
Now we turn off the 'auth transitioning' mode. These final steps are
|
||||
important: without them, you have **no authentication**.
|
||||
### Ensure there are no authentication failures
|
||||
|
||||
Update the configuration on your Gitaly servers:
|
||||
After the new token is set, and all services involved have been restarted, you will
|
||||
[temporarily see](#verify-authentication-monitoring) a mix of:
|
||||
|
||||
- `status="would be ok"`.
|
||||
- `status="denied"`.
|
||||
|
||||
After the new token has been picked up by all Gitaly clients and Gitaly servers, the
|
||||
**only non-zero rate** should be `enforced="false",status="would be ok"`.
|
||||
|
||||
### Disable "auth transitioning" mode
|
||||
|
||||
To re-enable Gitaly authentication, disable "auth transitioning" mode. Update the configuration on
|
||||
your Gitaly servers as follows:
|
||||
|
||||
```ruby
|
||||
# in /etc/gitlab/gitlab.rb
|
||||
gitaly['auth_transitioning'] = false
|
||||
```
|
||||
|
||||
### 6. Verify that authentication is enforced again
|
||||
CAUTION: **Caution:**
|
||||
Without completing this step, you have **no Gitaly authentication**.
|
||||
|
||||
Refresh your Prometheus query. You should now see the same kind of
|
||||
result as you did in the beginning:
|
||||
### Verify authentication is enforced
|
||||
|
||||
Refresh your [Prometheus query](#verify-authentication-monitoring). You should now see a similar
|
||||
result as you did at the start. For example:
|
||||
|
||||
```prometheus
|
||||
{enforced="true",status="ok"} 4424.985419441742
|
||||
```
|
||||
|
||||
Note that `enforced="true"`, meaning that authentication is being enforced.
|
||||
Note that `enforced="true"` means that authentication is being enforced.
|
||||
|
||||
## Direct Git access in GitLab Rails
|
||||
## Direct access to Git in GitLab
|
||||
|
||||
Also known as "the Rugged patches".
|
||||
Direct access to Git uses code in GitLab known as the "Rugged patches".
|
||||
|
||||
### History
|
||||
|
||||
Before Gitaly existed, the things that are now Gitaly clients used to
|
||||
access Git repositories directly. Either on a local disk in the case of
|
||||
e.g. a single-machine Omnibus GitLab installation, or via NFS in the
|
||||
case of a horizontally scaled GitLab installation.
|
||||
Before Gitaly existed, what are now Gitaly clients used to access Git repositories directly, either:
|
||||
|
||||
Besides running plain `git` commands, in GitLab Rails we also used to
|
||||
use a Ruby gem (library) called
|
||||
- On a local disk in the case of a single-machine Omnibus GitLab installation
|
||||
- Using NFS in the case of a horizontally-scaled GitLab installation.
|
||||
|
||||
Besides running plain `git` commands, GitLab used to use a Ruby library called
|
||||
[Rugged](https://github.com/libgit2/rugged). Rugged is a wrapper around
|
||||
[libgit2](https://libgit2.org/), a stand-alone implementation of Git in
|
||||
the form of a C library.
|
||||
[libgit2](https://libgit2.org/), a stand-alone implementation of Git in the form of a C library.
|
||||
|
||||
Over time it has become clear to use that Rugged, and particularly
|
||||
Rugged in combination with the [Unicorn](https://yhbt.net/unicorn/)
|
||||
web server, is extremely efficient. Because libgit2 is a *library* and
|
||||
not an external process, there was very little overhead between GitLab
|
||||
application code that tried to look up data in Git repositories, and the
|
||||
Git implementation itself.
|
||||
Over time it became clear that Rugged, particularly in combination with
|
||||
[Unicorn](https://yhbt.net/unicorn/), is extremely efficient. Because `libgit2` is a library and
|
||||
not an external process, there was very little overhead between:
|
||||
|
||||
Because Rugged+Unicorn was so efficient, GitLab's application code ended
|
||||
up with lots of duplicate Git object lookups (like looking up the
|
||||
`master` commit a dozen times in one request). We could write
|
||||
inefficient code without being punished for it.
|
||||
- GitLab application code that tried to look up data in Git repositories.
|
||||
- The Git implementation itself.
|
||||
|
||||
When we migrated these Git lookups to Gitaly calls, we were suddenly
|
||||
getting a much higher fixed cost per Git lookup. Even when Gitaly is
|
||||
able to re-use an already-running `git` process to look up e.g. a commit
|
||||
you still have the cost of a network roundtrip to Gitaly, and within
|
||||
Gitaly a write/read roundtrip on the Unix pipes that connect Gitaly to
|
||||
the `git` process.
|
||||
Because the combination of Rugged and Unicorn was so efficient, GitLab's application code ended up with lots of
|
||||
duplicate Git object lookups. For example, looking up the `master` commit a dozen times in one
|
||||
request. We could write inefficient code without poor performance.
|
||||
|
||||
Using GitLab.com performance as our yardstick, we pushed down the number
|
||||
of Gitaly calls per request until the loss of Rugged's efficiency was no
|
||||
longer felt. It also helped that we run Gitaly itself directly on the
|
||||
Git file severs, rather than via NFS mounts: this gave us a speed boost
|
||||
that counteracted the negative effect of not using Rugged anymore.
|
||||
When we migrated these Git lookups to Gitaly calls, we suddenly had a much higher fixed cost per Git
|
||||
lookup. Even when Gitaly is able to re-use an already-running `git` process (for example, to look up
|
||||
a commit), you still have:
|
||||
|
||||
Unfortunately, some *other* deployments of GitLab could not ditch NFS
|
||||
like we did on GitLab.com and they got the worst of both worlds: the
|
||||
slowness of NFS and the increased inherent overhead of Gitaly.
|
||||
- The cost of a network roundtrip to Gitaly.
|
||||
- Within Gitaly, a write/read roundtrip on the Unix pipes that connect Gitaly to the `git` process.
|
||||
|
||||
As a performance band-aid for these stuck-on-NFS deployments, we
|
||||
re-introduced some of the old Rugged code that got deleted from
|
||||
GitLab Rails during the Gitaly migration project. These pieces of
|
||||
re-introduced code are informally referred to as "the Rugged patches".
|
||||
Using GitLab.com to measure, we reduced the number of Gitaly calls per request until the loss of
|
||||
Rugged's efficiency was no longer felt. It also helped that we run Gitaly itself directly on the Git
|
||||
file severs, rather than via NFS mounts. This gave us a speed boost that counteracted the negative
|
||||
effect of not using Rugged anymore.
|
||||
|
||||
### Activation of direct Git access in GitLab Rails
|
||||
Unfortunately, other deployments of GitLab could not remove NFS like we did on GitLab.com, and they
|
||||
got the worst of both worlds:
|
||||
|
||||
The Ruby methods that perform direct Git access are hidden behind [feature
|
||||
flags](../../development/gitaly.md#legacy-rugged-code). These feature
|
||||
flags are off by default. It is not good if you need to know about
|
||||
feature flags to get the best performance so in a second iteration, we
|
||||
added an automatic mechanism that will enable direct Git access.
|
||||
- The slowness of NFS.
|
||||
- The increased inherent overhead of Gitaly.
|
||||
|
||||
When GitLab Rails calls a function that has a Rugged patch it performs
|
||||
two checks. The result of both of these checks is cached.
|
||||
The code removed from GitLab during the Gitaly migration project affected these deployments. As a
|
||||
performance workaround for these NFS-based deployments, we re-introduced some of the old Rugged
|
||||
code. This re-introduced code is informally referred to as the "Rugged patches".
|
||||
|
||||
1. Is the feature flag for this patch set in the database? If so, do
|
||||
what the feature flag says.
|
||||
1. If the feature flag is not set (i.e. neither true nor false), try to
|
||||
see if we can access filesystem underneath the Gitaly server
|
||||
directly. If so, use the Rugged patch.
|
||||
### How it works
|
||||
|
||||
To see if GitLab Rails can access the repository filesystem directly, we use
|
||||
the following heuristic:
|
||||
The Ruby methods that perform direct Git access are behind
|
||||
[feature flags](../../development/gitaly.md#legacy-rugged-code), disabled by default. It wasn't
|
||||
convenient to set feature flags to get the best performance, so we added an automatic mechanism that
|
||||
enables direct Git access.
|
||||
|
||||
- Gitaly ensures that the filesystem has a metadata file in its root
|
||||
with a UUID in it.
|
||||
- Gitaly reports this UUID to GitLab Rails via the `ServerInfo` RPC.
|
||||
- GitLab Rails tries to read the metadata file directly. If it exists,
|
||||
and if the UUID's match, assume we have direct access.
|
||||
When GitLab calls a function that has a "Rugged patch", it performs two checks:
|
||||
|
||||
Because of the way the UUID check works, and because Omnibus GitLab will
|
||||
fill in the correct repository paths in the GitLab Rails config file
|
||||
`config/gitlab.yml`, **direct Git access in GitLab Rails is on by default in
|
||||
Omnibus**.
|
||||
- Is the feature flag for this patch set in the database? If so, the feature flag setting controls
|
||||
GitLab's use of "Rugged patch" code.
|
||||
- If the feature flag is not set, GitLab tries accessing the filesystem underneath the
|
||||
Gitaly server directly. If it can, it will use the "Rugged patch".
|
||||
|
||||
### Plans to remove direct Git access in GitLab Rails
|
||||
The result of both of these checks is cached.
|
||||
|
||||
For the sake of removing complexity it is desirable that we get rid of
|
||||
direct Git access in GitLab Rails. For as long as some GitLab installations are stuck
|
||||
with Git repositories on slow NFS, however, we cannot just remove them.
|
||||
To see if GitLab can access the repository filesystem directly, we use the following heuristic:
|
||||
|
||||
There are two prongs to our efforts to remove direct Git access in GitLab Rails:
|
||||
- Gitaly ensures that the filesystem has a metadata file in its root with a UUID in it.
|
||||
- Gitaly reports this UUID to GitLab via the `ServerInfo` RPC.
|
||||
- GitLab Rails tries to read the metadata file directly. If it exists, and if the UUID's match,
|
||||
assume we have direct access.
|
||||
|
||||
1. Reduce the number of (inefficient) Gitaly queries made by
|
||||
GitLab Rails.
|
||||
1. Persuade everybody who runs a Highly Available / horizontally scaled
|
||||
GitLab installation to move off of NFS.
|
||||
Direct Git access is enable by default in Omnibus GitLab because it fills in the correct repository
|
||||
paths in the GitLab configuration file `config/gitlab.yml`. This satisfies the UUID check.
|
||||
|
||||
The second prong is the only real solution. For this we need [Gitaly
|
||||
HA](https://gitlab.com/groups/gitlab-org/-/epics?scope=all&utf8=%E2%9C%93&state=opened&label_name[]=Gitaly%20HA),
|
||||
which is still under development as of December 2019.
|
||||
### Transition to Gitaly Cluster
|
||||
|
||||
For the sake of removing complexity, we must remove direct Git access in GitLab. However, we can't
|
||||
remove it as long some GitLab installations require Git repositories on NFS.
|
||||
|
||||
There are two facets to our efforts to remove direct Git access in GitLab:
|
||||
|
||||
- Reduce the number of inefficient Gitaly queries made by GitLab.
|
||||
- Persuade administrators of fault-tolerant or horizontally-scaled GitLab instances to migrate off
|
||||
NFS.
|
||||
|
||||
The second facet presents the only real solution. For this, we developed
|
||||
[Gitaly Cluster](praefect.md).
|
||||
|
||||
## Troubleshooting Gitaly
|
||||
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
# Switching to Puma
|
||||
|
||||
## Puma
|
||||
|
||||
As of GitLab 12.9, [Puma](https://github.com/puma/puma) has replaced [Unicorn](https://yhbt.net/unicorn/).
|
||||
as the default web server. Starting with 13.0, both all-in-one package based
|
||||
installations as well as Helm chart based installations will run Puma instead of
|
||||
Unicorn unless explicitly specified not to.
|
||||
as the default web server. From GitLab 13.0, the following run Puma instead of Unicorn unless
|
||||
explicitly configured not to:
|
||||
|
||||
- All-in-one package-based installations.
|
||||
- Helm chart-based installations.
|
||||
|
||||
## Why switch to Puma?
|
||||
|
||||
|
@ -32,10 +32,12 @@ Additionally we strongly recommend that multi-node deployments [configure their
|
|||
## Performance caveat when using Puma with Rugged
|
||||
|
||||
For deployments where NFS is used to store Git repository, we allow GitLab to use
|
||||
[Direct Git Access](../gitaly/#direct-git-access-in-gitlab-rails) to improve performance via usage of [Rugged](https://github.com/libgit2/rugged).
|
||||
[direct Git access](../gitaly/index.md#direct-access-to-git-in-gitlab) to improve performance using
|
||||
[Rugged](https://github.com/libgit2/rugged).
|
||||
|
||||
Rugged usage is automatically enabled if Direct Git Access is present, unless it
|
||||
is disabled by [feature flags](../../development/gitaly.md#legacy-rugged-code).
|
||||
Rugged usage is automatically enabled if direct Git access
|
||||
[is available](../gitaly/index.md#how-it-works), unless it is disabled by
|
||||
[feature flags](../../development/gitaly.md#legacy-rugged-code).
|
||||
|
||||
MRI Ruby uses a GVL. This allows MRI Ruby to be multi-threaded, but running at
|
||||
most on a single core. Since Rugged can use a thread for long periods of
|
||||
|
|
|
@ -265,10 +265,6 @@ For example, to add support for files referenced by a `Widget` model with a
|
|||
MODEL_FOREIGN_KEY = :widget_id
|
||||
|
||||
belongs_to :widget, class_name: 'Widget'
|
||||
|
||||
def self.has_create_events?
|
||||
true
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
|
|
|
@ -716,6 +716,10 @@ management project. Refer to the
|
|||
[chart](https://github.com/helm/charts/tree/master/stable/nginx-ingress)
|
||||
for the available configuration options.
|
||||
|
||||
NOTE: **Note:**
|
||||
Support for installing the Ingress managed application is provided by the GitLab Configure group.
|
||||
If you run into unknown issues, please [open a new issue](https://gitlab.com/gitlab-org/gitlab/-/issues/new) and ping at least 2 people from the [Configure group](https://about.gitlab.com/handbook/product/categories/#configure-group).
|
||||
|
||||
### Install cert-manager using GitLab CI/CD
|
||||
|
||||
cert-manager is installed using GitLab CI/CD by defining configuration in
|
||||
|
@ -753,6 +757,10 @@ management project. Refer to the
|
|||
[chart](https://hub.helm.sh/charts/jetstack/cert-manager) for the
|
||||
available configuration options.
|
||||
|
||||
NOTE: **Note:**
|
||||
Support for installing the Cert Manager managed application is provided by the GitLab Configure group.
|
||||
If you run into unknown issues, please [open a new issue](https://gitlab.com/gitlab-org/gitlab/-/issues/new) and ping at least 2 people from the [Configure group](https://about.gitlab.com/handbook/product/categories/#configure-group).
|
||||
|
||||
### Install Sentry using GitLab CI/CD
|
||||
|
||||
NOTE: **Note:**
|
||||
|
@ -814,6 +822,10 @@ postgresql:
|
|||
postgresqlPassword: example-postgresql-password
|
||||
```
|
||||
|
||||
NOTE: **Note:**
|
||||
Support for installing the Sentry managed application is provided by the GitLab Health group.
|
||||
If you run into unknown issues, please [open a new issue](https://gitlab.com/gitlab-org/gitlab/-/issues/new) and ping at least 2 people from the [Health group](https://about.gitlab.com/handbook/product/product-categories/#health-group).
|
||||
|
||||
### Install PostHog using GitLab CI/CD
|
||||
|
||||
[PostHog](https://www.posthog.com) 🦔 is a developer-friendly, open-source product analytics platform.
|
||||
|
@ -885,6 +897,10 @@ project. Refer to the
|
|||
[Configuration section of the Prometheus chart's README](https://github.com/helm/charts/tree/master/stable/prometheus#configuration)
|
||||
for the available configuration options.
|
||||
|
||||
NOTE: **Note:**
|
||||
Support for installing the Prometheus managed application is provided by the GitLab APM group.
|
||||
If you run into unknown issues, please [open a new issue](https://gitlab.com/gitlab-org/gitlab/-/issues/new) and ping at least 2 people from the [APM group](https://about.gitlab.com/handbook/product/product-categories/#apm-group).
|
||||
|
||||
### Install GitLab Runner using GitLab CI/CD
|
||||
|
||||
GitLab Runner is installed using GitLab CI/CD by defining configuration in
|
||||
|
@ -916,6 +932,10 @@ management project. Refer to the
|
|||
[chart](https://gitlab.com/gitlab-org/charts/gitlab-runner) for the
|
||||
available configuration options.
|
||||
|
||||
NOTE: **Note:**
|
||||
Support for installing the Elastic Stack managed application is provided by the GitLab Runner group.
|
||||
If you run into unknown issues, please [open a new issue](https://gitlab.com/gitlab-org/gitlab/-/issues/new) and ping at least 2 people from the [Runner group](https://about.gitlab.com/handbook/product/product-categories/#runner-group).
|
||||
|
||||
### Install Cilium using GitLab CI/CD
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/cluster-integration/cluster-applications/-/merge_requests/22) in GitLab 12.8.
|
||||
|
@ -1019,6 +1039,10 @@ metrics:
|
|||
- 'flow:sourceContext=namespace;destinationContext=namespace'
|
||||
```
|
||||
|
||||
NOTE: **Note:**
|
||||
Support for installing the Cilium managed application is provided by the GitLab Container Security group.
|
||||
If you run into unknown issues, please [open a new issue](https://gitlab.com/gitlab-org/gitlab/-/issues/new) and ping at least 2 people from the [Container Security group](https://about.gitlab.com/handbook/product/product-categories/#container-security-group).
|
||||
|
||||
### Install Falco using GitLab CI/CD
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/cluster-integration/cluster-applications/-/merge_requests/91) in GitLab 13.1.
|
||||
|
@ -1106,6 +1130,10 @@ You can check these logs with the following command:
|
|||
kubectl logs -l app=falco -n gitlab-managed-apps
|
||||
```
|
||||
|
||||
NOTE: **Note:**
|
||||
Support for installing the Falco managed application is provided by the GitLab Container Security group.
|
||||
If you run into unknown issues, please [open a new issue](https://gitlab.com/gitlab-org/gitlab/-/issues/new) and ping at least 2 people from the [Container Security group](https://about.gitlab.com/handbook/product/product-categories/#container-security-group).
|
||||
|
||||
### Install Vault using GitLab CI/CD
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/9982) in GitLab 12.9.
|
||||
|
@ -1195,6 +1223,10 @@ kubectl -n gitlab-managed-apps exec -it vault-0 sh
|
|||
This should give you your unseal keys and initial root token. Make sure to note these down
|
||||
and keep these safe as you will need them to unseal the Vault throughout its lifecycle.
|
||||
|
||||
NOTE: **Note:**
|
||||
Support for installing the Vault managed application is provided by the GitLab Release Management group.
|
||||
If you run into unknown issues, please [open a new issue](https://gitlab.com/gitlab-org/gitlab/-/issues/new) and ping at least 2 people from the [Release Management group](https://about.gitlab.com/handbook/product/product-categories/#release-management-group).
|
||||
|
||||
### Install JupyterHub using GitLab CI/CD
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/cluster-integration/cluster-applications/-/merge_requests/40) in GitLab 12.8.
|
||||
|
@ -1244,6 +1276,10 @@ Refer to the
|
|||
[chart reference](https://zero-to-jupyterhub.readthedocs.io/en/stable/reference/reference.html) for the
|
||||
available configuration options.
|
||||
|
||||
NOTE: **Note:**
|
||||
Support for installing the JupyterHub managed application is provided by the GitLab Configure group.
|
||||
If you run into unknown issues, please [open a new issue](https://gitlab.com/gitlab-org/gitlab/-/issues/new) and ping at least 2 people from the [Configure group](https://about.gitlab.com/handbook/product/categories/#configure-group).
|
||||
|
||||
### Install Elastic Stack using GitLab CI/CD
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/25138) in GitLab 12.8.
|
||||
|
@ -1271,6 +1307,10 @@ available configuration options.
|
|||
NOTE: **Note:**
|
||||
In this alpha implementation of installing Elastic Stack through CI, reading the environment logs through Elasticsearch is unsupported. This is supported if [installed via the UI](#elastic-stack).
|
||||
|
||||
NOTE: **Note:**
|
||||
Support for installing the Elastic Stack managed application is provided by the GitLab APM group.
|
||||
If you run into unknown issues, please [open a new issue](https://gitlab.com/gitlab-org/gitlab/-/issues/new) and ping at least 2 people from the [APM group](https://about.gitlab.com/handbook/product/product-categories/#apm-group).
|
||||
|
||||
### Install Crossplane using GitLab CI/CD
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/35675) in GitLab 12.9.
|
||||
|
@ -1297,6 +1337,10 @@ management project. Refer to the
|
|||
[chart](https://github.com/crossplane/crossplane/tree/master/cluster/charts/crossplane#configuration) for the
|
||||
available configuration options. Note that this link points to the documentation for the current development release, which may differ from the version you have installed.
|
||||
|
||||
NOTE: **Note:**
|
||||
Support for the Crossplane managed application is provided by the Crossplane team.
|
||||
If you run into issues, please [open a support ticket](https://github.com/crossplane/crossplane/issues/new/choose) directly.
|
||||
|
||||
### Install Fluentd using GitLab CI/CD
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/cluster-integration/cluster-applications/-/merge_requests/76) in GitLab 12.10.
|
||||
|
@ -1321,6 +1365,10 @@ The configuration chart link points to the current development release, which
|
|||
may differ from the version you have installed. To ensure compatibility, switch
|
||||
to the specific branch or tag you are using.
|
||||
|
||||
NOTE: **Note:**
|
||||
Support for installing the Fluentd managed application is provided by the GitLab Container Security group.
|
||||
If you run into unknown issues, please [open a new issue](https://gitlab.com/gitlab-org/gitlab/-/issues/new) and ping at least 2 people from the [Container Security group](https://about.gitlab.com/handbook/product/product-categories/#container-security-group).
|
||||
|
||||
### Install Knative using GitLab CI/CD
|
||||
|
||||
To install Knative, define the `.gitlab/managed-apps/config.yaml` file
|
||||
|
@ -1343,6 +1391,10 @@ domain: 'my.wildcard.A.record.dns'
|
|||
|
||||
If you plan to use GitLab Serverless capabilities, be sure to set an A record wildcard domain on your custom configuration.
|
||||
|
||||
NOTE: **Note:**
|
||||
Support for installing the Knative managed application is provided by the GitLab Configure group.
|
||||
If you run into unknown issues, please [open a new issue](https://gitlab.com/gitlab-org/gitlab/-/issues/new) and ping at least 2 people from the [Configure group](https://about.gitlab.com/handbook/product/categories/#configure-group).
|
||||
|
||||
#### Knative Metrics
|
||||
|
||||
GitLab provides [Invocation Metrics](../project/clusters/serverless/index.md#invocation-metrics) for your functions. To collect these metrics, you must have:
|
||||
|
@ -1400,6 +1452,10 @@ podAnnotations:
|
|||
|
||||
The only information to be changed here is the profile name which is `profile-one` in this example. Refer to the [AppArmor tutorial](https://kubernetes.io/docs/tutorials/clusters/apparmor/#securing-a-pod) for more information on how AppArmor is integrated in Kubernetes.
|
||||
|
||||
NOTE: **Note:**
|
||||
Support for installing the AppArmor managed application is provided by the GitLab Container Security group.
|
||||
If you run into unknown issues, please [open a new issue](https://gitlab.com/gitlab-org/gitlab/-/issues/new) and ping at least 2 people from the [Container Security group](https://about.gitlab.com/handbook/product/product-categories/#container-security-group).
|
||||
|
||||
## Upgrading applications
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/24789) in GitLab 11.8.
|
||||
|
|
|
@ -17,6 +17,18 @@ module API
|
|||
expose :file_name do |snippet|
|
||||
snippet.file_name_on_repo || snippet.file_name
|
||||
end
|
||||
expose :files, if: ->(snippet, options) { snippet_multiple_files?(snippet, options[:current_user]) } do |snippet, options|
|
||||
snippet.list_files.map do |file|
|
||||
{
|
||||
path: file,
|
||||
raw_url: Gitlab::UrlBuilder.build(snippet, file: file, ref: snippet.repository.root_ref)
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
def snippet_multiple_files?(snippet, current_user)
|
||||
::Feature.enabled?(:snippet_multiple_files, current_user) && snippet.repository_exists?
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -37,7 +37,7 @@ module API
|
|||
use :pagination
|
||||
end
|
||||
get ":id/snippets" do
|
||||
present paginate(snippets_for_current_user), with: Entities::ProjectSnippet
|
||||
present paginate(snippets_for_current_user), with: Entities::ProjectSnippet, current_user: current_user
|
||||
end
|
||||
|
||||
desc 'Get a single project snippet' do
|
||||
|
@ -48,7 +48,7 @@ module API
|
|||
end
|
||||
get ":id/snippets/:snippet_id" do
|
||||
snippet = snippets_for_current_user.find(params[:snippet_id])
|
||||
present snippet, with: Entities::ProjectSnippet
|
||||
present snippet, with: Entities::ProjectSnippet, current_user: current_user
|
||||
end
|
||||
|
||||
desc 'Create a new project snippet' do
|
||||
|
@ -71,7 +71,7 @@ module API
|
|||
snippet = service_response.payload[:snippet]
|
||||
|
||||
if service_response.success?
|
||||
present snippet, with: Entities::ProjectSnippet
|
||||
present snippet, with: Entities::ProjectSnippet, current_user: current_user
|
||||
else
|
||||
render_spam_error! if snippet.spam?
|
||||
|
||||
|
@ -107,7 +107,7 @@ module API
|
|||
snippet = service_response.payload[:snippet]
|
||||
|
||||
if service_response.success?
|
||||
present snippet, with: Entities::ProjectSnippet
|
||||
present snippet, with: Entities::ProjectSnippet, current_user: current_user
|
||||
else
|
||||
render_spam_error! if snippet.spam?
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ module API
|
|||
use :pagination
|
||||
end
|
||||
get do
|
||||
present paginate(snippets_for_current_user), with: Entities::Snippet
|
||||
present paginate(snippets_for_current_user), with: Entities::Snippet, current_user: current_user
|
||||
end
|
||||
|
||||
desc 'List all public personal snippets current_user has access to' do
|
||||
|
@ -42,7 +42,7 @@ module API
|
|||
use :pagination
|
||||
end
|
||||
get 'public' do
|
||||
present paginate(public_snippets), with: Entities::PersonalSnippet
|
||||
present paginate(public_snippets), with: Entities::PersonalSnippet, current_user: current_user
|
||||
end
|
||||
|
||||
desc 'Get a single snippet' do
|
||||
|
@ -57,7 +57,7 @@ module API
|
|||
|
||||
break not_found!('Snippet') unless snippet
|
||||
|
||||
present snippet, with: Entities::PersonalSnippet
|
||||
present snippet, with: Entities::PersonalSnippet, current_user: current_user
|
||||
end
|
||||
|
||||
desc 'Create new snippet' do
|
||||
|
@ -82,7 +82,7 @@ module API
|
|||
snippet = service_response.payload[:snippet]
|
||||
|
||||
if service_response.success?
|
||||
present snippet, with: Entities::PersonalSnippet
|
||||
present snippet, with: Entities::PersonalSnippet, current_user: current_user
|
||||
else
|
||||
render_spam_error! if snippet.spam?
|
||||
|
||||
|
@ -116,7 +116,7 @@ module API
|
|||
snippet = service_response.payload[:snippet]
|
||||
|
||||
if service_response.success?
|
||||
present snippet, with: Entities::PersonalSnippet
|
||||
present snippet, with: Entities::PersonalSnippet, current_user: current_user
|
||||
else
|
||||
render_spam_error! if snippet.spam?
|
||||
|
||||
|
|
|
@ -71,7 +71,11 @@ module Gitlab
|
|||
end
|
||||
|
||||
def snippet_url(snippet, **options)
|
||||
if options.delete(:raw).present?
|
||||
if options[:file].present?
|
||||
file, ref = options.values_at(:file, :ref)
|
||||
|
||||
instance.gitlab_raw_snippet_blob_url(snippet, file, ref)
|
||||
elsif options.delete(:raw).present?
|
||||
instance.gitlab_raw_snippet_url(snippet, **options)
|
||||
else
|
||||
instance.gitlab_snippet_url(snippet, **options)
|
||||
|
|
|
@ -12731,6 +12731,9 @@ msgstr ""
|
|||
msgid "Jira integration not configured."
|
||||
msgstr ""
|
||||
|
||||
msgid "Jira project key is not configured"
|
||||
msgstr ""
|
||||
|
||||
msgid "Jira project: %{importProject}"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@ gem 'rspec_junit_formatter', '~> 0.4.1'
|
|||
gem 'faker', '~> 1.6', '>= 1.6.6'
|
||||
gem 'knapsack', '~> 1.17'
|
||||
gem 'parallel_tests', '~> 2.29'
|
||||
gem 'rotp', '~> 3.1.0'
|
||||
|
||||
group :test do
|
||||
gem 'pry-byebug', '~> 3.5.1', platform: :mri
|
||||
|
|
|
@ -78,6 +78,7 @@ GEM
|
|||
http-cookie (>= 1.0.2, < 2.0)
|
||||
mime-types (>= 1.16, < 4.0)
|
||||
netrc (~> 0.8)
|
||||
rotp (3.1.0)
|
||||
rspec (3.9.0)
|
||||
rspec-core (~> 3.9.0)
|
||||
rspec-expectations (~> 3.9.0)
|
||||
|
@ -129,6 +130,7 @@ DEPENDENCIES
|
|||
pry-byebug (~> 3.5.1)
|
||||
rake (~> 12.3.0)
|
||||
rest-client (~> 2.1.0)
|
||||
rotp (~> 3.1.0)
|
||||
rspec (~> 3.7)
|
||||
rspec-retry (~> 0.6.1)
|
||||
rspec_junit_formatter (~> 0.4.1)
|
||||
|
|
2
qa/qa.rb
2
qa/qa.rb
|
@ -182,6 +182,7 @@ module QA
|
|||
autoload :Login, 'qa/page/main/login'
|
||||
autoload :Menu, 'qa/page/main/menu'
|
||||
autoload :OAuth, 'qa/page/main/oauth'
|
||||
autoload :TwoFactorAuth, 'qa/page/main/two_factor_auth'
|
||||
autoload :SignUp, 'qa/page/main/sign_up'
|
||||
autoload :Terms, 'qa/page/main/terms'
|
||||
end
|
||||
|
@ -564,6 +565,7 @@ module QA
|
|||
autoload :Retrier, 'qa/support/retrier'
|
||||
autoload :Waiter, 'qa/support/waiter'
|
||||
autoload :WaitForRequests, 'qa/support/wait_for_requests'
|
||||
autoload :OTP, 'qa/support/otp'
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -22,9 +22,9 @@ module QA
|
|||
end
|
||||
end
|
||||
|
||||
def sign_in(as: nil, address: :gitlab)
|
||||
def sign_in(as: nil, address: :gitlab, skip_page_validation: false)
|
||||
Runtime::Browser.visit(address, Page::Main::Login)
|
||||
Page::Main::Login.perform { |login| login.sign_in_using_credentials(user: as) }
|
||||
Page::Main::Login.perform { |login| login.sign_in_using_credentials(user: as, skip_page_validation: skip_page_validation) }
|
||||
end
|
||||
|
||||
def sign_in_as_admin(address: :gitlab)
|
||||
|
|
22
qa/qa/page/main/two_factor_auth.rb
Normal file
22
qa/qa/page/main/two_factor_auth.rb
Normal file
|
@ -0,0 +1,22 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module QA
|
||||
module Page
|
||||
module Main
|
||||
class TwoFactorAuth < Page::Base
|
||||
view 'app/views/devise/sessions/two_factor.html.haml' do
|
||||
element :verify_code_button
|
||||
element :two_fa_code_field
|
||||
end
|
||||
|
||||
def click_verify_code_button
|
||||
click_element :verify_code_button
|
||||
end
|
||||
|
||||
def set_2fa_code(code)
|
||||
fill_element(:two_fa_code_field, code)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -8,9 +8,35 @@ module QA
|
|||
element :configure_it_later_button
|
||||
end
|
||||
|
||||
view 'app/views/profiles/two_factor_auths/show.html.haml' do
|
||||
element :otp_secret_content
|
||||
element :pin_code_field
|
||||
element :register_2fa_app_button
|
||||
end
|
||||
|
||||
view 'app/views/profiles/two_factor_auths/_codes.html.haml' do
|
||||
element :proceed_button
|
||||
end
|
||||
|
||||
def click_configure_it_later_button
|
||||
click_element :configure_it_later_button
|
||||
end
|
||||
|
||||
def otp_secret_content
|
||||
find_element(:otp_secret_content).text.gsub('Key:', '').delete(' ')
|
||||
end
|
||||
|
||||
def set_pin_code(pin_code)
|
||||
fill_element(:pin_code_field, pin_code)
|
||||
end
|
||||
|
||||
def click_register_2fa_app_button
|
||||
click_element :register_2fa_app_button
|
||||
end
|
||||
|
||||
def click_proceed_button
|
||||
click_element :proceed_button
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -59,6 +59,10 @@ module QA
|
|||
"/groups/#{CGI.escape("#{sandbox.path}/#{path}")}"
|
||||
end
|
||||
|
||||
def api_put_path
|
||||
"/groups/#{id}"
|
||||
end
|
||||
|
||||
def api_post_path
|
||||
'/groups'
|
||||
end
|
||||
|
@ -75,6 +79,15 @@ module QA
|
|||
def api_delete_path
|
||||
"/groups/#{id}"
|
||||
end
|
||||
|
||||
def set_require_two_factor_authentication(value:)
|
||||
put_body = { require_two_factor_authentication: value }
|
||||
response = put Runtime::API::Request.new(api_client, api_put_path).url, put_body
|
||||
|
||||
unless response.code == HTTP_STATUS_OK
|
||||
raise ResourceUpdateFailedError, "Could not update require_two_factor_authentication to #{value}. Request returned (#{response.code}): `#{response}`."
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -8,10 +8,14 @@ module QA
|
|||
#
|
||||
module Members
|
||||
def add_member(user, access_level = AccessLevel::DEVELOPER)
|
||||
QA::Runtime::Logger.debug(%Q[Adding user #{user.username} to #{full_path} #{self.class.name}])
|
||||
|
||||
post Runtime::API::Request.new(api_client, api_members_path).url, { user_id: user.id, access_level: access_level }
|
||||
end
|
||||
|
||||
def remove_member(user)
|
||||
QA::Runtime::Logger.debug(%Q[Removing user #{user.username} from #{full_path} #{self.class.name}])
|
||||
|
||||
delete Runtime::API::Request.new(api_client, "#{api_members_path}/#{user.id}").url
|
||||
end
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ module QA
|
|||
attribute :id
|
||||
attribute :runners_token
|
||||
attribute :name
|
||||
attribute :full_path
|
||||
|
||||
def initialize
|
||||
@path = Runtime::Namespace.sandbox_name
|
||||
|
|
|
@ -117,7 +117,10 @@ module QA
|
|||
user.password = password
|
||||
end
|
||||
else
|
||||
self.fabricate!
|
||||
self.fabricate! do |user|
|
||||
user.username = username if username
|
||||
user.password = password if password
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -194,6 +194,14 @@ module QA
|
|||
ENV['GITLAB_QA_PASSWORD_6']
|
||||
end
|
||||
|
||||
def gitlab_qa_2fa_owner_username_1
|
||||
ENV['GITLAB_QA_2FA_OWNER_USERNAME_1'] || 'gitlab-qa-2fa-owner-user1'
|
||||
end
|
||||
|
||||
def gitlab_qa_2fa_owner_password_1
|
||||
ENV['GITLAB_QA_2FA_OWNER_PASSWORD_1']
|
||||
end
|
||||
|
||||
def gitlab_qa_1p_email
|
||||
ENV['GITLAB_QA_1P_EMAIL']
|
||||
end
|
||||
|
|
|
@ -0,0 +1,110 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module QA
|
||||
context 'Manage', :requires_admin, :skip_live_env do
|
||||
describe '2FA' do
|
||||
let(:owner_user) do
|
||||
Resource::User.fabricate_or_use(Runtime::Env.gitlab_qa_2fa_owner_username_1, Runtime::Env.gitlab_qa_2fa_owner_password_1)
|
||||
end
|
||||
|
||||
let(:sandbox_group) do
|
||||
Resource::Sandbox.fabricate! do |sandbox_group|
|
||||
sandbox_group.path = "gitlab-qa-2fa-sandbox-group"
|
||||
sandbox_group.api_client = owner_api_client
|
||||
end
|
||||
end
|
||||
|
||||
let(:group) do
|
||||
QA::Resource::Group.fabricate_via_api! do |group|
|
||||
group.sandbox = sandbox_group
|
||||
group.api_client = owner_api_client
|
||||
group.name = 'group-with-2fa'
|
||||
end
|
||||
end
|
||||
|
||||
let(:developer_user) do
|
||||
Resource::User.fabricate_via_api! do |resource|
|
||||
resource.api_client = admin_api_client
|
||||
end
|
||||
end
|
||||
|
||||
let(:two_fa_expected_text) { /The group settings for.*require you to enable Two-Factor Authentication for your account.*You need to do this before/ }
|
||||
|
||||
before do
|
||||
group.add_member(developer_user, Resource::Members::AccessLevel::DEVELOPER)
|
||||
end
|
||||
|
||||
it 'allows enforcing 2FA via UI and logging in with 2FA' do
|
||||
enforce_two_factor_authentication_on_group(group)
|
||||
|
||||
enable_two_factor_authentication_for_user(developer_user)
|
||||
|
||||
Flow::Login.sign_in(as: developer_user, skip_page_validation: true)
|
||||
|
||||
Page::Main::TwoFactorAuth.perform do |two_fa_auth|
|
||||
two_fa_auth.set_2fa_code('000000')
|
||||
two_fa_auth.click_verify_code_button
|
||||
end
|
||||
|
||||
expect(page).to have_text('Invalid two-factor code')
|
||||
|
||||
Page::Main::TwoFactorAuth.perform do |two_fa_auth|
|
||||
two_fa_auth.set_2fa_code(@otp.fresh_otp)
|
||||
two_fa_auth.click_verify_code_button
|
||||
end
|
||||
|
||||
expect(Page::Main::Menu.perform(&:signed_in?)).to be_truthy
|
||||
end
|
||||
|
||||
after do
|
||||
group.set_require_two_factor_authentication(value: 'false')
|
||||
group.remove_via_api! do |resource|
|
||||
resource.api_client = admin_api_client
|
||||
end
|
||||
developer_user.remove_via_api!
|
||||
end
|
||||
|
||||
def admin_api_client
|
||||
@admin_api_client ||= Runtime::API::Client.as_admin
|
||||
end
|
||||
|
||||
def owner_api_client
|
||||
@owner_api_client ||= Runtime::API::Client.new(:gitlab, user: owner_user)
|
||||
end
|
||||
|
||||
# We are intentionally using the UI to enforce 2FA to exercise the flow with UI.
|
||||
# Any future tests should use the API for this purpose.
|
||||
def enforce_two_factor_authentication_on_group(group)
|
||||
Flow::Login.while_signed_in(as: owner_user) do
|
||||
group.visit!
|
||||
|
||||
Page::Group::Menu.perform(&:click_group_general_settings_item)
|
||||
Page::Group::Settings::General.perform(&:set_require_2fa_enabled)
|
||||
|
||||
expect(page).to have_text(two_fa_expected_text)
|
||||
|
||||
Page::Profile::TwoFactorAuth.perform(&:click_configure_it_later_button)
|
||||
|
||||
expect(page).not_to have_text(two_fa_expected_text)
|
||||
end
|
||||
end
|
||||
|
||||
def enable_two_factor_authentication_for_user(user)
|
||||
Flow::Login.while_signed_in(as: user) do
|
||||
expect(page).to have_text(two_fa_expected_text)
|
||||
|
||||
Page::Profile::TwoFactorAuth.perform do |two_fa_auth|
|
||||
@otp = QA::Support::OTP.new(two_fa_auth.otp_secret_content)
|
||||
|
||||
two_fa_auth.set_pin_code(@otp.fresh_otp)
|
||||
two_fa_auth.click_register_2fa_app_button
|
||||
|
||||
expect(two_fa_auth).to have_text('Congratulations! You have enabled Two-factor Authentication!')
|
||||
|
||||
two_fa_auth.click_proceed_button
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
25
qa/qa/support/otp.rb
Normal file
25
qa/qa/support/otp.rb
Normal file
|
@ -0,0 +1,25 @@
|
|||
# frozen_string_literal: true
|
||||
require 'rotp'
|
||||
|
||||
module QA
|
||||
module Support
|
||||
class OTP
|
||||
def initialize(secret)
|
||||
@rotp = ROTP::TOTP.new(secret)
|
||||
end
|
||||
|
||||
def fresh_otp
|
||||
otps = []
|
||||
|
||||
# Fetches a fresh OTP and returns it only after rotp provides the same OTP twice
|
||||
# An OTP is valid for 30 seconds so 70 attempts with 0.5 interval would ensure we complete 1 cycle
|
||||
Support::Retrier.retry_until(max_attempts: 70, sleep_interval: 0.5) do
|
||||
otps << @rotp.now
|
||||
otps.size >= 3 && otps[-1] == otps[-2] && otps[-1] != otps[-3]
|
||||
end
|
||||
|
||||
otps.last
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -8,33 +8,15 @@ RSpec.describe Projects::ImportsController do
|
|||
|
||||
before do
|
||||
sign_in(user)
|
||||
project.add_maintainer(user)
|
||||
end
|
||||
|
||||
describe 'GET #show' do
|
||||
context 'when repository does not exists' do
|
||||
it 'renders template' do
|
||||
get :show, params: { namespace_id: project.namespace.to_param, project_id: project }
|
||||
|
||||
expect(response).to render_template :show
|
||||
context 'when the user has maintainer rights' do
|
||||
before do
|
||||
project.add_maintainer(user)
|
||||
end
|
||||
|
||||
it 'sets flash.now if params is present' do
|
||||
get :show, params: { namespace_id: project.namespace.to_param, project_id: project, continue: { to: '/', notice_now: 'Started' } }
|
||||
|
||||
expect(flash.now[:notice]).to eq 'Started'
|
||||
end
|
||||
end
|
||||
|
||||
context 'when repository exists' do
|
||||
let(:project) { create(:project_empty_repo, import_url: 'https://github.com/vim/vim.git') }
|
||||
let(:import_state) { project.import_state }
|
||||
|
||||
context 'when import is in progress' do
|
||||
before do
|
||||
import_state.update(status: :started)
|
||||
end
|
||||
|
||||
context 'when repository does not exists' do
|
||||
it 'renders template' do
|
||||
get :show, params: { namespace_id: project.namespace.to_param, project_id: project }
|
||||
|
||||
|
@ -42,82 +24,138 @@ RSpec.describe Projects::ImportsController do
|
|||
end
|
||||
|
||||
it 'sets flash.now if params is present' do
|
||||
get :show, params: { namespace_id: project.namespace.to_param, project_id: project, continue: { to: '/', notice_now: 'In progress' } }
|
||||
get :show, params: { namespace_id: project.namespace.to_param, project_id: project, continue: { to: '/', notice_now: 'Started' } }
|
||||
|
||||
expect(flash.now[:notice]).to eq 'In progress'
|
||||
expect(flash.now[:notice]).to eq 'Started'
|
||||
end
|
||||
end
|
||||
|
||||
context 'when import failed' do
|
||||
before do
|
||||
import_state.update(status: :failed)
|
||||
end
|
||||
context 'when repository exists' do
|
||||
let(:project) { create(:project_empty_repo, import_url: 'https://github.com/vim/vim.git') }
|
||||
let(:import_state) { project.import_state }
|
||||
|
||||
it 'redirects to new_namespace_project_import_path' do
|
||||
get :show, params: { namespace_id: project.namespace.to_param, project_id: project }
|
||||
|
||||
expect(response).to redirect_to new_project_import_path(project)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when import finished' do
|
||||
before do
|
||||
import_state.update(status: :finished)
|
||||
end
|
||||
|
||||
context 'when project is a fork' do
|
||||
it 'redirects to namespace_project_path' do
|
||||
allow_any_instance_of(Project).to receive(:forked?).and_return(true)
|
||||
context 'when import is in progress' do
|
||||
before do
|
||||
import_state.update(status: :started)
|
||||
end
|
||||
|
||||
it 'renders template' do
|
||||
get :show, params: { namespace_id: project.namespace.to_param, project_id: project }
|
||||
|
||||
expect(flash[:notice]).to eq 'The project was successfully forked.'
|
||||
expect(response).to redirect_to project_path(project)
|
||||
expect(response).to render_template :show
|
||||
end
|
||||
|
||||
it 'sets flash.now if params is present' do
|
||||
get :show, params: { namespace_id: project.namespace.to_param, project_id: project, continue: { to: '/', notice_now: 'In progress' } }
|
||||
|
||||
expect(flash.now[:notice]).to eq 'In progress'
|
||||
end
|
||||
end
|
||||
|
||||
context 'when project is external' do
|
||||
context 'when import failed' do
|
||||
before do
|
||||
import_state.update(status: :failed)
|
||||
end
|
||||
|
||||
it 'redirects to new_namespace_project_import_path' do
|
||||
get :show, params: { namespace_id: project.namespace.to_param, project_id: project }
|
||||
|
||||
expect(response).to redirect_to new_project_import_path(project)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when import finished' do
|
||||
before do
|
||||
import_state.update(status: :finished)
|
||||
end
|
||||
|
||||
context 'when project is a fork' do
|
||||
it 'redirects to namespace_project_path' do
|
||||
allow_any_instance_of(Project).to receive(:forked?).and_return(true)
|
||||
|
||||
get :show, params: { namespace_id: project.namespace.to_param, project_id: project }
|
||||
|
||||
expect(flash[:notice]).to eq 'The project was successfully forked.'
|
||||
expect(response).to redirect_to project_path(project)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when project is external' do
|
||||
it 'redirects to namespace_project_path' do
|
||||
get :show, params: { namespace_id: project.namespace.to_param, project_id: project }
|
||||
|
||||
expect(flash[:notice]).to eq 'The project was successfully imported.'
|
||||
expect(response).to redirect_to project_path(project)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when continue params is present' do
|
||||
let(:params) do
|
||||
{
|
||||
to: project_path(project),
|
||||
notice: 'Finished'
|
||||
}
|
||||
end
|
||||
|
||||
it 'redirects to internal params[:to]' do
|
||||
get :show, params: { namespace_id: project.namespace.to_param, project_id: project, continue: params }
|
||||
|
||||
expect(flash[:notice]).to eq params[:notice]
|
||||
expect(response).to redirect_to params[:to]
|
||||
end
|
||||
|
||||
it 'does not redirect to external params[:to]' do
|
||||
params[:to] = "//google.com"
|
||||
|
||||
get :show, params: { namespace_id: project.namespace.to_param, project_id: project, continue: params }
|
||||
expect(response).not_to redirect_to params[:to]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when import never happened' do
|
||||
before do
|
||||
import_state.update(status: :none)
|
||||
end
|
||||
|
||||
it 'redirects to namespace_project_path' do
|
||||
get :show, params: { namespace_id: project.namespace.to_param, project_id: project }
|
||||
|
||||
expect(flash[:notice]).to eq 'The project was successfully imported.'
|
||||
expect(response).to redirect_to project_path(project)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when continue params is present' do
|
||||
let(:params) do
|
||||
{
|
||||
to: project_path(project),
|
||||
notice: 'Finished'
|
||||
}
|
||||
end
|
||||
|
||||
it 'redirects to internal params[:to]' do
|
||||
get :show, params: { namespace_id: project.namespace.to_param, project_id: project, continue: params }
|
||||
|
||||
expect(flash[:notice]).to eq params[:notice]
|
||||
expect(response).to redirect_to params[:to]
|
||||
end
|
||||
|
||||
it 'does not redirect to external params[:to]' do
|
||||
params[:to] = "//google.com"
|
||||
|
||||
get :show, params: { namespace_id: project.namespace.to_param, project_id: project, continue: params }
|
||||
expect(response).not_to redirect_to params[:to]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when project is in group' do
|
||||
let(:project) { create(:project_empty_repo, import_url: 'https://github.com/vim/vim.git', namespace: group) }
|
||||
|
||||
context 'when user has developer access to group and import is in progress' do
|
||||
let(:import_state) { project.import_state }
|
||||
|
||||
context 'when import never happened' do
|
||||
before do
|
||||
import_state.update(status: :none)
|
||||
group.add_developer(user)
|
||||
import_state.update!(status: :started)
|
||||
end
|
||||
|
||||
it 'redirects to namespace_project_path' do
|
||||
get :show, params: { namespace_id: project.namespace.to_param, project_id: project }
|
||||
context 'when group allows developers to create projects' do
|
||||
let(:group) { create(:group, project_creation_level: Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS) }
|
||||
|
||||
expect(response).to redirect_to project_path(project)
|
||||
it 'renders template' do
|
||||
get :show, params: { namespace_id: project.namespace.to_param, project_id: project }
|
||||
|
||||
expect(response).to render_template :show
|
||||
end
|
||||
end
|
||||
|
||||
context 'when group prohibits developers to create projects' do
|
||||
let(:group) { create(:group, project_creation_level: Gitlab::Access::MAINTAINER_PROJECT_ACCESS) }
|
||||
|
||||
it 'returns 404 response' do
|
||||
get :show, params: { namespace_id: project.namespace.to_param, project_id: project }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -128,6 +166,7 @@ RSpec.describe Projects::ImportsController do
|
|||
let(:project) { create(:project) }
|
||||
|
||||
before do
|
||||
project.add_maintainer(user)
|
||||
allow(RepositoryImportWorker).to receive(:perform_async)
|
||||
|
||||
post :create, params: { project: params, namespace_id: project.namespace.to_param, project_id: project }
|
||||
|
|
|
@ -646,109 +646,6 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
|
|||
end
|
||||
end
|
||||
|
||||
describe 'GET legacy trace.json' do
|
||||
before do
|
||||
stub_feature_flags(job_log_json: false)
|
||||
get_trace
|
||||
end
|
||||
|
||||
context 'when job has a trace artifact' do
|
||||
let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
|
||||
|
||||
it 'returns a trace' do
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response['id']).to eq job.id
|
||||
expect(json_response['status']).to eq job.status
|
||||
expect(json_response['state']).to be_present
|
||||
expect(json_response['append']).not_to be_nil
|
||||
expect(json_response['truncated']).not_to be_nil
|
||||
expect(json_response['size']).to be_present
|
||||
expect(json_response['total']).to be_present
|
||||
expect(json_response['html']).to eq(job.trace.html)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when job has a trace' do
|
||||
let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
|
||||
|
||||
it 'returns a trace' do
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response['id']).to eq job.id
|
||||
expect(json_response['status']).to eq job.status
|
||||
expect(json_response['html']).to eq('<span>BUILD TRACE</span>')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when job has no traces' do
|
||||
let(:job) { create(:ci_build, pipeline: pipeline) }
|
||||
|
||||
it 'returns no traces' do
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response['id']).to eq job.id
|
||||
expect(json_response['status']).to eq job.status
|
||||
expect(json_response['html']).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when job has a trace with ANSI sequence and Unicode' do
|
||||
let(:job) { create(:ci_build, :unicode_trace_live, pipeline: pipeline) }
|
||||
|
||||
it 'returns a trace with Unicode' do
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response['id']).to eq job.id
|
||||
expect(json_response['status']).to eq job.status
|
||||
expect(json_response['html']).to include("ヾ(´༎ຶД༎ຶ`)ノ")
|
||||
end
|
||||
end
|
||||
|
||||
context 'when trace artifact is in ObjectStorage' do
|
||||
let(:url) { 'http://object-storage/trace' }
|
||||
let(:file_path) { expand_fixture_path('trace/sample_trace') }
|
||||
let!(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) }
|
||||
|
||||
before do
|
||||
allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
|
||||
allow_any_instance_of(JobArtifactUploader).to receive(:url) { url }
|
||||
allow_any_instance_of(JobArtifactUploader).to receive(:size) { File.size(file_path) }
|
||||
end
|
||||
|
||||
context 'when there are no network issues' do
|
||||
before do
|
||||
stub_remote_url_206(url, file_path)
|
||||
|
||||
get_trace
|
||||
end
|
||||
|
||||
it 'returns a trace' do
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response['id']).to eq job.id
|
||||
expect(json_response['status']).to eq job.status
|
||||
expect(json_response['html']).to eq(job.trace.html)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there is a network issue' do
|
||||
before do
|
||||
stub_remote_url_500(url)
|
||||
end
|
||||
|
||||
it 'returns a trace' do
|
||||
expect { get_trace }.to raise_error(Gitlab::HttpIO::FailedToGetChunkError)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def get_trace
|
||||
get :trace,
|
||||
params: {
|
||||
namespace_id: project.namespace,
|
||||
project_id: project,
|
||||
id: job.id
|
||||
},
|
||||
format: :json
|
||||
end
|
||||
end
|
||||
|
||||
describe 'GET status.json' do
|
||||
let(:job) { create(:ci_build, pipeline: pipeline) }
|
||||
let(:status) { job.detailed_status(double('user')) }
|
||||
|
|
|
@ -837,7 +837,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
|
|||
|
||||
it 'renders empty state' do
|
||||
expect(page).to have_content(job.detailed_status(user).illustration[:title])
|
||||
expect(page).not_to have_selector('.js-build-trace')
|
||||
expect(page).not_to have_selector('.job-log')
|
||||
expect(page).to have_content('This job has been canceled')
|
||||
end
|
||||
end
|
||||
|
@ -852,7 +852,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
|
|||
|
||||
it 'renders empty state' do
|
||||
expect(page).to have_content(job.detailed_status(user).illustration[:title])
|
||||
expect(page).not_to have_selector('.js-build-trace')
|
||||
expect(page).not_to have_selector('.job-log')
|
||||
expect(page).to have_content('This job has been skipped')
|
||||
end
|
||||
end
|
||||
|
|
|
@ -0,0 +1,81 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Projects::Integrations::Jira::IssuesFinder do
|
||||
let_it_be(:project, reload: true) { create(:project) }
|
||||
let_it_be(:jira_service, reload: true) { create(:jira_service, project: project) }
|
||||
let(:params) { {} }
|
||||
let(:service) { described_class.new(project, params) }
|
||||
|
||||
describe '#execute' do
|
||||
subject(:issues) { service.execute }
|
||||
|
||||
context 'when jira_integration feature flag is not enabled' do
|
||||
before do
|
||||
stub_feature_flags(jira_integration: false)
|
||||
end
|
||||
|
||||
it 'exits early and returns no issues' do
|
||||
expect(issues.size).to eq 0
|
||||
expect(service.total_count).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when jira service integration does not have project_key' do
|
||||
it 'raises error' do
|
||||
expect { subject }.to raise_error(Projects::Integrations::Jira::IntegrationError, 'Jira project key is not configured')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when jira service integration is not active' do
|
||||
before do
|
||||
jira_service.update!(active: false)
|
||||
end
|
||||
|
||||
it 'raises error' do
|
||||
expect { subject }.to raise_error(Projects::Integrations::Jira::IntegrationError, 'Jira service not configured.')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when jira service integration has project_key' do
|
||||
let(:params) { {} }
|
||||
let(:client) { double(options: { site: 'https://jira.example.com' }) }
|
||||
|
||||
before do
|
||||
jira_service.update!(project_key: 'TEST')
|
||||
expect_next_instance_of(Jira::Requests::Issues::ListService) do |instance|
|
||||
expect(instance).to receive(:client).at_least(:once).and_return(client)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when Jira API request fails' do
|
||||
before do
|
||||
expect(client).to receive(:get).and_raise(Timeout::Error)
|
||||
end
|
||||
|
||||
it 'raises error', :aggregate_failures do
|
||||
expect { subject }.to raise_error(Projects::Integrations::Jira::RequestError)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when Jira API request succeeds' do
|
||||
before do
|
||||
expect(client).to receive(:get).and_return(
|
||||
{
|
||||
"total" => 375,
|
||||
"startAt" => 0,
|
||||
"issues" => [{ "key" => 'TEST-1' }, { "key" => 'TEST-2' }]
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
it 'return service response with issues', :aggregate_failures do
|
||||
expect(issues.size).to eq 2
|
||||
expect(service.total_count).to eq 375
|
||||
expect(issues.map(&:key)).to eq(%w[TEST-1 TEST-2])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -7,6 +7,16 @@
|
|||
"project_id": { "type": ["integer", "null"] },
|
||||
"title": { "type": "string" },
|
||||
"file_name": { "type": ["string", "null"] },
|
||||
"files" : {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"path": { "type": "string" },
|
||||
"raw_url": { "type": "string" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": { "type": ["string", "null"] },
|
||||
"visibility": { "type": "string" },
|
||||
"web_url": { "type": "string" },
|
||||
|
|
|
@ -397,132 +397,31 @@ describe('Job App', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('trace output', () => {
|
||||
describe('with append flag', () => {
|
||||
it('appends the log content to the existing one', () =>
|
||||
setupAndMount({
|
||||
traceData: {
|
||||
html: '<span>More<span>',
|
||||
status: 'running',
|
||||
state: 'newstate',
|
||||
append: true,
|
||||
complete: true,
|
||||
},
|
||||
})
|
||||
.then(() => {
|
||||
store.state.trace = 'Update';
|
||||
describe('trace controls', () => {
|
||||
beforeEach(() =>
|
||||
setupAndMount({
|
||||
traceData: {
|
||||
html: '<span>Update</span>',
|
||||
status: 'success',
|
||||
append: false,
|
||||
size: 50,
|
||||
total: 100,
|
||||
complete: true,
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
return wrapper.vm.$nextTick();
|
||||
})
|
||||
.then(() => {
|
||||
expect(
|
||||
wrapper
|
||||
.find('.js-build-trace')
|
||||
.text()
|
||||
.trim(),
|
||||
).toEqual('Update');
|
||||
}));
|
||||
it('should render scroll buttons', () => {
|
||||
expect(wrapper.find('.js-scroll-top').exists()).toBe(true);
|
||||
expect(wrapper.find('.js-scroll-bottom').exists()).toBe(true);
|
||||
});
|
||||
|
||||
describe('without append flag', () => {
|
||||
it('replaces the trace', () =>
|
||||
setupAndMount({
|
||||
traceData: {
|
||||
html: '<span>Different<span>',
|
||||
status: 'running',
|
||||
append: false,
|
||||
complete: true,
|
||||
},
|
||||
}).then(() => {
|
||||
expect(
|
||||
wrapper
|
||||
.find('.js-build-trace')
|
||||
.text()
|
||||
.trim(),
|
||||
).toEqual('Different');
|
||||
}));
|
||||
it('should render link to raw ouput', () => {
|
||||
expect(wrapper.find('.js-raw-link-controller').exists()).toBe(true);
|
||||
});
|
||||
|
||||
describe('truncated information', () => {
|
||||
describe('when size is less than total', () => {
|
||||
it('shows information about truncated log', () => {
|
||||
mock.onGet(`${props.pagePath}/trace.json`).reply(200, {
|
||||
html: '<span>Update</span>',
|
||||
status: 'success',
|
||||
append: false,
|
||||
size: 50,
|
||||
total: 100,
|
||||
complete: true,
|
||||
});
|
||||
|
||||
return setupAndMount({
|
||||
traceData: {
|
||||
html: '<span>Update</span>',
|
||||
status: 'success',
|
||||
append: false,
|
||||
size: 50,
|
||||
total: 100,
|
||||
complete: true,
|
||||
},
|
||||
}).then(() => {
|
||||
expect(
|
||||
wrapper
|
||||
.find('.js-truncated-info')
|
||||
.text()
|
||||
.trim(),
|
||||
).toContain('Showing last 50 bytes');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when size is equal than total', () => {
|
||||
it('does not show the truncated information', () =>
|
||||
setupAndMount({
|
||||
traceData: {
|
||||
html: '<span>Update</span>',
|
||||
status: 'success',
|
||||
append: false,
|
||||
size: 100,
|
||||
total: 100,
|
||||
complete: true,
|
||||
},
|
||||
}).then(() => {
|
||||
expect(
|
||||
wrapper
|
||||
.find('.js-truncated-info')
|
||||
.text()
|
||||
.trim(),
|
||||
).toEqual('');
|
||||
}));
|
||||
});
|
||||
});
|
||||
|
||||
describe('trace controls', () => {
|
||||
beforeEach(() =>
|
||||
setupAndMount({
|
||||
traceData: {
|
||||
html: '<span>Update</span>',
|
||||
status: 'success',
|
||||
append: false,
|
||||
size: 50,
|
||||
total: 100,
|
||||
complete: true,
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
it('should render scroll buttons', () => {
|
||||
expect(wrapper.find('.js-scroll-top').exists()).toBe(true);
|
||||
expect(wrapper.find('.js-scroll-bottom').exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('should render link to raw ouput', () => {
|
||||
expect(wrapper.find('.js-raw-link-controller').exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('should render link to erase job', () => {
|
||||
expect(wrapper.find('.js-erase-link').exists()).toBe(true);
|
||||
});
|
||||
it('should render link to erase job', () => {
|
||||
expect(wrapper.find('.js-erase-link').exists()).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,65 +0,0 @@
|
|||
import Vue from 'vue';
|
||||
import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
|
||||
import component from '~/jobs/components/job_log.vue';
|
||||
import createStore from '~/jobs/store';
|
||||
import { resetStore } from '../store/helpers';
|
||||
|
||||
describe('Job Log', () => {
|
||||
const Component = Vue.extend(component);
|
||||
let store;
|
||||
let vm;
|
||||
|
||||
const trace =
|
||||
'<span>Running with gitlab-runner 12.1.0 (de7731dd)<br/></span><span> on docker-auto-scale-com d5ae8d25<br/></span><div class="gl-mr-3" data-timestamp="1565502765" data-section="prepare-executor" role="button"></div><span class="section section-header js-s-prepare-executor">Using Docker executor with image ruby:2.6 ...<br/></span>';
|
||||
|
||||
beforeEach(() => {
|
||||
store = createStore();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
resetStore(store);
|
||||
vm.$destroy();
|
||||
});
|
||||
|
||||
it('renders provided trace', () => {
|
||||
vm = mountComponentWithStore(Component, {
|
||||
props: {
|
||||
trace,
|
||||
isComplete: true,
|
||||
},
|
||||
store,
|
||||
});
|
||||
|
||||
expect(vm.$el.querySelector('code').textContent).toContain(
|
||||
'Running with gitlab-runner 12.1.0 (de7731dd)',
|
||||
);
|
||||
});
|
||||
|
||||
describe('while receiving trace', () => {
|
||||
it('renders animation', () => {
|
||||
vm = mountComponentWithStore(Component, {
|
||||
props: {
|
||||
trace,
|
||||
isComplete: false,
|
||||
},
|
||||
store,
|
||||
});
|
||||
|
||||
expect(vm.$el.querySelector('.js-log-animation')).not.toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when build trace has finishes', () => {
|
||||
it('does not render animation', () => {
|
||||
vm = mountComponentWithStore(Component, {
|
||||
props: {
|
||||
trace,
|
||||
isComplete: true,
|
||||
},
|
||||
store,
|
||||
});
|
||||
|
||||
expect(vm.$el.querySelector('.js-log-animation')).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -76,28 +76,15 @@ describe('Jobs Store Mutations', () => {
|
|||
lines: [],
|
||||
});
|
||||
|
||||
expect(stateCopy.trace).toEqual(html);
|
||||
expect(stateCopy.traceSize).toEqual(511846);
|
||||
expect(stateCopy.isTraceComplete).toEqual(true);
|
||||
});
|
||||
|
||||
describe('with new job log', () => {
|
||||
let stateWithNewLog;
|
||||
beforeEach(() => {
|
||||
gon.features = gon.features || {};
|
||||
gon.features.jobLogJson = true;
|
||||
|
||||
stateWithNewLog = state();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
gon.features.jobLogJson = false;
|
||||
});
|
||||
|
||||
describe('log.lines', () => {
|
||||
describe('when append is true', () => {
|
||||
it('sets the parsed log ', () => {
|
||||
mutations[types.RECEIVE_TRACE_SUCCESS](stateWithNewLog, {
|
||||
mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
|
||||
append: true,
|
||||
size: 511846,
|
||||
complete: true,
|
||||
|
@ -109,7 +96,7 @@ describe('Jobs Store Mutations', () => {
|
|||
],
|
||||
});
|
||||
|
||||
expect(stateWithNewLog.trace).toEqual([
|
||||
expect(stateCopy.trace).toEqual([
|
||||
{
|
||||
offset: 1,
|
||||
content: [{ text: 'Running with gitlab-runner 11.12.1 (5a147c92)' }],
|
||||
|
@ -121,7 +108,7 @@ describe('Jobs Store Mutations', () => {
|
|||
|
||||
describe('when it is defined', () => {
|
||||
it('sets the parsed log ', () => {
|
||||
mutations[types.RECEIVE_TRACE_SUCCESS](stateWithNewLog, {
|
||||
mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
|
||||
append: false,
|
||||
size: 511846,
|
||||
complete: true,
|
||||
|
@ -130,7 +117,7 @@ describe('Jobs Store Mutations', () => {
|
|||
],
|
||||
});
|
||||
|
||||
expect(stateWithNewLog.trace).toEqual([
|
||||
expect(stateCopy.trace).toEqual([
|
||||
{
|
||||
offset: 0,
|
||||
content: [{ text: 'Running with gitlab-runner 11.11.1 (5a147c92)' }],
|
||||
|
@ -142,7 +129,7 @@ describe('Jobs Store Mutations', () => {
|
|||
|
||||
describe('when it is null', () => {
|
||||
it('sets the default value', () => {
|
||||
mutations[types.RECEIVE_TRACE_SUCCESS](stateWithNewLog, {
|
||||
mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
|
||||
append: true,
|
||||
html,
|
||||
size: 511846,
|
||||
|
@ -150,7 +137,7 @@ describe('Jobs Store Mutations', () => {
|
|||
lines: null,
|
||||
});
|
||||
|
||||
expect(stateWithNewLog.trace).toEqual([]);
|
||||
expect(stateCopy.trace).toEqual([]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -147,8 +147,8 @@ RSpec.describe GitlabRoutingHelper do
|
|||
end
|
||||
|
||||
context 'snippets' do
|
||||
let_it_be(:personal_snippet) { create(:personal_snippet) }
|
||||
let_it_be(:project_snippet) { create(:project_snippet) }
|
||||
let_it_be(:personal_snippet) { create(:personal_snippet, :repository) }
|
||||
let_it_be(:project_snippet) { create(:project_snippet, :repository) }
|
||||
let_it_be(:note) { create(:note_on_personal_snippet, noteable: personal_snippet) }
|
||||
|
||||
describe '#gitlab_snippet_path' do
|
||||
|
@ -191,6 +191,32 @@ RSpec.describe GitlabRoutingHelper do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#gitlab_raw_snippet_blob_url' do
|
||||
let(:blob) { snippet.blobs.first }
|
||||
let(:ref) { 'snippet-test-ref' }
|
||||
|
||||
context 'for a PersonalSnippet' do
|
||||
let(:snippet) { personal_snippet }
|
||||
|
||||
it { expect(gitlab_raw_snippet_blob_url(snippet, blob.path, ref)).to eq("http://test.host/-/snippets/#{snippet.id}/raw/#{ref}/#{blob.path}") }
|
||||
end
|
||||
|
||||
context 'for a ProjectSnippet' do
|
||||
let(:snippet) { project_snippet }
|
||||
|
||||
it { expect(gitlab_raw_snippet_blob_url(snippet, blob.path, ref)).to eq("http://test.host/#{snippet.project.full_path}/-/snippets/#{snippet.id}/raw/#{ref}/#{blob.path}") }
|
||||
end
|
||||
|
||||
context 'without a ref' do
|
||||
let(:snippet) { personal_snippet }
|
||||
let(:ref) { snippet.repository.root_ref }
|
||||
|
||||
it 'uses the root ref' do
|
||||
expect(gitlab_raw_snippet_blob_url(snippet, blob.path)).to eq("http://test.host/-/snippets/#{snippet.id}/raw/#{ref}/#{blob.path}")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#gitlab_snippet_notes_path' do
|
||||
it 'returns the notes path for the personal snippet' do
|
||||
expect(gitlab_snippet_notes_path(personal_snippet)).to eq("/snippets/#{personal_snippet.id}/notes")
|
||||
|
|
|
@ -21,6 +21,16 @@ RSpec.describe ::API::Entities::Snippet do
|
|||
it { expect(subject[:visibility]).to eq snippet.visibility }
|
||||
it { expect(subject).to include(:author) }
|
||||
|
||||
context 'with snippet_multiple_files feature disabled' do
|
||||
before do
|
||||
stub_feature_flags(snippet_multiple_files: false)
|
||||
end
|
||||
|
||||
it 'does not return files' do
|
||||
expect(subject).not_to include(:files)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'file_name' do
|
||||
it 'returns attribute from repository' do
|
||||
expect(subject[:file_name]).to eq snippet.blobs.first.path
|
||||
|
@ -62,6 +72,49 @@ RSpec.describe ::API::Entities::Snippet do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'files' do
|
||||
let(:blob) { snippet.blobs.first }
|
||||
let(:ref) { blob.repository.root_ref }
|
||||
|
||||
context 'when repository does not exist' do
|
||||
it 'does not include the files attribute' do
|
||||
allow(snippet).to receive(:repository_exists?).and_return(false)
|
||||
|
||||
expect(subject).not_to include(:files)
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'snippet files' do
|
||||
let(:file) { subject[:files].first }
|
||||
|
||||
it 'returns all snippet files' do
|
||||
expect(subject[:files].count).to eq snippet.blobs.count
|
||||
end
|
||||
|
||||
it 'has the file path' do
|
||||
expect(file[:path]).to eq blob.path
|
||||
end
|
||||
|
||||
it 'has the raw url' do
|
||||
expect(file[:raw_url]).to match(raw_url)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with PersonalSnippet' do
|
||||
it_behaves_like 'snippet files' do
|
||||
let(:snippet) { personal_snippet }
|
||||
let(:raw_url) { "/-/snippets/#{snippet.id}/raw/#{ref}/#{blob.path}" }
|
||||
end
|
||||
end
|
||||
|
||||
context 'with ProjectSnippet' do
|
||||
it_behaves_like 'snippet files' do
|
||||
let(:snippet) { project_snippet }
|
||||
let(:raw_url) { "#{snippet.project.full_path}/-/snippets/#{snippet.id}/raw/#{ref}/#{blob.path}" }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with PersonalSnippet' do
|
||||
|
|
|
@ -87,12 +87,41 @@ RSpec.describe Gitlab::UrlBuilder do
|
|||
end
|
||||
|
||||
context 'when passing a Snippet' do
|
||||
let(:snippet) { build_stubbed(:personal_snippet) }
|
||||
let_it_be(:personal_snippet) { create(:personal_snippet, :repository) }
|
||||
let_it_be(:project_snippet) { create(:project_snippet, :repository) }
|
||||
let(:blob) { snippet.blobs.first }
|
||||
let(:ref) { blob.repository.root_ref }
|
||||
|
||||
it 'returns a raw snippet URL if requested' do
|
||||
url = subject.build(snippet, raw: true)
|
||||
context 'for a PersonalSnippet' do
|
||||
let(:snippet) { personal_snippet }
|
||||
|
||||
expect(url).to eq "#{Gitlab.config.gitlab.url}/snippets/#{snippet.id}/raw"
|
||||
it 'returns a raw snippet URL if requested' do
|
||||
url = subject.build(snippet, raw: true)
|
||||
|
||||
expect(url).to eq "#{Gitlab.config.gitlab.url}/snippets/#{snippet.id}/raw"
|
||||
end
|
||||
|
||||
it 'returns a raw snippet blob URL if requested' do
|
||||
url = subject.build(snippet, file: blob.path, ref: ref)
|
||||
|
||||
expect(url).to eq "#{Gitlab.config.gitlab.url}/-/snippets/#{snippet.id}/raw/#{ref}/#{blob.path}"
|
||||
end
|
||||
end
|
||||
|
||||
context 'for a ProjectSnippet' do
|
||||
let(:snippet) { project_snippet }
|
||||
|
||||
it 'returns a raw snippet URL if requested' do
|
||||
url = subject.build(snippet, raw: true)
|
||||
|
||||
expect(url).to eq "#{Gitlab.config.gitlab.url}/#{snippet.project.full_path}/snippets/#{snippet.id}/raw"
|
||||
end
|
||||
|
||||
it 'returns a raw snippet blob URL if requested' do
|
||||
url = subject.build(snippet, file: blob.path, ref: ref)
|
||||
|
||||
expect(url).to eq "#{Gitlab.config.gitlab.url}/#{snippet.project.full_path}/-/snippets/#{snippet.id}/raw/#{ref}/#{blob.path}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ RSpec.describe Ci::BuildTrace do
|
|||
Gitlab::Ci::Trace::Stream.new { data }
|
||||
end
|
||||
|
||||
subject { described_class.new(build: build, stream: stream, state: state, content_format: content_format) }
|
||||
subject { described_class.new(build: build, stream: stream, state: state) }
|
||||
|
||||
shared_examples 'delegates methods' do
|
||||
it { is_expected.to delegate_method(:state).to(:trace) }
|
||||
|
@ -25,29 +25,11 @@ RSpec.describe Ci::BuildTrace do
|
|||
it { is_expected.to delegate_method(:complete?).to(:build).with_prefix }
|
||||
end
|
||||
|
||||
context 'with :json content format' do
|
||||
let(:content_format) { :json }
|
||||
it_behaves_like 'delegates methods'
|
||||
|
||||
it_behaves_like 'delegates methods'
|
||||
|
||||
it { is_expected.to be_json }
|
||||
|
||||
it 'returns formatted trace' do
|
||||
expect(subject.trace.lines).to eq([
|
||||
{ offset: 0, content: [{ text: 'the-stream' }] }
|
||||
])
|
||||
end
|
||||
end
|
||||
|
||||
context 'with :html content format' do
|
||||
let(:content_format) { :html }
|
||||
|
||||
it_behaves_like 'delegates methods'
|
||||
|
||||
it { is_expected.to be_html }
|
||||
|
||||
it 'returns formatted trace' do
|
||||
expect(subject.trace.html).to eq('<span>the-stream</span>')
|
||||
end
|
||||
it 'returns formatted trace' do
|
||||
expect(subject.lines).to eq([
|
||||
{ offset: 0, content: [{ text: 'the-stream' }] }
|
||||
])
|
||||
end
|
||||
end
|
||||
|
|
|
@ -75,6 +75,18 @@ RSpec.describe CommitCollection do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#with_markdown_cache' do
|
||||
let(:commits) { [commit] }
|
||||
let(:collection) { described_class.new(project, commits) }
|
||||
|
||||
it 'preloads commits cache markdown' do
|
||||
aggregate_failures do
|
||||
expect(Commit).to receive(:preload_markdown_cache!).with(commits)
|
||||
expect(collection.with_markdown_cache).to eq(collection)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'enrichment methods' do
|
||||
let(:gitaly_commit) { commit }
|
||||
let(:hash_commit) { Commit.from_hash(gitaly_commit.to_hash, project) }
|
||||
|
|
|
@ -762,4 +762,29 @@ RSpec.describe Snippet do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#list_files' do
|
||||
let_it_be(:snippet) { create(:snippet, :repository) }
|
||||
let(:ref) { 'test-ref' }
|
||||
|
||||
subject { snippet.list_files(ref) }
|
||||
|
||||
context 'when snippet has a repository' do
|
||||
it 'lists files from the repository with the ref' do
|
||||
expect(snippet.repository).to receive(:ls_files).with(ref)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
|
||||
context 'when snippet does not have a repository' do
|
||||
before do
|
||||
allow(snippet.repository).to receive(:empty?).and_return(true)
|
||||
end
|
||||
|
||||
it 'returns an empty array' do
|
||||
expect(subject).to eq []
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe API::ProjectSnippets do
|
||||
include SnippetHelpers
|
||||
|
||||
let_it_be(:project) { create(:project, :public) }
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:admin) { create(:admin) }
|
||||
|
@ -84,19 +86,22 @@ RSpec.describe API::ProjectSnippets do
|
|||
end
|
||||
|
||||
describe 'GET /projects/:project_id/snippets/:id' do
|
||||
let(:user) { create(:user) }
|
||||
let(:snippet) { create(:project_snippet, :public, :repository, project: project) }
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:snippet) { create(:project_snippet, :public, :repository, project: project) }
|
||||
|
||||
it 'returns snippet json' do
|
||||
get api("/projects/#{project.id}/snippets/#{snippet.id}", user)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
aggregate_failures do
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
|
||||
expect(json_response['title']).to eq(snippet.title)
|
||||
expect(json_response['description']).to eq(snippet.description)
|
||||
expect(json_response['file_name']).to eq(snippet.file_name_on_repo)
|
||||
expect(json_response['ssh_url_to_repo']).to eq(snippet.ssh_url_to_repo)
|
||||
expect(json_response['http_url_to_repo']).to eq(snippet.http_url_to_repo)
|
||||
expect(json_response['title']).to eq(snippet.title)
|
||||
expect(json_response['description']).to eq(snippet.description)
|
||||
expect(json_response['file_name']).to eq(snippet.file_name_on_repo)
|
||||
expect(json_response['files']).to eq(snippet.blobs.map { |blob| snippet_blob_file(blob) } )
|
||||
expect(json_response['ssh_url_to_repo']).to eq(snippet.ssh_url_to_repo)
|
||||
expect(json_response['http_url_to_repo']).to eq(snippet.http_url_to_repo)
|
||||
end
|
||||
end
|
||||
|
||||
it 'returns 404 for invalid snippet id' do
|
||||
|
@ -111,6 +116,10 @@ RSpec.describe API::ProjectSnippets do
|
|||
let(:request) { get api("/projects/#{project_no_snippets.id}/snippets/123", user) }
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'snippet_multiple_files feature disabled' do
|
||||
subject { get api("/projects/#{project.id}/snippets/#{snippet.id}", user) }
|
||||
end
|
||||
end
|
||||
|
||||
describe 'POST /projects/:project_id/snippets/' do
|
||||
|
@ -443,7 +452,7 @@ RSpec.describe API::ProjectSnippets do
|
|||
get api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/raw", admin)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response.content_type).to eq 'text/plain'
|
||||
expect(response.media_type).to eq 'text/plain'
|
||||
end
|
||||
|
||||
it 'returns 404 for invalid snippet id' do
|
||||
|
|
|
@ -3,13 +3,15 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe API::Snippets do
|
||||
include SnippetHelpers
|
||||
|
||||
let_it_be(:user) { create(:user) }
|
||||
|
||||
describe 'GET /snippets/' do
|
||||
it 'returns snippets available' do
|
||||
public_snippet = create(:personal_snippet, :public, author: user)
|
||||
private_snippet = create(:personal_snippet, :private, author: user)
|
||||
internal_snippet = create(:personal_snippet, :internal, author: user)
|
||||
public_snippet = create(:personal_snippet, :repository, :public, author: user)
|
||||
private_snippet = create(:personal_snippet, :repository, :private, author: user)
|
||||
internal_snippet = create(:personal_snippet, :repository, :internal, author: user)
|
||||
|
||||
get api("/snippets/", user)
|
||||
|
||||
|
@ -22,6 +24,7 @@ RSpec.describe API::Snippets do
|
|||
private_snippet.id)
|
||||
expect(json_response.last).to have_key('web_url')
|
||||
expect(json_response.last).to have_key('raw_url')
|
||||
expect(json_response.last).to have_key('files')
|
||||
expect(json_response.last).to have_key('visibility')
|
||||
end
|
||||
|
||||
|
@ -59,32 +62,33 @@ RSpec.describe API::Snippets do
|
|||
end
|
||||
|
||||
describe 'GET /snippets/public' do
|
||||
let!(:other_user) { create(:user) }
|
||||
let!(:public_snippet) { create(:personal_snippet, :public, author: user) }
|
||||
let!(:private_snippet) { create(:personal_snippet, :private, author: user) }
|
||||
let!(:internal_snippet) { create(:personal_snippet, :internal, author: user) }
|
||||
let!(:public_snippet_other) { create(:personal_snippet, :public, author: other_user) }
|
||||
let!(:private_snippet_other) { create(:personal_snippet, :private, author: other_user) }
|
||||
let!(:internal_snippet_other) { create(:personal_snippet, :internal, author: other_user) }
|
||||
let!(:public_snippet_project) { create(:project_snippet, :public, author: user) }
|
||||
let!(:private_snippet_project) { create(:project_snippet, :private, author: user) }
|
||||
let!(:internal_snippet_project) { create(:project_snippet, :internal, author: user) }
|
||||
let_it_be(:other_user) { create(:user) }
|
||||
let_it_be(:public_snippet) { create(:personal_snippet, :repository, :public, author: user) }
|
||||
let_it_be(:private_snippet) { create(:personal_snippet, :repository, :private, author: user) }
|
||||
let_it_be(:internal_snippet) { create(:personal_snippet, :repository, :internal, author: user) }
|
||||
let_it_be(:public_snippet_other) { create(:personal_snippet, :repository, :public, author: other_user) }
|
||||
let_it_be(:private_snippet_other) { create(:personal_snippet, :repository, :private, author: other_user) }
|
||||
let_it_be(:internal_snippet_other) { create(:personal_snippet, :repository, :internal, author: other_user) }
|
||||
let_it_be(:public_snippet_project) { create(:project_snippet, :repository, :public, author: user) }
|
||||
let_it_be(:private_snippet_project) { create(:project_snippet, :repository, :private, author: user) }
|
||||
let_it_be(:internal_snippet_project) { create(:project_snippet, :repository, :internal, author: user) }
|
||||
|
||||
it 'returns all snippets with public visibility from all users' do
|
||||
get api("/snippets/public", user)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to include_pagination_headers
|
||||
expect(json_response).to be_an Array
|
||||
expect(json_response.map { |snippet| snippet['id']} ).to contain_exactly(
|
||||
public_snippet.id,
|
||||
public_snippet_other.id)
|
||||
expect(json_response.map { |snippet| snippet['web_url']} ).to contain_exactly(
|
||||
"http://localhost/snippets/#{public_snippet.id}",
|
||||
"http://localhost/snippets/#{public_snippet_other.id}")
|
||||
expect(json_response.map { |snippet| snippet['raw_url']} ).to contain_exactly(
|
||||
"http://localhost/snippets/#{public_snippet.id}/raw",
|
||||
"http://localhost/snippets/#{public_snippet_other.id}/raw")
|
||||
aggregate_failures do
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to include_pagination_headers
|
||||
expect(json_response).to be_an Array
|
||||
expect(json_response.map { |snippet| snippet['id']} ).to contain_exactly(
|
||||
public_snippet.id,
|
||||
public_snippet_other.id)
|
||||
expect(json_response.map { |snippet| snippet['web_url']} ).to contain_exactly(
|
||||
"http://localhost/snippets/#{public_snippet.id}",
|
||||
"http://localhost/snippets/#{public_snippet_other.id}")
|
||||
expect(json_response[0]['files'].first).to eq snippet_blob_file(public_snippet_other.blobs.first)
|
||||
expect(json_response[1]['files'].first).to eq snippet_blob_file(public_snippet.blobs.first)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -102,7 +106,7 @@ RSpec.describe API::Snippets do
|
|||
get api("/snippets/#{snippet.id}/raw", author)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response.content_type).to eq 'text/plain'
|
||||
expect(response.media_type).to eq 'text/plain'
|
||||
end
|
||||
|
||||
it 'forces attachment content disposition' do
|
||||
|
@ -146,51 +150,75 @@ RSpec.describe API::Snippets do
|
|||
let_it_be(:author) { create(:user) }
|
||||
let_it_be(:private_snippet) { create(:personal_snippet, :repository, :private, author: author) }
|
||||
let_it_be(:internal_snippet) { create(:personal_snippet, :repository, :internal, author: author) }
|
||||
let(:snippet) { private_snippet }
|
||||
|
||||
it 'requires authentication' do
|
||||
get api("/snippets/#{private_snippet.id}", nil)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:unauthorized)
|
||||
end
|
||||
|
||||
it 'returns snippet json' do
|
||||
get api("/snippets/#{private_snippet.id}", author)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
|
||||
expect(json_response['title']).to eq(private_snippet.title)
|
||||
expect(json_response['description']).to eq(private_snippet.description)
|
||||
expect(json_response['file_name']).to eq(private_snippet.file_name_on_repo)
|
||||
expect(json_response['visibility']).to eq(private_snippet.visibility)
|
||||
expect(json_response['ssh_url_to_repo']).to eq(private_snippet.ssh_url_to_repo)
|
||||
expect(json_response['http_url_to_repo']).to eq(private_snippet.http_url_to_repo)
|
||||
end
|
||||
|
||||
it 'shows private snippets to an admin' do
|
||||
get api("/snippets/#{private_snippet.id}", admin)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
end
|
||||
subject { get api("/snippets/#{snippet.id}", user) }
|
||||
|
||||
it 'hides private snippets from an ordinary user' do
|
||||
get api("/snippets/#{private_snippet.id}", user)
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
|
||||
it 'shows internal snippets to an ordinary user' do
|
||||
get api("/snippets/#{internal_snippet.id}", user)
|
||||
context 'without a user' do
|
||||
let(:user) { nil }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
it 'requires authentication' do
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:unauthorized)
|
||||
end
|
||||
end
|
||||
|
||||
it 'returns 404 for invalid snippet id' do
|
||||
private_snippet.destroy
|
||||
context 'with the author' do
|
||||
let(:user) { author }
|
||||
|
||||
get api("/snippets/#{private_snippet.id}", admin)
|
||||
it 'returns snippet json' do
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
expect(json_response['message']).to eq('404 Snippet Not Found')
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
|
||||
expect(json_response['title']).to eq(private_snippet.title)
|
||||
expect(json_response['description']).to eq(private_snippet.description)
|
||||
expect(json_response['file_name']).to eq(private_snippet.file_name_on_repo)
|
||||
expect(json_response['files']).to eq(private_snippet.blobs.map { |blob| snippet_blob_file(blob) })
|
||||
expect(json_response['visibility']).to eq(private_snippet.visibility)
|
||||
expect(json_response['ssh_url_to_repo']).to eq(private_snippet.ssh_url_to_repo)
|
||||
expect(json_response['http_url_to_repo']).to eq(private_snippet.http_url_to_repo)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with an admin' do
|
||||
let(:user) { admin }
|
||||
|
||||
it 'shows private snippets to an admin' do
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
end
|
||||
|
||||
it 'returns 404 for invalid snippet id' do
|
||||
private_snippet.destroy
|
||||
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
expect(json_response['message']).to eq('404 Snippet Not Found')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with an internal snippet' do
|
||||
let(:snippet) { internal_snippet }
|
||||
|
||||
it 'shows internal snippets to an ordinary user' do
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'snippet_multiple_files feature disabled' do
|
||||
let(:user) { author }
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -221,6 +249,7 @@ RSpec.describe API::Snippets do
|
|||
expect(json_response['title']).to eq(params[:title])
|
||||
expect(json_response['description']).to eq(params[:description])
|
||||
expect(json_response['file_name']).to eq(params[:file_name])
|
||||
expect(json_response['files']).to eq(snippet.blobs.map { |blob| snippet_blob_file(blob) })
|
||||
expect(json_response['visibility']).to eq(params[:visibility])
|
||||
end
|
||||
|
||||
|
@ -251,6 +280,10 @@ RSpec.describe API::Snippets do
|
|||
|
||||
it_behaves_like 'snippet creation'
|
||||
|
||||
it_behaves_like 'snippet_multiple_files feature disabled' do
|
||||
let(:snippet) { Snippet.find(json_response["id"]) }
|
||||
end
|
||||
|
||||
context 'with an external user' do
|
||||
let(:user) { create(:user, :external) }
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ RSpec.describe BuildTraceEntity do
|
|||
end
|
||||
|
||||
let(:build_trace) do
|
||||
Ci::BuildTrace.new(build: build, stream: stream, content_format: content_format, state: nil)
|
||||
Ci::BuildTrace.new(build: build, stream: stream, state: nil)
|
||||
end
|
||||
|
||||
let(:entity) do
|
||||
|
@ -22,42 +22,24 @@ RSpec.describe BuildTraceEntity do
|
|||
|
||||
subject { entity.as_json }
|
||||
|
||||
shared_examples 'includes build and trace metadata' do
|
||||
it 'includes build attributes' do
|
||||
expect(subject[:id]).to eq(build.id)
|
||||
expect(subject[:status]).to eq(build.status)
|
||||
expect(subject[:complete]).to eq(build.complete?)
|
||||
end
|
||||
|
||||
it 'includes trace metadata' do
|
||||
expect(subject).to include(:state)
|
||||
expect(subject).to include(:append)
|
||||
expect(subject).to include(:truncated)
|
||||
expect(subject).to include(:offset)
|
||||
expect(subject).to include(:size)
|
||||
expect(subject).to include(:total)
|
||||
end
|
||||
it 'includes build attributes' do
|
||||
expect(subject[:id]).to eq(build.id)
|
||||
expect(subject[:status]).to eq(build.status)
|
||||
expect(subject[:complete]).to eq(build.complete?)
|
||||
end
|
||||
|
||||
context 'when content format is :json' do
|
||||
let(:content_format) { :json }
|
||||
|
||||
it_behaves_like 'includes build and trace metadata'
|
||||
|
||||
it 'includes the trace content in json' do
|
||||
expect(subject[:lines]).to eq([
|
||||
{ offset: 0, content: [{ text: 'the-trace' }] }
|
||||
])
|
||||
end
|
||||
it 'includes trace metadata' do
|
||||
expect(subject).to include(:state)
|
||||
expect(subject).to include(:append)
|
||||
expect(subject).to include(:truncated)
|
||||
expect(subject).to include(:offset)
|
||||
expect(subject).to include(:size)
|
||||
expect(subject).to include(:total)
|
||||
end
|
||||
|
||||
context 'when content format is :html' do
|
||||
let(:content_format) { :html }
|
||||
|
||||
it_behaves_like 'includes build and trace metadata'
|
||||
|
||||
it 'includes the trace content in json' do
|
||||
expect(subject[:html]).to eq('<span>the-trace</span>')
|
||||
end
|
||||
it 'includes the trace content in json' do
|
||||
expect(subject[:lines]).to eq([
|
||||
{ offset: 0, content: [{ text: 'the-trace' }] }
|
||||
])
|
||||
end
|
||||
end
|
||||
|
|
25
spec/services/jira/jql_builder_service_spec.rb
Normal file
25
spec/services/jira/jql_builder_service_spec.rb
Normal file
|
@ -0,0 +1,25 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Jira::JqlBuilderService do
|
||||
describe '#execute' do
|
||||
subject { described_class.new('PROJECT_KEY', params).execute }
|
||||
|
||||
context 'when no params' do
|
||||
let(:params) { {} }
|
||||
|
||||
it 'builds jql with default ordering' do
|
||||
expect(subject).to eq("project = PROJECT_KEY order by created DESC")
|
||||
end
|
||||
end
|
||||
|
||||
context 'with sort params' do
|
||||
let(:params) { { sort: 'updated', sort_direction: 'ASC' } }
|
||||
|
||||
it 'builds jql' do
|
||||
expect(subject).to eq("project = PROJECT_KEY order by updated ASC")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
89
spec/services/jira/requests/issues/list_service_spec.rb
Normal file
89
spec/services/jira/requests/issues/list_service_spec.rb
Normal file
|
@ -0,0 +1,89 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Jira::Requests::Issues::ListService do
|
||||
let(:jira_service) { create(:jira_service) }
|
||||
let(:params) { {} }
|
||||
|
||||
describe '#execute' do
|
||||
let(:service) { described_class.new(jira_service, params) }
|
||||
|
||||
subject { service.execute }
|
||||
|
||||
context 'without jira_service' do
|
||||
before do
|
||||
jira_service.update!(active: false)
|
||||
end
|
||||
|
||||
it 'returns an error response' do
|
||||
expect(subject.error?).to be_truthy
|
||||
expect(subject.message).to eq('Jira service not configured.')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when jira_service is nil' do
|
||||
let(:jira_service) { nil }
|
||||
|
||||
it 'returns an error response' do
|
||||
expect(subject.error?).to be_truthy
|
||||
expect(subject.message).to eq('Jira service not configured.')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with jira_service' do
|
||||
context 'when validations and params are ok' do
|
||||
let(:client) { double(options: { site: 'https://jira.example.com' }) }
|
||||
|
||||
before do
|
||||
expect(service).to receive(:client).at_least(:once).and_return(client)
|
||||
end
|
||||
|
||||
context 'when the request to Jira returns an error' do
|
||||
before do
|
||||
expect(client).to receive(:get).and_raise(Timeout::Error)
|
||||
end
|
||||
|
||||
it 'returns an error response' do
|
||||
expect(subject.error?).to be_truthy
|
||||
expect(subject.message).to eq('Jira request error: Timeout::Error')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the request does not return any values' do
|
||||
before do
|
||||
expect(client).to receive(:get).and_return([])
|
||||
end
|
||||
|
||||
it 'returns a paylod with no issues' do
|
||||
payload = subject.payload
|
||||
|
||||
expect(subject.success?).to be_truthy
|
||||
expect(payload[:issues]).to be_empty
|
||||
expect(payload[:is_last]).to be_truthy
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the request returns values' do
|
||||
before do
|
||||
expect(client).to receive(:get).and_return(
|
||||
{
|
||||
"total" => 375,
|
||||
"startAt" => 0,
|
||||
"issues" => [{ "key" => 'TST-1' }, { "key" => 'TST-2' }]
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
it 'returns a paylod with jira issues' do
|
||||
payload = subject.payload
|
||||
|
||||
expect(subject.success?).to be_truthy
|
||||
expect(payload[:issues].map(&:key)).to eq(%w[TST-1 TST-2])
|
||||
expect(payload[:is_last]).to be_falsy
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Jira::Requests::Projects do
|
||||
RSpec.describe Jira::Requests::Projects::ListService do
|
||||
let(:jira_service) { create(:jira_service) }
|
||||
let(:params) { {} }
|
||||
|
|
@ -4,4 +4,11 @@ module SnippetHelpers
|
|||
def sign_in_as(user)
|
||||
sign_in(public_send(user)) if user
|
||||
end
|
||||
|
||||
def snippet_blob_file(blob)
|
||||
{
|
||||
"path" => blob.path,
|
||||
"raw_url" => gitlab_raw_snippet_blob_url(blob.container, blob.path)
|
||||
}
|
||||
end
|
||||
end
|
||||
|
|
|
@ -98,3 +98,15 @@ RSpec.shared_examples 'snippet blob content' do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
RSpec.shared_examples 'snippet_multiple_files feature disabled' do
|
||||
before do
|
||||
stub_feature_flags(snippet_multiple_files: false)
|
||||
|
||||
subject
|
||||
end
|
||||
|
||||
it 'does not return files attributes' do
|
||||
expect(json_response).not_to have_key('files')
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,41 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe ProjectExportOptions do
|
||||
let(:project) { create(:project) }
|
||||
let(:project_export_job) { create(:project_export_job, project: project, jid: '123', status: 1) }
|
||||
let(:job) { { 'args' => [project.owner.id, project.id, nil, nil], 'jid' => '123' } }
|
||||
let(:worker_class) do
|
||||
Class.new do
|
||||
include Sidekiq::Worker
|
||||
include ProjectExportOptions
|
||||
end
|
||||
end
|
||||
|
||||
it 'sets default retry limit' do
|
||||
expect(worker_class.sidekiq_options['retry']).to eq(ProjectExportOptions::EXPORT_RETRY_COUNT)
|
||||
end
|
||||
|
||||
it 'sets default status expiration' do
|
||||
expect(worker_class.sidekiq_options['status_expiration']).to eq(StuckExportJobsWorker::EXPORT_JOBS_EXPIRATION)
|
||||
end
|
||||
|
||||
describe '.sidekiq_retries_exhausted' do
|
||||
it 'marks status as failed' do
|
||||
expect { worker_class.sidekiq_retries_exhausted_block.call(job) }.to change { project_export_job.reload.status }.from(1).to(3)
|
||||
end
|
||||
|
||||
context 'when status update fails' do
|
||||
before do
|
||||
project_export_job.update(status: 2)
|
||||
end
|
||||
|
||||
it 'logs an error' do
|
||||
expect(Sidekiq.logger).to receive(:error).with("Failed to set Job #{job['jid']} for project #{project.id} to failed state")
|
||||
|
||||
worker_class.sidekiq_retries_exhausted_block.call(job)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -69,4 +69,14 @@ RSpec.describe ProjectExportWorker do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'sidekiq options' do
|
||||
it 'disables retry' do
|
||||
expect(described_class.sidekiq_options['retry']).to eq(false)
|
||||
end
|
||||
|
||||
it 'sets default status expiration' do
|
||||
expect(described_class.sidekiq_options['status_expiration']).to eq(StuckExportJobsWorker::EXPORT_JOBS_EXPIRATION)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -3,12 +3,6 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe RepositoryImportWorker do
|
||||
describe 'modules' do
|
||||
it 'includes ProjectImportOptions' do
|
||||
expect(described_class).to include_module(ProjectImportOptions)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#perform' do
|
||||
let(:project) { create(:project, :import_scheduled) }
|
||||
let(:import_state) { project.import_state }
|
||||
|
|
Loading…
Reference in a new issue