Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-05-25 15:07:58 +00:00
parent b5249f2d99
commit 0d8e625e4c
84 changed files with 2916 additions and 310 deletions

View File

@ -21,11 +21,12 @@ import getAlerts from '../graphql/queries/get_alerts.query.graphql';
import getAlertsCountByStatus from '../graphql/queries/get_count_by_status.query.graphql'; import getAlertsCountByStatus from '../graphql/queries/get_count_by_status.query.graphql';
import { ALERTS_STATUS, ALERTS_STATUS_TABS, ALERTS_SEVERITY_LABELS } from '../constants'; import { ALERTS_STATUS, ALERTS_STATUS_TABS, ALERTS_SEVERITY_LABELS } from '../constants';
import updateAlertStatus from '../graphql/mutations/update_alert_status.graphql'; import updateAlertStatus from '../graphql/mutations/update_alert_status.graphql';
import { capitalizeFirstCharacter } from '~/lib/utils/text_utility'; import { capitalizeFirstCharacter, convertToSnakeCase } from '~/lib/utils/text_utility';
const tdClass = 'table-col d-flex d-md-table-cell align-items-center'; const tdClass = 'table-col d-flex d-md-table-cell align-items-center';
const bodyTrClass = const bodyTrClass =
'gl-border-1 gl-border-t-solid gl-border-gray-100 hover-bg-blue-50 hover-gl-cursor-pointer hover-gl-border-b-solid hover-gl-border-blue-200'; 'gl-border-1 gl-border-t-solid gl-border-gray-100 hover-bg-blue-50 hover-gl-cursor-pointer hover-gl-border-b-solid hover-gl-border-blue-200';
const findDefaultSortColumn = () => document.querySelector('.js-started-at');
export default { export default {
i18n: { i18n: {
@ -41,34 +42,41 @@ export default {
key: 'severity', key: 'severity',
label: s__('AlertManagement|Severity'), label: s__('AlertManagement|Severity'),
tdClass: `${tdClass} rounded-top text-capitalize`, tdClass: `${tdClass} rounded-top text-capitalize`,
sortable: true,
}, },
{ {
key: 'startedAt', key: 'startTime',
label: s__('AlertManagement|Start time'), label: s__('AlertManagement|Start time'),
thClass: 'js-started-at',
tdClass, tdClass,
sortable: true,
}, },
{ {
key: 'endedAt', key: 'endTime',
label: s__('AlertManagement|End time'), label: s__('AlertManagement|End time'),
tdClass, tdClass,
sortable: true,
}, },
{ {
key: 'title', key: 'title',
label: s__('AlertManagement|Alert'), label: s__('AlertManagement|Alert'),
thClass: 'w-30p', thClass: 'w-30p alert-title',
tdClass, tdClass,
sortable: false,
}, },
{ {
key: 'eventCount', key: 'eventsCount',
label: s__('AlertManagement|Events'), label: s__('AlertManagement|Events'),
thClass: 'text-right gl-pr-9', thClass: 'text-right gl-pr-9 w-3rem',
tdClass: `${tdClass} text-md-right`, tdClass: `${tdClass} text-md-right`,
sortable: true,
}, },
{ {
key: 'status', key: 'status',
thClass: 'w-15p', thClass: 'w-15p',
label: s__('AlertManagement|Status'), label: s__('AlertManagement|Status'),
tdClass: `${tdClass} rounded-bottom`, tdClass: `${tdClass} rounded-bottom`,
sortable: true,
}, },
], ],
statuses: { statuses: {
@ -122,6 +130,7 @@ export default {
return { return {
projectPath: this.projectPath, projectPath: this.projectPath,
statuses: this.statusFilter, statuses: this.statusFilter,
sort: this.sort,
}; };
}, },
update(data) { update(data) {
@ -148,6 +157,7 @@ export default {
errored: false, errored: false,
isAlertDismissed: false, isAlertDismissed: false,
isErrorAlertDismissed: false, isErrorAlertDismissed: false,
sort: 'START_TIME_ASC',
statusFilter: this.$options.statusTabs[4].filters, statusFilter: this.$options.statusTabs[4].filters,
}; };
}, },
@ -170,10 +180,22 @@ export default {
return !this.loading && this.hasAlerts ? bodyTrClass : ''; return !this.loading && this.hasAlerts ? bodyTrClass : '';
}, },
}, },
mounted() {
findDefaultSortColumn().ariaSort = 'ascending';
},
methods: { methods: {
filterAlertsByStatus(tabIndex) { filterAlertsByStatus(tabIndex) {
this.statusFilter = this.$options.statusTabs[tabIndex].filters; this.statusFilter = this.$options.statusTabs[tabIndex].filters;
}, },
fetchSortedData({ sortBy, sortDesc }) {
const sortDirection = sortDesc ? 'DESC' : 'ASC';
const sortColumn = convertToSnakeCase(sortBy).toUpperCase();
if (sortBy !== 'startTime') {
findDefaultSortColumn().ariaSort = 'none';
}
this.sort = `${sortColumn}_${sortDirection}`;
},
capitalizeFirstCharacter, capitalizeFirstCharacter,
updateAlertStatus(status, iid) { updateAlertStatus(status, iid) {
this.$apollo this.$apollo
@ -235,7 +257,10 @@ export default {
:busy="loading" :busy="loading"
stacked="md" stacked="md"
:tbody-tr-class="tbodyTrClass" :tbody-tr-class="tbodyTrClass"
:no-local-sorting="true"
sort-icon-left
@row-clicked="navigateToAlertDetails" @row-clicked="navigateToAlertDetails"
@sort-changed="fetchSortedData"
> >
<template #cell(severity)="{ item }"> <template #cell(severity)="{ item }">
<div <div
@ -252,13 +277,17 @@ export default {
</div> </div>
</template> </template>
<template #cell(startedAt)="{ item }"> <template #cell(startTime)="{ item }">
<time-ago v-if="item.startedAt" :time="item.startedAt" /> <time-ago v-if="item.startedAt" :time="item.startedAt" />
</template> </template>
<template #cell(endedAt)="{ item }"> <template #cell(endTime)="{ item }">
<time-ago v-if="item.endedAt" :time="item.endedAt" /> <time-ago v-if="item.endedAt" :time="item.endedAt" />
</template> </template>
<!-- TODO: Remove after: https://gitlab.com/gitlab-org/gitlab/-/issues/218467 -->
<template #cell(eventsCount)="{ item }">
{{ item.eventCount }}
</template>
<template #cell(title)="{ item }"> <template #cell(title)="{ item }">
<div class="gl-max-w-full text-truncate">{{ item.title }}</div> <div class="gl-max-w-full text-truncate">{{ item.title }}</div>

View File

@ -4,6 +4,7 @@ mutation ($projectPath: ID!, $status: AlertManagementStatus!, $iid: String!) {
alert { alert {
iid, iid,
status, status,
endedAt
} }
} }
} }

View File

@ -1,8 +1,8 @@
#import "../fragments/list_item.fragment.graphql" #import "../fragments/list_item.fragment.graphql"
query getAlerts($projectPath: ID!, $statuses: [AlertManagementStatus!]) { query getAlerts($projectPath: ID!, $statuses: [AlertManagementStatus!], $sort: AlertManagementAlertSort ) {
project(fullPath: $projectPath) { project(fullPath: $projectPath) {
alertManagementAlerts(statuses: $statuses) { alertManagementAlerts(statuses: $statuses, sort: $sort) {
nodes { nodes {
...AlertListItem ...AlertListItem
} }

View File

@ -55,3 +55,26 @@ $tooltip-padding-y: 0.5rem;
$tooltip-padding-x: 0.75rem; $tooltip-padding-x: 0.75rem;
$tooltip-arrow-height: 0.5rem; $tooltip-arrow-height: 0.5rem;
$tooltip-arrow-width: 1rem; $tooltip-arrow-width: 1rem;
$b-table-sort-icon-bg-ascending: url('data:image/svg+xml, <svg \
xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="4 0 8 16"> \
<path style="fill: #666;" fill-rule="evenodd" d="M11.707085,11.7071 \
L7.999975,15.4142 L4.292875,11.7071 C3.902375,11.3166 3.902375, \
10.6834 4.292875,10.2929 C4.683375,9.90237 \
5.316575,9.90237 5.707075,10.2929 L6.999975, \
11.5858 L6.999975,2 C6.999975,1.44771 \
7.447695,1 7.999975,1 C8.552255,1 8.999975,1.44771 \
8.999975,2 L8.999975,11.5858 L10.292865,10.2929 C10.683395 \
,9.90237 11.316555,9.90237 11.707085,10.2929 \
C12.097605,10.6834 12.097605,11.3166 11.707085,11.7071 Z"/> \
</svg>') !default;
$b-table-sort-icon-bg-descending: url('data:image/svg+xml,<svg \
xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="4 0 8 16"> \
<path style="fill: #666;" fill-rule="evenodd" d="M4.29289,4.2971 L8,0.59 \
L11.7071,4.2971 C12.0976,4.6876 \
12.0976,5.3208 11.7071,5.7113 C11.3166,6.10183 10.6834, \
6.10183 10.2929,5.7113 L9,4.4184 L9,14.0042 C9,14.55649 \
8.55228,15.0042 8,15.0042 C7.44772,15.0042 7,14.55649 \
7,14.0042 L7,4.4184 L5.70711,5.7113 C5.31658,6.10183 4.68342,6.10183 4.29289,5.7113 \
C3.90237,5.3208 3.90237,4.6876 4.29289,4.2971 Z"/> \
</svg> ') !default;
$b-table-sort-icon-bg-not-sorted: '';

View File

@ -28,8 +28,19 @@
td, td,
th { th {
@include gl-p-5; // TODO: There is no gl-pl-9 utlity for this padding, to be done and then removed.
padding-left: 1.25rem;
@include gl-py-5;
@include gl-outline-none;
border: 0; // Remove cell border styling so that we can set border styling per row border: 0; // Remove cell border styling so that we can set border styling per row
&.event-count {
@include gl-pr-9;
}
&.alert-title {
@include gl-pointer-events-none;
}
} }
th { th {

View File

@ -6,9 +6,11 @@ class IdeController < ApplicationController
include ClientsidePreviewCSP include ClientsidePreviewCSP
include StaticObjectExternalStorageCSP include StaticObjectExternalStorageCSP
before_action do
push_frontend_feature_flag(:build_service_proxy)
end
def index def index
Gitlab::UsageDataCounters::WebIdeCounter.increment_views_count Gitlab::UsageDataCounters::WebIdeCounter.increment_views_count
end end
end end
IdeController.prepend_if_ee('EE::IdeController')

View File

@ -14,6 +14,8 @@ class Projects::JobsController < Projects::ApplicationController
before_action only: [:show] do before_action only: [:show] do
push_frontend_feature_flag(:job_log_json, project, default_enabled: true) push_frontend_feature_flag(:job_log_json, project, default_enabled: true)
end end
before_action :authorize_create_proxy_build!, only: :proxy_websocket_authorize
before_action :verify_proxy_request!, only: :proxy_websocket_authorize
layout 'project' layout 'project'
@ -151,6 +153,10 @@ class Projects::JobsController < Projects::ApplicationController
render json: Gitlab::Workhorse.channel_websocket(@build.terminal_specification) render json: Gitlab::Workhorse.channel_websocket(@build.terminal_specification)
end end
def proxy_websocket_authorize
render json: proxy_websocket_service(build_service_specification)
end
private private
def authorize_update_build! def authorize_update_build!
@ -165,10 +171,19 @@ class Projects::JobsController < Projects::ApplicationController
return access_denied! unless can?(current_user, :create_build_terminal, build) return access_denied! unless can?(current_user, :create_build_terminal, build)
end end
def authorize_create_proxy_build!
return access_denied! unless can?(current_user, :create_build_service_proxy, build)
end
def verify_api_request! def verify_api_request!
Gitlab::Workhorse.verify_api_request!(request.headers) Gitlab::Workhorse.verify_api_request!(request.headers)
end end
def verify_proxy_request!
verify_api_request!
set_workhorse_internal_api_content_type
end
def raw_send_params def raw_send_params
{ type: 'text/plain; charset=utf-8', disposition: 'inline' } { type: 'text/plain; charset=utf-8', disposition: 'inline' }
end end
@ -202,6 +217,27 @@ class Projects::JobsController < Projects::ApplicationController
'attachment' 'attachment'
end end
end
Projects::JobsController.prepend_if_ee('EE::Projects::JobsController') def build_service_specification
build.service_specification(service: params['service'],
port: params['port'],
path: params['path'],
subprotocols: proxy_subprotocol)
end
def proxy_subprotocol
# This will allow to reuse the same subprotocol set
# in the original websocket connection
request.headers['HTTP_SEC_WEBSOCKET_PROTOCOL'].presence || ::Ci::BuildRunnerSession::TERMINAL_SUBPROTOCOL
end
# This method provides the information to Workhorse
# about the service we want to proxy to.
# For security reasons, in case this operation is started by JS,
# it's important to use only sourced GitLab JS code
def proxy_websocket_service(service)
service[:url] = ::Gitlab::UrlHelpers.as_wss(service[:url])
::Gitlab::Workhorse.channel_websocket(service)
end
end

View File

@ -0,0 +1,98 @@
# frozen_string_literal: true
class Projects::WebIdeTerminalsController < Projects::ApplicationController
before_action :authenticate_user!
before_action :build, except: [:check_config, :create]
before_action :authorize_create_web_ide_terminal!
before_action :authorize_read_web_ide_terminal!, except: [:check_config, :create]
before_action :authorize_update_web_ide_terminal!, only: [:cancel, :retry]
def check_config
return respond_422 unless branch_sha
result = ::Ci::WebIdeConfigService.new(project, current_user, sha: branch_sha).execute
if result[:status] == :success
head :ok
else
respond_422
end
end
def show
render_terminal(build)
end
def create
result = ::Ci::CreateWebIdeTerminalService.new(project,
current_user,
ref: params[:branch])
.execute
if result[:status] == :error
render status: :bad_request, json: result[:message]
else
pipeline = result[:pipeline]
current_build = pipeline.builds.last
if current_build
Gitlab::UsageDataCounters::WebIdeCounter.increment_terminals_count
render_terminal(current_build)
else
render status: :bad_request, json: pipeline.errors.full_messages
end
end
end
def cancel
return respond_422 unless build.cancelable?
build.cancel
head :ok
end
def retry
return respond_422 unless build.retryable?
new_build = Ci::Build.retry(build, current_user)
render_terminal(new_build)
end
private
def authorize_create_web_ide_terminal!
return access_denied! unless can?(current_user, :create_web_ide_terminal, project)
end
def authorize_read_web_ide_terminal!
authorize_build_ability!(:read_web_ide_terminal)
end
def authorize_update_web_ide_terminal!
authorize_build_ability!(:update_web_ide_terminal)
end
def authorize_build_ability!(ability)
return access_denied! unless can?(current_user, ability, build)
end
def build
@build ||= project.builds.find(params[:id])
end
def branch_sha
return unless params[:branch].present?
project.commit(params[:branch])&.id
end
def render_terminal(current_build)
render json: WebIdeTerminalSerializer
.new(project: project, current_user: current_user)
.represent(current_build)
end
end

View File

@ -55,6 +55,7 @@ module Ci
delegate :url, to: :runner_session, prefix: true, allow_nil: true delegate :url, to: :runner_session, prefix: true, allow_nil: true
delegate :terminal_specification, to: :runner_session, allow_nil: true delegate :terminal_specification, to: :runner_session, allow_nil: true
delegate :service_specification, to: :runner_session, allow_nil: true
delegate :gitlab_deploy_token, to: :project delegate :gitlab_deploy_token, to: :project
delegate :trigger_short_token, to: :trigger_request, allow_nil: true delegate :trigger_short_token, to: :trigger_request, allow_nil: true

View File

@ -7,6 +7,8 @@ module Ci
extend Gitlab::Ci::Model extend Gitlab::Ci::Model
TERMINAL_SUBPROTOCOL = 'terminal.gitlab.com' TERMINAL_SUBPROTOCOL = 'terminal.gitlab.com'
DEFAULT_SERVICE_NAME = 'build'.freeze
DEFAULT_PORT_NAME = 'default_port'.freeze
self.table_name = 'ci_builds_runner_session' self.table_name = 'ci_builds_runner_session'
@ -23,6 +25,17 @@ module Ci
channel_specification(wss_url, TERMINAL_SUBPROTOCOL) channel_specification(wss_url, TERMINAL_SUBPROTOCOL)
end end
def service_specification(service: nil, path: nil, port: nil, subprotocols: nil)
return {} unless url.present?
port = port.presence || DEFAULT_PORT_NAME
service = service.presence || DEFAULT_SERVICE_NAME
url = "#{self.url}/proxy/#{service}/#{port}/#{path}"
subprotocols = subprotocols.presence || ::Ci::BuildRunnerSession::TERMINAL_SUBPROTOCOL
channel_specification(url, subprotocols)
end
private private
def channel_specification(url, subprotocol) def channel_specification(url, subprotocol)
@ -37,5 +50,3 @@ module Ci
end end
end end
end end
Ci::BuildRunnerSession.prepend_if_ee('EE::Ci::BuildRunnerSession')

View File

@ -27,6 +27,7 @@ module Ci
# https://gitlab.com/gitlab-org/gitlab/issues/195991 # https://gitlab.com/gitlab-org/gitlab/issues/195991
pipeline: 7, pipeline: 7,
chat: 8, chat: 8,
webide: 9,
merge_request_event: 10, merge_request_event: 10,
external_pull_request_event: 11, external_pull_request_event: 11,
parent_pipeline: 12 parent_pipeline: 12
@ -40,6 +41,7 @@ module Ci
unknown_source: nil, unknown_source: nil,
repository_source: 1, repository_source: 1,
auto_devops_source: 2, auto_devops_source: 2,
webide_source: 3,
remote_source: 4, remote_source: 4,
external_project_source: 5, external_project_source: 5,
bridge_source: 6 bridge_source: 6

View File

@ -328,6 +328,8 @@ class Project < ApplicationRecord
has_many :repository_storage_moves, class_name: 'ProjectRepositoryStorageMove' has_many :repository_storage_moves, class_name: 'ProjectRepositoryStorageMove'
has_many :webide_pipelines, -> { webide_source }, class_name: 'Ci::Pipeline', inverse_of: :project
accepts_nested_attributes_for :variables, allow_destroy: true accepts_nested_attributes_for :variables, allow_destroy: true
accepts_nested_attributes_for :project_feature, update_only: true accepts_nested_attributes_for :project_feature, update_only: true
accepts_nested_attributes_for :project_setting, update_only: true accepts_nested_attributes_for :project_setting, update_only: true
@ -733,6 +735,10 @@ class Project < ApplicationRecord
end end
end end
def active_webide_pipelines(user:)
webide_pipelines.running_or_pending.for_user(user)
end
def autoclose_referenced_issues def autoclose_referenced_issues
return true if super.nil? return true if super.nil?

View File

@ -84,7 +84,11 @@ class ProjectImportState < ApplicationRecord
update_column(:last_error, sanitized_message) update_column(:last_error, sanitized_message)
rescue ActiveRecord::ActiveRecordError => e rescue ActiveRecord::ActiveRecordError => e
Gitlab::AppLogger.error("Error setting import status to failed: #{e.message}. Original error: #{sanitized_message}") Gitlab::Import::Logger.error(
message: 'Error setting import status to failed',
error: e.message,
original_error: sanitized_message
)
ensure ensure
@errors = original_errors @errors = original_errors
end end

View File

@ -69,7 +69,6 @@ class User < ApplicationRecord
MINIMUM_INACTIVE_DAYS = 180 MINIMUM_INACTIVE_DAYS = 180
ignore_column :bot_type, remove_with: '13.1', remove_after: '2020-05-22'
ignore_column :ghost, remove_with: '13.2', remove_after: '2020-06-22' ignore_column :ghost, remove_with: '13.2', remove_after: '2020-06-22'
# Override Devise::Models::Trackable#update_tracked_fields! # Override Devise::Models::Trackable#update_tracked_fields!

View File

@ -0,0 +1,51 @@
# frozen_string_literal: true
class WebIdeTerminal
include ::Gitlab::Routing
attr_reader :build, :project
delegate :id, :status, to: :build
def initialize(build)
@build = build
@project = build.project
end
def show_path
web_ide_terminal_route_generator(:show)
end
def retry_path
web_ide_terminal_route_generator(:retry)
end
def cancel_path
web_ide_terminal_route_generator(:cancel)
end
def terminal_path
terminal_project_job_path(project, build, format: :ws)
end
def proxy_websocket_path
proxy_project_job_path(project, build, format: :ws)
end
def services
build.services.map(&:alias).compact + Array(build.image&.alias)
end
private
def web_ide_terminal_route_generator(action, options = {})
options.reverse_merge!(action: action,
controller: 'projects/web_ide_terminals',
namespace_id: project.namespace.to_param,
project_id: project.to_param,
id: build.id,
only_path: true)
url_for(options)
end
end

View File

@ -36,6 +36,10 @@ module Ci
@subject.has_terminal? @subject.has_terminal?
end end
condition(:is_web_ide_terminal, scope: :subject) do
@subject.pipeline.webide?
end
rule { protected_ref | archived }.policy do rule { protected_ref | archived }.policy do
prevent :update_build prevent :update_build
prevent :update_commit_status prevent :update_commit_status
@ -50,6 +54,24 @@ module Ci
end end
rule { can?(:update_build) & terminal }.enable :create_build_terminal rule { can?(:update_build) & terminal }.enable :create_build_terminal
rule { is_web_ide_terminal & can?(:create_web_ide_terminal) & (admin | owner_of_job) }.policy do
enable :read_web_ide_terminal
enable :update_web_ide_terminal
end
rule { is_web_ide_terminal & ~can?(:update_web_ide_terminal) }.policy do
prevent :create_build_terminal
end
rule { can?(:update_web_ide_terminal) & terminal }.policy do
enable :create_build_terminal
enable :create_build_service_proxy
end
rule { ~can?(:build_service_proxy_enabled) }.policy do
prevent :create_build_service_proxy
end
end end
end end

View File

@ -147,6 +147,10 @@ class ProjectPolicy < BasePolicy
@user && @user.confirmed? @user && @user.confirmed?
end end
condition(:build_service_proxy_enabled) do
::Feature.enabled?(:build_service_proxy, @subject)
end
features = %w[ features = %w[
merge_requests merge_requests
issues issues
@ -559,6 +563,10 @@ class ProjectPolicy < BasePolicy
enable :read_project enable :read_project
end end
rule { can?(:create_pipeline) & can?(:maintainer_access) }.enable :create_web_ide_terminal
rule { build_service_proxy_enabled }.enable :build_service_proxy_enabled
private private
def team_member? def team_member?

View File

@ -0,0 +1,12 @@
# frozen_string_literal: true
class WebIdeTerminalEntity < Grape::Entity
expose :id
expose :status
expose :show_path
expose :cancel_path
expose :retry_path
expose :terminal_path
expose :services
expose :proxy_websocket_path, if: ->(_) { Feature.enabled?(:build_service_proxy) }
end

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
class WebIdeTerminalSerializer < BaseSerializer
entity WebIdeTerminalEntity
def represent(resource, opts = {})
resource = WebIdeTerminal.new(resource) if resource.is_a?(Ci::Build)
super
end
end

View File

@ -0,0 +1,123 @@
# frozen_string_literal: true
module Ci
class CreateWebIdeTerminalService < ::BaseService
include ::Gitlab::Utils::StrongMemoize
TerminalCreationError = Class.new(StandardError)
TERMINAL_NAME = 'terminal'.freeze
attr_reader :terminal
def execute
check_access!
validate_params!
load_terminal_config!
pipeline = create_pipeline!
success(pipeline: pipeline)
rescue TerminalCreationError => e
error(e.message)
rescue ActiveRecord::RecordInvalid => e
error("Failed to persist the pipeline: #{e.message}")
end
private
def create_pipeline!
build_pipeline.tap do |pipeline|
pipeline.stages << terminal_stage_seed(pipeline).to_resource
pipeline.save!
Ci::ProcessPipelineService
.new(pipeline)
.execute(nil, initial_process: true)
pipeline_created_counter.increment(source: :webide)
end
end
def build_pipeline
Ci::Pipeline.new(
project: project,
user: current_user,
source: :webide,
config_source: :webide_source,
ref: ref,
sha: sha,
tag: false,
before_sha: Gitlab::Git::BLANK_SHA
)
end
def terminal_stage_seed(pipeline)
attributes = {
name: TERMINAL_NAME,
index: 0,
builds: [terminal_build_seed]
}
Gitlab::Ci::Pipeline::Seed::Stage.new(pipeline, attributes, [])
end
def terminal_build_seed
terminal.merge(
name: TERMINAL_NAME,
stage: TERMINAL_NAME,
user: current_user,
scheduling_type: :stage)
end
def load_terminal_config!
result = ::Ci::WebIdeConfigService.new(project, current_user, sha: sha).execute
raise TerminalCreationError, result[:message] if result[:status] != :success
@terminal = result[:terminal]
raise TerminalCreationError, 'Terminal is not configured' unless terminal
end
def validate_params!
unless sha
raise TerminalCreationError, 'Ref does not exist'
end
unless branch_exists?
raise TerminalCreationError, 'Ref needs to be a branch'
end
end
def check_access!
unless can?(current_user, :create_web_ide_terminal, project)
raise TerminalCreationError, 'Insufficient permissions to create a terminal'
end
if terminal_active?
raise TerminalCreationError, 'There is already a terminal running'
end
end
def pipeline_created_counter
@pipeline_created_counter ||= Gitlab::Metrics
.counter(:pipelines_created_total, "Counter of pipelines created")
end
def terminal_active?
project.active_webide_pipelines(user: current_user).exists?
end
def ref
strong_memoize(:ref) do
Gitlab::Git.ref_name(params[:ref])
end
end
def branch_exists?
project.repository.branch_exists?(ref)
end
def sha
project.commit(params[:ref]).try(:id)
end
end
end

View File

@ -0,0 +1,59 @@
# frozen_string_literal: true
module Ci
class WebIdeConfigService < ::BaseService
include ::Gitlab::Utils::StrongMemoize
ValidationError = Class.new(StandardError)
WEBIDE_CONFIG_FILE = '.gitlab/.gitlab-webide.yml'.freeze
attr_reader :config, :config_content
def execute
check_access!
load_config_content!
load_config!
success(terminal: config.terminal_value)
rescue ValidationError => e
error(e.message)
end
private
def check_access!
unless can?(current_user, :download_code, project)
raise ValidationError, 'Insufficient permissions to read configuration'
end
end
def load_config_content!
@config_content = webide_yaml_from_repo
unless config_content
raise ValidationError, "Failed to load Web IDE config file '#{WEBIDE_CONFIG_FILE}' for #{params[:sha]}"
end
end
def load_config!
@config = Gitlab::WebIde::Config.new(config_content)
unless @config.valid?
raise ValidationError, @config.errors.first
end
rescue Gitlab::WebIde::Config::ConfigError => e
raise ValidationError, e.message
end
def webide_yaml_from_repo
gitlab_webide_yml_for(params[:sha])
rescue GRPC::NotFound, GRPC::Internal
nil
end
def gitlab_webide_yml_for(sha)
project.repository.blob_data_at(sha, WEBIDE_CONFIG_FILE)
end
end
end

View File

@ -18,13 +18,9 @@ module Clusters
raise ArgumentError, 'Artifact is not cluster_applications file type' unless artifact&.cluster_applications? raise ArgumentError, 'Artifact is not cluster_applications file type' unless artifact&.cluster_applications?
unless artifact.file.size < MAX_ACCEPTABLE_ARTIFACT_SIZE return error(too_big_error_message, :bad_request) unless artifact.file.size < MAX_ACCEPTABLE_ARTIFACT_SIZE
return error(too_big_error_message, :bad_request) return error(no_deployment_message, :bad_request) unless job.deployment
end return error(no_deployment_cluster_message, :bad_request) unless cluster
unless cluster
return error(s_('ClusterIntegration|No deployment cluster found for this job'))
end
parse!(artifact) parse!(artifact)
@ -61,7 +57,8 @@ module Clusters
Clusters::Cluster.transaction do Clusters::Cluster.transaction do
RELEASE_NAMES.each do |release_name| RELEASE_NAMES.each do |release_name|
application = find_or_build_application(release_name) application_class = Clusters::Cluster::APPLICATIONS[release_name]
application = cluster.find_or_build_application(application_class)
release = release_by_name[release_name] release = release_by_name[release_name]
@ -80,16 +77,18 @@ module Clusters
end end
end end
def find_or_build_application(application_name)
application_class = Clusters::Cluster::APPLICATIONS[application_name]
cluster.find_or_build_application(application_class)
end
def too_big_error_message def too_big_error_message
human_size = ActiveSupport::NumberHelper.number_to_human_size(MAX_ACCEPTABLE_ARTIFACT_SIZE) human_size = ActiveSupport::NumberHelper.number_to_human_size(MAX_ACCEPTABLE_ARTIFACT_SIZE)
s_('ClusterIntegration|Cluster_applications artifact too big. Maximum allowable size: %{human_size}') % { human_size: human_size } s_('ClusterIntegration|Cluster_applications artifact too big. Maximum allowable size: %{human_size}') % { human_size: human_size }
end end
def no_deployment_message
s_('ClusterIntegration|No deployment found for this job')
end
def no_deployment_cluster_message
s_('ClusterIntegration|No deployment cluster found for this job')
end
end end
end end

View File

@ -4,10 +4,11 @@ module Groups
module ImportExport module ImportExport
class ExportService class ExportService
def initialize(group:, user:, params: {}) def initialize(group:, user:, params: {})
@group = group @group = group
@current_user = user @current_user = user
@params = params @params = params
@shared = @params[:shared] || Gitlab::ImportExport::Shared.new(@group) @shared = @params[:shared] || Gitlab::ImportExport::Shared.new(@group)
@logger = Gitlab::Export::Logger.build
end end
def async_execute def async_execute
@ -91,21 +92,21 @@ module Groups
end end
def notify_success def notify_success
@shared.logger.info( @logger.info(
group_id: @group.id, message: 'Group Export succeeded',
group_name: @group.name, group_id: @group.id,
message: 'Group Import/Export: Export succeeded' group_name: @group.name
) )
notification_service.group_was_exported(@group, @current_user) notification_service.group_was_exported(@group, @current_user)
end end
def notify_error def notify_error
@shared.logger.error( @logger.error(
group_id: @group.id, message: 'Group Export failed',
group_id: @group.id,
group_name: @group.name, group_name: @group.name,
error: @shared.errors.join(', '), errors: @shared.errors.join(', ')
message: 'Group Import/Export: Export failed'
) )
notification_service.group_was_not_exported(@group, @current_user, @shared.errors) notification_service.group_was_not_exported(@group, @current_user, @shared.errors)

View File

@ -9,6 +9,7 @@ module Groups
@group = group @group = group
@current_user = user @current_user = user
@shared = Gitlab::ImportExport::Shared.new(@group) @shared = Gitlab::ImportExport::Shared.new(@group)
@logger = Gitlab::Import::Logger.build
end end
def async_execute def async_execute
@ -81,7 +82,7 @@ module Groups
end end
def notify_success def notify_success
@shared.logger.info( @logger.info(
group_id: @group.id, group_id: @group.id,
group_name: @group.name, group_name: @group.name,
message: 'Group Import/Export: Import succeeded' message: 'Group Import/Export: Import succeeded'
@ -89,7 +90,7 @@ module Groups
end end
def notify_error def notify_error
@shared.logger.error( @logger.error(
group_id: @group.id, group_id: @group.id,
group_name: @group.name, group_name: @group.name,
message: "Group Import/Export: Errors occurred, see '#{Gitlab::ErrorTracking::Logger.file_name}' for details" message: "Group Import/Export: Errors occurred, see '#{Gitlab::ErrorTracking::Logger.file_name}' for details"

View File

@ -22,8 +22,12 @@ module Projects
# causing GC to run every time. # causing GC to run every time.
service.increment! service.increment!
rescue Projects::HousekeepingService::LeaseTaken => e rescue Projects::HousekeepingService::LeaseTaken => e
Gitlab::AppLogger.info( Gitlab::Import::Logger.info(
"Could not perform housekeeping for project #{@project.full_path} (#{@project.id}): #{e}") message: 'Project housekeeping failed',
project_full_path: @project.full_path,
project_id: @project.id,
error: e.message
)
end end
private private

View File

@ -9,6 +9,7 @@ module Projects
super super
@shared = project.import_export_shared @shared = project.import_export_shared
@logger = Gitlab::Export::Logger.build
end end
def execute(after_export_strategy = nil) def execute(after_export_strategy = nil)
@ -115,11 +116,20 @@ module Projects
end end
def notify_success def notify_success
Gitlab::AppLogger.info("Import/Export - Project #{project.name} with ID: #{project.id} successfully exported") @logger.info(
message: 'Project successfully exported',
project_name: project.name,
project_id: project.id
)
end end
def notify_error def notify_error
Gitlab::AppLogger.error("Import/Export - Project #{project.name} with ID: #{project.id} export error - #{shared.errors.join(', ')}") @logger.error(
message: 'Project export error',
export_errors: shared.errors.join(', '),
project_name: project.name,
project_id: project.id
)
notification_service.project_not_exported(project, current_user, shared.errors) notification_service.project_not_exported(project, current_user, shared.errors)
end end

View File

@ -43,7 +43,12 @@ class RepositoryImportWorker # rubocop:disable Scalability/IdempotentWorker
def start_import def start_import
return true if start(project.import_state) return true if start(project.import_state)
Rails.logger.info("Project #{project.full_path} was in inconsistent state (#{project.import_status}) while importing.") # rubocop:disable Gitlab/RailsLogger Gitlab::Import::Logger.info(
message: 'Project was in inconsistent state while importing',
project_full_path: project.full_path,
project_import_status: project.import_status
)
false false
end end

View File

@ -45,7 +45,11 @@ class StuckImportJobsWorker # rubocop:disable Scalability/IdempotentWorker
completed_import_states = enqueued_import_states_with_jid.where(id: completed_import_state_ids) completed_import_states = enqueued_import_states_with_jid.where(id: completed_import_state_ids)
completed_import_state_jids = completed_import_states.map { |import_state| import_state.jid }.join(', ') completed_import_state_jids = completed_import_states.map { |import_state| import_state.jid }.join(', ')
Rails.logger.info("Marked stuck import jobs as failed. JIDs: #{completed_import_state_jids}") # rubocop:disable Gitlab/RailsLogger
Gitlab::Import::Logger.info(
message: 'Marked stuck import jobs as failed',
job_ids: completed_import_state_jids
)
completed_import_states.each do |import_state| completed_import_states.each do |import_state|
import_state.mark_as_failed(error_message) import_state.mark_as_failed(error_message)

20
bin/web
View File

@ -5,16 +5,16 @@ set -e
cd $(dirname $0)/.. cd $(dirname $0)/..
case "$USE_WEB_SERVER" in case "$USE_WEB_SERVER" in
puma|"") # and the "" defines default puma|"") # and the "" defines default
exec bin/web_puma "$@" exec bin/web_puma "$@"
;; ;;
unicorn) unicorn)
exec bin/web_unicorn "$@" exec bin/web_unicorn "$@"
;; ;;
*) *)
echo "Unkown web server used by USE_WEB_SERVER: $USE_WEB_SERVER." echo "Unkown web server used by USE_WEB_SERVER: $USE_WEB_SERVER."
exit 1 exit 1
;; ;;
esac esac

View File

@ -0,0 +1,5 @@
---
title: Adds sorting by column to alert management list
merge_request: 32478
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Tidy
merge_request: 32759
author: Lee Tickett
type: other

View File

@ -0,0 +1,5 @@
---
title: Avoid refresh to show endedAt after mutation
merge_request: 32636
author:
type: fixed

View File

@ -49,9 +49,11 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
get :trace, defaults: { format: 'json' } get :trace, defaults: { format: 'json' }
get :raw get :raw
get :terminal get :terminal
get :proxy
# This route is also defined in gitlab-workhorse. Make sure to update accordingly. # These routes are also defined in gitlab-workhorse. Make sure to update accordingly.
get '/terminal.ws/authorize', to: 'jobs#terminal_websocket_authorize', format: false get '/terminal.ws/authorize', to: 'jobs#terminal_websocket_authorize', format: false
get '/proxy.ws/authorize', to: 'jobs#proxy_websocket_authorize', format: false
end end
resource :artifacts, only: [] do resource :artifacts, only: [] do
@ -472,6 +474,17 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
post :web_ide_pipelines_count post :web_ide_pipelines_count
end end
resources :web_ide_terminals, path: :ide_terminals, only: [:create, :show], constraints: { id: /\d+/, format: :json } do # rubocop: disable Cop/PutProjectRoutesUnderScope
member do
post :cancel
post :retry
end
collection do
post :check_config
end
end
# Deprecated unscoped routing. # Deprecated unscoped routing.
# Issue https://gitlab.com/gitlab-org/gitlab/issues/118849 # Issue https://gitlab.com/gitlab-org/gitlab/issues/118849
scope as: 'deprecated' do scope as: 'deprecated' do

View File

@ -597,6 +597,16 @@ installations from source.
It logs the progress of the import process. It logs the progress of the import process.
## `exporter.log`
> Introduced in GitLab 13.1.
This file lives in `/var/log/gitlab/gitlab-rails/exporter.log` for
Omnibus GitLab packages or in `/home/git/gitlab/log/exporter.log` for
installations from source.
It logs the progress of the export process.
## `auth.log` ## `auth.log`
> Introduced in GitLab 12.0. > Introduced in GitLab 12.0.

View File

@ -1943,6 +1943,7 @@ GET /projects/:id/hooks/:hook_id
"merge_requests_events": true, "merge_requests_events": true,
"tag_push_events": true, "tag_push_events": true,
"note_events": true, "note_events": true,
"confidential_note_events": true,
"job_events": true, "job_events": true,
"pipeline_events": true, "pipeline_events": true,
"wiki_page_events": true, "wiki_page_events": true,
@ -1970,6 +1971,7 @@ POST /projects/:id/hooks
| `merge_requests_events` | boolean | no | Trigger hook on merge requests events | | `merge_requests_events` | boolean | no | Trigger hook on merge requests events |
| `tag_push_events` | boolean | no | Trigger hook on tag push events | | `tag_push_events` | boolean | no | Trigger hook on tag push events |
| `note_events` | boolean | no | Trigger hook on note events | | `note_events` | boolean | no | Trigger hook on note events |
| `confidential_note_events` | boolean | no | Trigger hook on confidential note events |
| `job_events` | boolean | no | Trigger hook on job events | | `job_events` | boolean | no | Trigger hook on job events |
| `pipeline_events` | boolean | no | Trigger hook on pipeline events | | `pipeline_events` | boolean | no | Trigger hook on pipeline events |
| `wiki_page_events` | boolean | no | Trigger hook on wiki events | | `wiki_page_events` | boolean | no | Trigger hook on wiki events |
@ -1996,6 +1998,7 @@ PUT /projects/:id/hooks/:hook_id
| `merge_requests_events` | boolean | no | Trigger hook on merge requests events | | `merge_requests_events` | boolean | no | Trigger hook on merge requests events |
| `tag_push_events` | boolean | no | Trigger hook on tag push events | | `tag_push_events` | boolean | no | Trigger hook on tag push events |
| `note_events` | boolean | no | Trigger hook on note events | | `note_events` | boolean | no | Trigger hook on note events |
| `confidential_note_events` | boolean | no | Trigger hook on confidential note events |
| `job_events` | boolean | no | Trigger hook on job events | | `job_events` | boolean | no | Trigger hook on job events |
| `pipeline_events` | boolean | no | Trigger hook on pipeline events | | `pipeline_events` | boolean | no | Trigger hook on pipeline events |
| `wiki_events` | boolean | no | Trigger hook on wiki events | | `wiki_events` | boolean | no | Trigger hook on wiki events |

View File

@ -65,18 +65,18 @@ subgraph "gitlab-qa-mirror pipeline"
end end
``` ```
1. Developer triggers a manual action, that can be found in CE / EE merge 1. Developer triggers a manual action, that can be found in GitLab merge
requests. This starts a chain of pipelines in multiple projects. requests. This starts a chain of pipelines in multiple projects.
1. The script being executed triggers a pipeline in 1. The script being executed triggers a pipeline in
[Omnibus GitLab Mirror](https://gitlab.com/gitlab-org/omnibus-gitlab-mirror) [Omnibus GitLab Mirror](https://gitlab.com/gitlab-org/build/omnibus-gitlab-mirror)
and waits for the resulting status. We call this a _status attribution_. and waits for the resulting status. We call this a _status attribution_.
1. GitLab packages are being built in the [Omnibus GitLab](https://gitlab.com/gitlab-org/omnibus-gitlab) 1. GitLab packages are being built in the [Omnibus GitLab Mirror](https://gitlab.com/gitlab-org/build/omnibus-gitlab-mirror)
pipeline. Packages are then pushed to its Container Registry. pipeline. Packages are then pushed to its Container Registry.
1. When packages are ready, and available in the registry, a final step in the 1. When packages are ready, and available in the registry, a final step in the
[Omnibus GitLab](https://gitlab.com/gitlab-org/omnibus-gitlab) pipeline, triggers a new [Omnibus GitLab Mirror](https://gitlab.com/gitlab-org/build/omnibus-gitlab-mirror) pipeline, triggers a new
GitLab QA pipeline (those with access can view them at `https://gitlab.com/gitlab-org/gitlab-qa-mirror/pipelines`). It also waits for a resulting status. GitLab QA pipeline (those with access can view them at `https://gitlab.com/gitlab-org/gitlab-qa-mirror/pipelines`). It also waits for a resulting status.
1. GitLab QA pulls images from the registry, spins-up containers and runs tests 1. GitLab QA pulls images from the registry, spins-up containers and runs tests
@ -84,7 +84,7 @@ subgraph "gitlab-qa-mirror pipeline"
tool. tool.
1. The result of the GitLab QA pipeline is being 1. The result of the GitLab QA pipeline is being
propagated upstream, through Omnibus, back to the CE / EE merge request. propagated upstream, through Omnibus, back to the GitLab merge request.
Please note, we plan to [add more specific information](https://gitlab.com/gitlab-org/quality/team-tasks/issues/156) Please note, we plan to [add more specific information](https://gitlab.com/gitlab-org/quality/team-tasks/issues/156)
about the tests included in each job/scenario that runs in `gitlab-qa-mirror`. about the tests included in each job/scenario that runs in `gitlab-qa-mirror`.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 29 KiB

View File

@ -280,7 +280,10 @@ We need a security group for our database that will allow inbound traffic from t
1. From the EC2 dashboard, select **Security Groups** from the left menu bar. 1. From the EC2 dashboard, select **Security Groups** from the left menu bar.
1. Click **Create security group**. 1. Click **Create security group**.
1. Give it a name (we'll use `gitlab-rds-sec-group`), a description, and select the `gitlab-vpc` from the **VPC** dropdown. 1. Give it a name (we'll use `gitlab-rds-sec-group`), a description, and select the `gitlab-vpc` from the **VPC** dropdown.
1. In the **Inbound rules** section, click **Add rule** and add a **PostgreSQL** rule, and set the "Custom" source as the `gitlab-loadbalancer-sec-group` we created earlier. The default PostgreSQL port is `5432`, which we'll also use when creating our database below. 1. In the **Inbound rules** section, click **Add rule** and set the following:
1. **Type:** search for and select the **PostgreSQL** rule.
1. **Source type:** set as "Custom".
1. **Source:** select the `gitlab-loadbalancer-sec-group` we created earlier.
1. When done, click **Create security group**. 1. When done, click **Create security group**.
### RDS Subnet Group ### RDS Subnet Group
@ -288,11 +291,10 @@ We need a security group for our database that will allow inbound traffic from t
1. Navigate to the RDS dashboard and select **Subnet Groups** from the left menu. 1. Navigate to the RDS dashboard and select **Subnet Groups** from the left menu.
1. Click on **Create DB Subnet Group**. 1. Click on **Create DB Subnet Group**.
1. Under **Subnet group details**, enter a name (we'll use `gitlab-rds-group`), a description, and choose the `gitlab-vpc` from the VPC dropdown. 1. Under **Subnet group details**, enter a name (we'll use `gitlab-rds-group`), a description, and choose the `gitlab-vpc` from the VPC dropdown.
1. Under **Add subnets**, click **Add all the subnets related to this VPC** and remove the public ones, we only want the **private subnets**. In the end, you should see `10.0.1.0/24` and `10.0.3.0/24` (as we defined them in the [subnets section](#subnets)). 1. From the **Availability Zones** dropdown, select the Availability Zones that include the subnets you've configured. In our case, we'll add `eu-west-2a` and `eu-west-2b`.
1. From the **Subnets** dropdown, select the two private subnets (`10.0.1.0/24` and `10.0.3.0/24`) as we defined them in the [subnets section](#subnets).
1. Click **Create** when ready. 1. Click **Create** when ready.
![RDS Subnet Group](img/rds_subnet_group.png)
### Create the database ### Create the database
DANGER: **Danger:** Avoid using burstable instances (t class instances) for the database as this could lead to performance issues due to CPU credits running out during sustained periods of high load. DANGER: **Danger:** Avoid using burstable instances (t class instances) for the database as this could lead to performance issues due to CPU credits running out during sustained periods of high load.
@ -301,7 +303,7 @@ Now, it's time to create the database:
1. Navigate to the RDS dashboard, select **Databases** from the left menu, and click **Create database**. 1. Navigate to the RDS dashboard, select **Databases** from the left menu, and click **Create database**.
1. Select **Standard Create** for the database creation method. 1. Select **Standard Create** for the database creation method.
1. Select **PostgreSQL** as the database engine and select **PostgreSQL 10.9-R1** from the version dropdown menu (check the [database requirements](../../install/requirements.md#postgresql-requirements) to see if there are any updates on this for your chosen version of GitLab). 1. Select **PostgreSQL** as the database engine and select the minimum PostgreSQL version as defined for your GitLab version in our [database requirements](../../install/requirements.md#postgresql-requirements).
1. Since this is a production server, let's choose **Production** from the **Templates** section. 1. Since this is a production server, let's choose **Production** from the **Templates** section.
1. Under **Settings**, set a DB instance identifier, a master username, and a master password. We'll use `gitlab-db-ha`, `gitlab`, and a very secure password respectively. Make a note of these as we'll need them later. 1. Under **Settings**, set a DB instance identifier, a master username, and a master password. We'll use `gitlab-db-ha`, `gitlab`, and a very secure password respectively. Make a note of these as we'll need them later.
1. For the DB instance size, select **Standard classes** and select an instance size that meets your requirements from the dropdown menu. We'll use a `db.m4.large` instance. 1. For the DB instance size, select **Standard classes** and select an instance size that meets your requirements from the dropdown menu. We'll use a `db.m4.large` instance.
@ -329,7 +331,7 @@ Now that the database is created, let's move on to setting up Redis with ElastiC
## Redis with ElastiCache ## Redis with ElastiCache
ElastiCache is an in-memory hosted caching solution. Redis maintains its own ElastiCache is an in-memory hosted caching solution. Redis maintains its own
persistence and is used for certain types of the GitLab application. persistence and is used to store session data, temporary cache information, and background job queues for the GitLab application.
### Create a Redis Security Group ### Create a Redis Security Group

View File

@ -1,6 +1,7 @@
# Service Desk **(STARTER)** # Service Desk **(STARTER)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/149) in [GitLab Premium 9.1](https://about.gitlab.com/releases/2017/04/22/gitlab-9-1-released/#service-desk-eep). > - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/149) in [GitLab Premium](https://about.gitlab.com/pricing/) 9.1.
> - [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/214839) to [GitLab Starter](https://about.gitlab.com/pricing/) in 13.0.
## Overview ## Overview
@ -28,14 +29,19 @@ with GitLab CI/CD.
Here's how Service Desk will work for you: Here's how Service Desk will work for you:
1. You'll provide a project-specific email address to your paying customers, who can email you directly from within the app 1. You provide a project-specific email address to your paying customers, who can email you directly
1. Each email they send creates an issue in the appropriate project from within the app.
1. Your team members navigate to the Service Desk issue tracker, where they can see new support requests and respond inside associated issues 1. Each email they send creates an issue in the appropriate project.
1. Your team communicates back and forth with the customer to understand the request 1. Your team members navigate to the Service Desk issue tracker, where they can see new support
1. Your team starts working on implementing code to solve your customer's problem requests and respond inside associated issues.
1. When your team finishes the implementation, whereupon the merge request is merged and the issue is closed automatically 1. Your team communicates back and forth with the customer to understand the request.
1. The customer will have been attended successfully via email, without having real access to your GitLab instance 1. Your team starts working on implementing code to solve your customer's problem.
1. Your team saved time by not having to leave GitLab (or setup any integrations) to follow up with your customer 1. When your team finishes the implementation, whereupon the merge request is merged and the issue
is closed automatically.
1. The customer will have been attended successfully via email, without having real access to your
GitLab instance.
1. Your team saved time by not having to leave GitLab (or setup any integrations) to follow up with
your customer.
## How it works ## How it works

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
module Gitlab
module Export
class Logger < ::Gitlab::JsonLogger
def self.file_name_noext
'exporter'
end
end
end
end

View File

@ -29,7 +29,10 @@ module Gitlab
yield object yield object
end end
rescue StandardError => e rescue StandardError => e
Rails.logger.error("The Lfs import process failed. #{e.message}") # rubocop:disable Gitlab/RailsLogger Gitlab::Import::Logger.error(
message: 'The Lfs import process failed',
error: e.message
)
end end
end end
end end

View File

@ -40,8 +40,10 @@ module Gitlab
pname = project.path_with_namespace pname = project.path_with_namespace
Rails.logger # rubocop:disable Gitlab/RailsLogger Gitlab::Import::Logger.info(
.info("GitHub importer finished updating repository for #{pname}") message: 'GitHub importer finished updating repository',
project_name: pname
)
repository_updates_counter.increment repository_updates_counter.increment
end end

View File

@ -3,6 +3,8 @@
module Gitlab module Gitlab
module ImportExport module ImportExport
class AttributesFinder class AttributesFinder
attr_reader :tree, :included_attributes, :excluded_attributes, :methods, :preloads
def initialize(config:) def initialize(config:)
@tree = config[:tree] || {} @tree = config[:tree] || {}
@included_attributes = config[:included_attributes] || {} @included_attributes = config[:included_attributes] || {}

View File

@ -0,0 +1,105 @@
# frozen_string_literal: true
# AttributesPermitter builds a hash of permitted attributes for
# every model defined in import_export.yml that is used to validate and
# filter out any attributes that are not permitted when doing Project/Group Import
#
# Each model's list includes:
# - attributes defined under included_attributes section
# - associations defined under project/group tree
# - methods defined under methods section
#
# Given the following import_export.yml example:
# ```
# tree:
# project:
# - labels:
# - :priorities
# included_attributes:
# labels:
# - :title
# - :description
# methods:
# labels:
# - :type
# ```
#
# Produces a list of permitted attributes:
# ```
# Gitlab::ImportExport::AttributesPermitter.new.permitted_attributes
#
# => { labels: [:priorities, :title, :description, :type] }
# ```
#
# Filters out any other attributes from specific relation hash:
# ```
# Gitlab::ImportExport::AttributesPermitter.new.permit(:labels, {id: 5, type: 'opened', description: 'test', sensitive_attribute: 'my_sensitive_attribute'})
#
# => {:type=>"opened", :description=>"test"}
# ```
module Gitlab
module ImportExport
class AttributesPermitter
attr_reader :permitted_attributes
def initialize(config: ImportExport::Config.new.to_h)
@config = config
@attributes_finder = Gitlab::ImportExport::AttributesFinder.new(config: @config)
@permitted_attributes = {}
build_permitted_attributes
end
def permit(relation_name, relation_hash)
permitted_attributes = permitted_attributes_for(relation_name)
relation_hash.select do |key, _|
permitted_attributes.include?(key)
end
end
def permitted_attributes_for(relation_name)
@permitted_attributes[relation_name] || []
end
private
def build_permitted_attributes
build_associations
build_attributes
build_methods
end
# Deep traverse relations tree to build a list of allowed model relations
def build_associations
stack = @attributes_finder.tree.to_a
while stack.any?
model_name, relations = stack.pop
if relations.is_a?(Hash)
add_permitted_attributes(model_name, relations.keys)
stack.concat(relations.to_a)
end
end
@permitted_attributes
end
def build_attributes
@attributes_finder.included_attributes.each(&method(:add_permitted_attributes))
end
def build_methods
@attributes_finder.methods.each(&method(:add_permitted_attributes))
end
def add_permitted_attributes(model_name, attributes)
@permitted_attributes[model_name] ||= []
@permitted_attributes[model_name].concat(attributes) if attributes.any?
end
end
end
end

View File

@ -41,7 +41,13 @@ module Gitlab
def create_source_branch def create_source_branch
@project.repository.create_branch(@merge_request.source_branch, @diff_head_sha) @project.repository.create_branch(@merge_request.source_branch, @diff_head_sha)
rescue => err rescue => err
Rails.logger.warn("Import/Export warning: Failed to create source branch #{@merge_request.source_branch} => #{@diff_head_sha} for MR #{@merge_request.iid}: #{err}") # rubocop:disable Gitlab/RailsLogger Gitlab::Import::Logger.warn(
message: 'Import warning: Failed to create source branch',
source_branch: @merge_request.source_branch,
diff_head_sha: @diff_head_sha,
merge_request_iid: @merge_request.iid,
error: err.message
)
end end
def create_target_branch def create_target_branch

View File

@ -11,14 +11,18 @@ module Gitlab
def initialize(exportable:, shared:) def initialize(exportable:, shared:)
@exportable = exportable @exportable = exportable
@shared = shared @shared = shared
end end
def save def save
if compress_and_save if compress_and_save
remove_export_path remove_export_path
Rails.logger.info("Saved #{@exportable.class} export #{archive_file}") # rubocop:disable Gitlab/RailsLogger Gitlab::Export::Logger.info(
message: 'Export archive saved',
exportable_class: @exportable.class.to_s,
archive_file: archive_file
)
save_upload save_upload
else else

View File

@ -36,7 +36,11 @@ module Gitlab
def different_version?(version) def different_version?(version)
Gem::Version.new(version) != Gem::Version.new(Gitlab::ImportExport.version) Gem::Version.new(version) != Gem::Version.new(Gitlab::ImportExport.version)
rescue => e rescue => e
Rails.logger.error("Import/Export error: #{e.message}") # rubocop:disable Gitlab/RailsLogger Gitlab::Import::Logger.error(
message: 'Import error',
error: e.message
)
raise Gitlab::ImportExport::Error.new('Incorrect VERSION format') raise Gitlab::ImportExport::Error.new('Incorrect VERSION format')
end end
end end

View File

@ -0,0 +1,44 @@
# frozen_string_literal: true
module Gitlab
module WebIde
#
# Base GitLab WebIde Configuration facade
#
class Config
ConfigError = Class.new(StandardError)
def initialize(config, opts = {})
@config = build_config(config, opts)
@global = Entry::Global.new(@config,
with_image_ports: true)
@global.compose!
rescue Gitlab::Config::Loader::FormatError => e
raise Config::ConfigError, e.message
end
def valid?
@global.valid?
end
def errors
@global.errors
end
def to_hash
@config
end
def terminal_value
@global.terminal_value
end
private
def build_config(config, opts = {})
Gitlab::Config::Loader::Yaml.new(config).load!
end
end
end
end

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
module Gitlab
module WebIde
class Config
module Entry
##
# This class represents a global entry - root Entry for entire
# GitLab WebIde Configuration file.
#
class Global < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Configurable
include ::Gitlab::Config::Entry::Attributable
ALLOWED_KEYS = %i[terminal].freeze
validations do
validates :config, allowed_keys: ALLOWED_KEYS
end
entry :terminal, Entry::Terminal,
description: 'Configuration of the webide terminal.'
attributes :terminal
end
end
end
end
end

View File

@ -0,0 +1,75 @@
# frozen_string_literal: true
module Gitlab
module WebIde
class Config
module Entry
##
# Entry that represents a concrete CI/CD job.
#
class Terminal < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Configurable
include ::Gitlab::Config::Entry::Attributable
# By default the build will finish in a few seconds, not giving the webide
# enough time to connect to the terminal. This default script provides
# those seconds blocking the build from finishing inmediately.
DEFAULT_SCRIPT = ['sleep 60'].freeze
ALLOWED_KEYS = %i[image services tags before_script script variables].freeze
validations do
validates :config, allowed_keys: ALLOWED_KEYS
validates :config, job_port_unique: { data: ->(record) { record.ports } }
with_options allow_nil: true do
validates :tags, array_of_strings: true
end
end
entry :before_script, ::Gitlab::Ci::Config::Entry::Script,
description: 'Global before script overridden in this job.'
entry :script, ::Gitlab::Ci::Config::Entry::Commands,
description: 'Commands that will be executed in this job.'
entry :image, ::Gitlab::Ci::Config::Entry::Image,
description: 'Image that will be used to execute this job.'
entry :services, ::Gitlab::Ci::Config::Entry::Services,
description: 'Services that will be used to execute this job.'
entry :variables, ::Gitlab::Ci::Config::Entry::Variables,
description: 'Environment variables available for this job.'
attributes :tags
def value
to_hash.compact
end
private
def to_hash
{ tag_list: tags || [],
yaml_variables: yaml_variables,
options: {
image: image_value,
services: services_value,
before_script: before_script_value,
script: script_value || DEFAULT_SCRIPT
}.compact }
end
def yaml_variables
return unless variables_value
variables_value.map do |key, value|
{ key: key.to_s, value: value, public: true }
end
end
end
end
end
end
end

View File

@ -73,7 +73,7 @@ fi
# Switch to the gitlab path, exit on failure. # Switch to the gitlab path, exit on failure.
if ! cd "$app_root" ; then if ! cd "$app_root" ; then
echo "Failed to cd into $app_root, exiting!"; exit 1 echo "Failed to cd into $app_root, exiting!"; exit 1
fi fi
if [ -z "$SIDEKIQ_WORKERS" ]; then if [ -z "$SIDEKIQ_WORKERS" ]; then
@ -341,7 +341,7 @@ start_gitlab() {
echo "Gitaly is already running with pid $gapid, not restarting" echo "Gitaly is already running with pid $gapid, not restarting"
else else
$app_root/bin/daemon_with_pidfile $gitaly_pid_path \ $app_root/bin/daemon_with_pidfile $gitaly_pid_path \
$gitaly_dir/gitaly $gitaly_dir/config.toml >> $gitaly_log 2>&1 & $gitaly_dir/gitaly $gitaly_dir/config.toml >> $gitaly_log 2>&1 &
fi fi
fi fi
@ -413,39 +413,39 @@ print_status() {
return return
fi fi
if [ "$web_status" = "0" ]; then if [ "$web_status" = "0" ]; then
echo "The GitLab web server with pid $wpid is running." echo "The GitLab web server with pid $wpid is running."
else else
printf "The GitLab web server is \033[31mnot running\033[0m.\n" printf "The GitLab web server is \033[31mnot running\033[0m.\n"
fi fi
if [ "$sidekiq_status" = "0" ]; then if [ "$sidekiq_status" = "0" ]; then
echo "The GitLab Sidekiq job dispatcher with pid $spid is running." echo "The GitLab Sidekiq job dispatcher with pid $spid is running."
else else
printf "The GitLab Sidekiq job dispatcher is \033[31mnot running\033[0m.\n" printf "The GitLab Sidekiq job dispatcher is \033[31mnot running\033[0m.\n"
fi fi
if [ "$gitlab_workhorse_status" = "0" ]; then if [ "$gitlab_workhorse_status" = "0" ]; then
echo "The GitLab Workhorse with pid $hpid is running." echo "The GitLab Workhorse with pid $hpid is running."
else else
printf "The GitLab Workhorse is \033[31mnot running\033[0m.\n" printf "The GitLab Workhorse is \033[31mnot running\033[0m.\n"
fi fi
if [ "$mail_room_enabled" = true ]; then if [ "$mail_room_enabled" = true ]; then
if [ "$mail_room_status" = "0" ]; then if [ "$mail_room_status" = "0" ]; then
echo "The GitLab MailRoom email processor with pid $mpid is running." echo "The GitLab MailRoom email processor with pid $mpid is running."
else else
printf "The GitLab MailRoom email processor is \033[31mnot running\033[0m.\n" printf "The GitLab MailRoom email processor is \033[31mnot running\033[0m.\n"
fi fi
fi fi
if [ "$gitlab_pages_enabled" = true ]; then if [ "$gitlab_pages_enabled" = true ]; then
if [ "$gitlab_pages_status" = "0" ]; then if [ "$gitlab_pages_status" = "0" ]; then
echo "The GitLab Pages with pid $gppid is running." echo "The GitLab Pages with pid $gppid is running."
else else
printf "The GitLab Pages is \033[31mnot running\033[0m.\n" printf "The GitLab Pages is \033[31mnot running\033[0m.\n"
fi fi
fi fi
if [ "$gitaly_enabled" = true ]; then if [ "$gitaly_enabled" = true ]; then
if [ "$gitaly_status" = "0" ]; then if [ "$gitaly_status" = "0" ]; then
echo "Gitaly with pid $gapid is running." echo "Gitaly with pid $gapid is running."
else else
printf "Gitaly is \033[31mnot running\033[0m.\n" printf "Gitaly is \033[31mnot running\033[0m.\n"
fi fi
fi fi
if [ "$web_status" = "0" ] && [ "$sidekiq_status" = "0" ] && [ "$gitlab_workhorse_status" = "0" ] && { [ "$mail_room_enabled" != true ] || [ "$mail_room_status" = "0" ]; } && { [ "$gitlab_pages_enabled" != true ] || [ "$gitlab_pages_status" = "0" ]; } && { [ "$gitaly_enabled" != true ] || [ "$gitaly_status" = "0" ]; }; then if [ "$web_status" = "0" ] && [ "$sidekiq_status" = "0" ] && [ "$gitlab_workhorse_status" = "0" ] && { [ "$mail_room_enabled" != true ] || [ "$mail_room_status" = "0" ]; } && { [ "$gitlab_pages_enabled" != true ] || [ "$gitlab_pages_status" = "0" ]; } && { [ "$gitaly_enabled" != true ] || [ "$gitaly_status" = "0" ]; }; then
@ -490,25 +490,25 @@ restart_gitlab(){
case "$1" in case "$1" in
start) start)
start_gitlab start_gitlab
;; ;;
stop) stop)
stop_gitlab stop_gitlab
;; ;;
restart) restart)
restart_gitlab restart_gitlab
;; ;;
reload|force-reload) reload|force-reload)
reload_gitlab reload_gitlab
;; ;;
status) status)
print_status print_status
exit $gitlab_status exit $gitlab_status
;; ;;
*) *)
echo "Usage: service gitlab {start|stop|restart|reload|status}" echo "Usage: service gitlab {start|stop|restart|reload|status}"
exit 1 exit 1
;; ;;
esac esac
exit exit

View File

@ -4977,6 +4977,9 @@ msgstr ""
msgid "ClusterIntegration|No deployment cluster found for this job" msgid "ClusterIntegration|No deployment cluster found for this job"
msgstr "" msgstr ""
msgid "ClusterIntegration|No deployment found for this job"
msgstr ""
msgid "ClusterIntegration|No instance type found" msgid "ClusterIntegration|No instance type found"
msgstr "" msgstr ""

View File

@ -40,8 +40,8 @@
"@babel/plugin-syntax-import-meta": "^7.8.3", "@babel/plugin-syntax-import-meta": "^7.8.3",
"@babel/preset-env": "^7.8.4", "@babel/preset-env": "^7.8.4",
"@gitlab/at.js": "1.5.5", "@gitlab/at.js": "1.5.5",
"@gitlab/svgs": "1.128.0", "@gitlab/svgs": "1.130.0",
"@gitlab/ui": "14.14.2", "@gitlab/ui": "14.17.0",
"@gitlab/visual-review-tools": "1.6.1", "@gitlab/visual-review-tools": "1.6.1",
"@rails/actioncable": "^6.0.3", "@rails/actioncable": "^6.0.3",
"@sentry/browser": "^5.10.2", "@sentry/browser": "^5.10.2",

View File

@ -2,7 +2,7 @@
gemfile_lock_changed() { gemfile_lock_changed() {
if [ -n "$(git diff --name-only -- Gemfile.lock)" ]; then if [ -n "$(git diff --name-only -- Gemfile.lock)" ]; then
cat << EOF cat <<EOF
Gemfile was updated but Gemfile.lock was not updated. Gemfile was updated but Gemfile.lock was not updated.
Usually, when Gemfile is updated, you should run Usually, when Gemfile is updated, you should run

View File

@ -6,7 +6,7 @@ lint_paths="changelogs/unreleased"
invalid_files=$(find $lint_paths -type f -not -name "*.yml" -not -name ".gitkeep") invalid_files=$(find $lint_paths -type f -not -name "*.yml" -not -name ".gitkeep")
if [ -n "$invalid_files" ]; then if [ -n "$invalid_files" ]; then
echo "Changelog files must end in .yml, but these did not:" echo "Changelog files must end in .yml, but these did not:"
echo "$invalid_files" | sed -e "s/^/* /" echo "$invalid_files" | sed -e "s/^/* /"
exit 1 exit 1
fi fi

View File

@ -7,56 +7,56 @@ set -e
if [[ "$MERGE_TRAIN_TRIGGER_TOKEN" == '' ]] if [[ "$MERGE_TRAIN_TRIGGER_TOKEN" == '' ]]
then then
echo 'The variable MERGE_TRAIN_TRIGGER_TOKEN must be set to a non-empty value' echo 'The variable MERGE_TRAIN_TRIGGER_TOKEN must be set to a non-empty value'
exit 1 exit 1
fi fi
if [[ "$MERGE_TRAIN_TRIGGER_URL" == '' ]] if [[ "$MERGE_TRAIN_TRIGGER_URL" == '' ]]
then then
echo 'The variable MERGE_TRAIN_TRIGGER_URL must be set to a non-empty value' echo 'The variable MERGE_TRAIN_TRIGGER_URL must be set to a non-empty value'
exit 1 exit 1
fi fi
if [[ "$CI_COMMIT_REF_NAME" == '' ]] if [[ "$CI_COMMIT_REF_NAME" == '' ]]
then then
echo 'The variable CI_COMMIT_REF_NAME must be set to a non-empty value' echo 'The variable CI_COMMIT_REF_NAME must be set to a non-empty value'
exit 1 exit 1
fi fi
if [[ "$SOURCE_PROJECT" == '' ]] if [[ "$SOURCE_PROJECT" == '' ]]
then then
echo 'The variable SOURCE_PROJECT must be set to a non-empty value' echo 'The variable SOURCE_PROJECT must be set to a non-empty value'
exit 1 exit 1
fi fi
if [[ "$TARGET_PROJECT" == '' ]] if [[ "$TARGET_PROJECT" == '' ]]
then then
echo 'The variable TARGET_PROJECT must be set to a non-empty value' echo 'The variable TARGET_PROJECT must be set to a non-empty value'
exit 1 exit 1
fi fi
if [[ "$TARGET_PROJECT" != "gitlab-org/gitlab-foss" ]] if [[ "$TARGET_PROJECT" != "gitlab-org/gitlab-foss" ]]
then then
echo 'This is a security FOSS merge train' echo 'This is a security FOSS merge train'
echo "Checking if $CI_COMMIT_SHA is available on canonical" echo "Checking if $CI_COMMIT_SHA is available on canonical"
gitlab_com_commit_status=$(curl -s "https://gitlab.com/api/v4/projects/278964/repository/commits/$CI_COMMIT_SHA" | jq -M .status) gitlab_com_commit_status=$(curl -s "https://gitlab.com/api/v4/projects/278964/repository/commits/$CI_COMMIT_SHA" | jq -M .status)
if [[ "$gitlab_com_commit_status" != "null" ]] if [[ "$gitlab_com_commit_status" != "null" ]]
then then
echo 'Commit available on canonical, skipping merge train' echo 'Commit available on canonical, skipping merge train'
exit 0 exit 0
fi fi
echo 'Commit not available, triggering a merge train' echo 'Commit not available, triggering a merge train'
fi fi
curl -X POST \ curl -X POST \
-F token="$MERGE_TRAIN_TRIGGER_TOKEN" \ -F token="$MERGE_TRAIN_TRIGGER_TOKEN" \
-F ref=master \ -F ref=master \
-F "variables[MERGE_FOSS]=1" \ -F "variables[MERGE_FOSS]=1" \
-F "variables[SOURCE_BRANCH]=$CI_COMMIT_REF_NAME" \ -F "variables[SOURCE_BRANCH]=$CI_COMMIT_REF_NAME" \
-F "variables[TARGET_BRANCH]=${CI_COMMIT_REF_NAME/-ee/}" \ -F "variables[TARGET_BRANCH]=${CI_COMMIT_REF_NAME/-ee/}" \
-F "variables[SOURCE_PROJECT]=$SOURCE_PROJECT" \ -F "variables[SOURCE_PROJECT]=$SOURCE_PROJECT" \
-F "variables[TARGET_PROJECT]=$TARGET_PROJECT" \ -F "variables[TARGET_PROJECT]=$TARGET_PROJECT" \
"$MERGE_TRAIN_TRIGGER_URL" "$MERGE_TRAIN_TRIGGER_URL"

View File

@ -1,25 +1,25 @@
function retry() { function retry() {
if eval "$@"; then if eval "$@"; then
return 0 return 0
fi fi
for i in 2 1; do for i in 2 1; do
sleep 3s sleep 3s
echo "Retrying $i..." echo "Retrying $i..."
if eval "$@"; then if eval "$@"; then
return 0 return 0
fi fi
done done
return 1 return 1
} }
function setup_db_user_only() { function setup_db_user_only() {
source scripts/create_postgres_user.sh source scripts/create_postgres_user.sh
} }
function setup_db() { function setup_db() {
run_timed_command "setup_db_user_only" run_timed_command "setup_db_user_only"
run_timed_command "bundle exec rake db:drop db:create db:structure:load db:migrate gitlab:db:setup_ee" run_timed_command "bundle exec rake db:drop db:create db:structure:load db:migrate gitlab:db:setup_ee"
} }
function install_api_client_dependencies_with_apk() { function install_api_client_dependencies_with_apk() {

View File

@ -1225,4 +1225,198 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
get :terminal_websocket_authorize, params: params.merge(extra_params) get :terminal_websocket_authorize, params: params.merge(extra_params)
end end
end end
describe 'GET #proxy_websocket_authorize' do
let_it_be(:owner) { create(:owner) }
let_it_be(:admin) { create(:admin) }
let_it_be(:maintainer) { create(:user) }
let_it_be(:developer) { create(:user) }
let_it_be(:reporter) { create(:user) }
let_it_be(:guest) { create(:user) }
let_it_be(:project) { create(:project, :private, :repository, namespace: owner.namespace) }
let(:user) { maintainer }
let(:pipeline) { create(:ci_pipeline, project: project, source: :webide, config_source: :webide_source, user: user) }
let(:job) { create(:ci_build, :running, :with_runner_session, pipeline: pipeline, user: user) }
let(:extra_params) { { id: job.id } }
let(:path) { :proxy_websocket_authorize }
let(:render_method) { :channel_websocket }
let(:expected_data) do
{
'Channel' => {
'Subprotocols' => ["terminal.gitlab.com"],
'Url' => 'wss://localhost/proxy/build/default_port/',
'Header' => {
'Authorization' => [nil]
},
'MaxSessionTime' => nil,
'CAPem' => nil
}
}.to_json
end
before do
stub_feature_flags(build_service_proxy: true)
allow(job).to receive(:has_terminal?).and_return(true)
project.add_maintainer(maintainer)
project.add_developer(developer)
project.add_reporter(reporter)
project.add_guest(guest)
sign_in(user)
end
context 'access rights' do
before do
allow(Gitlab::Workhorse).to receive(:verify_api_request!).and_return(nil)
make_request
end
context 'with admin' do
let(:user) { admin }
context 'when admin mode is enabled', :enable_admin_mode do
it 'returns 200' do
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'when admin mode is disabled' do
it 'returns 404' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
context 'with owner' do
let(:user) { owner }
it 'returns 200' do
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'with maintainer' do
let(:user) { maintainer }
it 'returns 200' do
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'with developer' do
let(:user) { developer }
it 'returns 404' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'with reporter' do
let(:user) { reporter }
it 'returns 404' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'with guest' do
let(:user) { guest }
it 'returns 404' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'with non member' do
let(:user) { create(:user) }
it 'returns 404' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
context 'when pipeline is not from a webide source' do
context 'with admin' do
let(:user) { admin }
let(:pipeline) { create(:ci_pipeline, project: project, source: :chat, user: user) }
before do
allow(Gitlab::Workhorse).to receive(:verify_api_request!).and_return(nil)
make_request
end
it 'returns 404' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
context 'when workhorse signature is valid' do
before do
allow(Gitlab::Workhorse).to receive(:verify_api_request!).and_return(nil)
end
context 'and the id is valid' do
it 'returns the proxy data for the service running in the job' do
make_request
expect(response).to have_gitlab_http_status(:ok)
expect(response.headers["Content-Type"]).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(response.body).to eq(expected_data)
end
end
context 'and the id is invalid' do
let(:extra_params) { { id: non_existing_record_id } }
it 'returns 404' do
make_request
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
context 'with invalid workhorse signature' do
it 'aborts with an exception' do
allow(Gitlab::Workhorse).to receive(:verify_api_request!).and_raise(JWT::DecodeError)
expect { make_request }.to raise_error(JWT::DecodeError)
end
end
context 'when feature flag :build_service_proxy is disabled' do
let(:user) { admin }
it 'returns 404' do
allow(Gitlab::Workhorse).to receive(:verify_api_request!).and_return(nil)
stub_feature_flags(build_service_proxy: false)
make_request
expect(response).to have_gitlab_http_status(:not_found)
end
end
it 'converts the url scheme into wss' do
allow(Gitlab::Workhorse).to receive(:verify_api_request!).and_return(nil)
expect(job.runner_session_url).to start_with('https://')
expect(Gitlab::Workhorse).to receive(:channel_websocket).with(a_hash_including(url: "wss://localhost/proxy/build/default_port/"))
make_request
end
def make_request
params = {
namespace_id: project.namespace.to_param,
project_id: project
}
get path, params: params.merge(extra_params)
end
end
end end

View File

@ -718,7 +718,7 @@ describe Projects::PipelinesController do
end end
shared_examples 'creates a pipeline' do shared_examples 'creates a pipeline' do
it do specify do
expect { post_request }.to change { project.ci_pipelines.count }.by(1) expect { post_request }.to change { project.ci_pipelines.count }.by(1)
pipeline = project.ci_pipelines.last pipeline = project.ci_pipelines.last

View File

@ -0,0 +1,304 @@
# frozen_string_literal: true
require 'spec_helper'
describe Projects::WebIdeTerminalsController do
let_it_be(:owner) { create(:owner) }
let_it_be(:admin) { create(:admin) }
let_it_be(:maintainer) { create(:user) }
let_it_be(:developer) { create(:user) }
let_it_be(:reporter) { create(:user) }
let_it_be(:guest) { create(:user) }
let_it_be(:project) { create(:project, :private, :repository, namespace: owner.namespace) }
let(:pipeline) { create(:ci_pipeline, project: project, source: :webide, config_source: :webide_source, user: user) }
let(:job) { create(:ci_build, pipeline: pipeline, user: user, project: project) }
let(:user) { maintainer }
before do
project.add_maintainer(maintainer)
project.add_developer(developer)
project.add_reporter(reporter)
project.add_guest(guest)
sign_in(user)
end
shared_examples 'terminal access rights' do
context 'with admin' do
let(:user) { admin }
context 'when admin mode is enabled', :enable_admin_mode do
it 'returns 200' do
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'when admin mode is disabled' do
it 'returns 404' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
context 'with owner' do
let(:user) { owner }
it 'returns 200' do
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'with maintainer' do
let(:user) { maintainer }
it 'returns 200' do
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'with developer' do
let(:user) { developer }
it 'returns 404' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'with reporter' do
let(:user) { reporter }
it 'returns 404' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'with guest' do
let(:user) { guest }
it 'returns 404' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'with non member' do
let(:user) { create(:user) }
it 'returns 404' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
shared_examples 'when pipeline is not from a webide source' do
context 'with admin' do
let(:user) { admin }
let(:pipeline) { create(:ci_pipeline, project: project, source: :chat, user: user) }
it 'returns 404' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
describe 'GET show' do
before do
get(:show, params: { namespace_id: project.namespace.to_param, project_id: project, id: job.id })
end
it_behaves_like 'terminal access rights'
it_behaves_like 'when pipeline is not from a webide source'
end
describe 'POST check_config' do
let(:result) { { status: :success } }
before do
allow_next_instance_of(::Ci::WebIdeConfigService) do |instance|
allow(instance).to receive(:execute).and_return(result)
end
post :check_config, params: {
namespace_id: project.namespace.to_param,
project_id: project.to_param,
branch: 'master'
}
end
it_behaves_like 'terminal access rights'
context 'when invalid config file' do
let(:user) { admin }
let(:result) { { status: :error } }
it 'returns 422', :enable_admin_mode do
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
end
describe 'POST create' do
let(:branch) { 'master' }
subject do
post :create, params: {
namespace_id: project.namespace.to_param,
project_id: project.to_param,
branch: branch
}
end
context 'when terminal job is created successfully' do
let(:build) { create(:ci_build, project: project) }
let(:pipeline) { build.pipeline }
before do
allow_next_instance_of(::Ci::CreateWebIdeTerminalService) do |instance|
allow(instance).to receive(:execute).and_return(status: :success, pipeline: pipeline)
end
end
context 'access rights' do
before do
subject
end
it_behaves_like 'terminal access rights'
end
it 'increases the web ide terminal counter' do
expect(Gitlab::UsageDataCounters::WebIdeCounter).to receive(:increment_terminals_count)
subject
end
end
shared_examples 'web ide terminal usage counter' do
it 'does not increase', :enable_admin_mode do
expect(Gitlab::UsageDataCounters::WebIdeCounter).not_to receive(:increment_terminals_count)
subject
end
end
context 'when branch does not exist' do
let(:user) { admin }
let(:branch) { 'foobar' }
it 'returns 400', :enable_admin_mode do
subject
expect(response).to have_gitlab_http_status(:bad_request)
end
it_behaves_like 'web ide terminal usage counter'
end
context 'when there is an error creating the job' do
let(:user) { admin }
before do
allow_next_instance_of(::Ci::CreateWebIdeTerminalService) do |instance|
allow(instance).to receive(:execute).and_return(status: :error, message: 'foobar')
end
end
it 'returns 400', :enable_admin_mode do
subject
expect(response).to have_gitlab_http_status(:bad_request)
end
it_behaves_like 'web ide terminal usage counter'
end
context 'when the current build is nil' do
let(:user) { admin }
before do
allow(pipeline).to receive(:builds).and_return([])
allow_next_instance_of(::Ci::CreateWebIdeTerminalService) do |instance|
allow(instance).to receive(:execute).and_return(status: :success, pipeline: pipeline)
end
end
it 'returns 400', :enable_admin_mode do
subject
expect(response).to have_gitlab_http_status(:bad_request)
end
it_behaves_like 'web ide terminal usage counter'
end
end
describe 'POST cancel' do
let(:job) { create(:ci_build, :running, pipeline: pipeline, user: user, project: project) }
before do
post(:cancel, params: {
namespace_id: project.namespace.to_param,
project_id: project.to_param,
id: job.id
})
end
it_behaves_like 'terminal access rights'
it_behaves_like 'when pipeline is not from a webide source'
context 'when job is not cancelable' do
let!(:job) { create(:ci_build, :failed, pipeline: pipeline, user: user) }
it 'returns 422' do
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
end
describe 'POST retry' do
let(:status) { :failed }
let(:job) { create(:ci_build, status, pipeline: pipeline, user: user, project: project) }
before do
post(:retry, params: {
namespace_id: project.namespace.to_param,
project_id: project.to_param,
id: job.id
})
end
it_behaves_like 'terminal access rights'
it_behaves_like 'when pipeline is not from a webide source'
context 'when job is not retryable' do
let(:status) { :running }
it 'returns 422' do
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
context 'when job is cancelled' do
let(:status) { :canceled }
it 'returns 200' do
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'when job fails' do
let(:status) { :failed }
it 'returns 200' do
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'when job is successful' do
let(:status) { :success }
it 'returns 200' do
expect(response).to have_gitlab_http_status(:ok)
end
end
end
end

View File

@ -155,6 +155,11 @@ FactoryBot.define do
source_sha { merge_request.source_branch_sha } source_sha { merge_request.source_branch_sha }
target_sha { merge_request.target_branch_sha } target_sha { merge_request.target_branch_sha }
end end
trait :webide do
source { :webide }
config_source { :webide_source }
end
end end
end end
end end

View File

@ -0,0 +1,60 @@
# frozen_string_literal: true
require 'spec_helper'
describe 'Group value stream analytics' do
let_it_be(:group) { create(:group) }
let_it_be(:user) { create(:user) }
RSpec::Matchers.define :have_pushed_frontend_feature_flags do |expected|
def to_js(key, value)
"\"#{key}\":#{value}"
end
match do |actual|
expected.all? do |feature_flag_name, enabled|
page.html.include?(to_js(feature_flag_name, enabled))
end
end
failure_message do |actual|
missing = expected.select do |feature_flag_name, enabled|
!page.html.include?(to_js(feature_flag_name, enabled))
end
formatted_missing_flags = missing.map { |feature_flag_name, enabled| to_js(feature_flag_name, enabled) }.join("\n")
"The following feature flag(s) cannot be found in the frontend HTML source: #{formatted_missing_flags}"
end
end
before do
stub_licensed_features(cycle_analytics_for_groups: true)
group.add_owner(user)
sign_in(user)
end
it 'pushes frontend feature flags' do
visit group_analytics_cycle_analytics_path(group)
expect(page).to have_pushed_frontend_feature_flags(
cycleAnalyticsScatterplotEnabled: true,
cycleAnalyticsScatterplotMedianEnabled: true,
valueStreamAnalyticsPathNavigation: true
)
end
context 'when `value_stream_analytics_path_navigation` is disabled for a group' do
before do
stub_feature_flags(value_stream_analytics_path_navigation: false, thing: group)
end
it 'pushes disabled feature flag to the frontend' do
visit group_analytics_cycle_analytics_path(group)
expect(page).to have_pushed_frontend_feature_flags(valueStreamAnalyticsPathNavigation: false)
end
end
end

View File

@ -38,6 +38,7 @@ describe('AlertManagementList', () => {
const findDateFields = () => wrapper.findAll(TimeAgo); const findDateFields = () => wrapper.findAll(TimeAgo);
const findFirstStatusOption = () => findStatusDropdown().find(GlDropdownItem); const findFirstStatusOption = () => findStatusDropdown().find(GlDropdownItem);
const findSeverityFields = () => wrapper.findAll('[data-testid="severityField"]'); const findSeverityFields = () => wrapper.findAll('[data-testid="severityField"]');
const findSeverityColumnHeader = () => wrapper.findAll('th').at(0);
const alertsCount = { const alertsCount = {
acknowledged: 6, acknowledged: 6,
@ -80,7 +81,10 @@ describe('AlertManagementList', () => {
}); });
} }
const mockStartedAtCol = {};
beforeEach(() => { beforeEach(() => {
jest.spyOn(document, 'querySelector').mockReturnValue(mockStartedAtCol);
mountComponent(); mountComponent();
}); });
@ -284,6 +288,34 @@ describe('AlertManagementList', () => {
}); });
}); });
describe('sorting the alert list by column', () => {
beforeEach(() => {
mountComponent({
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
data: { alerts: mockAlerts, errored: false, sort: 'START_TIME_ASC', alertsCount },
loading: false,
});
});
it('updates sort with new direction and column key', () => {
findSeverityColumnHeader().trigger('click');
expect(wrapper.vm.$data.sort).toEqual('SEVERITY_ASC');
findSeverityColumnHeader().trigger('click');
expect(wrapper.vm.$data.sort).toEqual('SEVERITY_DESC');
});
it('updates the `ariaSort` attribute so the sort icon appears in the proper column', () => {
expect(mockStartedAtCol.ariaSort).toEqual('ascending');
findSeverityColumnHeader().trigger('click');
expect(mockStartedAtCol.ariaSort).toEqual('none');
});
});
describe('updating the alert status', () => { describe('updating the alert status', () => {
const iid = '1527542'; const iid = '1527542';
const mockUpdatedMutationResult = { const mockUpdatedMutationResult = {

View File

@ -154,9 +154,11 @@ describe Gitlab::GithubImport::Importer::PullRequestsImporter do
.to receive(:fetch_remote) .to receive(:fetch_remote)
.with('github', forced: false) .with('github', forced: false)
expect(Rails.logger) expect_next_instance_of(Gitlab::Import::Logger) do |logger|
.to receive(:info) expect(logger)
.with(an_instance_of(String)) .to receive(:info)
.with(an_instance_of(Hash))
end
expect(importer.repository_updates_counter) expect(importer.repository_updates_counter)
.to receive(:increment) .to receive(:increment)

View File

@ -0,0 +1,77 @@
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::ImportExport::AttributesPermitter do
let(:yml_config) do
<<-EOF
tree:
project:
- labels:
- :priorities
- milestones:
- events:
- :push_event_payload
included_attributes:
labels:
- :title
- :description
methods:
labels:
- :type
EOF
end
let(:file) { Tempfile.new(%w(import_export .yml)) }
let(:config_hash) { Gitlab::ImportExport::Config.new(config: file.path).to_h }
before do
file.write(yml_config)
file.rewind
end
after do
file.close
file.unlink
end
subject { described_class.new(config: config_hash) }
describe '#permitted_attributes' do
it 'builds permitted attributes hash' do
expect(subject.permitted_attributes).to match(
a_hash_including(
project: [:labels, :milestones],
labels: [:priorities, :title, :description, :type],
events: [:push_event_payload],
milestones: [:events],
priorities: [],
push_event_payload: []
)
)
end
end
describe '#permit' do
let(:unfiltered_hash) do
{
title: 'Title',
description: 'Description',
undesired_attribute: 'Undesired Attribute',
another_attribute: 'Another Attribute'
}
end
it 'only allows permitted attributes' do
expect(subject.permit(:labels, unfiltered_hash)).to eq(title: 'Title', description: 'Description')
end
end
describe '#permitted_attributes_for' do
it 'returns an array of permitted attributes for a relation' do
expect(subject.permitted_attributes_for(:labels)).to contain_exactly(:title, :description, :type, :priorities)
end
end
end

View File

@ -0,0 +1,164 @@
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::WebIde::Config::Entry::Global do
let(:global) { described_class.new(hash) }
describe '.nodes' do
it 'returns a hash' do
expect(described_class.nodes).to be_a(Hash)
end
context 'when filtering all the entry/node names' do
it 'contains the expected node names' do
expect(described_class.nodes.keys)
.to match_array(%i[terminal])
end
end
end
context 'when configuration is valid' do
context 'when some entries defined' do
let(:hash) do
{ terminal: { before_script: ['ls'], variables: {}, script: 'sleep 10s', services: ['mysql'] } }
end
describe '#compose!' do
before do
global.compose!
end
it 'creates nodes hash' do
expect(global.descendants).to be_an Array
end
it 'creates node object for each entry' do
expect(global.descendants.count).to eq 1
end
it 'creates node object using valid class' do
expect(global.descendants.first)
.to be_an_instance_of Gitlab::WebIde::Config::Entry::Terminal
end
it 'sets correct description for nodes' do
expect(global.descendants.first.description)
.to eq 'Configuration of the webide terminal.'
end
describe '#leaf?' do
it 'is not leaf' do
expect(global).not_to be_leaf
end
end
end
context 'when not composed' do
describe '#terminal_value' do
it 'returns nil' do
expect(global.terminal_value).to be nil
end
end
describe '#leaf?' do
it 'is leaf' do
expect(global).to be_leaf
end
end
end
context 'when composed' do
before do
global.compose!
end
describe '#errors' do
it 'has no errors' do
expect(global.errors).to be_empty
end
end
describe '#terminal_value' do
it 'returns correct script' do
expect(global.terminal_value).to eq({
tag_list: [],
yaml_variables: [],
options: {
before_script: ['ls'],
script: ['sleep 10s'],
services: [{ name: "mysql" }]
}
})
end
end
end
end
end
context 'when configuration is not valid' do
before do
global.compose!
end
context 'when job does not have valid before script' do
let(:hash) do
{ terminal: { before_script: 100 } }
end
describe '#errors' do
it 'reports errors about missing script' do
expect(global.errors)
.to include "terminal:before_script config should be an array containing strings and arrays of strings"
end
end
end
end
context 'when value is not a hash' do
let(:hash) { [] }
describe '#valid?' do
it 'is not valid' do
expect(global).not_to be_valid
end
end
describe '#errors' do
it 'returns error about invalid type' do
expect(global.errors.first).to match /should be a hash/
end
end
end
describe '#specified?' do
it 'is concrete entry that is defined' do
expect(global.specified?).to be true
end
end
describe '#[]' do
before do
global.compose!
end
let(:hash) do
{ terminal: { before_script: ['ls'] } }
end
context 'when entry exists' do
it 'returns correct entry' do
expect(global[:terminal])
.to be_an_instance_of Gitlab::WebIde::Config::Entry::Terminal
expect(global[:terminal][:before_script].value).to eq ['ls']
end
end
context 'when entry does not exist' do
it 'always return unspecified node' do
expect(global[:some][:unknown][:node])
.not_to be_specified
end
end
end
end

View File

@ -0,0 +1,156 @@
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::WebIde::Config::Entry::Terminal do
let(:entry) { described_class.new(config, with_image_ports: true) }
describe '.nodes' do
context 'when filtering all the entry/node names' do
subject { described_class.nodes.keys }
let(:result) do
%i[before_script script image services variables]
end
it { is_expected.to match_array result }
end
end
describe 'validations' do
before do
entry.compose!
end
context 'when entry config value is correct' do
let(:config) { { script: 'rspec' } }
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
context 'when the same port is not duplicated' do
let(:config) do
{
image: { name: "ruby", ports: [80] },
services: [{ name: "mysql", alias: "service1", ports: [81] }, { name: "mysql", alias: "service2", ports: [82] }]
}
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context 'when unknown port keys detected' do
let(:config) do
{
image: { name: "ruby", ports: [80] },
services: [{ name: "mysql", alias: "service2", ports: [{ number: 81, invalid_key: 'foobar' }] }]
}
end
it 'is not valid' do
expect(entry).not_to be_valid
expect(entry.errors.first)
.to match /port config contains unknown keys: invalid_key/
end
end
end
context 'when entry value is not correct' do
context 'incorrect config value type' do
let(:config) { ['incorrect'] }
describe '#errors' do
it 'reports error about a config type' do
expect(entry.errors)
.to include 'terminal config should be a hash'
end
end
end
context 'when config is empty' do
let(:config) { {} }
describe '#valid' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context 'when unknown keys detected' do
let(:config) { { unknown: true } }
describe '#valid' do
it 'is not valid' do
expect(entry).not_to be_valid
end
end
end
context 'when the same port is duplicated' do
let(:config) do
{
image: { name: "ruby", ports: [80] },
services: [{ name: "mysql", ports: [80] }, { name: "mysql", ports: [81] }]
}
end
describe '#valid?' do
it 'is invalid' do
expect(entry).not_to be_valid
expect(entry.errors.count).to eq 1
expect(entry.errors.first).to match "each port number can only be referenced once"
end
end
end
end
end
describe '#relevant?' do
it 'is a relevant entry' do
entry = described_class.new({ script: 'rspec' })
expect(entry).to be_relevant
end
end
context 'when composed' do
before do
entry.compose!
end
describe '#value' do
context 'when entry is correct' do
let(:config) do
{ before_script: %w[ls pwd],
script: 'sleep 100',
tags: ['webide'],
image: 'ruby:2.5',
services: ['mysql'],
variables: { KEY: 'value' } }
end
it 'returns correct value' do
expect(entry.value)
.to eq(
tag_list: ['webide'],
yaml_variables: [{ key: 'KEY', value: 'value', public: true }],
options: {
image: { name: "ruby:2.5" },
services: [{ name: "mysql" }],
before_script: %w[ls pwd],
script: ['sleep 100']
}
)
end
end
end
end
end

View File

@ -0,0 +1,78 @@
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::WebIde::Config do
let(:config) do
described_class.new(yml)
end
context 'when config is valid' do
let(:yml) do
<<-EOS
terminal:
image: ruby:2.7
before_script:
- gem install rspec
EOS
end
describe '#to_hash' do
it 'returns hash created from string' do
hash = {
terminal: {
image: 'ruby:2.7',
before_script: ['gem install rspec']
}
}
expect(config.to_hash).to eq hash
end
describe '#valid?' do
it 'is valid' do
expect(config).to be_valid
end
it 'has no errors' do
expect(config.errors).to be_empty
end
end
end
end
context 'when config is invalid' do
context 'when yml is incorrect' do
let(:yml) { '// invalid' }
describe '.new' do
it 'raises error' do
expect { config }.to raise_error(
described_class::ConfigError,
/Invalid configuration format/
)
end
end
end
context 'when config logic is incorrect' do
let(:yml) { 'terminal: { before_script: "ls" }' }
describe '#valid?' do
it 'is not valid' do
expect(config).not_to be_valid
end
it 'has errors' do
expect(config.errors).not_to be_empty
end
end
describe '#errors' do
it 'returns an array of strings' do
expect(config.errors).to all(be_an_instance_of(String))
end
end
end
end
end

View File

@ -63,4 +63,64 @@ describe Ci::BuildRunnerSession, model: true do
end end
end end
end end
describe '#service_specification' do
let(:service) { 'foo'}
let(:port) { 80 }
let(:path) { 'path' }
let(:subprotocols) { nil }
let(:specification) { subject.service_specification(service: service, port: port, path: path, subprotocols: subprotocols) }
it 'returns service proxy url' do
expect(specification[:url]).to eq "https://localhost/proxy/#{service}/#{port}/#{path}"
end
it 'returns default service proxy websocket subprotocol' do
expect(specification[:subprotocols]).to eq %w[terminal.gitlab.com]
end
it 'returns empty hash if no url' do
subject.url = ''
expect(specification).to be_empty
end
context 'when port is not present' do
let(:port) { nil }
it 'uses the default port name' do
expect(specification[:url]).to eq "https://localhost/proxy/#{service}/default_port/#{path}"
end
end
context 'when the service is not present' do
let(:service) { '' }
it 'uses the service name "build" as default' do
expect(specification[:url]).to eq "https://localhost/proxy/build/#{port}/#{path}"
end
end
context 'when url is present' do
it 'returns ca_pem nil if empty certificate' do
subject.certificate = ''
expect(specification[:ca_pem]).to be_nil
end
it 'adds Authorization header if authorization is present' do
subject.authorization = 'foobar'
expect(specification[:headers]).to include(Authorization: ['foobar'])
end
end
context 'when subprotocol is present' do
let(:subprotocols) { 'foobar' }
it 'returns the new subprotocol' do
expect(specification[:subprotocols]).to eq [subprotocols]
end
end
end
end end

View File

@ -62,11 +62,16 @@ describe ProjectImportState, type: :model do
it 'logs error when update column fails' do it 'logs error when update column fails' do
allow(import_state).to receive(:update_column).and_raise(ActiveRecord::ActiveRecordError) allow(import_state).to receive(:update_column).and_raise(ActiveRecord::ActiveRecordError)
allow(Gitlab::AppLogger).to receive(:error)
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger).to receive(:error).with(
error: 'ActiveRecord::ActiveRecordError',
message: 'Error setting import status to failed',
original_error: error_message
)
end
import_state.mark_as_failed(error_message) import_state.mark_as_failed(error_message)
expect(Gitlab::AppLogger).to have_received(:error)
end end
it 'updates last_error with error message' do it 'updates last_error with error message' do

View File

@ -0,0 +1,82 @@
# frozen_string_literal: true
require 'spec_helper'
describe WebIdeTerminal do
let(:build) { create(:ci_build) }
subject { described_class.new(build) }
it 'returns the show_path of the build' do
expect(subject.show_path).to end_with("/ide_terminals/#{build.id}")
end
it 'returns the retry_path of the build' do
expect(subject.retry_path).to end_with("/ide_terminals/#{build.id}/retry")
end
it 'returns the cancel_path of the build' do
expect(subject.cancel_path).to end_with("/ide_terminals/#{build.id}/cancel")
end
it 'returns the terminal_path of the build' do
expect(subject.terminal_path).to end_with("/jobs/#{build.id}/terminal.ws")
end
it 'returns the proxy_websocket_path of the build' do
expect(subject.proxy_websocket_path).to end_with("/jobs/#{build.id}/proxy.ws")
end
describe 'services' do
let(:services_with_aliases) do
{
services: [{ name: 'postgres', alias: 'postgres' },
{ name: 'docker:stable-dind', alias: 'docker' }]
}
end
before do
allow(build).to receive(:options).and_return(config)
end
context 'when image does not have an alias' do
let(:config) do
{ image: 'ruby:2.7' }.merge(services_with_aliases)
end
it 'returns services aliases' do
expect(subject.services).to eq %w(postgres docker)
end
end
context 'when both image and services have aliases' do
let(:config) do
{ image: { name: 'ruby:2.7', alias: 'ruby' } }.merge(services_with_aliases)
end
it 'returns all aliases' do
expect(subject.services).to eq %w(postgres docker ruby)
end
end
context 'when image and services does not have any alias' do
let(:config) do
{ image: 'ruby:2.7', services: ['postgres'] }
end
it 'returns an empty array' do
expect(subject.services).to be_empty
end
end
context 'when no image nor services' do
let(:config) do
{ script: %w(echo) }
end
it 'returns an empty array' do
expect(subject.services).to be_empty
end
end
end
end

View File

@ -249,4 +249,129 @@ describe Ci::BuildPolicy do
end end
end end
end end
describe 'manage a web ide terminal' do
let(:build_permissions) { %i[read_web_ide_terminal create_build_terminal update_web_ide_terminal create_build_service_proxy] }
let_it_be(:maintainer) { create(:user) }
let(:owner) { create(:owner) }
let(:admin) { create(:admin) }
let(:maintainer) { create(:user) }
let(:developer) { create(:user) }
let(:reporter) { create(:user) }
let(:guest) { create(:user) }
let(:project) { create(:project, :public, namespace: owner.namespace) }
let(:pipeline) { create(:ci_empty_pipeline, project: project, source: :webide) }
let(:build) { create(:ci_build, pipeline: pipeline) }
before do
allow(build).to receive(:has_terminal?).and_return(true)
project.add_maintainer(maintainer)
project.add_developer(developer)
project.add_reporter(reporter)
project.add_guest(guest)
end
subject { described_class.new(current_user, build) }
context 'when create_web_ide_terminal access enabled' do
context 'with admin' do
let(:current_user) { admin }
context 'when admin mode enabled', :enable_admin_mode do
it { expect_allowed(*build_permissions) }
end
context 'when admin mode disabled' do
it { expect_disallowed(*build_permissions) }
end
context 'when build is not from a webide pipeline' do
let(:pipeline) { create(:ci_empty_pipeline, project: project, source: :chat) }
it { expect_disallowed(:read_web_ide_terminal, :update_web_ide_terminal, :create_build_service_proxy) }
end
context 'when build has no runner terminal' do
before do
allow(build).to receive(:has_terminal?).and_return(false)
end
context 'when admin mode enabled', :enable_admin_mode do
it { expect_allowed(:read_web_ide_terminal, :update_web_ide_terminal) }
it { expect_disallowed(:create_build_terminal, :create_build_service_proxy) }
end
context 'when admin mode disabled' do
it { expect_disallowed(:read_web_ide_terminal, :update_web_ide_terminal) }
it { expect_disallowed(:create_build_terminal, :create_build_service_proxy) }
end
end
context 'feature flag "build_service_proxy" is disabled' do
before do
stub_feature_flags(build_service_proxy: false)
end
it { expect_disallowed(:create_build_service_proxy) }
end
end
shared_examples 'allowed build owner access' do
it { expect_disallowed(*build_permissions) }
context 'when user is the owner of the job' do
let(:build) { create(:ci_build, pipeline: pipeline, user: current_user) }
it { expect_allowed(*build_permissions) }
end
end
shared_examples 'forbidden access' do
it { expect_disallowed(*build_permissions) }
context 'when user is the owner of the job' do
let(:build) { create(:ci_build, pipeline: pipeline, user: current_user) }
it { expect_disallowed(*build_permissions) }
end
end
context 'with owner' do
let(:current_user) { owner }
it_behaves_like 'allowed build owner access'
end
context 'with maintainer' do
let(:current_user) { maintainer }
it_behaves_like 'allowed build owner access'
end
context 'with developer' do
let(:current_user) { developer }
it_behaves_like 'forbidden access'
end
context 'with reporter' do
let(:current_user) { reporter }
it_behaves_like 'forbidden access'
end
context 'with guest' do
let(:current_user) { guest }
it_behaves_like 'forbidden access'
end
context 'with non member' do
let(:current_user) { create(:user) }
it_behaves_like 'forbidden access'
end
end
end
end end

View File

@ -742,4 +742,62 @@ describe ProjectPolicy do
it { is_expected.to be_disallowed(:destroy_package) } it { is_expected.to be_disallowed(:destroy_package) }
end end
end end
describe 'create_web_ide_terminal' do
subject { described_class.new(current_user, project) }
context 'with admin' do
let(:current_user) { admin }
context 'when admin mode enabled', :enable_admin_mode do
it { is_expected.to be_allowed(:create_web_ide_terminal) }
end
context 'when admin mode disabled' do
it { is_expected.to be_disallowed(:create_web_ide_terminal) }
end
end
context 'with owner' do
let(:current_user) { owner }
it { is_expected.to be_allowed(:create_web_ide_terminal) }
end
context 'with maintainer' do
let(:current_user) { maintainer }
it { is_expected.to be_allowed(:create_web_ide_terminal) }
end
context 'with developer' do
let(:current_user) { developer }
it { is_expected.to be_disallowed(:create_web_ide_terminal) }
end
context 'with reporter' do
let(:current_user) { reporter }
it { is_expected.to be_disallowed(:create_web_ide_terminal) }
end
context 'with guest' do
let(:current_user) { guest }
it { is_expected.to be_disallowed(:create_web_ide_terminal) }
end
context 'with non member' do
let(:current_user) { create(:user) }
it { is_expected.to be_disallowed(:create_web_ide_terminal) }
end
context 'with anonymous' do
let(:current_user) { nil }
it { is_expected.to be_disallowed(:create_web_ide_terminal) }
end
end
end end

View File

@ -5,10 +5,6 @@ require 'spec_helper'
describe API::Issues do describe API::Issues do
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
let_it_be(:owner) { create(:owner) } let_it_be(:owner) { create(:owner) }
let_it_be(:project, reload: true) do
create(:project, :public, creator_id: owner.id, namespace: owner.namespace)
end
let(:user2) { create(:user) } let(:user2) { create(:user) }
let(:non_member) { create(:user) } let(:non_member) { create(:user) }
let_it_be(:guest) { create(:user) } let_it_be(:guest) { create(:user) }
@ -17,6 +13,11 @@ describe API::Issues do
let(:admin) { create(:user, :admin) } let(:admin) { create(:user, :admin) }
let(:issue_title) { 'foo' } let(:issue_title) { 'foo' }
let(:issue_description) { 'closed' } let(:issue_description) { 'closed' }
let_it_be(:project, reload: true) do
create(:project, :public, creator_id: owner.id, namespace: owner.namespace)
end
let!(:closed_issue) do let!(:closed_issue) do
create :closed_issue, create :closed_issue,
author: user, author: user,
@ -28,6 +29,7 @@ describe API::Issues do
updated_at: 3.hours.ago, updated_at: 3.hours.ago,
closed_at: 1.hour.ago closed_at: 1.hour.ago
end end
let!(:confidential_issue) do let!(:confidential_issue) do
create :issue, create :issue,
:confidential, :confidential,
@ -37,6 +39,7 @@ describe API::Issues do
created_at: generate(:past_time), created_at: generate(:past_time),
updated_at: 2.hours.ago updated_at: 2.hours.ago
end end
let!(:issue) do let!(:issue) do
create :issue, create :issue,
author: user, author: user,
@ -48,18 +51,24 @@ describe API::Issues do
title: issue_title, title: issue_title,
description: issue_description description: issue_description
end end
let_it_be(:label) do let_it_be(:label) do
create(:label, title: 'label', color: '#FFAABB', project: project) create(:label, title: 'label', color: '#FFAABB', project: project)
end end
let!(:label_link) { create(:label_link, label: label, target: issue) } let!(:label_link) { create(:label_link, label: label, target: issue) }
let(:milestone) { create(:milestone, title: '1.0.0', project: project) } let(:milestone) { create(:milestone, title: '1.0.0', project: project) }
let_it_be(:empty_milestone) do let_it_be(:empty_milestone) do
create(:milestone, title: '2.0.0', project: project) create(:milestone, title: '2.0.0', project: project)
end end
let!(:note) { create(:note_on_issue, author: user, project: project, noteable: issue) }
let!(:note) { create(:note_on_issue, author: user, project: project, noteable: issue) }
let(:no_milestone_title) { 'None' } let(:no_milestone_title) { 'None' }
let(:any_milestone_title) { 'Any' } let(:any_milestone_title) { 'Any' }
let(:updated_title) { 'updated title' }
let(:issue_path) { "/projects/#{project.id}/issues/#{issue.iid}" }
let(:api_for_user) { api(issue_path, user) }
before_all do before_all do
project.add_reporter(user) project.add_reporter(user)
@ -72,108 +81,97 @@ describe API::Issues do
describe 'PUT /projects/:id/issues/:issue_iid to update only title' do describe 'PUT /projects/:id/issues/:issue_iid to update only title' do
it 'updates a project issue' do it 'updates a project issue' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), put api_for_user, params: { title: updated_title }
params: { title: 'updated title' }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq('updated title') expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq(updated_title)
end end
it 'returns 404 error if issue iid not found' do it 'returns 404 error if issue iid not found' do
put api("/projects/#{project.id}/issues/44444", user), put api("/projects/#{project.id}/issues/44444", user), params: { title: updated_title }
params: { title: 'updated title' }
expect(response).to have_gitlab_http_status(:not_found) expect(response).to have_gitlab_http_status(:not_found)
end end
it 'returns 404 error if issue id is used instead of the iid' do it 'returns 404 error if issue id is used instead of the iid' do
put api("/projects/#{project.id}/issues/#{issue.id}", user), put api("/projects/#{project.id}/issues/#{issue.id}", user), params: { title: updated_title }
params: { title: 'updated title' }
expect(response).to have_gitlab_http_status(:not_found) expect(response).to have_gitlab_http_status(:not_found)
end end
it 'allows special label names' do it 'allows special label names' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), put api_for_user,
params: { params: {
title: 'updated title', title: updated_title,
labels: 'label, label?, label&foo, ?, &' labels: 'label, label?, label&foo, ?, &'
} }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'label'
expect(json_response['labels']).to include 'label?'
expect(json_response['labels']).to include 'label&foo'
expect(json_response['labels']).to include '?'
expect(json_response['labels']).to include '&'
end end
it 'allows special label names with labels param as array' do it 'allows special label names with labels param as array' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), put api_for_user,
params: { params: {
title: 'updated title', title: updated_title,
labels: ['label', 'label?', 'label&foo, ?, &'] labels: ['label', 'label?', 'label&foo, ?, &']
} }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'label' expect(json_response['labels']).to contain_exactly('label', 'label?', 'label&foo', '?', '&')
expect(json_response['labels']).to include 'label?'
expect(json_response['labels']).to include 'label&foo'
expect(json_response['labels']).to include '?'
expect(json_response['labels']).to include '&'
end end
context 'confidential issues' do context 'confidential issues' do
let(:confidential_issue_path) { "/projects/#{project.id}/issues/#{confidential_issue.iid}" }
it 'returns 403 for non project members' do it 'returns 403 for non project members' do
put api("/projects/#{project.id}/issues/#{confidential_issue.iid}", non_member), put api(confidential_issue_path, non_member), params: { title: updated_title }
params: { title: 'updated title' }
expect(response).to have_gitlab_http_status(:forbidden) expect(response).to have_gitlab_http_status(:forbidden)
end end
it 'returns 403 for project members with guest role' do it 'returns 403 for project members with guest role' do
put api("/projects/#{project.id}/issues/#{confidential_issue.iid}", guest), put api(confidential_issue_path, guest), params: { title: updated_title }
params: { title: 'updated title' }
expect(response).to have_gitlab_http_status(:forbidden) expect(response).to have_gitlab_http_status(:forbidden)
end end
it 'updates a confidential issue for project members' do it 'updates a confidential issue for project members' do
put api("/projects/#{project.id}/issues/#{confidential_issue.iid}", user), put api(confidential_issue_path, user), params: { title: updated_title }
params: { title: 'updated title' }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq('updated title') expect(json_response['title']).to eq(updated_title)
end end
it 'updates a confidential issue for author' do it 'updates a confidential issue for author' do
put api("/projects/#{project.id}/issues/#{confidential_issue.iid}", author), put api(confidential_issue_path, author), params: { title: updated_title }
params: { title: 'updated title' }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq('updated title') expect(json_response['title']).to eq(updated_title)
end end
it 'updates a confidential issue for admin' do it 'updates a confidential issue for admin' do
put api("/projects/#{project.id}/issues/#{confidential_issue.iid}", admin), put api(confidential_issue_path, admin), params: { title: updated_title }
params: { title: 'updated title' }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to eq('updated title') expect(json_response['title']).to eq(updated_title)
end end
it 'sets an issue to confidential' do it 'sets an issue to confidential' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), put api_for_user, params: { confidential: true }
params: { confidential: true }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['confidential']).to be_truthy expect(json_response['confidential']).to be_truthy
end end
it 'makes a confidential issue public' do it 'makes a confidential issue public' do
put api("/projects/#{project.id}/issues/#{confidential_issue.iid}", user), put api(confidential_issue_path, user), params: { confidential: false }
params: { confidential: false }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['confidential']).to be_falsy expect(json_response['confidential']).to be_falsy
end end
it 'does not update a confidential issue with wrong confidential flag' do it 'does not update a confidential issue with wrong confidential flag' do
put api("/projects/#{project.id}/issues/#{confidential_issue.iid}", user), put api(confidential_issue_path, user), params: { confidential: 'foo' }
params: { confidential: 'foo' }
expect(response).to have_gitlab_http_status(:bad_request) expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('confidential is invalid') expect(json_response['error']).to eq('confidential is invalid')
@ -185,12 +183,12 @@ describe API::Issues do
include_context 'includes Spam constants' include_context 'includes Spam constants'
def update_issue def update_issue
put api("/projects/#{project.id}/issues/#{issue.iid}", user), params: params put api_for_user, params: params
end end
let(:params) do let(:params) do
{ {
title: 'updated title', title: updated_title,
description: 'content here', description: 'content here',
labels: 'label, label2' labels: 'label, label2'
} }
@ -224,7 +222,7 @@ describe API::Issues do
it 'creates a new spam log entry' do it 'creates a new spam log entry' do
expect { update_issue } expect { update_issue }
.to log_spam(title: 'updated title', description: 'content here', user_id: user.id, noteable_type: 'Issue') .to log_spam(title: updated_title, description: 'content here', user_id: user.id, noteable_type: 'Issue')
end end
end end
@ -241,7 +239,7 @@ describe API::Issues do
it 'creates a new spam log entry' do it 'creates a new spam log entry' do
expect { update_issue } expect { update_issue }
.to log_spam(title: 'updated title', description: 'content here', user_id: user.id, noteable_type: 'Issue') .to log_spam(title: updated_title, description: 'content here', user_id: user.id, noteable_type: 'Issue')
end end
end end
end end
@ -249,49 +247,39 @@ describe API::Issues do
describe 'PUT /projects/:id/issues/:issue_iid to update assignee' do describe 'PUT /projects/:id/issues/:issue_iid to update assignee' do
context 'support for deprecated assignee_id' do context 'support for deprecated assignee_id' do
it 'removes assignee' do it 'removes assignee' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), put api_for_user, params: { assignee_id: 0 }
params: { assignee_id: 0 }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['assignee']).to be_nil expect(json_response['assignee']).to be_nil
end end
it 'updates an issue with new assignee' do it 'updates an issue with new assignee' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), put api_for_user, params: { assignee_id: user2.id }
params: { assignee_id: user2.id }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['assignee']['name']).to eq(user2.name) expect(json_response['assignee']['name']).to eq(user2.name)
end end
end end
it 'removes assignee' do it 'removes assignee' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), put api_for_user, params: { assignee_ids: [0] }
params: { assignee_ids: [0] }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['assignees']).to be_empty expect(json_response['assignees']).to be_empty
end end
it 'updates an issue with new assignee' do it 'updates an issue with new assignee' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), put api_for_user, params: { assignee_ids: [user2.id] }
params: { assignee_ids: [user2.id] }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['assignees'].first['name']).to eq(user2.name) expect(json_response['assignees'].first['name']).to eq(user2.name)
end end
context 'single assignee restrictions' do context 'single assignee restrictions' do
it 'updates an issue with several assignees but only one has been applied' do it 'updates an issue with several assignees but only one has been applied' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), put api_for_user, params: { assignee_ids: [user2.id, guest.id] }
params: { assignee_ids: [user2.id, guest.id] }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['assignees'].size).to eq(1) expect(json_response['assignees'].size).to eq(1)
end end
end end
@ -302,8 +290,7 @@ describe API::Issues do
let!(:label_link) { create(:label_link, label: label, target: issue) } let!(:label_link) { create(:label_link, label: label, target: issue) }
it 'adds relevant labels' do it 'adds relevant labels' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), put api_for_user, params: { add_labels: '1, 2' }
params: { add_labels: '1, 2' }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to contain_exactly(label.title, '1', '2') expect(json_response['labels']).to contain_exactly(label.title, '1', '2')
@ -314,16 +301,14 @@ describe API::Issues do
let!(:label_link2) { create(:label_link, label: label2, target: issue) } let!(:label_link2) { create(:label_link, label: label2, target: issue) }
it 'removes relevant labels' do it 'removes relevant labels' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), put api_for_user, params: { remove_labels: label2.title }
params: { remove_labels: label2.title }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to eq([label.title]) expect(json_response['labels']).to eq([label.title])
end end
it 'removes all labels' do it 'removes all labels' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), put api_for_user, params: { remove_labels: "#{label.title}, #{label2.title}" }
params: { remove_labels: "#{label.title}, #{label2.title}" }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to be_empty expect(json_response['labels']).to be_empty
@ -331,15 +316,15 @@ describe API::Issues do
end end
it 'does not update labels if not present' do it 'does not update labels if not present' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), put api_for_user, params: { title: updated_title }
params: { title: 'updated title' }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to eq([label.title]) expect(json_response['labels']).to eq([label.title])
end end
it 'removes all labels and touches the record' do it 'removes all labels and touches the record' do
Timecop.travel(1.minute.from_now) do Timecop.travel(1.minute.from_now) do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), params: { labels: '' } put api_for_user, params: { labels: '' }
end end
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
@ -349,7 +334,7 @@ describe API::Issues do
it 'removes all labels and touches the record with labels param as array' do it 'removes all labels and touches the record with labels param as array' do
Timecop.travel(1.minute.from_now) do Timecop.travel(1.minute.from_now) do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), params: { labels: [''] } put api_for_user, params: { labels: [''] }
end end
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
@ -359,20 +344,19 @@ describe API::Issues do
it 'updates labels and touches the record' do it 'updates labels and touches the record' do
Timecop.travel(1.minute.from_now) do Timecop.travel(1.minute.from_now) do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), put api_for_user, params: { labels: 'foo,bar' }
params: { labels: 'foo,bar' }
end end
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'foo' expect(json_response['labels']).to contain_exactly('foo', 'bar')
expect(json_response['labels']).to include 'bar'
expect(json_response['updated_at']).to be > Time.now expect(json_response['updated_at']).to be > Time.now
end end
it 'updates labels and touches the record with labels param as array' do it 'updates labels and touches the record with labels param as array' do
Timecop.travel(1.minute.from_now) do Timecop.travel(1.minute.from_now) do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), put api_for_user, params: { labels: %w(foo bar) }
params: { labels: %w(foo bar) }
end end
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'foo' expect(json_response['labels']).to include 'foo'
expect(json_response['labels']).to include 'bar' expect(json_response['labels']).to include 'bar'
@ -380,36 +364,22 @@ describe API::Issues do
end end
it 'allows special label names' do it 'allows special label names' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), put api_for_user, params: { labels: 'label:foo, label-bar,label_bar,label/bar,label?bar,label&bar,?,&' }
params: { labels: 'label:foo, label-bar,label_bar,label/bar,label?bar,label&bar,?,&' }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'label:foo' expect(json_response['labels']).to contain_exactly('label:foo', 'label-bar', 'label_bar', 'label/bar', 'label?bar', 'label&bar', '?', '&')
expect(json_response['labels']).to include 'label-bar'
expect(json_response['labels']).to include 'label_bar'
expect(json_response['labels']).to include 'label/bar'
expect(json_response['labels']).to include 'label?bar'
expect(json_response['labels']).to include 'label&bar'
expect(json_response['labels']).to include '?'
expect(json_response['labels']).to include '&'
end end
it 'allows special label names with labels param as array' do it 'allows special label names with labels param as array' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), put api_for_user, params: { labels: ['label:foo', 'label-bar', 'label_bar', 'label/bar,label?bar,label&bar,?,&'] }
params: { labels: ['label:foo', 'label-bar', 'label_bar', 'label/bar,label?bar,label&bar,?,&'] }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'label:foo' expect(json_response['labels']).to contain_exactly('label:foo', 'label-bar', 'label_bar', 'label/bar', 'label?bar', 'label&bar', '?', '&')
expect(json_response['labels']).to include 'label-bar'
expect(json_response['labels']).to include 'label_bar'
expect(json_response['labels']).to include 'label/bar'
expect(json_response['labels']).to include 'label?bar'
expect(json_response['labels']).to include 'label&bar'
expect(json_response['labels']).to include '?'
expect(json_response['labels']).to include '&'
end end
it 'returns 400 if title is too long' do it 'returns 400 if title is too long' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), put api_for_user, params: { title: 'g' * 256 }
params: { title: 'g' * 256 }
expect(response).to have_gitlab_http_status(:bad_request) expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']['title']).to eq([ expect(json_response['message']['title']).to eq([
'is too long (maximum is 255 characters)' 'is too long (maximum is 255 characters)'
@ -419,16 +389,15 @@ describe API::Issues do
describe 'PUT /projects/:id/issues/:issue_iid to update state and label' do describe 'PUT /projects/:id/issues/:issue_iid to update state and label' do
it 'updates a project issue' do it 'updates a project issue' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user), put api_for_user, params: { labels: 'label2', state_event: 'close' }
params: { labels: 'label2', state_event: 'close' }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'label2' expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to contain_exactly('label2')
expect(json_response['state']).to eq 'closed' expect(json_response['state']).to eq 'closed'
end end
it 'reopens a project isssue' do it 'reopens a project isssue' do
put api("/projects/#{project.id}/issues/#{closed_issue.iid}", user), params: { state_event: 'reopen' } put api(issue_path, user), params: { state_event: 'reopen' }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['state']).to eq 'opened' expect(json_response['state']).to eq 'opened'
@ -440,42 +409,41 @@ describe API::Issues do
it 'accepts the update date to be set' do it 'accepts the update date to be set' do
update_time = 2.weeks.ago update_time = 2.weeks.ago
put api("/projects/#{project.id}/issues/#{issue.iid}", user), put api_for_user, params: { title: 'some new title', updated_at: update_time }
params: { title: 'some new title', updated_at: update_time }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to include 'some new title' expect(json_response['title']).to eq('some new title')
expect(Time.parse(json_response['updated_at'])).not_to be_like_time(update_time) expect(Time.parse(json_response['updated_at'])).not_to be_like_time(update_time)
end end
end end
context 'when admin or owner makes the request' do context 'when admin or owner makes the request' do
let(:api_for_owner) { api(issue_path, owner) }
it 'not allow to set null for updated_at' do it 'not allow to set null for updated_at' do
put api("/projects/#{project.id}/issues/#{issue.iid}", owner), params: { updated_at: nil } put api_for_owner, params: { updated_at: nil }
expect(response).to have_gitlab_http_status(:bad_request) expect(response).to have_gitlab_http_status(:bad_request)
end end
it 'not allow to set blank for updated_at' do it 'not allow to set blank for updated_at' do
put api("/projects/#{project.id}/issues/#{issue.iid}", owner), params: { updated_at: '' } put api_for_owner, params: { updated_at: '' }
expect(response).to have_gitlab_http_status(:bad_request) expect(response).to have_gitlab_http_status(:bad_request)
end end
it 'not allow to set invalid format for updated_at' do it 'not allow to set invalid format for updated_at' do
put api("/projects/#{project.id}/issues/#{issue.iid}", owner), params: { updated_at: 'invalid-format' } put api_for_owner, params: { updated_at: 'invalid-format' }
expect(response).to have_gitlab_http_status(:bad_request) expect(response).to have_gitlab_http_status(:bad_request)
end end
it 'accepts the update date to be set' do it 'accepts the update date to be set' do
update_time = 2.weeks.ago update_time = 2.weeks.ago
put api("/projects/#{project.id}/issues/#{issue.iid}", owner), put api_for_owner, params: { title: 'some new title', updated_at: update_time }
params: { title: 'some new title', updated_at: update_time }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to include 'some new title' expect(json_response['title']).to eq('some new title')
expect(Time.parse(json_response['updated_at'])).to be_like_time(update_time) expect(Time.parse(json_response['updated_at'])).to be_like_time(update_time)
end end
end end
@ -485,7 +453,7 @@ describe API::Issues do
it 'creates a new project issue' do it 'creates a new project issue' do
due_date = 2.weeks.from_now.strftime('%Y-%m-%d') due_date = 2.weeks.from_now.strftime('%Y-%m-%d')
put api("/projects/#{project.id}/issues/#{issue.iid}", user), params: { due_date: due_date } put api_for_user, params: { due_date: due_date }
expect(response).to have_gitlab_http_status(:ok) expect(response).to have_gitlab_http_status(:ok)
expect(json_response['due_date']).to eq(due_date) expect(json_response['due_date']).to eq(due_date)

View File

@ -1055,6 +1055,65 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
post api('/jobs/request'), params: new_params, headers: { 'User-Agent' => user_agent } post api('/jobs/request'), params: new_params, headers: { 'User-Agent' => user_agent }
end end
end end
context 'for web-ide job' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
let(:runner) { create(:ci_runner, :project, projects: [project]) }
let(:service) { Ci::CreateWebIdeTerminalService.new(project, user, ref: 'master').execute }
let(:pipeline) { service[:pipeline] }
let(:build) { pipeline.builds.first }
let(:job) { {} }
let(:config_content) do
'terminal: { image: ruby, services: [mysql], before_script: [ls], tags: [tag-1], variables: { KEY: value } }'
end
before do
stub_webide_config_file(config_content)
project.add_maintainer(user)
pipeline
end
context 'when runner has matching tag' do
before do
runner.update!(tag_list: ['tag-1'])
end
it 'successfully picks job' do
request_job
build.reload
expect(build).to be_running
expect(build.runner).to eq(runner)
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to include(
"id" => build.id,
"variables" => include("key" => 'KEY', "value" => 'value', "public" => true, "masked" => false),
"image" => a_hash_including("name" => 'ruby'),
"services" => all(a_hash_including("name" => 'mysql')),
"job_info" => a_hash_including("name" => 'terminal', "stage" => 'terminal'))
end
end
context 'when runner does not have matching tags' do
it 'does not pick a job' do
request_job
build.reload
expect(build).to be_pending
expect(response).to have_gitlab_http_status(:no_content)
end
end
def request_job(token = runner.token, **params)
post api('/jobs/request'), params: params.merge(token: token)
end
end
end end
describe 'PUT /api/v4/jobs/:id' do describe 'PUT /api/v4/jobs/:id' do

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
require 'spec_helper'
describe WebIdeTerminalEntity do
let(:build) { create(:ci_build) }
let(:entity) { described_class.new(WebIdeTerminal.new(build)) }
subject { entity.as_json }
it { is_expected.to have_key(:id) }
it { is_expected.to have_key(:status) }
it { is_expected.to have_key(:show_path) }
it { is_expected.to have_key(:cancel_path) }
it { is_expected.to have_key(:retry_path) }
it { is_expected.to have_key(:terminal_path) }
it { is_expected.to have_key(:services) }
it { is_expected.to have_key(:proxy_websocket_path) }
context 'when feature flag build_service_proxy is disabled' do
before do
stub_feature_flags(build_service_proxy: false)
end
it { is_expected.not_to have_key(:proxy_websocket_path) }
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
require 'spec_helper'
describe WebIdeTerminalSerializer do
let(:build) { create(:ci_build) }
subject { described_class.new.represent(WebIdeTerminal.new(build)) }
it 'represents WebIdeTerminalEntity entities' do
expect(described_class.entity_class).to eq(WebIdeTerminalEntity)
end
it 'accepts WebIdeTerminal as a resource' do
expect(subject[:id]).to eq build.id
end
context 'when resource is a build' do
subject { described_class.new.represent(build) }
it 'transforms it into a WebIdeTerminal resource' do
expect(WebIdeTerminal).to receive(:new)
subject
end
end
end

View File

@ -0,0 +1,143 @@
# frozen_string_literal: true
require 'spec_helper'
describe Ci::CreateWebIdeTerminalService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:ref) { 'master' }
describe '#execute' do
subject { described_class.new(project, user, ref: ref).execute }
context 'for maintainer' do
shared_examples 'be successful' do
it 'returns a success with pipeline object' do
is_expected.to include(status: :success)
expect(subject[:pipeline]).to be_a(Ci::Pipeline)
expect(subject[:pipeline]).to be_persisted
expect(subject[:pipeline].stages.count).to eq(1)
expect(subject[:pipeline].builds.count).to eq(1)
end
end
before do
project.add_maintainer(user)
end
context 'when web-ide has valid configuration' do
before do
stub_webide_config_file(config_content)
end
context 'for empty configuration' do
let(:config_content) do
'terminal: {}'
end
it_behaves_like 'be successful'
end
context 'for configuration with container image' do
let(:config_content) do
'terminal: { image: ruby }'
end
it_behaves_like 'be successful'
end
context 'for configuration with ports' do
let(:config_content) do
<<-EOS
terminal:
image:
name: ruby:2.7
ports:
- 80
script: rspec
services:
- name: test
alias: test
ports:
- 8080
EOS
end
it_behaves_like 'be successful'
end
end
end
context 'error handling' do
shared_examples 'having an error' do |message|
it 'returns an error' do
is_expected.to eq(
status: :error,
message: message
)
end
end
shared_examples 'having insufficient permissions' do
it_behaves_like 'having an error', 'Insufficient permissions to create a terminal'
end
context 'when user is developer' do
before do
project.add_developer(user)
end
it_behaves_like 'having insufficient permissions'
end
context 'when user is maintainer' do
before do
project.add_maintainer(user)
end
context 'when terminal is already running' do
let!(:webide_pipeline) { create(:ci_pipeline, :webide, :running, project: project, user: user) }
it_behaves_like 'having an error', 'There is already a terminal running'
end
context 'when ref is non-existing' do
let(:ref) { 'non-existing-ref' }
it_behaves_like 'having an error', 'Ref does not exist'
end
context 'when ref is a tag' do
let(:ref) { 'v1.0.0' }
it_behaves_like 'having an error', 'Ref needs to be a branch'
end
context 'when terminal config is missing' do
let(:ref) { 'v1.0.0' }
it_behaves_like 'having an error', 'Ref needs to be a branch'
end
context 'when webide config is present' do
before do
stub_webide_config_file(config_content)
end
context 'config has invalid content' do
let(:config_content) { 'invalid' }
it_behaves_like 'having an error', 'Invalid configuration format'
end
context 'config is valid, but does not have terminal' do
let(:config_content) { '{}' }
it_behaves_like 'having an error', 'Terminal is not configured'
end
end
end
end
end
end

View File

@ -0,0 +1,91 @@
# frozen_string_literal: true
require 'spec_helper'
describe Ci::WebIdeConfigService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:sha) { 'sha' }
describe '#execute' do
subject { described_class.new(project, user, sha: sha).execute }
context 'when insufficient permission' do
it 'returns an error' do
is_expected.to include(
status: :error,
message: 'Insufficient permissions to read configuration')
end
end
context 'for developer' do
before do
project.add_developer(user)
end
context 'when file is missing' do
it 'returns an error' do
is_expected.to include(
status: :error,
message: "Failed to load Web IDE config file '.gitlab/.gitlab-webide.yml' for sha")
end
end
context 'when file is present' do
before do
allow(project.repository).to receive(:blob_data_at).with('sha', anything) do
config_content
end
end
context 'content is not valid' do
let(:config_content) { 'invalid content' }
it 'returns an error' do
is_expected.to include(
status: :error,
message: "Invalid configuration format")
end
end
context 'content is valid, but terminal not defined' do
let(:config_content) { '{}' }
it 'returns success' do
is_expected.to include(
status: :success,
terminal: nil)
end
end
context 'content is valid, with enabled terminal' do
let(:config_content) { 'terminal: {}' }
it 'returns success' do
is_expected.to include(
status: :success,
terminal: {
tag_list: [],
yaml_variables: [],
options: { script: ["sleep 60"] }
})
end
end
context 'content is valid, with custom terminal' do
let(:config_content) { 'terminal: { before_script: [ls] }' }
it 'returns success' do
is_expected.to include(
status: :success,
terminal: {
tag_list: [],
yaml_variables: [],
options: { before_script: ["ls"], script: ["sleep 60"] }
})
end
end
end
end
end
end

View File

@ -85,9 +85,21 @@ describe Clusters::ParseClusterApplicationsArtifactService do
end end
end end
context 'job has no deployment cluster' do context 'job has no deployment' do
let(:job) { build(:ci_build) } let(:job) { build(:ci_build) }
it 'returns an error' do
result = described_class.new(job, user).execute(artifact)
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('No deployment found for this job')
end
end
context 'job has no deployment cluster' do
let(:deployment) { create(:deployment) }
let(:job) { deployment.deployable }
it 'returns an error' do it 'returns an error' do
result = described_class.new(job, user).execute(artifact) result = described_class.new(job, user).execute(artifact)

View File

@ -103,12 +103,14 @@ describe Groups::ImportExport::ExportService do
end end
it 'logs the error' do it 'logs the error' do
expect(shared.logger).to receive(:error).with( expect_next_instance_of(Gitlab::Export::Logger) do |logger|
group_id: group.id, expect(logger).to receive(:error).with(
group_name: group.name, group_id: group.id,
error: expected_message, group_name: group.name,
message: 'Group Import/Export: Export failed' errors: expected_message,
) message: 'Group Export failed'
)
end
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error) expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end end
@ -162,7 +164,8 @@ describe Groups::ImportExport::ExportService do
it 'notifies logger' do it 'notifies logger' do
allow(service).to receive_message_chain(:tree_exporter, :save).and_return(false) allow(service).to receive_message_chain(:tree_exporter, :save).and_return(false)
expect(shared.logger).to receive(:error)
expect(service.instance_variable_get(:@logger)).to receive(:error)
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error) expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end end

View File

@ -119,9 +119,7 @@ describe Projects::ImportExport::ExportService do
end end
it 'notifies logger' do it 'notifies logger' do
allow(Gitlab::AppLogger).to receive(:error) expect(service.instance_variable_get(:@logger)).to receive(:error)
expect(Gitlab::AppLogger).to receive(:error)
end end
end end
end end
@ -149,7 +147,7 @@ describe Projects::ImportExport::ExportService do
end end
it 'notifies logger' do it 'notifies logger' do
expect(Gitlab::AppLogger).to receive(:error) expect(service.instance_variable_get(:@logger)).to receive(:error)
end end
it 'does not call the export strategy' do it 'does not call the export strategy' do

View File

@ -141,6 +141,12 @@ module StubGitlabCalls
.to_return(status: 200, body: "", headers: {}) .to_return(status: 200, body: "", headers: {})
end end
def stub_webide_config_file(content, sha: anything)
allow_any_instance_of(Repository)
.to receive(:blob_data_at).with(sha, '.gitlab/.gitlab-webide.yml')
.and_return(content)
end
def project_hash_array def project_hash_array
f = File.read(Rails.root.join('spec/support/gitlab_stubs/projects.json')) f = File.read(Rails.root.join('spec/support/gitlab_stubs/projects.json'))
Gitlab::Json.parse(f) Gitlab::Json.parse(f)

View File

@ -782,15 +782,15 @@
eslint-plugin-vue "^6.2.1" eslint-plugin-vue "^6.2.1"
vue-eslint-parser "^7.0.0" vue-eslint-parser "^7.0.0"
"@gitlab/svgs@1.128.0": "@gitlab/svgs@1.130.0":
version "1.128.0" version "1.130.0"
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.128.0.tgz#c510050d5646d73b52e684248a186dbd1f55cbb0" resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.130.0.tgz#0c2f3cdc0a4b0f54c47b2861c8fa31b2a58c570a"
integrity sha512-RqgF6k2xPptbz58RB1nNgeo6gy3l1u7+1rxXvALzIAsazmrAw708NYCT3PALg2RoyH0G/fpUa6yPQ0HbR+OtEg== integrity sha512-azJ1E9PBk6fGOaP6816BSr8oYrQu3m3BbYZwWOCUp8AfbZuf0ZOZVYmlR9i/eAOhoqqqmwF8hYCK2VjAklbpPA==
"@gitlab/ui@14.14.2": "@gitlab/ui@14.17.0":
version "14.14.2" version "14.17.0"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-14.14.2.tgz#7cc81d90d5b5394345d6781ff02e974e24b97387" resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-14.17.0.tgz#668f08318e9ef4d36a05c1ec13d531a8e46983b4"
integrity sha512-Fq7fGjhofnN64xckTuuuX4EE23ZXcndwCfFBFrCTCbDfrDSa0l0xkmkrvYCSrNNTp6CyL5Ec/LWgGcnGCPWaFw== integrity sha512-p75/lFQ0w5Mlg0DMdF2g/LW8DTJoSua4Xoh9BQO80o+Kw3ALQFOvvZAx17AW/MbgRD775I7Yv3V84H+s/xUqwg==
dependencies: dependencies:
"@babel/standalone" "^7.0.0" "@babel/standalone" "^7.0.0"
"@gitlab/vue-toasted" "^1.3.0" "@gitlab/vue-toasted" "^1.3.0"