Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-07-22 15:09:28 +00:00
parent 2c171fdd72
commit d1cb802bac
59 changed files with 1092 additions and 367 deletions

View file

@ -58,7 +58,7 @@ code_quality:
SAST_ANALYZER_IMAGE_PREFIX: "registry.gitlab.com/gitlab-org/security-products/analyzers"
SAST_ANALYZER_IMAGE_TAG: 2
SAST_BRAKEMAN_LEVEL: 2 # GitLab-specific
SAST_EXCLUDED_PATHS: qa,spec,doc,ee/spec # GitLab-specific
SAST_EXCLUDED_PATHS: qa,spec,doc,ee/spec,config/gitlab.yml.example # GitLab-specific
SAST_DISABLE_BABEL: "true"
script:
- /analyzer run

View file

@ -1,5 +1,5 @@
<script>
import { GlDeprecatedButton, GlLoadingIcon } from '@gitlab/ui';
import { GlButton, GlLoadingIcon, GlModal, GlLink } from '@gitlab/ui';
import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
import PipelinesService from '~/pipelines/services/pipelines_service';
import PipelineStore from '~/pipelines/stores/pipelines_store';
@ -12,8 +12,10 @@ import CIPaginationMixin from '~/vue_shared/mixins/ci_pagination_api_mixin';
export default {
components: {
TablePagination,
GlDeprecatedButton,
GlButton,
GlLoadingIcon,
GlModal,
GlLink,
},
mixins: [pipelinesMixin, CIPaginationMixin],
props: {
@ -38,11 +40,21 @@ export default {
required: false,
default: 'child',
},
canRunPipeline: {
canCreatePipelineInTargetProject: {
type: Boolean,
required: false,
default: false,
},
sourceProjectFullPath: {
type: String,
required: false,
default: '',
},
targetProjectFullPath: {
type: String,
required: false,
default: '',
},
projectId: {
type: String,
required: false,
@ -63,6 +75,7 @@ export default {
state: store.state,
page: getParameterByName('page') || '1',
requestData: {},
modalId: 'create-pipeline-for-fork-merge-request-modal',
};
},
@ -75,13 +88,28 @@ export default {
},
/**
* The Run Pipeline button can only be rendered when:
* - In MR view - we use `canRunPipeline` for that purpose
* - In MR view - we use `canCreatePipelineInTargetProject` for that purpose
* - If the latest pipeline has the `detached_merge_request_pipeline` flag
*
* @returns {Boolean}
*/
canRenderPipelineButton() {
return this.canRunPipeline && this.latestPipelineDetachedFlag;
return this.latestPipelineDetachedFlag;
},
isForkMergeRequest() {
return this.sourceProjectFullPath !== this.targetProjectFullPath;
},
isLatestPipelineCreatedInTargetProject() {
const latest = this.state.pipelines[0];
return latest?.project?.full_path === `/${this.targetProjectFullPath}`;
},
shouldShowSecurityWarning() {
return (
this.canCreatePipelineInTargetProject &&
this.isForkMergeRequest &&
!this.isLatestPipelineCreatedInTargetProject
);
},
/**
* Checks if either `detached_merge_request_pipeline` or
@ -148,6 +176,13 @@ export default {
mergeRequestId: this.mergeRequestId,
});
},
tryRunPipeline() {
if (!this.shouldShowSecurityWarning) {
this.onClickRunPipeline();
} else {
this.$refs.modal.show();
}
},
},
};
</script>
@ -171,16 +206,53 @@ export default {
<div v-else-if="shouldRenderTable" class="table-holder">
<div v-if="canRenderPipelineButton" class="nav justify-content-end">
<gl-deprecated-button
v-if="canRenderPipelineButton"
<gl-button
variant="success"
class="js-run-mr-pipeline prepend-top-10 btn-wide-on-xs"
:disabled="state.isRunningMergeRequestPipeline"
@click="onClickRunPipeline"
@click="tryRunPipeline"
>
<gl-loading-icon v-if="state.isRunningMergeRequestPipeline" inline />
{{ s__('Pipelines|Run Pipeline') }}
</gl-deprecated-button>
</gl-button>
<gl-modal
:id="modalId"
ref="modal"
:modal-id="modalId"
:title="s__('Pipelines|Are you sure you want to run this pipeline?')"
:ok-title="s__('Pipelines|Run Pipeline')"
ok-variant="danger"
@ok="onClickRunPipeline"
>
<p>
{{
s__(
'Pipelines|This pipeline will run code originating from a forked project merge request. This means that the code can potentially have security considerations like exposing CI variables.',
)
}}
</p>
<p>
{{
s__(
"Pipelines|It is recommended the code is reviewed thoroughly before running this pipeline with the parent project's CI resource.",
)
}}
</p>
<p>
{{
s__(
'Pipelines|If you are unsure, please ask a project maintainer to review it for you.',
)
}}
</p>
<gl-link
href="/help/ci/merge_request_pipelines/index.html#create-pipelines-in-the-parent-project-for-merge-requests-from-a-forked-project"
target="_blank"
>
{{ s__('Pipelines|More Information') }}
</gl-link>
</gl-modal>
</div>
<pipelines-table-component

View file

@ -38,7 +38,7 @@ export default {
)
"
:disabled="isSaving"
variant="success"
variant="default"
size="small"
@click="openFileUpload"
>

View file

@ -748,9 +748,11 @@ export const setCurrentDiffFileIdFromNote = ({ commit, rootGetters }, noteId) =>
if (!note) return;
const fileHash = rootGetters.getDiscussion(note.discussion_id).diff_file.file_hash;
const fileHash = rootGetters.getDiscussion(note.discussion_id).diff_file?.file_hash;
commit(types.UPDATE_CURRENT_DIFF_FILE_ID, fileHash);
if (fileHash) {
commit(types.UPDATE_CURRENT_DIFF_FILE_ID, fileHash);
}
};
export const navigateToDiffFileIndex = ({ commit, state }, index) => {

View file

@ -358,7 +358,11 @@ export default class MergeRequestTabs {
emptyStateSvgPath: pipelineTableViewEl.dataset.emptyStateSvgPath,
errorStateSvgPath: pipelineTableViewEl.dataset.errorStateSvgPath,
autoDevopsHelpPath: pipelineTableViewEl.dataset.helpAutoDevopsPath,
canRunPipeline: true,
canCreatePipelineInTargetProject: Boolean(
mrWidgetData?.can_create_pipeline_in_target_project,
),
sourceProjectFullPath: mrWidgetData?.source_project_full_path || '',
targetProjectFullPath: mrWidgetData?.target_project_full_path || '',
projectId: pipelineTableViewEl.dataset.projectId,
mergeRequestId: mrWidgetData ? mrWidgetData.iid : null,
},

View file

@ -132,6 +132,8 @@ module SnippetsActions
end
def redirect_if_binary
return if Feature.enabled?(:snippets_binary_blob)
redirect_to gitlab_snippet_path(snippet) if blob&.binary?
end
end

View file

@ -75,18 +75,16 @@ module Ci
def append(new_data, offset)
raise ArgumentError, 'New data is missing' unless new_data
raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0
raise ArgumentError, 'Offset is out of range' if offset < 0 || offset > size
raise ArgumentError, 'Chunk size overflow' if CHUNK_SIZE < (offset + new_data.bytesize)
in_lock(*lock_params) do # Write operation is atomic
unsafe_set_data!(data.byteslice(0, offset) + new_data)
end
in_lock(*lock_params) { unsafe_append_data!(new_data, offset) }
schedule_to_persist if full?
end
def size
data&.bytesize.to_i
@size ||= current_store.size(self) || data&.bytesize
end
def start_offset
@ -118,7 +116,7 @@ module Ci
raise FailedToPersistDataError, 'Data is not fulfilled in a bucket'
end
old_store_class = self.class.get_store_class(data_store)
old_store_class = current_store
self.raw_data = nil
self.data_store = new_store
@ -128,16 +126,33 @@ module Ci
end
def get_data
self.class.get_store_class(data_store).data(self)&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default
rescue Excon::Error::NotFound
# If the data store is :fog and the file does not exist in the object storage, this method returns nil.
current_store.data(self)&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default
end
def unsafe_set_data!(value)
raise ArgumentError, 'New data size exceeds chunk size' if value.bytesize > CHUNK_SIZE
self.class.get_store_class(data_store).set_data(self, value)
current_store.set_data(self, value)
@data = value
@size = value.bytesize
save! if changed?
end
def unsafe_append_data!(value, offset)
new_size = value.bytesize + offset
if new_size > CHUNK_SIZE
raise ArgumentError, 'New data size exceeds chunk size'
end
current_store.append_data(self, value, offset).then do |stored|
raise ArgumentError, 'Trace appended incorrectly' if stored != new_size
end
@data = nil
@size = new_size
save! if changed?
end
@ -156,6 +171,10 @@ module Ci
size == CHUNK_SIZE
end
def current_store
self.class.get_store_class(data_store)
end
def lock_params
["trace_write:#{build_id}:chunks:#{chunk_index}",
{ ttl: WRITE_LOCK_TTL,

View file

@ -19,8 +19,22 @@ module Ci
model.raw_data
end
def set_data(model, data)
model.raw_data = data
def set_data(model, new_data)
model.raw_data = new_data
end
def append_data(model, new_data, offset)
if offset > 0
truncated_data = data(model).to_s.byteslice(0, offset)
new_data = truncated_data + new_data
end
model.raw_data = new_data
model.raw_data.to_s.bytesize
end
def size(model)
data(model).to_s.bytesize
end
def delete_data(model)

View file

@ -9,10 +9,29 @@ module Ci
def data(model)
connection.get_object(bucket_name, key(model))[:body]
rescue Excon::Error::NotFound
# If the object does not exist in the object storage, this method returns nil.
end
def set_data(model, data)
connection.put_object(bucket_name, key(model), data)
def set_data(model, new_data)
connection.put_object(bucket_name, key(model), new_data)
end
def append_data(model, new_data, offset)
if offset > 0
truncated_data = data(model).to_s.byteslice(0, offset)
new_data = truncated_data + new_data
end
set_data(model, new_data)
new_data.bytesize
end
def size(model)
connection.head_object(bucket_name, key(model))
.get_header('Content-Length')
rescue Excon::Error::NotFound
0
end
def delete_data(model)

View file

@ -4,6 +4,32 @@ module Ci
module BuildTraceChunks
class Redis
CHUNK_REDIS_TTL = 1.week
LUA_APPEND_CHUNK = <<~EOS.freeze
local key, new_data, offset = KEYS[1], ARGV[1], ARGV[2]
local length = new_data:len()
local expire = #{CHUNK_REDIS_TTL.seconds}
local current_size = redis.call("strlen", key)
offset = tonumber(offset)
if offset == 0 then
-- overwrite everything
redis.call("set", key, new_data, "ex", expire)
return redis.call("strlen", key)
elseif offset > current_size then
-- offset range violation
return -1
elseif offset + length >= current_size then
-- efficiently append or overwrite and append
redis.call("expire", key, expire)
return redis.call("setrange", key, offset, new_data)
else
-- append and truncate
local current_data = redis.call("get", key)
new_data = current_data:sub(1, offset) .. new_data
redis.call("set", key, new_data, "ex", expire)
return redis.call("strlen", key)
end
EOS
def available?
true
@ -21,6 +47,18 @@ module Ci
end
end
def append_data(model, new_data, offset)
Gitlab::Redis::SharedState.with do |redis|
redis.eval(LUA_APPEND_CHUNK, keys: [key(model)], argv: [new_data, offset])
end
end
def size(model)
Gitlab::Redis::SharedState.with do |redis|
redis.strlen(key(model))
end
end
def delete_data(model)
delete_keys([[model.build_id, model.chunk_index]])
end

View file

@ -14,6 +14,10 @@ class MergeRequestWidgetEntity < Grape::Entity
merge_request.project&.full_path
end
expose :can_create_pipeline_in_target_project do |merge_request|
can?(current_user, :create_pipeline, merge_request.target_project)
end
expose :email_patches_path do |merge_request|
project_merge_request_path(merge_request.project, merge_request, format: :patch)
end

View file

@ -39,17 +39,12 @@ module AlertManagement
def create_issue
label_result = find_or_create_incident_label
# Create an unlabelled issue if we couldn't create the label
# due to a race condition.
# See https://gitlab.com/gitlab-org/gitlab-foss/issues/65042
extra_params = label_result.success? ? { label_ids: [label_result.payload[:label].id] } : {}
issue = Issues::CreateService.new(
project,
user,
title: alert_presenter.title,
description: alert_presenter.issue_description,
**extra_params
label_ids: [label_result.payload[:label].id]
).execute
return error(object_errors(issue), issue) unless issue.valid?

View file

@ -14,27 +14,9 @@ module IncidentManagement
def execute
label = Labels::FindOrCreateService
.new(current_user, project, **LABEL_PROPERTIES)
.execute
if label.invalid?
log_invalid_label_info(label)
return ServiceResponse.error(payload: { label: label }, message: full_error_message(label))
end
.execute(skip_authorization: true)
ServiceResponse.success(payload: { label: label })
end
private
def log_invalid_label_info(label)
log_info <<~TEXT.chomp
Cannot create incident label "#{label.title}" \
for "#{label.project.full_name}": #{full_error_message(label)}.
TEXT
end
def full_error_message(label)
label.errors.full_messages.to_sentence
end
end
end

View file

@ -23,17 +23,12 @@ module IncidentManagement
def create_issue
label_result = find_or_create_incident_label
# Create an unlabelled issue if we couldn't create the label
# due to a race condition.
# See https://gitlab.com/gitlab-org/gitlab-foss/issues/65042
extra_params = label_result.success? ? { label_ids: [label_result.payload[:label].id] } : {}
Issues::CreateService.new(
project,
current_user,
title: issue_title,
description: issue_description,
**extra_params
label_ids: [label_result.payload[:label].id]
).execute
end

View file

@ -25,17 +25,12 @@ module IncidentManagement
def create_issue
label_result = find_or_create_incident_label
# Create an unlabelled issue if we couldn't create the label
# due to a race condition.
# See https://gitlab.com/gitlab-org/gitlab-foss/issues/65042
extra_params = label_result.success? ? { label_ids: [label_result.payload[:label].id] } : {}
Issues::CreateService.new(
project,
current_user,
title: issue_title,
description: issue_description,
**extra_params
label_ids: [label_result.payload[:label].id]
).execute
end

View file

@ -35,21 +35,21 @@
= sprite_icon('search', size: 16)
- if header_link?(:issues)
= nav_link(path: 'dashboard#issues', html_options: { class: "user-counter" }) do
= link_to assigned_issues_dashboard_path, title: _('Issues'), class: 'dashboard-shortcuts-issues', aria: { label: _('Issues') }, data: {toggle: 'tooltip', placement: 'bottom', container: 'body'} do
= link_to assigned_issues_dashboard_path, title: _('Issues'), class: 'dashboard-shortcuts-issues', aria: { label: _('Issues') }, data: { qa_selector: 'issues_shortcut_button', toggle: 'tooltip', placement: 'bottom', container: 'body' } do
= sprite_icon('issues', size: 16)
- issues_count = assigned_issuables_count(:issues)
%span.badge.badge-pill.issues-count.green-badge{ class: ('hidden' if issues_count.zero?) }
= number_with_delimiter(issues_count)
- if header_link?(:merge_requests)
= nav_link(path: 'dashboard#merge_requests', html_options: { class: "user-counter" }) do
= link_to assigned_mrs_dashboard_path, title: _('Merge requests'), class: 'dashboard-shortcuts-merge_requests', aria: { label: _('Merge requests') }, data: {toggle: 'tooltip', placement: 'bottom', container: 'body'} do
= link_to assigned_mrs_dashboard_path, title: _('Merge requests'), class: 'dashboard-shortcuts-merge_requests', aria: { label: _('Merge requests') }, data: { qa_selector: 'merge_requests_shortcut_button', toggle: 'tooltip', placement: 'bottom', container: 'body' } do
= sprite_icon('git-merge', size: 16)
- merge_requests_count = assigned_issuables_count(:merge_requests)
%span.badge.badge-pill.merge-requests-count{ class: ('hidden' if merge_requests_count.zero?) }
= number_with_delimiter(merge_requests_count)
- if header_link?(:todos)
= nav_link(controller: 'dashboard/todos', html_options: { class: "user-counter" }) do
= link_to dashboard_todos_path, title: _('To-Do List'), aria: { label: _('To-Do List') }, class: 'shortcuts-todos', data: {toggle: 'tooltip', placement: 'bottom', container: 'body'} do
= link_to dashboard_todos_path, title: _('To-Do List'), aria: { label: _('To-Do List') }, class: 'shortcuts-todos', data: { qa_selector: 'todos_shortcut_button', toggle: 'tooltip', placement: 'bottom', container: 'body' } do
= sprite_icon('todo-done', size: 16)
%span.badge.badge-pill.todos-count{ class: ('hidden' if todos_pending_count.zero?) }
= todos_count_format(todos_pending_count)

View file

@ -26,7 +26,7 @@
= image_tag avatar_icon_for_user(project.creator, 48), class: "avatar s48", alt:''
- else
= project_icon(project, alt: '', class: 'avatar project-avatar s48', width: 48, height: 48)
.project-details.d-sm-flex.flex-sm-fill.align-items-center{ data: { qa_selector: 'project', qa_project_name: project.name } }
.project-details.d-sm-flex.flex-sm-fill.align-items-center{ data: { qa_selector: 'project_content', qa_project_name: project.name } }
.flex-wrapper
.d-flex.align-items-center.flex-wrap.project-title
%h2.d-flex.gl-mt-3
@ -51,7 +51,7 @@
-# haml-lint:disable UnnecessaryStringOutput
= ' ' # prevent haml from eating the space between elements
.metadata-info.gl-mt-3
%span.user-access-role.d-block= Gitlab::Access.human_access(access)
%span.user-access-role.d-block{ data: { qa_selector: 'user_role_content' } }= Gitlab::Access.human_access(access)
- if !explore_projects_tab?
.metadata-info.gl-mt-3

View file

@ -0,0 +1,5 @@
---
title: 'UX Polish: Update top right Upload button from Green to Grey'
merge_request: 37558
author:
type: fixed

View file

@ -0,0 +1,5 @@
---
title: Show Security Warning Modal for fork pipelines
merge_request: 36951
author:
type: added

View file

@ -0,0 +1,5 @@
---
title: Backfill personal snippets statistics
merge_request: 36801
author:
type: other

View file

@ -0,0 +1,29 @@
# frozen_string_literal: true
class SchedulePopulatePersonalSnippetStatistics < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
DELAY_INTERVAL = 2.minutes.to_i
BATCH_SIZE = 500
MIGRATION = 'PopulatePersonalSnippetStatistics'
disable_ddl_transaction!
def up
snippets = exec_query <<~SQL
SELECT id
FROM snippets
WHERE type = 'PersonalSnippet'
ORDER BY author_id ASC, id ASC
SQL
snippets.rows.flatten.in_groups_of(BATCH_SIZE, false).each_with_index do |snippet_ids, index|
migrate_in(index * DELAY_INTERVAL, MIGRATION, [snippet_ids])
end
end
def down
# no-op
end
end

View file

@ -23990,6 +23990,7 @@ COPY "schema_migrations" (version) FROM STDIN;
20200713071042
20200713141854
20200713152443
20200714075739
20200715124210
20200715135130
20200715202659

View file

@ -848,14 +848,14 @@ cluster.
## Distributed reads
> Introduced in GitLab 13.1 in [beta](https://about.gitlab.com/handbook/product/#alpha-beta-ga) with feature flag `gitaly_distributed_reads` set to disabled.
> Introduced in GitLab 13.1 in [beta](https://about.gitlab.com/handbook/product/gitlab-the-product/#alpha-beta-ga) with feature flag `gitaly_distributed_reads` set to disabled.
Praefect supports distribution of read operations across Gitaly nodes that are
configured for the virtual node.
To allow for [performance testing](https://gitlab.com/gitlab-org/quality/performance/-/issues/231),
distributed reads are currently in
[beta](https://about.gitlab.com/handbook/product/#alpha-beta-ga) and disabled by
[beta](https://about.gitlab.com/handbook/product/gitlab-the-product/#alpha-beta-ga) and disabled by
default. To enable distributed reads, the `gitaly_distributed_reads`
[feature flag](../feature_flags.md) must be enabled in a Ruby console:
@ -885,15 +885,15 @@ They reflect configuration defined for this instance of Praefect.
## Strong consistency
> - Introduced in GitLab 13.1 in [alpha](https://about.gitlab.com/handbook/product/#alpha-beta-ga), disabled by default.
> - Entered [beta](https://about.gitlab.com/handbook/product/#alpha-beta-ga) in GitLab 13.2, disabled by default.
> - Introduced in GitLab 13.1 in [alpha](https://about.gitlab.com/handbook/product/gitlab-the-product/#alpha-beta-ga), disabled by default.
> - Entered [beta](https://about.gitlab.com/handbook/product/gitlab-the-product/#alpha-beta-ga) in GitLab 13.2, disabled by default.
Praefect guarantees eventual consistency by replicating all writes to secondary nodes
after the write to the primary Gitaly node has happened.
Praefect can instead provide strong consistency by creating a transaction and writing
changes to all Gitaly nodes at once. Strong consistency is currently in
[alpha](https://about.gitlab.com/handbook/product/#alpha-beta-ga) and not enabled by
[alpha](https://about.gitlab.com/handbook/product/gitlab-the-product/#alpha-beta-ga) and not enabled by
default. If enabled, transactions are only available for a subset of RPCs. For more
information, see the [strong consistency epic](https://gitlab.com/groups/gitlab-org/-/epics/1189).

View file

@ -316,50 +316,51 @@ Plan.default.actual_limits.update!(ci_instance_level_variables: 30)
### Maximum file size per type of artifact
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/216097) in GitLab 13.3.
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/37226) in GitLab 13.3.
Artifacts that are uploaded by the Runner will be rejected if the file size exceeds the
maximum file size limit. The limit is determined by picking the smaller value between the project's
Job artifacts defined with [`artifacts:reports`](../ci/pipelines/job_artifacts.md#artifactsreports)
that are uploaded by the Runner are rejected if the file size exceeds the maximum
file size limit. The limit is determined by comparing the project's
[maximum artifact size setting](../user/admin_area/settings/continuous_integration.md#maximum-artifacts-size-core-only)
and the plan limit for the given artifact type.
with the instance limit for the given artifact type, and choosing the smaller value.
Values are interpreted as megabytes thus the smallest possible value that can be defined is `1 MB`.
Limits are set in megabytes, so the smallest possible value that can be defined is `1 MB`.
Each type of artifact has its corresponding maximum size limit. For now, only the `lsif` type's plan limit
is enabled and has a default value defined. The rest of the values and defaults are going to be determined and updated
in future releases.
Each type of artifact has a size limit that can be set. A default of `0` means there
is no limit for that specific artifact type, and the project's maximum artifact size
setting is used:
| Limit Name | Default Value |
| ----------------------------------------- | ------------- |
| ci_max_artifact_size_lsif | 20 |
| ci_max_artifact_size_archive | 0 |
| ci_max_artifact_size_metadata | 0 |
| ci_max_artifact_size_trace | 0 |
| ci_max_artifact_size_junit | 0 |
| ci_max_artifact_size_sast | 0 |
| ci_max_artifact_size_dependency_scanning | 0 |
| ci_max_artifact_size_container_scanning | 0 |
| ci_max_artifact_size_dast | 0 |
| ci_max_artifact_size_codequality | 0 |
| ci_max_artifact_size_license_management | 0 |
| ci_max_artifact_size_license_scanning | 0 |
| ci_max_artifact_size_performance | 0 |
| ci_max_artifact_size_metrics | 0 |
| ci_max_artifact_size_metrics_referee | 0 |
| ci_max_artifact_size_network_referee | 0 |
| ci_max_artifact_size_dotenv | 0 |
| ci_max_artifact_size_cobertura | 0 |
| ci_max_artifact_size_terraform | 0 |
| ci_max_artifact_size_accessibility | 0 |
| ci_max_artifact_size_cluster_applications | 0 |
| ci_max_artifact_size_secret_detection | 0 |
| ci_max_artifact_size_requirements | 0 |
| ci_max_artifact_size_coverage_fuzzing | 0 |
| ci_max_artifact_size_browser_performance | 0 |
| ci_max_artifact_size_load_performance | 0 |
| Artifact limit name | Default value |
|---------------------------------------------|---------------|
| `ci_max_artifact_size_accessibility` | 0 |
| `ci_max_artifact_size_archive` | 0 |
| `ci_max_artifact_size_browser_performance` | 0 |
| `ci_max_artifact_size_cluster_applications` | 0 |
| `ci_max_artifact_size_cobertura` | 0 |
| `ci_max_artifact_size_codequality` | 0 |
| `ci_max_artifact_size_container_scanning` | 0 |
| `ci_max_artifact_size_coverage_fuzzing` | 0 |
| `ci_max_artifact_size_dast` | 0 |
| `ci_max_artifact_size_dependency_scanning` | 0 |
| `ci_max_artifact_size_dotenv` | 0 |
| `ci_max_artifact_size_junit` | 0 |
| `ci_max_artifact_size_license_management` | 0 |
| `ci_max_artifact_size_license_scanning` | 0 |
| `ci_max_artifact_size_load_performance` | 0 |
| `ci_max_artifact_size_lsif` | 20 MB ([introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/37226) in GitLab 13.3) |
| `ci_max_artifact_size_metadata` | 0 |
| `ci_max_artifact_size_metrics_referee` | 0 |
| `ci_max_artifact_size_metrics` | 0 |
| `ci_max_artifact_size_network_referee` | 0 |
| `ci_max_artifact_size_performance` | 0 |
| `ci_max_artifact_size_requirements` | 0 |
| `ci_max_artifact_size_sast` | 0 |
| `ci_max_artifact_size_secret_detection` | 0 |
| `ci_max_artifact_size_terraform` | 5 MB ([introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/37018) in GitLab 13.3) |
| `ci_max_artifact_size_trace` | 0 |
To update the limit on a self-managed installation, run the following in the
[GitLab Rails console](troubleshooting/debug.md#starting-a-rails-console-session):
For example, to set the `ci_max_artifact_size_junit` limit to 10MB on a self-managed
installation, run the following in the [GitLab Rails console](troubleshooting/debug.md#starting-a-rails-console-session):
```ruby
Plan.default.actual_limits.update!(ci_max_artifact_size_junit: 10)

View file

@ -73,7 +73,7 @@ sudo gitlab-rails runner "RAILS_COMMAND"
# Example with a two-line Ruby script
sudo gitlab-rails runner "user = User.first; puts user.username"
# Example with a ruby script file
# Example with a ruby script file (make sure to use the full path)
sudo gitlab-rails runner /path/to/script.rb
```
@ -85,7 +85,7 @@ sudo -u git -H bundle exec rails runner -e production "RAILS_COMMAND"
# Example with a two-line Ruby script
sudo -u git -H bundle exec rails runner -e production "user = User.first; puts user.username"
# Example with a ruby script file
# Example with a ruby script file (make sure to use the full path)
sudo -u git -H bundle exec rails runner -e production /path/to/script.rb
```

View file

@ -166,31 +166,33 @@ Read the [documentation on Pipelines for Merged Results](pipelines_for_merged_re
Read the [documentation on Merge Trains](pipelines_for_merged_results/merge_trains/index.md).
## Important notes about merge requests from forked projects
## Create pipelines in the parent project for merge requests from a forked project
Note that the current behavior is subject to change. In the usual contribution
flow, external contributors follow the following steps:
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/217451) in GitLab 13.3.
1. Fork a parent project.
1. Create a merge request from the forked project that targets the `master` branch
in the parent project.
1. A pipeline runs on the merge request.
1. A maintainer from the parent project checks the pipeline result, and merge
into a target branch if the latest pipeline has passed.
By default, external contributors working from forks can't create pipelines in the
parent project. When a pipeline for merge requests is triggered by a merge request
coming from a fork:
Currently, those pipelines are created in a **forked** project, not in the
parent project. This means you cannot completely trust the pipeline result,
because, technically, external contributors can disguise their pipeline results
by tweaking their GitLab Runner in the forked project.
- It's created and runs in the fork (source) project, not the parent (target) project.
- It uses the fork project's CI/CD configuration and resources.
There are multiple reasons why GitLab doesn't allow those pipelines to be
created in the parent project, but one of the biggest reasons is security concern.
External users could steal secret variables from the parent project by modifying
`.gitlab-ci.yml`, which could be some sort of credentials. This should not happen.
Sometimes parent project members want the pipeline to run in the parent
project. This could be to ensure that the post-merge pipeline passes in the parent project.
For example, a fork project could try to use a corrupted Runner that doesn't execute
test scripts properly, but reports a passed pipeline. Reviewers in the parent project
could mistakenly trust the merge request because it passed a faked pipeline.
We're discussing a secure solution of running pipelines for merge requests
that are submitted from forked projects,
see [the issue about the permission extension](https://gitlab.com/gitlab-org/gitlab/-/issues/11934).
Parent project members with at least [Developer permissions](../../user/permissions.md)
can create pipelines in the parent project for merge requests
from a forked project. In the merge request, go to the **Pipelines** and click
**Run Pipeline** button.
CAUTION: **Caution:**
Fork merge requests could contain malicious code that tries to steal secrets in the
parent project when the pipeline runs, even before merge. Reviewers must carefully
check the changes in the merge request before triggering the pipeline. GitLab shows
a warning that must be accepted before the pipeline can be triggered.
## Additional predefined variables

View file

@ -45,8 +45,6 @@ To enable pipelines for merge results:
- You must have maintainer [permissions](../../../user/permissions.md).
- You must be using [GitLab Runner](https://gitlab.com/gitlab-org/gitlab-runner) 11.9 or later.
- You must not be forking or using cross-repo workflows. To follow progress,
see [#11934](https://gitlab.com/gitlab-org/gitlab/-/issues/11934).
- You must not be using
[fast forward merges](../../../user/project/merge_requests/fast_forward_merge.md) yet.
To follow progress, see [#58226](https://gitlab.com/gitlab-org/gitlab/-/issues/26996).

View file

@ -66,7 +66,7 @@ to access it. This is where an SSH key pair comes in handy.
## Install ssh-agent if not already installed, it is required by Docker.
## (change apt-get to yum if you use an RPM-based image)
##
- 'which ssh-agent || ( apt-get update -y && apt-get install openssh-client -y )'
- 'command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client -y )'
##
## Run ssh-agent (inside the build environment)

View file

@ -53,7 +53,6 @@ graph TB
GitLabShell --TCP 8080 -->Unicorn["Unicorn (GitLab Rails)"]
GitLabShell --> Praefect
GitLabShell --> Redis
Unicorn --> PgBouncer[PgBouncer]
Unicorn --> Redis
Unicorn --> Praefect

View file

@ -58,7 +58,7 @@ the feature flag check will default to `true`.**
This is relevant when developing the feature using
[several smaller merge requests](https://about.gitlab.com/handbook/values/#make-small-merge-requests), or when the feature is considered to be an
[alpha or beta](https://about.gitlab.com/handbook/product/#alpha-beta-ga), and
[alpha or beta](https://about.gitlab.com/handbook/product/gitlab-the-product/#alpha-beta-ga), and
should not be available by default.
As an example, if you were to ship the frontend half of a feature without the
@ -67,7 +67,7 @@ also ready to be shipped. To make sure this feature is disabled for both
GitLab.com and self-managed instances, you should use the
[`Namespace#alpha_feature_available?`](https://gitlab.com/gitlab-org/gitlab/blob/458749872f4a8f27abe8add930dbb958044cb926/ee/app/models/ee/namespace.rb#L113) or
[`Namespace#beta_feature_available?`](https://gitlab.com/gitlab-org/gitlab/blob/458749872f4a8f27abe8add930dbb958044cb926/ee/app/models/ee/namespace.rb#L100-112)
method, according to our [definitions](https://about.gitlab.com/handbook/product/#alpha-beta-ga). This ensures the feature is disabled unless the feature flag is
method, according to our [definitions](https://about.gitlab.com/handbook/product/gitlab-the-product/#alpha-beta-ga). This ensures the feature is disabled unless the feature flag is
_explicitly_ enabled.
## Feature groups

View file

@ -10,7 +10,7 @@ GitLab Issues are a powerful tool for discussing ideas and planning and tracking
However, many organizations have been using Jira for these purposes and have
extensive data and business processes built into it.
While you can always migrate content and process from Jira to GitLab Issues,
While you can always [migrate](../../../user/project/import/jira.md) issues and processes from Jira to GitLab Issues,
you can also opt to continue using Jira and use it together with GitLab through
our integration.
@ -21,19 +21,20 @@ detect and cross-reference activity between the GitLab project and any of your p
in Jira. This includes the ability to close or transition Jira issues when the work
is completed in GitLab.
Here's how the integration responds when you take the following actions in GitLab:
Features include:
- **Mention a Jira issue ID** in a commit message or MR (merge request).
- GitLab hyperlinks to the Jira issue.
- The Jira issue adds an issue link to the commit/MR in GitLab.
- The Jira issue adds a comment reflecting the comment made in GitLab, the comment author, and a link to the commit/MR in GitLab, unless this commenting to Jira is [disabled](#disabling-comments-on-jira-issues).
- **Mention that a commit or MR 'closes', 'resolves', or 'fixes' a Jira issue ID**. When the commit is made on the project's default branch (usually master) or the change is merged to the default branch:
- GitLab's merge request page displays a note that it "Closed" the Jira issue, with a link to the issue. (Note: Before the merge, an MR will display that it "Closes" the Jira issue.)
- The Jira issue shows the activity and the Jira issue is closed, or otherwise transitioned.
- **Mention a Jira issue ID** in a commit message or MR (merge request) and
- GitLab links to the Jira issue.
- The Jira issue adds a comment with details and a link back to the activity in GitLab.
- **Mention that a commit or MR resolves or closes a specific Jira issue** and when it's merged to the default branch:
- GitLab's MR displays a note that it closed the Jira issue. Prior to the merge, MRs indicate which issue they will close.
- The Jira issue shows the activity and is closed or otherwise transitioned as specified in your GitLab settings.
- **View Jira issues directly in GitLab** **(PREMIUM)**
You can also use [Jira's Smart Commits](https://confluence.atlassian.com/fisheye/using-smart-commits-960155400.html)
directly from GitLab, as covered in the article
[How and why to integrate GitLab with Jira](https://www.programmableweb.com/news/how-and-why-to-integrate-gitlab-jira/how-to/2017/04/25).
For additional features, you can install the [Jira Development Panel integration](../../../integration/jira_development_panel.md). This enables you to:
- In a Jira issue, display relevant GitLab information in the [development panel](https://support.atlassian.com/jira-software-cloud/docs/view-development-information-for-an-issue/), including related branches, commits, and merge requests.
- Use Jira [Smart Commits](https://confluence.atlassian.com/fisheye/using-smart-commits-960155400.html) in GitLab to add Jira comments, log time spent on the issue, or apply any issue transition.
## Configuration

View file

@ -69,7 +69,7 @@ multiple issue boards within the same project.
## Use cases
There are many ways to use GitLab issue boards tailored to your own preferred workflow.
You can tailor GitLab issue boards to your own preferred workflow.
Here are some common use cases for issue boards.
### Use cases for a single issue board

View file

@ -0,0 +1,49 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This class creates/updates those personal snippets statistics
# that haven't been created nor initialized.
# It also updates the related root storage namespace stats
class PopulatePersonalSnippetStatistics
def perform(snippet_ids)
personal_snippets(snippet_ids).group_by(&:author).each do |author, author_snippets|
upsert_snippet_statistics(author_snippets)
update_namespace_statistics(author.namespace)
end
end
private
def personal_snippets(snippet_ids)
PersonalSnippet
.where(id: snippet_ids)
.includes(author: :namespace)
.includes(:statistics)
.includes(snippet_repository: :shard)
end
def upsert_snippet_statistics(snippets)
snippets.each do |snippet|
response = Snippets::UpdateStatisticsService.new(snippet).execute
error_message("#{response.message} snippet: #{snippet.id}") if response.error?
end
end
def update_namespace_statistics(namespace)
Namespaces::StatisticsRefresherService.new.execute(namespace)
rescue => e
error_message("Error updating statistics for namespace #{namespace.id}: #{e.message}")
end
def logger
@logger ||= Gitlab::BackgroundMigration::Logger.build
end
def error_message(message)
logger.error(message: "Snippet Statistics Migration: #{message}")
end
end
end
end

View file

@ -170,6 +170,7 @@ module Gitlab
%r{\A(ee/)?(danger/|lib/gitlab/danger/)} => :engineering_productivity,
%r{\A(ee/)?scripts/} => :engineering_productivity,
%r{\Atooling/} => :engineering_productivity,
%r{(CODEOWNERS)} => :engineering_productivity,
%r{\A(ee/)?app/(?!assets|views)[^/]+} => :backend,
%r{\A(ee/)?(bin|config|generator_templates|lib|rubocop)/} => :backend,

View file

@ -17158,6 +17158,9 @@ msgstr ""
msgid "Pipelines|API"
msgstr ""
msgid "Pipelines|Are you sure you want to run this pipeline?"
msgstr ""
msgid "Pipelines|Build with confidence"
msgstr ""
@ -17182,9 +17185,18 @@ msgstr ""
msgid "Pipelines|Group %{namespace_name} has exceeded its pipeline minutes quota. Unless you buy additional pipeline minutes, no new jobs or pipelines in its projects will run."
msgstr ""
msgid "Pipelines|If you are unsure, please ask a project maintainer to review it for you."
msgstr ""
msgid "Pipelines|It is recommended the code is reviewed thoroughly before running this pipeline with the parent project's CI resource."
msgstr ""
msgid "Pipelines|Loading Pipelines"
msgstr ""
msgid "Pipelines|More Information"
msgstr ""
msgid "Pipelines|Project cache successfully reset."
msgstr ""
@ -17206,6 +17218,9 @@ msgstr ""
msgid "Pipelines|This is a child pipeline within the parent pipeline"
msgstr ""
msgid "Pipelines|This pipeline will run code originating from a forked project merge request. This means that the code can potentially have security considerations like exposing CI variables."
msgstr ""
msgid "Pipelines|This project is not currently set up to run pipelines."
msgstr ""
@ -28472,6 +28487,9 @@ msgstr ""
msgid "mrWidget|Fork merge requests do not create merge request pipelines which validate a post merge result"
msgstr ""
msgid "mrWidget|Fork project merge requests do not create merge request pipelines that validate a post merge result unless invoked by a project member."
msgstr ""
msgid "mrWidget|If the %{branch} branch exists in your local repository, you can merge this merge request manually using the"
msgstr ""

View file

@ -50,11 +50,11 @@
"@sourcegraph/code-host-integration": "0.0.48",
"@toast-ui/editor": "^2.2.0",
"@toast-ui/vue-editor": "^2.3.0",
"apollo-cache-inmemory": "^1.6.3",
"apollo-client": "^2.6.4",
"apollo-link": "^1.2.11",
"apollo-link-batch-http": "^1.2.11",
"apollo-upload-client": "^10.0.0",
"apollo-cache-inmemory": "^1.6.6",
"apollo-client": "^2.6.10",
"apollo-link": "^1.2.14",
"apollo-link-batch-http": "^1.2.14",
"apollo-upload-client": "^13.0.0",
"autosize": "^4.0.2",
"aws-sdk": "^2.637.0",
"axios": "^0.19.0",
@ -88,9 +88,8 @@
"file-loader": "^5.1.0",
"fuzzaldrin-plus": "^0.6.0",
"glob": "^7.1.6",
"graphql": "^14.0.2",
"graphql": "^14.7.0",
"graphql-tag": "^2.10.1",
"immer": "^5.2.1",
"imports-loader": "^0.8.0",
"ipaddr.js": "^1.9.1",
"jed": "^1.1.1",

View file

@ -208,6 +208,7 @@ module QA
autoload :New, 'qa/page/group/new'
autoload :Show, 'qa/page/group/show'
autoload :Menu, 'qa/page/group/menu'
autoload :Members, 'qa/page/group/members'
module Milestone
autoload :Index, 'qa/page/group/milestone/index'
@ -216,7 +217,6 @@ module QA
module SubMenus
autoload :Common, 'qa/page/group/sub_menus/common'
autoload :Members, 'qa/page/group/sub_menus/members'
end
module Settings

View file

@ -8,6 +8,17 @@ module QA
element :project_filter_form, required: true
end
view 'app/views/shared/projects/_project.html.haml' do
element :project_content
element :user_role_content
end
def has_project_with_access_role?(project_name, access_role)
within_element(:project_content, text: project_name) do
has_element?(:user_role_content, text: access_role)
end
end
def go_to_project(name)
filter_by_name(name)

View file

@ -0,0 +1,81 @@
# frozen_string_literal: true
module QA
module Page
module Group
class Members < Page::Base
include QA::Page::Component::Select2
include Page::Component::UsersSelect
view 'app/assets/javascripts/vue_shared/components/remove_member_modal.vue' do
element :remove_member_modal_content
end
view 'app/views/shared/members/_invite_member.html.haml' do
element :member_select_field
element :invite_member_button
end
view 'app/views/shared/members/_member.html.haml' do
element :member_row
element :access_level_dropdown
element :delete_member_button
element :developer_access_level_link, 'qa_selector: "#{role.downcase}_access_level_link"' # rubocop:disable QA/ElementWithPattern, Lint/InterpolationCheck
end
view 'app/views/groups/group_members/index.html.haml' do
element :invite_group_tab
element :groups_list
end
view 'app/views/shared/members/_invite_group.html.haml' do
element :group_select_field
element :invite_group_button
end
view 'app/views/shared/members/_group.html.haml' do
element :group_row
end
def select_group(group_name)
click_element :group_select_field
search_and_select(group_name)
end
def invite_group(group_name)
click_element :invite_group_tab
select_group(group_name)
click_element :invite_group_button
end
def add_member(username)
select_user :member_select_field, username
click_element :invite_member_button
end
def update_access_level(username, access_level)
within_element(:member_row, text: username) do
click_element :access_level_dropdown
click_element "#{access_level.downcase}_access_level_link"
end
end
def remove_member(username)
within_element(:member_row, text: username) do
click_element :delete_member_button
end
within_element(:remove_member_modal_content) do
click_button("Remove member")
end
end
def has_existing_group_share?(group_name)
within_element(:groups_list) do
has_element?(:group_row, text: group_name)
end
end
end
end
end
end

View file

@ -1,51 +0,0 @@
# frozen_string_literal: true
module QA
module Page
module Group
module SubMenus
class Members < Page::Base
include Page::Component::UsersSelect
view 'app/assets/javascripts/vue_shared/components/remove_member_modal.vue' do
element :remove_member_modal_content
end
view 'app/views/shared/members/_invite_member.html.haml' do
element :member_select_field
element :invite_member_button
end
view 'app/views/shared/members/_member.html.haml' do
element :member_row
element :access_level_dropdown
element :delete_member_button
element :developer_access_level_link, 'qa_selector: "#{role.downcase}_access_level_link"' # rubocop:disable QA/ElementWithPattern, Lint/InterpolationCheck
end
def add_member(username)
select_user :member_select_field, username
click_element :invite_member_button
end
def update_access_level(username, access_level)
within_element(:member_row, text: username) do
click_element :access_level_dropdown
click_element "#{access_level.downcase}_access_level_link"
end
end
def remove_member(username)
within_element(:member_row, text: username) do
click_element :delete_member_button
end
within_element(:remove_member_modal_content) do
click_button("Remove member")
end
end
end
end
end
end
end

View file

@ -14,6 +14,9 @@ module QA
element :user_avatar, required: true
element :user_menu, required: true
element :stop_impersonation_link
element :issues_shortcut_button, required: true
element :merge_requests_shortcut_button, required: true
element :todos_shortcut_button, required: true
end
view 'app/views/layouts/nav/_dashboard.html.haml' do
@ -63,6 +66,18 @@ module QA
end
end
# To go to one of the popular pages using the provided shortcut buttons within top menu
# @param [Symbol] the name of the element (e.g: `:issues_shortcut button`)
# @example:
# Menu.perform do |menu|
# menu.go_to_page_by_shortcut(:issues_shortcut_button) #=> Go to Issues page using shortcut button
# end
def go_to_page_by_shortcut(button)
within_top_menu do
click_element(button)
end
end
def go_to_admin_area
click_admin_area

View file

@ -16,7 +16,7 @@ module QA
end
view 'app/views/shared/projects/_project.html.haml' do
element :project
element :project_content
end
def switch_to_code
@ -40,7 +40,7 @@ module QA
end
def has_project?(project_name)
has_element?(:project, project_name: project_name)
has_element?(:project_content, project_name: project_name)
end
private

View file

@ -123,14 +123,24 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
context 'when actor is a developer in parent project' do
let(:actor) { developer_in_parent }
it 'creates a pipeline in the parent project' do
it 'creates a pipeline in the parent project when user proceeds with the warning' do
visit project_merge_request_path(parent_project, merge_request)
create_merge_request_pipeline
act_on_security_warning(action: 'Run Pipeline')
check_pipeline(expected_project: parent_project)
check_head_pipeline(expected_project: parent_project)
end
it 'does not create a pipeline in the parent project when user cancels the action' do
visit project_merge_request_path(parent_project, merge_request)
create_merge_request_pipeline
act_on_security_warning(action: 'Cancel')
check_no_pipelines
end
end
context 'when actor is a developer in fork project' do
@ -187,6 +197,19 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
expect(page.find('.pipeline-id')[:href]).to include(expected_project.full_path)
end
end
def act_on_security_warning(action:)
page.within('#create-pipeline-for-fork-merge-request-modal') do
expect(page).to have_content('Are you sure you want to run this pipeline?')
click_button(action)
end
end
def check_no_pipelines
page.within('.ci-table') do
expect(page).to have_selector('.commit', count: 1)
end
end
end
describe 'race condition' do

View file

@ -121,14 +121,14 @@ describe('Pipelines table in Commits and Merge requests', () => {
pipelineCopy = { ...pipeline };
});
describe('when latest pipeline has detached flag and canRunPipeline is true', () => {
describe('when latest pipeline has detached flag', () => {
it('renders the run pipeline button', done => {
pipelineCopy.flags.detached_merge_request_pipeline = true;
pipelineCopy.flags.merge_request_pipeline = true;
mock.onGet('endpoint.json').reply(200, [pipelineCopy]);
vm = mountComponent(PipelinesTable, { ...props, canRunPipeline: true });
vm = mountComponent(PipelinesTable, { ...props });
setImmediate(() => {
expect(vm.$el.querySelector('.js-run-mr-pipeline')).not.toBeNull();
@ -137,46 +137,14 @@ describe('Pipelines table in Commits and Merge requests', () => {
});
});
describe('when latest pipeline has detached flag and canRunPipeline is false', () => {
it('does not render the run pipeline button', done => {
pipelineCopy.flags.detached_merge_request_pipeline = true;
pipelineCopy.flags.merge_request_pipeline = true;
mock.onGet('endpoint.json').reply(200, [pipelineCopy]);
vm = mountComponent(PipelinesTable, { ...props, canRunPipeline: false });
setImmediate(() => {
expect(vm.$el.querySelector('.js-run-mr-pipeline')).toBeNull();
done();
});
});
});
describe('when latest pipeline does not have detached flag and canRunPipeline is true', () => {
describe('when latest pipeline does not have detached flag', () => {
it('does not render the run pipeline button', done => {
pipelineCopy.flags.detached_merge_request_pipeline = false;
pipelineCopy.flags.merge_request_pipeline = false;
mock.onGet('endpoint.json').reply(200, [pipelineCopy]);
vm = mountComponent(PipelinesTable, { ...props, canRunPipeline: true });
setImmediate(() => {
expect(vm.$el.querySelector('.js-run-mr-pipeline')).toBeNull();
done();
});
});
});
describe('when latest pipeline does not have detached flag and merge_request_pipeline is true', () => {
it('does not render the run pipeline button', done => {
pipelineCopy.flags.detached_merge_request_pipeline = false;
pipelineCopy.flags.merge_request_pipeline = true;
mock.onGet('endpoint.json').reply(200, [pipelineCopy]);
vm = mountComponent(PipelinesTable, { ...props, canRunPipeline: false });
vm = mountComponent(PipelinesTable, { ...props });
setImmediate(() => {
expect(vm.$el.querySelector('.js-run-mr-pipeline')).toBeNull();
@ -186,6 +154,9 @@ describe('Pipelines table in Commits and Merge requests', () => {
});
describe('on click', () => {
const findModal = () =>
document.querySelector('#create-pipeline-for-fork-merge-request-modal');
beforeEach(() => {
pipelineCopy.flags.detached_merge_request_pipeline = true;
@ -206,6 +177,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
vm.$el.querySelector('.js-run-mr-pipeline').click();
vm.$nextTick(() => {
expect(findModal()).toBeNull();
expect(vm.state.isRunningMergeRequestPipeline).toBe(true);
setImmediate(() => {
@ -217,6 +189,39 @@ describe('Pipelines table in Commits and Merge requests', () => {
});
});
});
describe('on click for fork merge request', () => {
const findModal = () =>
document.querySelector('#create-pipeline-for-fork-merge-request-modal');
beforeEach(() => {
pipelineCopy.flags.detached_merge_request_pipeline = true;
mock.onGet('endpoint.json').reply(200, [pipelineCopy]);
vm = mountComponent(PipelinesTable, {
...props,
projectId: '5',
mergeRequestId: 3,
canCreatePipelineInTargetProject: true,
sourceProjectFullPath: 'test/parent-project',
targetProjectFullPath: 'test/fork-project',
});
});
it('shows a security warning modal', done => {
jest.spyOn(Api, 'postMergeRequestPipeline').mockReturnValue(Promise.resolve());
setImmediate(() => {
vm.$el.querySelector('.js-run-mr-pipeline').click();
vm.$nextTick(() => {
expect(findModal()).not.toBeNull();
done();
});
});
});
});
});
describe('unsuccessfull request', () => {

View file

@ -9,7 +9,7 @@ exports[`Design management upload button component renders inverted upload desig
icon=""
size="small"
title="Adding a design with the same filename replaces the file in a new version."
variant="success"
variant="default"
>
Upload designs
@ -35,7 +35,7 @@ exports[`Design management upload button component renders loading icon 1`] = `
icon=""
size="small"
title="Adding a design with the same filename replaces the file in a new version."
variant="success"
variant="default"
>
Upload designs
@ -66,7 +66,7 @@ exports[`Design management upload button component renders upload design button
icon=""
size="small"
title="Adding a design with the same filename replaces the file in a new version."
variant="success"
variant="default"
>
Upload designs

View file

@ -1603,6 +1603,18 @@ describe('DiffsStoreActions', () => {
expect(commit).toHaveBeenCalledWith(types.UPDATE_CURRENT_DIFF_FILE_ID, '123');
});
it('does not commit UPDATE_CURRENT_DIFF_FILE_ID when discussion has no diff_file', () => {
const commit = jest.fn();
const rootGetters = {
getDiscussion: () => ({ id: '1' }),
notesById: { '1': { discussion_id: '2' } },
};
setCurrentDiffFileIdFromNote({ commit, rootGetters }, '1');
expect(commit).not.toHaveBeenCalled();
});
});
describe('navigateToDiffFileIndex', () => {

View file

@ -0,0 +1,141 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::PopulatePersonalSnippetStatistics do
let(:file_name) { 'file_name.rb' }
let(:content) { 'content' }
let(:snippets) { table(:snippets) }
let(:snippet_repositories) { table(:snippet_repositories) }
let(:users) { table(:users) }
let(:namespaces) { table(:namespaces) }
let(:snippet_statistics) { table(:snippet_statistics) }
let(:namespace_statistics) { table(:namespace_root_storage_statistics) }
let(:routes) { table(:routes) }
let(:repo_size) { 123456 }
let(:expected_repo_size) { repo_size.megabytes }
let(:user1) { users.create!(id: 1, email: 'test@example.com', projects_limit: 100, username: 'test1') }
let(:user2) { users.create!(id: 2, email: 'test2@example.com', projects_limit: 100, username: 'test2') }
let!(:user1_namespace) { namespaces.create!(id: 1, name: 'user1', path: 'user1', owner_id: user1.id) }
let!(:user2_namespace) { namespaces.create!(id: 2, name: 'user2', path: 'user2', owner_id: user2.id) }
let(:user1_namespace_statistics) { namespace_statistics.find_by(namespace_id: user1_namespace.id) }
let(:user2_namespace_statistics) { namespace_statistics.find_by(namespace_id: user2_namespace.id) }
let(:ids) { snippets.pluck(:id) }
let(:migration) { described_class.new }
subject do
migration.perform(ids)
end
before do
allow_any_instance_of(Repository).to receive(:size).and_return(repo_size)
end
after do
snippets.all.each { |s| raw_repository(s).remove }
end
context 'with existing personal snippets' do
let!(:snippet1) { create_snippet(1, user1) }
let!(:snippet2) { create_snippet(2, user1) }
let!(:snippet3) { create_snippet(3, user2) }
let!(:snippet4) { create_snippet(4, user2) }
before do
create_snippet_statistics(2, 0)
create_snippet_statistics(4, 123)
end
it 'creates/updates all snippet_statistics' do
expect { subject }.to change { snippet_statistics.count }.from(2).to(4)
expect(snippet_statistics.pluck(:repository_size)).to be_all(expected_repo_size)
end
it 'creates/updates the associated namespace statistics' do
expect(migration).to receive(:update_namespace_statistics).twice.and_call_original
subject
stats = snippet_statistics.where(snippet_id: [snippet1, snippet2]).sum(:repository_size)
expect(user1_namespace_statistics.snippets_size).to eq stats
stats = snippet_statistics.where(snippet_id: [snippet3, snippet4]).sum(:repository_size)
expect(user2_namespace_statistics.snippets_size).to eq stats
end
context 'when an error is raised when updating a namespace statistics' do
it 'logs the error and continue execution' do
expect_next_instance_of(Namespaces::StatisticsRefresherService) do |instance|
expect(instance).to receive(:execute).with(Namespace.find(user1_namespace.id)).and_raise('Error')
end
expect_next_instance_of(Namespaces::StatisticsRefresherService) do |instance|
expect(instance).to receive(:execute).and_call_original
end
expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
expect(instance).to receive(:error).with(message: /Error updating statistics for namespace/).once
end
subject
expect(user1_namespace_statistics).to be_nil
stats = snippet_statistics.where(snippet_id: [snippet3, snippet4]).sum(:repository_size)
expect(user2_namespace_statistics.snippets_size).to eq stats
end
end
end
context 'when a snippet repository is empty' do
let!(:snippet1) { create_snippet(1, user1, with_repo: false) }
let!(:snippet2) { create_snippet(2, user1) }
it 'logs error and continues execution' do
expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
expect(instance).to receive(:error).with(message: /Invalid snippet repository/).once
end
subject
expect(snippet_statistics.find_by(snippet_id: snippet1.id)).to be_nil
expect(user1_namespace_statistics.snippets_size).to eq expected_repo_size
end
end
def create_snippet(id, author, with_repo: true)
snippets.create!(id: id, type: 'PersonalSnippet', author_id: author.id, file_name: file_name, content: content).tap do |snippet|
if with_repo
allow(snippet).to receive(:disk_path).and_return(disk_path(snippet))
TestEnv.copy_repo(snippet,
bare_repo: TestEnv.factory_repo_path_bare,
refs: TestEnv::BRANCH_SHA)
raw_repository(snippet).create_repository
end
end
end
def create_snippet_statistics(snippet_id, repository_size = 0)
snippet_statistics.create!(snippet_id: snippet_id, repository_size: repository_size)
end
def raw_repository(snippet)
Gitlab::Git::Repository.new('default',
"#{disk_path(snippet)}.git",
Gitlab::GlRepository::SNIPPET.identifier_for_container(snippet),
"@snippets/#{snippet.id}")
end
def hashed_repository(snippet)
Storage::Hashed.new(snippet, prefix: '@snippets')
end
def disk_path(snippet)
hashed_repository(snippet).disk_path
end
end

View file

@ -101,7 +101,7 @@ RSpec.describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
describe '#append' do
shared_examples_for 'appends' do
it "truncates and append content" do
it "truncates and appends content" do
stream.append(+"89", 4)
stream.seek(0)

View file

@ -242,6 +242,7 @@ RSpec.describe Gitlab::Danger::Helper do
'.editorconfig' | [:engineering_productivity]
'tooling/overcommit/foo' | [:engineering_productivity]
'.codeclimate.yml' | [:engineering_productivity]
'.gitlab/CODEOWNERS' | [:engineering_productivity]
'lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml' | [:backend]

View file

@ -0,0 +1,56 @@
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200714075739_schedule_populate_personal_snippet_statistics.rb')
RSpec.describe SchedulePopulatePersonalSnippetStatistics do
let(:users) { table(:users) }
let(:snippets) { table(:snippets) }
let(:projects) { table(:projects) }
let(:user1) { users.create!(id: 1, email: 'user1@example.com', projects_limit: 10, username: 'test1', name: 'Test1', state: 'active') }
let(:user2) { users.create!(id: 2, email: 'user2@example.com', projects_limit: 10, username: 'test2', name: 'Test2', state: 'active') }
let(:user3) { users.create!(id: 3, email: 'user3@example.com', projects_limit: 10, username: 'test3', name: 'Test3', state: 'active') }
def create_snippet(id, user_id, type = 'PersonalSnippet')
params = {
id: id,
type: type,
author_id: user_id,
file_name: 'foo',
content: 'bar'
}
snippets.create!(params)
end
it 'correctly schedules background migrations' do
# Creating the snippets in different order
create_snippet(1, user1.id)
create_snippet(2, user2.id)
create_snippet(3, user1.id)
create_snippet(4, user3.id)
create_snippet(5, user3.id)
create_snippet(6, user1.id)
# Creating a project snippet to ensure we don't pick it
create_snippet(7, user1.id, 'ProjectSnippet')
stub_const("#{described_class}::BATCH_SIZE", 4)
Sidekiq::Testing.fake! do
Timecop.freeze do
migrate!
aggregate_failures do
expect(described_class::MIGRATION)
.to be_scheduled_migration([1, 3, 6, 2])
expect(described_class::MIGRATION)
.to be_scheduled_delayed_migration(2.minutes, [4, 5])
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
end
end
end
end
end

View file

@ -262,6 +262,12 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
expect(build_trace_chunk.data).to be_empty
end
it 'does not read data when appending' do
expect(build_trace_chunk).not_to receive(:data)
build_trace_chunk.append(new_data, offset)
end
it_behaves_like 'Appending correctly'
it_behaves_like 'Scheduling sidekiq worker to flush data to persist store'
end
@ -486,7 +492,7 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
expect(build_trace_chunk.redis?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to eq(data)
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound)
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to be_nil
subject
@ -508,7 +514,7 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
expect(build_trace_chunk.redis?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to eq(data)
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound)
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to be_nil
end
end
end
@ -535,7 +541,7 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
expect(build_trace_chunk.database?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to eq(data)
expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound)
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to be_nil
subject
@ -557,7 +563,7 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
expect(build_trace_chunk.database?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to eq(data)
expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound)
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to be_nil
end
end
end

View file

@ -89,6 +89,24 @@ RSpec.describe Ci::BuildTraceChunks::Database do
end
end
describe '#size' do
context 'when data exists' do
let(:model) { create(:ci_build_trace_chunk, :database_with_data, initial_data: 'üabcdef') }
it 'returns data bytesize correctly' do
expect(data_store.size(model)).to eq 8
end
end
context 'when data does not exist' do
let(:model) { create(:ci_build_trace_chunk, :database_without_data) }
it 'returns zero' do
expect(data_store.size(model)).to be_zero
end
end
end
describe '#keys' do
subject { data_store.keys(relation) }

View file

@ -40,7 +40,7 @@ RSpec.describe Ci::BuildTraceChunks::Fog do
let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
it 'returns nil' do
expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound)
expect(data_store.data(model)).to be_nil
end
end
end
@ -66,7 +66,7 @@ RSpec.describe Ci::BuildTraceChunks::Fog do
let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
it 'sets new data' do
expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound)
expect(data_store.data(model)).to be_nil
subject
@ -86,7 +86,7 @@ RSpec.describe Ci::BuildTraceChunks::Fog do
subject
expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound)
expect(data_store.data(model)).to be_nil
end
end
@ -94,11 +94,29 @@ RSpec.describe Ci::BuildTraceChunks::Fog do
let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
it 'does nothing' do
expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound)
expect(data_store.data(model)).to be_nil
subject
expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound)
expect(data_store.data(model)).to be_nil
end
end
end
describe '#size' do
context 'when data exists' do
let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'üabcd') }
it 'returns data bytesize correctly' do
expect(data_store.size(model)).to eq 6
end
end
context 'when data does not exist' do
let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
it 'returns zero' do
expect(data_store.size(model)).to be_zero
end
end
end

View file

@ -61,6 +61,86 @@ RSpec.describe Ci::BuildTraceChunks::Redis, :clean_gitlab_redis_shared_state do
end
end
describe '#append_data' do
context 'when valid offset is used with existing data' do
let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'abcd') }
it 'appends data' do
expect(data_store.data(model)).to eq('abcd')
length = data_store.append_data(model, '12345', 4)
expect(length).to eq 9
expect(data_store.data(model)).to eq('abcd12345')
end
end
context 'when data does not exist yet' do
let(:model) { create(:ci_build_trace_chunk, :redis_without_data) }
it 'sets new data' do
expect(data_store.data(model)).to be_nil
length = data_store.append_data(model, 'abc', 0)
expect(length).to eq 3
expect(data_store.data(model)).to eq('abc')
end
end
context 'when data needs to be truncated' do
let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: '12345678') }
it 'appends data and truncates stored value' do
expect(data_store.data(model)).to eq('12345678')
length = data_store.append_data(model, 'ab', 4)
expect(length).to eq 6
expect(data_store.data(model)).to eq('1234ab')
end
end
context 'when invalid offset is provided' do
let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'abc') }
it 'raises an exception' do
length = data_store.append_data(model, '12345', 4)
expect(length).to be_negative
end
end
context 'when trace contains multi-byte UTF8 characters' do
let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'aüc') }
it 'appends data' do
length = data_store.append_data(model, '1234', 4)
data_store.data(model).then do |new_data|
expect(new_data.bytesize).to eq 8
expect(new_data).to eq 'aüc1234'
end
expect(length).to eq 8
end
end
context 'when trace contains non-UTF8 characters' do
let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: "a\255c") }
it 'appends data' do
length = data_store.append_data(model, '1234', 3)
data_store.data(model).then do |new_data|
expect(new_data.bytesize).to eq 7
end
expect(length).to eq 7
end
end
end
describe '#delete_data' do
subject { data_store.delete_data(model) }
@ -89,6 +169,24 @@ RSpec.describe Ci::BuildTraceChunks::Redis, :clean_gitlab_redis_shared_state do
end
end
describe '#size' do
context 'when data exists' do
let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'üabcd') }
it 'returns data bytesize correctly' do
expect(data_store.size(model)).to eq 6
end
end
context 'when data does not exist' do
let(:model) { create(:ci_build_trace_chunk, :redis_without_data) }
it 'returns zero' do
expect(data_store.size(model)).to be_zero
end
end
end
describe '#keys' do
subject { data_store.keys(relation) }

View file

@ -31,6 +31,28 @@ RSpec.describe MergeRequestWidgetEntity do
end
end
describe 'can_create_pipeline_in_target_project' do
context 'when user has permission' do
before do
project.add_developer(user)
end
it 'includes the correct permission info' do
expect(subject[:can_create_pipeline_in_target_project]).to eq(true)
end
end
context 'when user does not have permission' do
before do
project.add_guest(user)
end
it 'includes the correct permission info' do
expect(subject[:can_create_pipeline_in_target_project]).to eq(false)
end
end
end
describe 'issues links' do
it 'includes issues links when requested' do
data = described_class.new(resource, request: request, issues_links: true).as_json

View file

@ -52,7 +52,15 @@ RSpec.describe IncidentManagement::CreateIncidentLabelService do
end
context 'without label' do
it_behaves_like 'new label'
context 'when user has permissions to create labels' do
it_behaves_like 'new label'
end
context 'when user has no permissions to create labels' do
let_it_be(:user) { create(:user) }
it_behaves_like 'new label'
end
end
end
end

View file

@ -1,6 +1,8 @@
# frozen_string_literal: true
RSpec.shared_examples 'editing snippet checks blob is binary' do
let(:snippets_binary_blob_value) { true }
before do
sign_in(user)
@ -8,6 +10,8 @@ RSpec.shared_examples 'editing snippet checks blob is binary' do
allow(blob).to receive(:binary?).and_return(binary)
end
stub_feature_flags(snippets_binary_blob: snippets_binary_blob_value)
subject
end
@ -23,13 +27,24 @@ RSpec.shared_examples 'editing snippet checks blob is binary' do
context 'when blob is binary' do
let(:binary) { true }
it 'redirects away' do
expect(response).to redirect_to(gitlab_snippet_path(snippet))
it 'responds with status 200' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:edit)
end
context 'when feature flag :snippets_binary_blob is disabled' do
let(:snippets_binary_blob_value) { false }
it 'redirects away' do
expect(response).to redirect_to(gitlab_snippet_path(snippet))
end
end
end
end
RSpec.shared_examples 'updating snippet checks blob is binary' do
let(:snippets_binary_blob_value) { true }
before do
sign_in(user)
@ -37,6 +52,8 @@ RSpec.shared_examples 'updating snippet checks blob is binary' do
allow(blob).to receive(:binary?).and_return(binary)
end
stub_feature_flags(snippets_binary_blob: snippets_binary_blob_value)
subject
end
@ -52,9 +69,18 @@ RSpec.shared_examples 'updating snippet checks blob is binary' do
context 'when blob is binary' do
let(:binary) { true }
it 'redirects away without updating' do
it 'updates successfully' do
expect(snippet.reload.title).to eq title
expect(response).to redirect_to(gitlab_snippet_path(snippet))
expect(snippet.reload.title).not_to eq title
end
context 'when feature flag :snippets_binary_blob is disabled' do
let(:snippets_binary_blob_value) { false }
it 'redirects away without updating' do
expect(response).to redirect_to(gitlab_snippet_path(snippet))
expect(snippet.reload.title).not_to eq title
end
end
end
end

View file

@ -16,12 +16,4 @@ RSpec.shared_examples 'create alert issue sets issue labels' do
expect(issue.labels).to eq([label])
end
end
context 'when create incident label responds with error' do
let(:label_service_response) { ServiceResponse.error(payload: { label: label }, message: 'label error') }
it 'creates an issue without labels' do
expect(issue.labels).to be_empty
end
end
end

167
yarn.lock
View file

@ -764,10 +764,10 @@
core-js-pure "^3.0.0"
regenerator-runtime "^0.13.4"
"@babel/runtime@^7.10.2", "@babel/runtime@^7.8.4":
version "7.10.2"
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.10.2.tgz#d103f21f2602497d38348a32e008637d506db839"
integrity sha512-6sF3uQw2ivImfVIl62RZ7MXhO2tap69WeWK57vAaimT6AZbE4FbqjdEJIN1UqoD6wI6B+1n9UiagafH1sxjOtg==
"@babel/runtime@^7.10.2", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2":
version "7.10.4"
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.10.4.tgz#a6724f1a6b8d2f6ea5236dbfe58c7d7ea9c5eb99"
integrity sha512-UpTN5yUJr9b4EX2CnGNWIvER7Ab83ibv0pcvvHc4UOdrBI5jb8bj+32cCwPX6xu0mt2daFNjYhoi+X7beH0RSw==
dependencies:
regenerator-runtime "^0.13.4"
@ -1757,94 +1757,95 @@ anymatch@~3.1.1:
normalize-path "^3.0.0"
picomatch "^2.0.4"
apollo-cache-inmemory@^1.6.3:
version "1.6.3"
resolved "https://registry.yarnpkg.com/apollo-cache-inmemory/-/apollo-cache-inmemory-1.6.3.tgz#826861d20baca4abc45f7ca7a874105905b8525d"
integrity sha512-S4B/zQNSuYc0M/1Wq8dJDTIO9yRgU0ZwDGnmlqxGGmFombOZb9mLjylewSfQKmjNpciZ7iUIBbJ0mHlPJTzdXg==
apollo-cache-inmemory@^1.6.6:
version "1.6.6"
resolved "https://registry.yarnpkg.com/apollo-cache-inmemory/-/apollo-cache-inmemory-1.6.6.tgz#56d1f2a463a6b9db32e9fa990af16d2a008206fd"
integrity sha512-L8pToTW/+Xru2FFAhkZ1OA9q4V4nuvfoPecBM34DecAugUZEBhI2Hmpgnzq2hTKZ60LAMrlqiASm0aqAY6F8/A==
dependencies:
apollo-cache "^1.3.2"
apollo-utilities "^1.3.2"
apollo-cache "^1.3.5"
apollo-utilities "^1.3.4"
optimism "^0.10.0"
ts-invariant "^0.4.0"
tslib "^1.9.3"
tslib "^1.10.0"
apollo-cache@1.3.2, apollo-cache@^1.3.2:
version "1.3.2"
resolved "https://registry.yarnpkg.com/apollo-cache/-/apollo-cache-1.3.2.tgz#df4dce56240d6c95c613510d7e409f7214e6d26a"
integrity sha512-+KA685AV5ETEJfjZuviRTEImGA11uNBp/MJGnaCvkgr+BYRrGLruVKBv6WvyFod27WEB2sp7SsG8cNBKANhGLg==
apollo-cache@1.3.5, apollo-cache@^1.3.5:
version "1.3.5"
resolved "https://registry.yarnpkg.com/apollo-cache/-/apollo-cache-1.3.5.tgz#9dbebfc8dbe8fe7f97ba568a224bca2c5d81f461"
integrity sha512-1XoDy8kJnyWY/i/+gLTEbYLnoiVtS8y7ikBr/IfmML4Qb+CM7dEEbIUOjnY716WqmZ/UpXIxTfJsY7rMcqiCXA==
dependencies:
apollo-utilities "^1.3.2"
tslib "^1.9.3"
apollo-utilities "^1.3.4"
tslib "^1.10.0"
apollo-client@^2.6.4:
version "2.6.4"
resolved "https://registry.yarnpkg.com/apollo-client/-/apollo-client-2.6.4.tgz#872c32927263a0d34655c5ef8a8949fbb20b6140"
integrity sha512-oWOwEOxQ9neHHVZrQhHDbI6bIibp9SHgxaLRVPoGvOFy7OH5XUykZE7hBQAVxq99tQjBzgytaZffQkeWo1B4VQ==
apollo-client@^2.6.10:
version "2.6.10"
resolved "https://registry.yarnpkg.com/apollo-client/-/apollo-client-2.6.10.tgz#86637047b51d940c8eaa771a4ce1b02df16bea6a"
integrity sha512-jiPlMTN6/5CjZpJOkGeUV0mb4zxx33uXWdj/xQCfAMkuNAC3HN7CvYDyMHHEzmcQ5GV12LszWoQ/VlxET24CtA==
dependencies:
"@types/zen-observable" "^0.8.0"
apollo-cache "1.3.2"
apollo-cache "1.3.5"
apollo-link "^1.0.0"
apollo-utilities "1.3.2"
apollo-utilities "1.3.4"
symbol-observable "^1.0.2"
ts-invariant "^0.4.0"
tslib "^1.9.3"
tslib "^1.10.0"
zen-observable "^0.8.0"
apollo-link-batch-http@^1.2.11:
version "1.2.11"
resolved "https://registry.yarnpkg.com/apollo-link-batch-http/-/apollo-link-batch-http-1.2.11.tgz#ae42dbcc02820658e1e267d05bf2aae7ac208088"
integrity sha512-f+KEdbP51I3AeEaBDW2lKS3eaPK/1IXaTM9F2moj02s1hgC/TzeUORRuUeOExW8ggXveW1Jzp6aYMJ2SQkZJyA==
apollo-link-batch-http@^1.2.14:
version "1.2.14"
resolved "https://registry.yarnpkg.com/apollo-link-batch-http/-/apollo-link-batch-http-1.2.14.tgz#4502109d3f32a94d88eabd3a89274ae3a6e2f56f"
integrity sha512-LFUmfV3OXR3Er+zSgFxPY/qUe4Wyx0HS1euJZ36RCCaDvPegr24C9OQgKFScHy91VbjRTtFUyjXXVq1xFGPMvQ==
dependencies:
apollo-link "^1.2.11"
apollo-link-batch "^1.1.12"
apollo-link-http-common "^0.2.13"
apollo-link "^1.2.14"
apollo-link-batch "^1.1.15"
apollo-link-http-common "^0.2.16"
tslib "^1.9.3"
apollo-link-batch@^1.1.12:
version "1.1.12"
resolved "https://registry.yarnpkg.com/apollo-link-batch/-/apollo-link-batch-1.1.12.tgz#64eb231082f182b0395ef7ab903600627f6c7fe8"
integrity sha512-6NqLiB9tEGxRiyhtnX/7CPHkmFG0IXfEP7pC5kirhjV+4KxqBaWvJnJGKpGp7Owgdph7KJlV+9+niOKEkcwreg==
apollo-link-batch@^1.1.15:
version "1.1.15"
resolved "https://registry.yarnpkg.com/apollo-link-batch/-/apollo-link-batch-1.1.15.tgz#3a5b8c7d9cf1b7840ce630238249b95070e75e54"
integrity sha512-XbfQI/FNxJW9RSgJTfAl7RDFxxN77425yDtT7YgsImH4/2NQ+U4SWN6thWE3ZU1Wf7ktXd+XFa3KkenBRTybOQ==
dependencies:
apollo-link "^1.2.11"
apollo-link "^1.2.14"
tslib "^1.9.3"
apollo-link-http-common@^0.2.13, apollo-link-http-common@^0.2.8:
version "0.2.13"
resolved "https://registry.yarnpkg.com/apollo-link-http-common/-/apollo-link-http-common-0.2.13.tgz#c688f6baaffdc7b269b2db7ae89dae7c58b5b350"
integrity sha512-Uyg1ECQpTTA691Fwx5e6Rc/6CPSu4TB4pQRTGIpwZ4l5JDOQ+812Wvi/e3IInmzOZpwx5YrrOfXrtN8BrsDXoA==
apollo-link-http-common@^0.2.14, apollo-link-http-common@^0.2.16:
version "0.2.16"
resolved "https://registry.yarnpkg.com/apollo-link-http-common/-/apollo-link-http-common-0.2.16.tgz#756749dafc732792c8ca0923f9a40564b7c59ecc"
integrity sha512-2tIhOIrnaF4UbQHf7kjeQA/EmSorB7+HyJIIrUjJOKBgnXwuexi8aMecRlqTIDWcyVXCeqLhUnztMa6bOH/jTg==
dependencies:
apollo-link "^1.2.11"
ts-invariant "^0.3.2"
apollo-link "^1.2.14"
ts-invariant "^0.4.0"
tslib "^1.9.3"
apollo-link@^1.0.0, apollo-link@^1.2.11, apollo-link@^1.2.6:
version "1.2.11"
resolved "https://registry.yarnpkg.com/apollo-link/-/apollo-link-1.2.11.tgz#493293b747ad3237114ccd22e9f559e5e24a194d"
integrity sha512-PQvRCg13VduLy3X/0L79M6uOpTh5iHdxnxYuo8yL7sJlWybKRJwsv4IcRBJpMFbChOOaHY7Og9wgPo6DLKDKDA==
apollo-link@^1.0.0, apollo-link@^1.2.12, apollo-link@^1.2.14:
version "1.2.14"
resolved "https://registry.yarnpkg.com/apollo-link/-/apollo-link-1.2.14.tgz#3feda4b47f9ebba7f4160bef8b977ba725b684d9"
integrity sha512-p67CMEFP7kOG1JZ0ZkYZwRDa369w5PIjtMjvrQd/HnIV8FRsHRqLqK+oAZQnFa1DDdZtOtHTi+aMIW6EatC2jg==
dependencies:
apollo-utilities "^1.2.1"
ts-invariant "^0.3.2"
apollo-utilities "^1.3.0"
ts-invariant "^0.4.0"
tslib "^1.9.3"
zen-observable-ts "^0.8.18"
zen-observable-ts "^0.8.21"
apollo-upload-client@^10.0.0:
version "10.0.0"
resolved "https://registry.yarnpkg.com/apollo-upload-client/-/apollo-upload-client-10.0.0.tgz#6cc3d0ea2aef40bc237b655f5042809cacee1859"
integrity sha512-N0SENiEkZXoY4nl9xxrXFcj/cL0AVkSNQ4aYXSaruCBWE0aKpK6aCe4DBmiEHrK3FAsMxZPEJxBRIWNbsXT8dw==
apollo-upload-client@^13.0.0:
version "13.0.0"
resolved "https://registry.yarnpkg.com/apollo-upload-client/-/apollo-upload-client-13.0.0.tgz#146d1ddd85d711fcac8ca97a72d3ca6787f2b71b"
integrity sha512-lJ9/bk1BH1lD15WhWRha2J3+LrXrPIX5LP5EwiOUHv8PCORp4EUrcujrA3rI5hZeZygrTX8bshcuMdpqpSrvtA==
dependencies:
apollo-link "^1.2.6"
apollo-link-http-common "^0.2.8"
extract-files "^5.0.0"
"@babel/runtime" "^7.9.2"
apollo-link "^1.2.12"
apollo-link-http-common "^0.2.14"
extract-files "^8.0.0"
apollo-utilities@1.3.2, apollo-utilities@^1.2.1, apollo-utilities@^1.3.2:
version "1.3.2"
resolved "https://registry.yarnpkg.com/apollo-utilities/-/apollo-utilities-1.3.2.tgz#8cbdcf8b012f664cd6cb5767f6130f5aed9115c9"
integrity sha512-JWNHj8XChz7S4OZghV6yc9FNnzEXj285QYp/nLNh943iObycI5GTDO3NGR9Dth12LRrSFMeDOConPfPln+WGfg==
apollo-utilities@1.3.4, apollo-utilities@^1.3.0, apollo-utilities@^1.3.4:
version "1.3.4"
resolved "https://registry.yarnpkg.com/apollo-utilities/-/apollo-utilities-1.3.4.tgz#6129e438e8be201b6c55b0f13ce49d2c7175c9cf"
integrity sha512-pk2hiWrCXMAy2fRPwEyhvka+mqwzeP60Jr1tRYi5xru+3ko94HI9o6lK0CT33/w4RDlxWchmdhDCrvdr+pHCig==
dependencies:
"@wry/equality" "^0.1.2"
fast-json-stable-stringify "^2.0.0"
ts-invariant "^0.4.0"
tslib "^1.9.3"
tslib "^1.10.0"
append-transform@^1.0.0:
version "1.0.0"
@ -4890,10 +4891,10 @@ extglob@^2.0.4:
snapdragon "^0.8.1"
to-regex "^3.0.1"
extract-files@^5.0.0:
version "5.0.1"
resolved "https://registry.yarnpkg.com/extract-files/-/extract-files-5.0.1.tgz#c9492a8410be643e260a376f0151361993d5f659"
integrity sha512-qRW6y9eKF0VbCyOoOEtFhzJ3uykAw8GKwQVXyAIqwocyEWW4m+v+evec34RwtUkkxxHh7NKBLJ6AnXM8W4dH5w==
extract-files@^8.0.0:
version "8.1.0"
resolved "https://registry.yarnpkg.com/extract-files/-/extract-files-8.1.0.tgz#46a0690d0fe77411a2e3804852adeaa65cd59288"
integrity sha512-PTGtfthZK79WUMk+avLmwx3NGdU8+iVFXC2NMGxKsn0MnihOG2lvumj+AZo8CTwTrwjXDgZ5tztbRlEdRjBonQ==
extract-from-css@^0.4.4:
version "0.4.4"
@ -5568,10 +5569,10 @@ graphql-tag@^2.10.1:
resolved "https://registry.yarnpkg.com/graphql-tag/-/graphql-tag-2.10.3.tgz#ea1baba5eb8fc6339e4c4cf049dabe522b0edf03"
integrity sha512-4FOv3ZKfA4WdOKJeHdz6B3F/vxBLSgmBcGeAFPf4n1F64ltJUvOOerNj0rsJxONQGdhUMynQIvd6LzB+1J5oKA==
graphql@^14.0.2:
version "14.0.2"
resolved "https://registry.yarnpkg.com/graphql/-/graphql-14.0.2.tgz#7dded337a4c3fd2d075692323384034b357f5650"
integrity sha512-gUC4YYsaiSJT1h40krG3J+USGlwhzNTXSb4IOZljn9ag5Tj+RkoXrWp+Kh7WyE3t1NCfab5kzCuxBIvOMERMXw==
graphql@^14.7.0:
version "14.7.0"
resolved "https://registry.yarnpkg.com/graphql/-/graphql-14.7.0.tgz#7fa79a80a69be4a31c27dda824dc04dac2035a72"
integrity sha512-l0xWZpoPKpppFzMfvVyFmp9vLN7w/ZZJPefUicMCepfJeQ8sMcztloGYY9DfjVPo6tIUDzU5Hw3MUbIjj9AVVA==
dependencies:
iterall "^1.2.2"
@ -5955,11 +5956,6 @@ immediate@~3.0.5:
resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.0.6.tgz#9db1dbd0faf8de6fbe0f5dd5e56bb606280de69b"
integrity sha1-nbHb0Pr43m++D13V5Wu2BigN5ps=
immer@^5.2.1:
version "5.2.1"
resolved "https://registry.yarnpkg.com/immer/-/immer-5.2.1.tgz#7d4f74c242178e87151d595f48db1b5c51580485"
integrity sha512-9U1GEbJuH6nVoyuFRgTQDGMzcBuNBPfXM3M7Pp/sdmYKTKYOBUZGgeUb9H57GfLK/xC1DMLarWX2FrhMBfUJ8g==
import-fresh@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-2.0.0.tgz#d81355c15612d386c61f9ddd3922d4304822a546"
@ -11580,13 +11576,6 @@ tryer@^1.0.0:
resolved "https://registry.yarnpkg.com/tryer/-/tryer-1.0.0.tgz#027b69fa823225e551cace3ef03b11f6ab37c1d7"
integrity sha1-Antp+oIyJeVRys4+8DsR9qs3wdc=
ts-invariant@^0.3.2:
version "0.3.2"
resolved "https://registry.yarnpkg.com/ts-invariant/-/ts-invariant-0.3.2.tgz#89a2ffeb70879b777258df1df1c59383c35209b0"
integrity sha512-QsY8BCaRnHiB5T6iE4DPlJMAKEG3gzMiUco9FEt1jUXQf0XP6zi0idT0i0rMTu8A326JqNSDsmlkA9dRSh1TRg==
dependencies:
tslib "^1.9.3"
ts-invariant@^0.4.0:
version "0.4.4"
resolved "https://registry.yarnpkg.com/ts-invariant/-/ts-invariant-0.4.4.tgz#97a523518688f93aafad01b0e80eb803eb2abd86"
@ -11609,10 +11598,10 @@ ts-jest@24.0.0, ts-jest@^23.10.5:
semver "^5.5"
yargs-parser "10.x"
tslib@^1.8.1, tslib@^1.9.0, tslib@^1.9.3:
version "1.11.1"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.11.1.tgz#eb15d128827fbee2841549e171f45ed338ac7e35"
integrity sha512-aZW88SY8kQbU7gpV19lN24LtXh/yD4ZZg6qieAJDDg+YBsJcSmLGK9QpnUjAKVG/xefmvJGd1WUmfpT/g6AJGA==
tslib@^1.10.0, tslib@^1.8.1, tslib@^1.9.0, tslib@^1.9.3:
version "1.13.0"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.13.0.tgz#c881e13cc7015894ed914862d276436fa9a47043"
integrity sha512-i/6DQjL8Xf3be4K/E6Wgpekn5Qasl1usyw++dAA35Ue5orEn65VIxOA+YvNNl9HV3qv70T7CNwjODHZrLwvd1Q==
tsutils@^3.17.1:
version "3.17.1"
@ -12757,10 +12746,10 @@ yeast@0.1.2:
resolved "https://registry.yarnpkg.com/yeast/-/yeast-0.1.2.tgz#008e06d8094320c372dbc2f8ed76a0ca6c8ac419"
integrity sha1-AI4G2AlDIMNy28L47XagymyKxBk=
zen-observable-ts@^0.8.18:
version "0.8.18"
resolved "https://registry.yarnpkg.com/zen-observable-ts/-/zen-observable-ts-0.8.18.tgz#ade44b1060cc4a800627856ec10b9c67f5f639c8"
integrity sha512-q7d05s75Rn1j39U5Oapg3HI2wzriVwERVo4N7uFGpIYuHB9ff02P/E92P9B8T7QVC93jCMHpbXH7X0eVR5LA7A==
zen-observable-ts@^0.8.21:
version "0.8.21"
resolved "https://registry.yarnpkg.com/zen-observable-ts/-/zen-observable-ts-0.8.21.tgz#85d0031fbbde1eba3cd07d3ba90da241215f421d"
integrity sha512-Yj3yXweRc8LdRMrCC8nIc4kkjWecPAUVh0TI0OUrWXx6aX790vLcDlWca6I4vsyCGH3LpWxq0dJRcMOFoVqmeg==
dependencies:
tslib "^1.9.3"
zen-observable "^0.8.0"