Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
66b0c3d82e
commit
52af12cfec
48 changed files with 839 additions and 117 deletions
|
@ -534,6 +534,7 @@ rspec:undercoverage:
|
|||
- if [ -n "$CI_MERGE_REQUEST_SOURCE_BRANCH_SHA" ]; then
|
||||
echo "Checking out \$CI_MERGE_REQUEST_SOURCE_BRANCH_SHA ($CI_MERGE_REQUEST_SOURCE_BRANCH_SHA) instead of \$CI_COMMIT_SHA (merge result commit $CI_COMMIT_SHA) so we can use $CI_MERGE_REQUEST_DIFF_BASE_SHA for undercoverage in this merged result pipeline";
|
||||
git checkout -f ${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA};
|
||||
bundle_install_script;
|
||||
else
|
||||
echo "Using \$CI_COMMIT_SHA ($CI_COMMIT_SHA) for this non-merge result pipeline.";
|
||||
fi;
|
||||
|
|
|
@ -16,3 +16,4 @@ GraphQL/FieldDefinitions:
|
|||
- ee/app/graphql/types/group_release_stats_type.rb
|
||||
- ee/app/graphql/types/iteration_type.rb
|
||||
- ee/app/graphql/types/requirements_management/requirement_type.rb
|
||||
- ee/app/graphql/types/vulnerability_type.rb
|
||||
|
|
|
@ -1 +1 @@
|
|||
fe6bcc9ca347b59714c46adf65d100dd93abde52
|
||||
034cc7332fc1ebf67599f7f9e98e1588bc6d1823
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { memoize } from 'lodash';
|
||||
import { createNodeDict } from '../utils';
|
||||
import { EXPLICIT_NEEDS_PROPERTY, NEEDS_PROPERTY } from '../constants';
|
||||
import { createSankey } from './dag/drawing_utils';
|
||||
|
||||
/*
|
||||
|
@ -15,12 +16,14 @@ const deduplicate = (item, itemIndex, arr) => {
|
|||
return foundIdx === itemIndex;
|
||||
};
|
||||
|
||||
export const makeLinksFromNodes = (nodes, nodeDict) => {
|
||||
export const makeLinksFromNodes = (nodes, nodeDict, { needsKey = NEEDS_PROPERTY } = {}) => {
|
||||
const constantLinkValue = 10; // all links are the same weight
|
||||
return nodes
|
||||
.map(({ jobs, name: groupName }) =>
|
||||
jobs.map(({ needs = [] }) =>
|
||||
needs.reduce((acc, needed) => {
|
||||
jobs.map((job) => {
|
||||
const needs = job[needsKey] || [];
|
||||
|
||||
return needs.reduce((acc, needed) => {
|
||||
// It's possible that we have an optional job, which
|
||||
// is being needed by another job. In that scenario,
|
||||
// the needed job doesn't exist, so we don't want to
|
||||
|
@ -34,8 +37,8 @@ export const makeLinksFromNodes = (nodes, nodeDict) => {
|
|||
}
|
||||
|
||||
return acc;
|
||||
}, []),
|
||||
),
|
||||
}, []);
|
||||
}),
|
||||
)
|
||||
.flat(2);
|
||||
};
|
||||
|
@ -76,9 +79,9 @@ export const filterByAncestors = (links, nodeDict) =>
|
|||
return !allAncestors.includes(source);
|
||||
});
|
||||
|
||||
export const parseData = (nodes) => {
|
||||
const nodeDict = createNodeDict(nodes);
|
||||
const allLinks = makeLinksFromNodes(nodes, nodeDict);
|
||||
export const parseData = (nodes, { needsKey = NEEDS_PROPERTY } = {}) => {
|
||||
const nodeDict = createNodeDict(nodes, { needsKey });
|
||||
const allLinks = makeLinksFromNodes(nodes, nodeDict, { needsKey });
|
||||
const filteredLinks = allLinks.filter(deduplicate);
|
||||
const links = filterByAncestors(filteredLinks, nodeDict);
|
||||
|
||||
|
@ -123,7 +126,8 @@ export const removeOrphanNodes = (sankeyfiedNodes) => {
|
|||
export const listByLayers = ({ stages }) => {
|
||||
const arrayOfJobs = stages.flatMap(({ groups }) => groups);
|
||||
const parsedData = parseData(arrayOfJobs);
|
||||
const dataWithLayers = createSankey()(parsedData);
|
||||
const explicitParsedData = parseData(arrayOfJobs, { needsKey: EXPLICIT_NEEDS_PROPERTY });
|
||||
const dataWithLayers = createSankey()(explicitParsedData);
|
||||
|
||||
const pipelineLayers = dataWithLayers.nodes.reduce((acc, { layer, name }) => {
|
||||
/* sort groups by layer */
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { reportToSentry } from '../utils';
|
||||
import { EXPLICIT_NEEDS_PROPERTY, NEEDS_PROPERTY } from '../constants';
|
||||
|
||||
const unwrapGroups = (stages) => {
|
||||
return stages.map((stage, idx) => {
|
||||
|
@ -27,12 +28,16 @@ const unwrapNodesWithName = (jobArray, prop, field = 'name') => {
|
|||
}
|
||||
|
||||
return jobArray.map((job) => {
|
||||
if (job[prop]) {
|
||||
return { ...job, [prop]: job[prop].nodes.map((item) => item[field] || '') };
|
||||
}
|
||||
return job;
|
||||
});
|
||||
};
|
||||
|
||||
const unwrapJobWithNeeds = (denodedJobArray) => {
|
||||
return unwrapNodesWithName(denodedJobArray, 'needs');
|
||||
const explicitNeedsUnwrapped = unwrapNodesWithName(denodedJobArray, EXPLICIT_NEEDS_PROPERTY);
|
||||
return unwrapNodesWithName(explicitNeedsUnwrapped, NEEDS_PROPERTY);
|
||||
};
|
||||
|
||||
const unwrapStagesWithNeedsAndLookup = (denodedStages) => {
|
||||
|
|
|
@ -7,6 +7,8 @@ export const ANY_TRIGGER_AUTHOR = 'Any';
|
|||
export const SUPPORTED_FILTER_PARAMETERS = ['username', 'ref', 'status', 'source'];
|
||||
export const FILTER_TAG_IDENTIFIER = 'tag';
|
||||
export const SCHEDULE_ORIGIN = 'schedule';
|
||||
export const NEEDS_PROPERTY = 'needs';
|
||||
export const EXPLICIT_NEEDS_PROPERTY = 'previousStageJobsOrNeeds';
|
||||
|
||||
export const TestStatus = {
|
||||
FAILED: 'failed',
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import * as Sentry from '@sentry/browser';
|
||||
import { pickBy } from 'lodash';
|
||||
import { SUPPORTED_FILTER_PARAMETERS } from './constants';
|
||||
import { SUPPORTED_FILTER_PARAMETERS, NEEDS_PROPERTY } from './constants';
|
||||
|
||||
/*
|
||||
The following functions are the main engine in transforming the data as
|
||||
|
@ -35,11 +35,11 @@ import { SUPPORTED_FILTER_PARAMETERS } from './constants';
|
|||
10 -> value (constant)
|
||||
*/
|
||||
|
||||
export const createNodeDict = (nodes) => {
|
||||
export const createNodeDict = (nodes, { needsKey = NEEDS_PROPERTY } = {}) => {
|
||||
return nodes.reduce((acc, node) => {
|
||||
const newNode = {
|
||||
...node,
|
||||
needs: node.jobs.map((job) => job.needs || []).flat(),
|
||||
needs: node.jobs.map((job) => job[needsKey] || []).flat(),
|
||||
};
|
||||
|
||||
if (node.size > 1) {
|
||||
|
|
|
@ -12,4 +12,7 @@ class NewProjectSastEnabledExperiment < ApplicationExperiment # rubocop:disable
|
|||
|
||||
def free_indicator_behavior
|
||||
end
|
||||
|
||||
def unchecked_candidate_behavior
|
||||
end
|
||||
end
|
||||
|
|
|
@ -15,6 +15,7 @@ module Ci
|
|||
|
||||
def execute
|
||||
search!
|
||||
filter_by_active!
|
||||
filter_by_status!
|
||||
filter_by_runner_type!
|
||||
filter_by_tag_list!
|
||||
|
@ -60,6 +61,10 @@ module Ci
|
|||
end
|
||||
end
|
||||
|
||||
def filter_by_active!
|
||||
@runners = @runners.active(@params[:active]) if @params.include?(:active)
|
||||
end
|
||||
|
||||
def filter_by_status!
|
||||
filter_by!(:status_status, Ci::Runner::AVAILABLE_STATUSES)
|
||||
end
|
||||
|
|
|
@ -91,6 +91,14 @@ query getPipelineDetails($projectPath: ID!, $iid: ID!) {
|
|||
name
|
||||
}
|
||||
}
|
||||
previousStageJobsOrNeeds {
|
||||
__typename
|
||||
nodes {
|
||||
__typename
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
status: detailedStatus {
|
||||
__typename
|
||||
id
|
||||
|
|
|
@ -7,6 +7,10 @@ module Resolvers
|
|||
|
||||
type Types::Ci::RunnerType.connection_type, null: true
|
||||
|
||||
argument :active, ::GraphQL::Types::Boolean,
|
||||
required: false,
|
||||
description: 'Filter runners by active (true) or paused (false) status.'
|
||||
|
||||
argument :status, ::Types::Ci::RunnerStatusEnum,
|
||||
required: false,
|
||||
description: 'Filter runners by status.'
|
||||
|
@ -38,6 +42,7 @@ module Resolvers
|
|||
|
||||
def runners_finder_params(params)
|
||||
{
|
||||
active: params[:active],
|
||||
status_status: params[:status]&.to_s,
|
||||
type_type: params[:type],
|
||||
tag_name: params[:tag_list],
|
||||
|
|
|
@ -60,8 +60,8 @@ module Ci
|
|||
|
||||
before_save :ensure_token
|
||||
|
||||
scope :active, -> { where(active: true) }
|
||||
scope :paused, -> { where(active: false) }
|
||||
scope :active, -> (value = true) { where(active: value) }
|
||||
scope :paused, -> { active(false) }
|
||||
scope :online, -> { where('contacted_at > ?', online_contact_time_deadline) }
|
||||
scope :recent, -> { where('ci_runners.created_at >= :date OR ci_runners.contacted_at >= :date', date: stale_deadline) }
|
||||
scope :stale, -> { where('ci_runners.created_at < :date AND (ci_runners.contacted_at IS NULL OR ci_runners.contacted_at < :date)', date: stale_deadline) }
|
||||
|
|
|
@ -15,8 +15,8 @@ module AfterCommitQueue
|
|||
end
|
||||
|
||||
def run_after_commit_or_now(&block)
|
||||
if ApplicationRecord.inside_transaction?
|
||||
if ActiveRecord::Base.connection.current_transaction.records&.include?(self) # rubocop: disable Database/MultipleDatabases
|
||||
if self.class.inside_transaction?
|
||||
if connection.current_transaction.records&.include?(self)
|
||||
run_after_commit(&block)
|
||||
else
|
||||
# If the current transaction does not include this record, we can run
|
||||
|
|
|
@ -197,6 +197,13 @@ class Snippet < ApplicationRecord
|
|||
Snippet.find_by(id: id, project: project)
|
||||
end
|
||||
|
||||
def find_by_project_title_trunc_created_at(project, title, created_at)
|
||||
where(project: project, title: title)
|
||||
.find_by(
|
||||
"date_trunc('second', created_at at time zone :tz) at time zone :tz = :created_at",
|
||||
tz: created_at.zone, created_at: created_at)
|
||||
end
|
||||
|
||||
def max_file_limit
|
||||
MAX_FILE_COUNT
|
||||
end
|
||||
|
|
|
@ -37,7 +37,6 @@
|
|||
.settings-content
|
||||
= render partial: 'network_rate_limits', locals: { anchor: 'js-packages-limits-settings', setting_fragment: 'packages_api' }
|
||||
|
||||
- if Feature.enabled?(:files_api_throttling, default_enabled: :yaml)
|
||||
%section.settings.as-files-limits.no-animate#js-files-limits-settings{ class: ('expanded' if expanded_by_default?) }
|
||||
.settings-header
|
||||
%h4
|
||||
|
|
|
@ -74,25 +74,25 @@
|
|||
= s_('ProjectsNew|Allows you to immediately clone this project’s repository. Skip this if you plan to push up an existing repository.')
|
||||
|
||||
- experiment(:new_project_sast_enabled, user: current_user) do |e|
|
||||
- e.try do
|
||||
.form-group
|
||||
.form-check.gl-mb-3
|
||||
- e.try(:candidate) do
|
||||
= check_box_tag 'project[initialize_with_sast]', '1', true, class: 'form-check-input', data: { qa_selector: 'initialize_with_sast_checkbox', track_experiment: e.name, track_label: track_label, track_action: 'activate_form_input', track_property: 'init_with_sast' }
|
||||
= label_tag 'project[initialize_with_sast]', class: 'form-check-label' do
|
||||
= s_('ProjectsNew|Enable Static Application Security Testing (SAST)')
|
||||
.form-text.text-muted
|
||||
= s_('ProjectsNew|Analyze your source code for known security vulnerabilities.')
|
||||
= link_to _('Learn more.'), help_page_path('user/application_security/sast/index'), target: '_blank', rel: 'noopener noreferrer', data: { track_action: 'followed', track_experiment: e.name }
|
||||
- e.try(:free_indicator) do
|
||||
.form-group
|
||||
.form-check.gl-mb-3
|
||||
= check_box_tag 'project[initialize_with_sast]', '1', true, class: 'form-check-input', data: { qa_selector: 'initialize_with_sast_checkbox', track_experiment: e.name, track_label: track_label, track_action: 'activate_form_input', track_property: 'init_with_sast' }
|
||||
= label_tag 'project[initialize_with_sast]', class: 'form-check-label' do
|
||||
= s_('ProjectsNew|Enable Static Application Security Testing (SAST)')
|
||||
%span.badge.badge-info.badge-pill.gl-badge.sm= _('Free')
|
||||
- e.try(:unchecked_candidate) do
|
||||
= check_box_tag 'project[initialize_with_sast]', '1', false, class: 'form-check-input', data: { qa_selector: 'initialize_with_sast_checkbox', track_experiment: e.name, track_label: track_label, track_action: 'activate_form_input', track_property: 'init_with_sast' }
|
||||
= label_tag 'project[initialize_with_sast]', class: 'form-check-label' do
|
||||
= s_('ProjectsNew|Enable Static Application Security Testing (SAST)')
|
||||
.form-text.text-muted
|
||||
= s_('ProjectsNew|Analyze your source code for known security vulnerabilities.')
|
||||
= link_to _('Learn more.'), help_page_path('user/application_security/sast/index'), target: '_blank', rel: 'noopener noreferrer', data: { track_action: 'followed', track_experiment: e.name }
|
||||
|
||||
|
||||
= f.submit _('Create project'), class: "btn gl-button btn-confirm", data: { track_label: "#{track_label}", track_action: "click_button", track_property: "create_project", track_value: "" }
|
||||
= link_to _('Cancel'), dashboard_projects_path, class: 'btn gl-button btn-default btn-cancel', data: { track_label: "#{track_label}", track_action: "click_button", track_property: "cancel", track_value: "" }
|
||||
|
|
|
@ -13,8 +13,7 @@
|
|||
= sprite_icon('tag')
|
||||
= @tag.name
|
||||
- if protected_tag?(@project, @tag)
|
||||
%span.badge.badge-success
|
||||
= s_('TagsPage|protected')
|
||||
= gl_badge_tag s_('TagsPage|protected'), variant: :success
|
||||
|
||||
- if user
|
||||
= link_to user_path(user) do
|
||||
|
|
|
@ -28,8 +28,7 @@
|
|||
%td= s_('Runners|Tags')
|
||||
%td
|
||||
- runner.tag_list.sort.each do |tag|
|
||||
%span.badge.badge-primary
|
||||
= tag
|
||||
= gl_badge_tag tag, variant: :info
|
||||
%tr
|
||||
%td= s_('Runners|Name')
|
||||
%td= runner.name
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
---
|
||||
name: files_api_throttling
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68560
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/338903
|
||||
milestone: '14.3'
|
||||
type: development
|
||||
group: group::source code
|
||||
default_enabled: false
|
|
@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/341423
|
|||
milestone: '14.4'
|
||||
type: development
|
||||
group: group::container security
|
||||
default_enabled: false
|
||||
default_enabled: true
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddCiRunnersIndexOnCreatedAtWhereActiveIsFalse < Gitlab::Database::Migration[1.0]
|
||||
INDEX_NAME = 'index_ci_runners_on_created_at_and_id_where_inactive'
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_concurrent_index :ci_runners, [:created_at, :id], where: 'active = FALSE', order: { created_at: :desc, id: :desc }, name: INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :ci_runners, INDEX_NAME
|
||||
end
|
||||
end
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddCiRunnersIndexOnContactedAtWhereActiveIsFalse < Gitlab::Database::Migration[1.0]
|
||||
INDEX_NAME = 'index_ci_runners_on_contacted_at_and_id_where_inactive'
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_concurrent_index :ci_runners, [:contacted_at, :id], where: 'active = FALSE', order: { contacted_at: :desc, id: :desc }, name: INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :ci_runners, INDEX_NAME
|
||||
end
|
||||
end
|
1
db/schema_migrations/20211207154413
Normal file
1
db/schema_migrations/20211207154413
Normal file
|
@ -0,0 +1 @@
|
|||
98098b41864158fc4de3b8fe42603b2c0c5c2fbc664397c431712311bdaa3621
|
1
db/schema_migrations/20211207154414
Normal file
1
db/schema_migrations/20211207154414
Normal file
|
@ -0,0 +1 @@
|
|||
278907a15d04b455aa852eb9d17000c6b353be6ef78a8dcc2e71a9772a6e43ea
|
|
@ -25610,10 +25610,14 @@ CREATE INDEX index_ci_runner_projects_on_runner_id ON ci_runner_projects USING b
|
|||
|
||||
CREATE INDEX index_ci_runners_on_contacted_at_and_id_desc ON ci_runners USING btree (contacted_at, id DESC);
|
||||
|
||||
CREATE INDEX index_ci_runners_on_contacted_at_and_id_where_inactive ON ci_runners USING btree (contacted_at DESC, id DESC) WHERE (active = false);
|
||||
|
||||
CREATE INDEX index_ci_runners_on_contacted_at_desc_and_id_desc ON ci_runners USING btree (contacted_at DESC, id DESC);
|
||||
|
||||
CREATE INDEX index_ci_runners_on_created_at_and_id_desc ON ci_runners USING btree (created_at, id DESC);
|
||||
|
||||
CREATE INDEX index_ci_runners_on_created_at_and_id_where_inactive ON ci_runners USING btree (created_at DESC, id DESC) WHERE (active = false);
|
||||
|
||||
CREATE INDEX index_ci_runners_on_created_at_desc_and_id_desc ON ci_runners USING btree (created_at DESC, id DESC);
|
||||
|
||||
CREATE INDEX index_ci_runners_on_description_trigram ON ci_runners USING gin (description gin_trgm_ops);
|
||||
|
|
|
@ -88,12 +88,8 @@ requests per user. For more information, read
|
|||
|
||||
### Files API
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68561) in GitLab 14.3.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default this feature is not available. To make it available,
|
||||
ask an administrator to [enable the `files_api_throttling` flag](../administration/feature_flags.md). On GitLab.com, this feature is available but can be configured by GitLab.com administrators only.
|
||||
The feature is not ready for production use.
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68561) in GitLab 14.3 [with a flag](../administration/feature_flags.md) named `files_api_throttling`. Disabled by default.
|
||||
> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/75918) in GitLab 14.6. [Feature flag `files_api_throttling`](https://gitlab.com/gitlab-org/gitlab/-/issues/338903) removed.
|
||||
|
||||
This setting limits the request rate on the Packages API per user or IP address. For more information, read
|
||||
[Files API rate limits](../user/admin_area/settings/files_api_rate_limits.md).
|
||||
|
|
|
@ -370,6 +370,7 @@ four standard [pagination arguments](#connection-pagination-arguments):
|
|||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="queryrunnersactive"></a>`active` | [`Boolean`](#boolean) | Filter runners by active (true) or paused (false) status. |
|
||||
| <a id="queryrunnerssearch"></a>`search` | [`String`](#string) | Filter by full token or partial text in description field. |
|
||||
| <a id="queryrunnerssort"></a>`sort` | [`CiRunnerSort`](#cirunnersort) | Sort order of results. |
|
||||
| <a id="queryrunnersstatus"></a>`status` | [`CiRunnerStatus`](#cirunnerstatus) | Filter runners by status. |
|
||||
|
@ -10917,6 +10918,7 @@ four standard [pagination arguments](#connection-pagination-arguments):
|
|||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="grouprunnersactive"></a>`active` | [`Boolean`](#boolean) | Filter runners by active (true) or paused (false) status. |
|
||||
| <a id="grouprunnersmembership"></a>`membership` | [`RunnerMembershipFilter`](#runnermembershipfilter) | Control which runners to include in the results. |
|
||||
| <a id="grouprunnerssearch"></a>`search` | [`String`](#string) | Filter by full token or partial text in description field. |
|
||||
| <a id="grouprunnerssort"></a>`sort` | [`CiRunnerSort`](#cirunnersort) | Sort order of results. |
|
||||
|
@ -15312,6 +15314,7 @@ Represents a vulnerability.
|
|||
| <a id="vulnerabilityconfirmedat"></a>`confirmedAt` | [`Time`](#time) | Timestamp of when the vulnerability state was changed to confirmed. |
|
||||
| <a id="vulnerabilityconfirmedby"></a>`confirmedBy` | [`UserCore`](#usercore) | User that confirmed the vulnerability. |
|
||||
| <a id="vulnerabilitydescription"></a>`description` | [`String`](#string) | Description of the vulnerability. |
|
||||
| <a id="vulnerabilitydescriptionhtml"></a>`descriptionHtml` | [`String`](#string) | The GitLab Flavored Markdown rendering of `description`. |
|
||||
| <a id="vulnerabilitydetails"></a>`details` | [`[VulnerabilityDetail!]!`](#vulnerabilitydetail) | Details of the vulnerability. |
|
||||
| <a id="vulnerabilitydetectedat"></a>`detectedAt` | [`Time!`](#time) | Timestamp of when the vulnerability was first detected. |
|
||||
| <a id="vulnerabilitydiscussions"></a>`discussions` | [`DiscussionConnection!`](#discussionconnection) | All discussions on this noteable. (see [Connections](#connections)) |
|
||||
|
|
|
@ -7,13 +7,8 @@ type: reference
|
|||
|
||||
# Files API rate limits **(FREE SELF)**
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68561) in GitLab 14.3.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default this feature is not available. To make it
|
||||
available, ask an administrator to [enable the `files_api_throttling` flag](../../../administration/feature_flags.md).
|
||||
On GitLab.com, this feature is available but can be configured by GitLab.com
|
||||
administrators only. The feature is not ready for production use.
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68561) in GitLab 14.3.
|
||||
> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/75918) in GitLab 14.6. [Feature flag files_api_throttling](https://gitlab.com/gitlab-org/gitlab/-/issues/338903) removed.
|
||||
|
||||
The [Repository files API](../../../api/repository_files.md) enables you to
|
||||
fetch, create, update, and delete files in your repository. To improve the security
|
||||
|
@ -29,10 +24,9 @@ the general user and IP rate limits for requests to the
|
|||
and IP rate limits already in place, and increase or decrease the rate limits
|
||||
for the Files API. No other new features are provided by this override.
|
||||
|
||||
Prerequisites:
|
||||
Prerequisite:
|
||||
|
||||
- You must have the Administrator role for your instance.
|
||||
- The `files_api_throttling` feature flag must be enabled.
|
||||
|
||||
To override the general user and IP rate limits for requests to the Repository files API:
|
||||
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module BulkImports
|
||||
module Projects
|
||||
module Graphql
|
||||
module GetSnippetRepositoryQuery
|
||||
extend Queryable
|
||||
extend self
|
||||
|
||||
def to_s
|
||||
<<-'GRAPHQL'
|
||||
query($full_path: ID!) {
|
||||
project(fullPath: $full_path) {
|
||||
snippets {
|
||||
page_info: pageInfo {
|
||||
next_page: endCursor
|
||||
has_next_page: hasNextPage
|
||||
}
|
||||
nodes {
|
||||
title
|
||||
createdAt
|
||||
httpUrlToRepo
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
GRAPHQL
|
||||
end
|
||||
|
||||
def variables(context)
|
||||
{
|
||||
full_path: context.entity.source_full_path,
|
||||
cursor: context.tracker.next_page,
|
||||
per_page: ::BulkImports::Tracker::DEFAULT_PAGE_SIZE
|
||||
}
|
||||
end
|
||||
|
||||
def base_path
|
||||
%w[data project snippets]
|
||||
end
|
||||
|
||||
def data_path
|
||||
base_path << 'nodes'
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,69 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module BulkImports
|
||||
module Projects
|
||||
module Pipelines
|
||||
class SnippetsRepositoryPipeline
|
||||
include Pipeline
|
||||
|
||||
extractor Common::Extractors::GraphqlExtractor, query: Graphql::GetSnippetRepositoryQuery
|
||||
|
||||
def transform(_context, data)
|
||||
data.tap do |d|
|
||||
d['createdAt'] = DateTime.parse(data['createdAt'])
|
||||
end
|
||||
end
|
||||
|
||||
def load(context, data)
|
||||
return unless data['httpUrlToRepo'].present?
|
||||
|
||||
oauth2_url = oauth2(data['httpUrlToRepo'])
|
||||
validate_url(oauth2_url)
|
||||
|
||||
matched_snippet = find_matched_snippet(data)
|
||||
# Skip snippets that we couldn't find a match. Probably because more snippets were
|
||||
# added after the migration had already started, namely after the SnippetsPipeline
|
||||
# has already run.
|
||||
return unless matched_snippet
|
||||
|
||||
matched_snippet.create_repository
|
||||
matched_snippet.repository.fetch_as_mirror(oauth2_url)
|
||||
response = Snippets::RepositoryValidationService.new(nil, matched_snippet).execute
|
||||
|
||||
# skips matched_snippet repository creation if repository is invalid
|
||||
return cleanup_snippet_repository(matched_snippet) if response.error?
|
||||
|
||||
Snippets::UpdateStatisticsService.new(matched_snippet).execute
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def find_matched_snippet(data)
|
||||
Snippet.find_by_project_title_trunc_created_at(
|
||||
context.portable, data['title'], data['createdAt'])
|
||||
end
|
||||
|
||||
def allow_local_requests?
|
||||
Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
|
||||
end
|
||||
|
||||
def oauth2(url)
|
||||
url.sub("://", "://oauth2:#{context.configuration.access_token}@")
|
||||
end
|
||||
|
||||
def validate_url(url)
|
||||
Gitlab::UrlBlocker.validate!(
|
||||
url,
|
||||
allow_local_network: allow_local_requests?,
|
||||
allow_localhost: allow_local_requests?)
|
||||
end
|
||||
|
||||
def cleanup_snippet_repository(snippet)
|
||||
snippet.repository.remove
|
||||
snippet.snippet_repository.delete
|
||||
snippet.repository.expire_exists_cache
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -35,6 +35,10 @@ module BulkImports
|
|||
pipeline: BulkImports::Projects::Pipelines::SnippetsPipeline,
|
||||
stage: 3
|
||||
},
|
||||
snippets_repository: {
|
||||
pipeline: BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline,
|
||||
stage: 4
|
||||
},
|
||||
boards: {
|
||||
pipeline: BulkImports::Common::Pipelines::BoardsPipeline,
|
||||
stage: 4
|
||||
|
|
|
@ -139,14 +139,12 @@ module Gitlab
|
|||
|
||||
def throttle_unauthenticated_files_api?
|
||||
files_api_path? &&
|
||||
Feature.enabled?(:files_api_throttling, default_enabled: :yaml) &&
|
||||
Gitlab::Throttle.settings.throttle_unauthenticated_files_api_enabled &&
|
||||
unauthenticated?
|
||||
end
|
||||
|
||||
def throttle_authenticated_files_api?
|
||||
files_api_path? &&
|
||||
Feature.enabled?(:files_api_throttling, default_enabled: :yaml) &&
|
||||
Gitlab::Throttle.settings.throttle_authenticated_files_api_enabled
|
||||
end
|
||||
|
||||
|
|
|
@ -120,18 +120,14 @@ module Gitlab
|
|||
Random.rand(Float::MAX.to_i).to_s(36)
|
||||
end
|
||||
|
||||
# See: http://stackoverflow.com/questions/2108727/which-in-ruby-checking-if-program-exists-in-path-from-ruby
|
||||
# Cross-platform way of finding an executable in the $PATH.
|
||||
# Behaves like `which` on Linux machines: given PATH, try to resolve the given
|
||||
# executable name to an absolute path, or return nil.
|
||||
#
|
||||
# which('ruby') #=> /usr/bin/ruby
|
||||
def which(cmd, env = ENV)
|
||||
exts = env['PATHEXT'] ? env['PATHEXT'].split(';') : ['']
|
||||
|
||||
env['PATH'].split(File::PATH_SEPARATOR).each do |path|
|
||||
exts.each do |ext|
|
||||
exe = File.join(path, "#{cmd}#{ext}")
|
||||
return exe if File.executable?(exe) && !File.directory?(exe)
|
||||
end
|
||||
def which(filename)
|
||||
ENV['PATH']&.split(File::PATH_SEPARATOR)&.each do |path|
|
||||
full_path = File.join(path, filename)
|
||||
return full_path if File.executable?(full_path)
|
||||
end
|
||||
|
||||
nil
|
||||
|
|
|
@ -4,7 +4,7 @@ require 'spec_helper'
|
|||
|
||||
RSpec.describe NewProjectSastEnabledExperiment do
|
||||
it "defines the expected behaviors and variants" do
|
||||
expect(subject.behaviors.keys).to match_array(%w[control candidate free_indicator])
|
||||
expect(subject.behaviors.keys).to match_array(%w[control candidate free_indicator unchecked_candidate])
|
||||
end
|
||||
|
||||
it "publishes to the database" do
|
||||
|
|
|
@ -56,6 +56,31 @@ RSpec.describe 'User creates a project', :js do
|
|||
expect(page).to have_content('README.md Initial commit')
|
||||
end
|
||||
|
||||
it 'allows creating a new project when the new_project_sast_enabled is assigned the unchecked candidate' do
|
||||
stub_experiments(new_project_sast_enabled: 'unchecked_candidate')
|
||||
|
||||
visit(new_project_path)
|
||||
|
||||
click_link 'Create blank project'
|
||||
fill_in(:project_name, with: 'With initial commits')
|
||||
|
||||
expect(page).to have_checked_field 'Initialize repository with a README'
|
||||
expect(page).to have_unchecked_field 'Enable Static Application Security Testing (SAST)'
|
||||
|
||||
check 'Enable Static Application Security Testing (SAST)'
|
||||
|
||||
page.within('#content-body') do
|
||||
click_button('Create project')
|
||||
end
|
||||
|
||||
project = Project.last
|
||||
|
||||
expect(current_path).to eq(project_path(project))
|
||||
expect(page).to have_content('With initial commits')
|
||||
expect(page).to have_content('Configure SAST in `.gitlab-ci.yml`, creating this file if it does not already exist')
|
||||
expect(page).to have_content('README.md Initial commit')
|
||||
end
|
||||
|
||||
context 'in a subgroup they do not own' do
|
||||
let(:parent) { create(:group) }
|
||||
let!(:subgroup) { create(:group, parent: parent) }
|
||||
|
|
|
@ -59,6 +59,20 @@ RSpec.describe Ci::RunnersFinder do
|
|||
end
|
||||
end
|
||||
|
||||
context 'by active status' do
|
||||
it 'with active set as false calls the corresponding scope on Ci::Runner with false' do
|
||||
expect(Ci::Runner).to receive(:active).with(false).and_call_original
|
||||
|
||||
described_class.new(current_user: admin, params: { active: false }).execute
|
||||
end
|
||||
|
||||
it 'with active set as true calls the corresponding scope on Ci::Runner with true' do
|
||||
expect(Ci::Runner).to receive(:active).with(true).and_call_original
|
||||
|
||||
described_class.new(current_user: admin, params: { active: true }).execute
|
||||
end
|
||||
end
|
||||
|
||||
context 'by runner type' do
|
||||
it 'calls the corresponding scope on Ci::Runner' do
|
||||
expect(Ci::Runner).to receive(:project_type).and_call_original
|
||||
|
@ -263,7 +277,15 @@ RSpec.describe Ci::RunnersFinder do
|
|||
let(:extra_params) { { search: 'runner_project_search' } }
|
||||
|
||||
it 'returns correct runner' do
|
||||
expect(subject).to eq([runner_project_3])
|
||||
expect(subject).to match_array([runner_project_3])
|
||||
end
|
||||
end
|
||||
|
||||
context 'by active status' do
|
||||
let(:extra_params) { { active: false } }
|
||||
|
||||
it 'returns correct runner' do
|
||||
expect(subject).to match_array([runner_sub_group_1])
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -271,7 +293,7 @@ RSpec.describe Ci::RunnersFinder do
|
|||
let(:extra_params) { { status_status: 'paused' } }
|
||||
|
||||
it 'returns correct runner' do
|
||||
expect(subject).to eq([runner_sub_group_1])
|
||||
expect(subject).to match_array([runner_sub_group_1])
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -279,7 +301,7 @@ RSpec.describe Ci::RunnersFinder do
|
|||
let(:extra_params) { { tag_name: %w[runner_tag] } }
|
||||
|
||||
it 'returns correct runner' do
|
||||
expect(subject).to eq([runner_project_5])
|
||||
expect(subject).to match_array([runner_project_5])
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ Array [
|
|||
"id": "6",
|
||||
"name": "build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl",
|
||||
"needs": Array [],
|
||||
"previousStageJobsOrNeeds": Array [],
|
||||
"scheduledAt": null,
|
||||
"status": Object {
|
||||
"__typename": "DetailedStatus",
|
||||
|
@ -53,6 +54,7 @@ Array [
|
|||
"id": "11",
|
||||
"name": "build_b",
|
||||
"needs": Array [],
|
||||
"previousStageJobsOrNeeds": Array [],
|
||||
"scheduledAt": null,
|
||||
"status": Object {
|
||||
"__typename": "DetailedStatus",
|
||||
|
@ -93,6 +95,7 @@ Array [
|
|||
"id": "16",
|
||||
"name": "build_c",
|
||||
"needs": Array [],
|
||||
"previousStageJobsOrNeeds": Array [],
|
||||
"scheduledAt": null,
|
||||
"status": Object {
|
||||
"__typename": "DetailedStatus",
|
||||
|
@ -133,6 +136,7 @@ Array [
|
|||
"id": "21",
|
||||
"name": "build_d 1/3",
|
||||
"needs": Array [],
|
||||
"previousStageJobsOrNeeds": Array [],
|
||||
"scheduledAt": null,
|
||||
"status": Object {
|
||||
"__typename": "DetailedStatus",
|
||||
|
@ -157,6 +161,7 @@ Array [
|
|||
"id": "24",
|
||||
"name": "build_d 2/3",
|
||||
"needs": Array [],
|
||||
"previousStageJobsOrNeeds": Array [],
|
||||
"scheduledAt": null,
|
||||
"status": Object {
|
||||
"__typename": "DetailedStatus",
|
||||
|
@ -181,6 +186,7 @@ Array [
|
|||
"id": "27",
|
||||
"name": "build_d 3/3",
|
||||
"needs": Array [],
|
||||
"previousStageJobsOrNeeds": Array [],
|
||||
"scheduledAt": null,
|
||||
"status": Object {
|
||||
"__typename": "DetailedStatus",
|
||||
|
@ -221,6 +227,7 @@ Array [
|
|||
"id": "59",
|
||||
"name": "test_c",
|
||||
"needs": Array [],
|
||||
"previousStageJobsOrNeeds": Array [],
|
||||
"scheduledAt": null,
|
||||
"status": Object {
|
||||
"__typename": "DetailedStatus",
|
||||
|
@ -267,6 +274,11 @@ Array [
|
|||
"build_b",
|
||||
"build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl",
|
||||
],
|
||||
"previousStageJobsOrNeeds": Array [
|
||||
"build_c",
|
||||
"build_b",
|
||||
"build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl",
|
||||
],
|
||||
"scheduledAt": null,
|
||||
"status": Object {
|
||||
"__typename": "DetailedStatus",
|
||||
|
@ -313,6 +325,13 @@ Array [
|
|||
"build_b",
|
||||
"build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl",
|
||||
],
|
||||
"previousStageJobsOrNeeds": Array [
|
||||
"build_d 3/3",
|
||||
"build_d 2/3",
|
||||
"build_d 1/3",
|
||||
"build_b",
|
||||
"build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl",
|
||||
],
|
||||
"scheduledAt": null,
|
||||
"status": Object {
|
||||
"__typename": "DetailedStatus",
|
||||
|
@ -343,6 +362,13 @@ Array [
|
|||
"build_b",
|
||||
"build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl",
|
||||
],
|
||||
"previousStageJobsOrNeeds": Array [
|
||||
"build_d 3/3",
|
||||
"build_d 2/3",
|
||||
"build_d 1/3",
|
||||
"build_b",
|
||||
"build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl",
|
||||
],
|
||||
"scheduledAt": null,
|
||||
"status": Object {
|
||||
"__typename": "DetailedStatus",
|
||||
|
@ -385,6 +411,9 @@ Array [
|
|||
"needs": Array [
|
||||
"build_b",
|
||||
],
|
||||
"previousStageJobsOrNeeds": Array [
|
||||
"build_b",
|
||||
],
|
||||
"scheduledAt": null,
|
||||
"status": Object {
|
||||
"__typename": "DetailedStatus",
|
||||
|
|
|
@ -73,6 +73,10 @@ export const mockPipelineResponse = {
|
|||
__typename: 'CiBuildNeedConnection',
|
||||
nodes: [],
|
||||
},
|
||||
previousStageJobsOrNeeds: {
|
||||
__typename: 'CiJobConnection',
|
||||
nodes: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -118,6 +122,10 @@ export const mockPipelineResponse = {
|
|||
__typename: 'CiBuildNeedConnection',
|
||||
nodes: [],
|
||||
},
|
||||
previousStageJobsOrNeeds: {
|
||||
__typename: 'CiJobConnection',
|
||||
nodes: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -163,6 +171,10 @@ export const mockPipelineResponse = {
|
|||
__typename: 'CiBuildNeedConnection',
|
||||
nodes: [],
|
||||
},
|
||||
previousStageJobsOrNeeds: {
|
||||
__typename: 'CiJobConnection',
|
||||
nodes: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -208,6 +220,10 @@ export const mockPipelineResponse = {
|
|||
__typename: 'CiBuildNeedConnection',
|
||||
nodes: [],
|
||||
},
|
||||
previousStageJobsOrNeeds: {
|
||||
__typename: 'CiJobConnection',
|
||||
nodes: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
__typename: 'CiJob',
|
||||
|
@ -235,6 +251,10 @@ export const mockPipelineResponse = {
|
|||
__typename: 'CiBuildNeedConnection',
|
||||
nodes: [],
|
||||
},
|
||||
previousStageJobsOrNeeds: {
|
||||
__typename: 'CiJobConnection',
|
||||
nodes: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
__typename: 'CiJob',
|
||||
|
@ -262,6 +282,10 @@ export const mockPipelineResponse = {
|
|||
__typename: 'CiBuildNeedConnection',
|
||||
nodes: [],
|
||||
},
|
||||
previousStageJobsOrNeeds: {
|
||||
__typename: 'CiJobConnection',
|
||||
nodes: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -339,6 +363,27 @@ export const mockPipelineResponse = {
|
|||
},
|
||||
],
|
||||
},
|
||||
previousStageJobsOrNeeds: {
|
||||
__typename: 'CiJobConnection',
|
||||
nodes: [
|
||||
{
|
||||
__typename: 'CiBuildNeed',
|
||||
id: '37',
|
||||
name: 'build_c',
|
||||
},
|
||||
{
|
||||
__typename: 'CiBuildNeed',
|
||||
id: '38',
|
||||
name: 'build_b',
|
||||
},
|
||||
{
|
||||
__typename: 'CiBuildNeed',
|
||||
id: '39',
|
||||
name:
|
||||
'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -411,6 +456,37 @@ export const mockPipelineResponse = {
|
|||
},
|
||||
],
|
||||
},
|
||||
previousStageJobsOrNeeds: {
|
||||
__typename: 'CiJobConnection',
|
||||
nodes: [
|
||||
{
|
||||
__typename: 'CiBuildNeed',
|
||||
id: '45',
|
||||
name: 'build_d 3/3',
|
||||
},
|
||||
{
|
||||
__typename: 'CiBuildNeed',
|
||||
id: '46',
|
||||
name: 'build_d 2/3',
|
||||
},
|
||||
{
|
||||
__typename: 'CiBuildNeed',
|
||||
id: '47',
|
||||
name: 'build_d 1/3',
|
||||
},
|
||||
{
|
||||
__typename: 'CiBuildNeed',
|
||||
id: '48',
|
||||
name: 'build_b',
|
||||
},
|
||||
{
|
||||
__typename: 'CiBuildNeed',
|
||||
id: '49',
|
||||
name:
|
||||
'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
__typename: 'CiJob',
|
||||
|
@ -465,6 +541,37 @@ export const mockPipelineResponse = {
|
|||
},
|
||||
],
|
||||
},
|
||||
previousStageJobsOrNeeds: {
|
||||
__typename: 'CiJobConnection',
|
||||
nodes: [
|
||||
{
|
||||
__typename: 'CiBuildNeed',
|
||||
id: '52',
|
||||
name: 'build_d 3/3',
|
||||
},
|
||||
{
|
||||
__typename: 'CiBuildNeed',
|
||||
id: '53',
|
||||
name: 'build_d 2/3',
|
||||
},
|
||||
{
|
||||
__typename: 'CiBuildNeed',
|
||||
id: '54',
|
||||
name: 'build_d 1/3',
|
||||
},
|
||||
{
|
||||
__typename: 'CiBuildNeed',
|
||||
id: '55',
|
||||
name: 'build_b',
|
||||
},
|
||||
{
|
||||
__typename: 'CiBuildNeed',
|
||||
id: '56',
|
||||
name:
|
||||
'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -503,6 +610,10 @@ export const mockPipelineResponse = {
|
|||
__typename: 'CiBuildNeedConnection',
|
||||
nodes: [],
|
||||
},
|
||||
previousStageJobsOrNeeds: {
|
||||
__typename: 'CiJobConnection',
|
||||
nodes: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -547,6 +658,16 @@ export const mockPipelineResponse = {
|
|||
},
|
||||
],
|
||||
},
|
||||
previousStageJobsOrNeeds: {
|
||||
__typename: 'CiJobConnection',
|
||||
nodes: [
|
||||
{
|
||||
__typename: 'CiBuildNeed',
|
||||
id: '65',
|
||||
name: 'build_b',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -720,6 +841,10 @@ export const wrappedPipelineReturn = {
|
|||
__typename: 'CiBuildNeedConnection',
|
||||
nodes: [],
|
||||
},
|
||||
previousStageJobsOrNeeds: {
|
||||
__typename: 'CiJobConnection',
|
||||
nodes: [],
|
||||
},
|
||||
status: {
|
||||
__typename: 'DetailedStatus',
|
||||
id: '84',
|
||||
|
|
|
@ -45,6 +45,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver do
|
|||
let(:finder) { instance_double(::Ci::RunnersFinder) }
|
||||
let(:args) do
|
||||
{
|
||||
active: true,
|
||||
status: 'active',
|
||||
type: :instance_type,
|
||||
tag_list: ['active_runner'],
|
||||
|
@ -55,6 +56,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver do
|
|||
|
||||
let(:expected_params) do
|
||||
{
|
||||
active: true,
|
||||
status_status: 'active',
|
||||
type_type: :instance_type,
|
||||
tag_name: ['active_runner'],
|
||||
|
|
|
@ -0,0 +1,58 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe BulkImports::Projects::Graphql::GetSnippetRepositoryQuery do
|
||||
describe 'query repository based on full_path' do
|
||||
let_it_be(:entity) { create(:bulk_import_entity) }
|
||||
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
|
||||
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
|
||||
|
||||
it 'has a valid query' do
|
||||
query = GraphQL::Query.new(
|
||||
GitlabSchema,
|
||||
described_class.to_s,
|
||||
variables: described_class.variables(context)
|
||||
)
|
||||
result = GitlabSchema.static_validator.validate(query)
|
||||
|
||||
expect(result[:errors]).to be_empty
|
||||
end
|
||||
|
||||
it 'returns snippet httpUrlToRepo' do
|
||||
expect(described_class.to_s).to include('httpUrlToRepo')
|
||||
end
|
||||
|
||||
it 'returns snippet createdAt' do
|
||||
expect(described_class.to_s).to include('createdAt')
|
||||
end
|
||||
|
||||
it 'returns snippet title' do
|
||||
expect(described_class.to_s).to include('title')
|
||||
end
|
||||
|
||||
describe '.variables' do
|
||||
it 'queries project based on source_full_path and pagination' do
|
||||
expected = { full_path: entity.source_full_path, cursor: nil, per_page: 500 }
|
||||
|
||||
expect(described_class.variables(context)).to eq(expected)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.data_path' do
|
||||
it '.data_path returns data path' do
|
||||
expected = %w[data project snippets nodes]
|
||||
|
||||
expect(described_class.data_path).to eq(expected)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.page_info_path' do
|
||||
it '.page_info_path returns pagination information path' do
|
||||
expected = %w[data project snippets page_info]
|
||||
|
||||
expect(described_class.page_info_path).to eq(expected)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,168 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline do
|
||||
let(:user) { create(:user) }
|
||||
let(:project) { create(:project) }
|
||||
let(:bulk_import) { create(:bulk_import, user: user) }
|
||||
let(:bulk_import_configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) }
|
||||
let!(:matched_snippet) { create(:snippet, project: project, created_at: "1981-12-13T23:59:59Z")}
|
||||
let(:entity) do
|
||||
create(
|
||||
:bulk_import_entity,
|
||||
:project_entity,
|
||||
project: project,
|
||||
bulk_import: bulk_import_configuration.bulk_import,
|
||||
source_full_path: 'source/full/path',
|
||||
destination_name: 'My Destination Project',
|
||||
destination_namespace: project.full_path
|
||||
)
|
||||
end
|
||||
|
||||
let(:tracker) { create(:bulk_import_tracker, entity: entity) }
|
||||
let(:context) { BulkImports::Pipeline::Context.new(tracker) }
|
||||
|
||||
subject(:pipeline) { described_class.new(context) }
|
||||
|
||||
let(:http_url_to_repo) { 'https://example.com/foo/bar/snippets/42.git' }
|
||||
let(:data) do
|
||||
[
|
||||
{
|
||||
'title' => matched_snippet.title,
|
||||
'httpUrlToRepo' => http_url_to_repo,
|
||||
'createdAt' => matched_snippet.created_at.to_s
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
let(:page_info) do
|
||||
{
|
||||
'next_page' => 'eyJpZCI6IjIyMDA2OTYifQ',
|
||||
'has_next_page' => false
|
||||
}
|
||||
end
|
||||
|
||||
let(:extracted_data) { BulkImports::Pipeline::ExtractedData.new(data: data, page_info: page_info) }
|
||||
|
||||
describe 'extractor' do
|
||||
it 'is a GraphqlExtractor with Graphql::GetSnippetRepositoryQuery' do
|
||||
expect(described_class.get_extractor).to eq(
|
||||
klass: BulkImports::Common::Extractors::GraphqlExtractor,
|
||||
options: {
|
||||
query: BulkImports::Projects::Graphql::GetSnippetRepositoryQuery
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
describe '#run' do
|
||||
let(:validation_response) { double(Hash, 'error?': false) }
|
||||
|
||||
before do
|
||||
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
|
||||
allow(extractor).to receive(:extract).and_return(extracted_data)
|
||||
end
|
||||
|
||||
allow_next_instance_of(Snippets::RepositoryValidationService) do |repository_validation|
|
||||
allow(repository_validation).to receive(:execute).and_return(validation_response)
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'skippable snippet' do
|
||||
it 'does not create snippet repo' do
|
||||
pipeline.run
|
||||
|
||||
expect(Gitlab::GlRepository::SNIPPET.repository_for(matched_snippet).exists?).to be false
|
||||
end
|
||||
end
|
||||
|
||||
context 'when a snippet is not matched' do
|
||||
let(:data) do
|
||||
[
|
||||
{
|
||||
'title' => 'unmatched title',
|
||||
'httpUrlToRepo' => http_url_to_repo,
|
||||
'createdAt' => matched_snippet.created_at.to_s
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
it_behaves_like 'skippable snippet'
|
||||
end
|
||||
|
||||
context 'when httpUrlToRepo is empty' do
|
||||
let(:data) do
|
||||
[
|
||||
{
|
||||
'title' => matched_snippet.title,
|
||||
'createdAt' => matched_snippet.created_at.to_s
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
it_behaves_like 'skippable snippet'
|
||||
end
|
||||
|
||||
context 'when a snippet matches' do
|
||||
context 'when snippet url is valid' do
|
||||
it 'creates snippet repo' do
|
||||
expect { pipeline.run }
|
||||
.to change { Gitlab::GlRepository::SNIPPET.repository_for(matched_snippet).exists? }.to true
|
||||
end
|
||||
|
||||
it 'updates snippets statistics' do
|
||||
allow_next_instance_of(Repository) do |repository|
|
||||
allow(repository).to receive(:fetch_as_mirror)
|
||||
end
|
||||
|
||||
service = double(Snippets::UpdateStatisticsService)
|
||||
|
||||
expect(Snippets::UpdateStatisticsService).to receive(:new).with(kind_of(Snippet)).and_return(service)
|
||||
expect(service).to receive(:execute)
|
||||
|
||||
pipeline.run
|
||||
end
|
||||
|
||||
it 'fetches snippet repo from url' do
|
||||
expect_next_instance_of(Repository) do |repository|
|
||||
expect(repository)
|
||||
.to receive(:fetch_as_mirror)
|
||||
.with("https://oauth2:#{bulk_import_configuration.access_token}@example.com/foo/bar/snippets/42.git")
|
||||
end
|
||||
|
||||
pipeline.run
|
||||
end
|
||||
end
|
||||
|
||||
context 'when url is invalid' do
|
||||
let(:http_url_to_repo) { 'http://0.0.0.0' }
|
||||
|
||||
it_behaves_like 'skippable snippet'
|
||||
end
|
||||
|
||||
context 'when snippet is invalid' do
|
||||
let(:validation_response) { double(Hash, 'error?': true) }
|
||||
|
||||
before do
|
||||
allow_next_instance_of(Repository) do |repository|
|
||||
allow(repository).to receive(:fetch_as_mirror)
|
||||
end
|
||||
end
|
||||
|
||||
it 'does not leave a hanging SnippetRepository behind' do
|
||||
pipeline.run
|
||||
|
||||
expect(SnippetRepository.where(snippet_id: matched_snippet.id).exists?).to be false
|
||||
end
|
||||
|
||||
it 'does not call UpdateStatisticsService' do
|
||||
expect(Snippets::UpdateStatisticsService).not_to receive(:new)
|
||||
|
||||
pipeline.run
|
||||
end
|
||||
|
||||
it_behaves_like 'skippable snippet'
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -14,6 +14,7 @@ RSpec.describe BulkImports::Projects::Stage do
|
|||
[2, BulkImports::Common::Pipelines::BadgesPipeline],
|
||||
[3, BulkImports::Projects::Pipelines::IssuesPipeline],
|
||||
[3, BulkImports::Projects::Pipelines::SnippetsPipeline],
|
||||
[4, BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline],
|
||||
[4, BulkImports::Common::Pipelines::BoardsPipeline],
|
||||
[4, BulkImports::Projects::Pipelines::MergeRequestsPipeline],
|
||||
[4, BulkImports::Projects::Pipelines::ExternalPullRequestsPipeline],
|
||||
|
|
|
@ -249,10 +249,16 @@ RSpec.describe Gitlab::Utils do
|
|||
end
|
||||
|
||||
describe '.which' do
|
||||
it 'finds the full path to an executable binary' do
|
||||
expect(File).to receive(:executable?).with('/bin/sh').and_return(true)
|
||||
before do
|
||||
stub_env('PATH', '/sbin:/usr/bin:/home/joe/bin')
|
||||
end
|
||||
|
||||
expect(which('sh', 'PATH' => '/bin')).to eq('/bin/sh')
|
||||
it 'finds the full path to an executable binary in order of appearance' do
|
||||
expect(File).to receive(:executable?).with('/sbin/tool').ordered.and_return(false)
|
||||
expect(File).to receive(:executable?).with('/usr/bin/tool').ordered.and_return(true)
|
||||
expect(File).not_to receive(:executable?).with('/home/joe/bin/tool')
|
||||
|
||||
expect(which('tool')).to eq('/usr/bin/tool')
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -302,6 +302,44 @@ RSpec.describe Ci::Runner do
|
|||
it { is_expected.to eq([runner1, runner3, runner4])}
|
||||
end
|
||||
|
||||
describe '.active' do
|
||||
subject { described_class.active(active_value) }
|
||||
|
||||
let!(:runner1) { create(:ci_runner, :instance, active: false) }
|
||||
let!(:runner2) { create(:ci_runner, :instance) }
|
||||
|
||||
context 'with active_value set to false' do
|
||||
let(:active_value) { false }
|
||||
|
||||
it 'returns inactive runners' do
|
||||
is_expected.to match_array([runner1])
|
||||
end
|
||||
end
|
||||
|
||||
context 'with active_value set to true' do
|
||||
let(:active_value) { true }
|
||||
|
||||
it 'returns active runners' do
|
||||
is_expected.to match_array([runner2])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '.paused' do
|
||||
before do
|
||||
expect(described_class).to receive(:active).with(false).and_call_original
|
||||
end
|
||||
|
||||
subject { described_class.paused }
|
||||
|
||||
let!(:runner1) { create(:ci_runner, :instance, active: false) }
|
||||
let!(:runner2) { create(:ci_runner, :instance) }
|
||||
|
||||
it 'returns inactive runners' do
|
||||
is_expected.to match_array([runner1])
|
||||
end
|
||||
end
|
||||
|
||||
describe '.stale' do
|
||||
subject { described_class.stale }
|
||||
|
||||
|
|
|
@ -69,5 +69,60 @@ RSpec.describe AfterCommitQueue do
|
|||
|
||||
expect(called).to be true
|
||||
end
|
||||
|
||||
context 'multiple databases - Ci::ApplicationRecord models' do
|
||||
before do
|
||||
skip_if_multiple_databases_not_setup
|
||||
|
||||
table_sql = <<~SQL
|
||||
CREATE TABLE _test_ci_after_commit_queue (
|
||||
id serial NOT NULL PRIMARY KEY);
|
||||
SQL
|
||||
|
||||
::Ci::ApplicationRecord.connection.execute(table_sql)
|
||||
end
|
||||
|
||||
let(:ci_klass) do
|
||||
Class.new(Ci::ApplicationRecord) do
|
||||
self.table_name = '_test_ci_after_commit_queue'
|
||||
|
||||
include AfterCommitQueue
|
||||
|
||||
def self.name
|
||||
'TestCiAfterCommitQueue'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
let(:ci_record) { ci_klass.new }
|
||||
|
||||
it 'runs immediately if not within a transaction' do
|
||||
called = false
|
||||
test_proc = proc { called = true }
|
||||
|
||||
ci_record.run_after_commit_or_now(&test_proc)
|
||||
|
||||
expect(called).to be true
|
||||
end
|
||||
|
||||
it 'runs after transaction has completed' do
|
||||
called = false
|
||||
test_proc = proc { called = true }
|
||||
|
||||
Ci::ApplicationRecord.transaction do
|
||||
# Add this record to the current transaction so that after commit hooks
|
||||
# are called
|
||||
Ci::ApplicationRecord.connection.add_transaction_record(ci_record)
|
||||
|
||||
ci_record.run_after_commit_or_now(&test_proc)
|
||||
|
||||
ci_record.save!
|
||||
|
||||
expect(called).to be false
|
||||
end
|
||||
|
||||
expect(called).to be true
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -403,6 +403,51 @@ RSpec.describe Snippet do
|
|||
end
|
||||
end
|
||||
|
||||
describe '.find_by_project_title_trunc_created_at' do
|
||||
let_it_be(:snippet) { create(:snippet) }
|
||||
let_it_be(:created_at_without_ms) { snippet.created_at.change(usec: 0) }
|
||||
|
||||
it 'returns a record if arguments match' do
|
||||
result = described_class.find_by_project_title_trunc_created_at(
|
||||
snippet.project,
|
||||
snippet.title,
|
||||
created_at_without_ms
|
||||
)
|
||||
|
||||
expect(result).to eq(snippet)
|
||||
end
|
||||
|
||||
it 'returns nil if project does not match' do
|
||||
result = described_class.find_by_project_title_trunc_created_at(
|
||||
'unmatched project',
|
||||
snippet.title,
|
||||
created_at_without_ms # to_s truncates ms of the argument
|
||||
)
|
||||
|
||||
expect(result).to be(nil)
|
||||
end
|
||||
|
||||
it 'returns nil if title does not match' do
|
||||
result = described_class.find_by_project_title_trunc_created_at(
|
||||
snippet.project,
|
||||
'unmatched title',
|
||||
created_at_without_ms # to_s truncates ms of the argument
|
||||
)
|
||||
|
||||
expect(result).to be(nil)
|
||||
end
|
||||
|
||||
it 'returns nil if created_at does not match' do
|
||||
result = described_class.find_by_project_title_trunc_created_at(
|
||||
snippet.project,
|
||||
snippet.title,
|
||||
snippet.created_at # fails match by milliseconds
|
||||
)
|
||||
|
||||
expect(result).to be(nil)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#participants' do
|
||||
let_it_be(:project) { create(:project, :public) }
|
||||
let_it_be(:snippet) { create(:snippet, content: 'foo', project: project) }
|
||||
|
|
|
@ -720,19 +720,6 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac
|
|||
expect_rejection { do_request }
|
||||
end
|
||||
|
||||
context 'when feature flag is off' do
|
||||
before do
|
||||
stub_feature_flags(files_api_throttling: false)
|
||||
end
|
||||
|
||||
it 'allows requests over the rate limit' do
|
||||
(1 + requests_per_period).times do
|
||||
do_request
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when unauthenticated api throttle is lower' do
|
||||
before do
|
||||
settings_to_set[:throttle_unauthenticated_api_requests_per_period] = 0
|
||||
|
@ -817,19 +804,6 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac
|
|||
expect_rejection { do_request }
|
||||
end
|
||||
end
|
||||
|
||||
context 'when feature flag is off' do
|
||||
before do
|
||||
stub_feature_flags(files_api_throttling: false)
|
||||
end
|
||||
|
||||
it 'allows requests over the rate limit' do
|
||||
(1 + requests_per_period).times do
|
||||
do_request
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when authenticated files api throttle is disabled' do
|
||||
|
|
|
@ -113,14 +113,14 @@ RSpec.describe 'shared/runners/_runner_details.html.haml' do
|
|||
describe 'Tags value' do
|
||||
context 'when runner does not have tags' do
|
||||
it { is_expected.to have_content('Tags') }
|
||||
it { is_expected.not_to have_selector('span.badge.badge-primary')}
|
||||
it { is_expected.not_to have_selector('span.gl-badge.badge.badge-info')}
|
||||
end
|
||||
|
||||
context 'when runner have tags' do
|
||||
let(:runner) { create(:ci_runner, tag_list: %w(tag2 tag3 tag1)) }
|
||||
|
||||
it { is_expected.to have_content('Tags tag1 tag2 tag3') }
|
||||
it { is_expected.to have_selector('span.badge.badge-primary')}
|
||||
it { is_expected.to have_selector('span.gl-badge.badge.badge-info')}
|
||||
end
|
||||
end
|
||||
|
||||
|
|
Loading…
Reference in a new issue