Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-07-20 12:08:44 +00:00
parent 6078f74ce9
commit 96add3eb95
37 changed files with 684 additions and 293 deletions

View File

@ -1,6 +1,6 @@
<!-- This issue template is used by https://about.gitlab.com/handbook/engineering/development/growth/product-intelligence/ for tracking effort around Service Ping reporting for GitLab.com -->
<!-- This issue template is used by https://about.gitlab.com/handbook/engineering/development/analytics-section/product-intelligence/ for tracking effort around Service Ping reporting for GitLab.com -->
The [Product Intelligence group](https://about.gitlab.com/handbook/engineering/development/growth/product-intelligence/) runs manual reporting of ServicePing for GitLab.com on a weekly basis. This issue captures:
The [Product Intelligence group](https://about.gitlab.com/handbook/engineering/development/analytics/product-intelligence/) runs manual reporting of ServicePing for GitLab.com on a weekly basis. This issue:
- Captures the work required to complete the reporting process,.
- Captures the follow-up tasks that are focused on metrics performance verification.
@ -123,7 +123,7 @@ If you get mentioned, check the failing metric and open an optimization issue.
<!-- Do not edit below this line -->
/confidential
/label ~"group::product intelligence" ~"devops::growth" ~backend ~"section::growth" ~"Category:Service Ping"
/label ~"group::product intelligence" ~"devops::analytics" ~backend ~"section::analytics" ~"Category:Service Ping"
/epic https://gitlab.com/groups/gitlab-org/-/epics/6000
/weight 5
/title Monitor and Generate GitLab.com Service Ping

View File

@ -1 +1 @@
3fc66dc23581de48bdbbf1b5a5d5ca9faf5f925b
1c907781819bf8810e15578f3d4d2b25e3ca1053

View File

@ -39,13 +39,13 @@ import {
TOKEN_TITLE_TYPE,
} from '~/vue_shared/components/filtered_search_bar/constants';
import IssuableList from '~/vue_shared/issuable/list/components/issuable_list_root.vue';
import {
IssuableListTabs,
IssuableStates,
IssuableTypes,
} from '~/vue_shared/issuable/list/constants';
import { IssuableListTabs, IssuableStates } from '~/vue_shared/issuable/list/constants';
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import { WORK_ITEM_TYPE_ENUM_TASK } from '~/work_items/constants';
import {
CREATED_DESC,
defaultTypeTokenOptions,
defaultWorkItemTypes,
i18n,
ISSUE_REFERENCE,
MAX_LIST_SIZE,
@ -67,6 +67,7 @@ import {
TOKEN_TYPE_ORGANIZATION,
TOKEN_TYPE_RELEASE,
TOKEN_TYPE_TYPE,
TYPE_TOKEN_TASK_OPTION,
UPDATED_DESC,
urlSortParams,
} from '../constants';
@ -107,7 +108,6 @@ const CrmOrganizationToken = () =>
export default {
i18n,
IssuableListTabs,
IssuableTypes: [IssuableTypes.Issue, IssuableTypes.Incident, IssuableTypes.TestCase],
components: {
CsvImportExportButtons,
GlButton,
@ -123,6 +123,7 @@ export default {
directives: {
GlTooltip: GlTooltipDirective,
},
mixins: [glFeatureFlagMixin()],
inject: [
'autocompleteAwardEmojisPath',
'calendarPath',
@ -180,9 +181,7 @@ export default {
issues: {
query: getIssuesQuery,
variables() {
const { types } = this.queryVariables;
return { ...this.queryVariables, types: types ? [types] : this.$options.IssuableTypes };
return this.queryVariables;
},
update(data) {
return data[this.namespace]?.issues.nodes ?? [];
@ -206,9 +205,7 @@ export default {
issuesCounts: {
query: getIssuesCountsQuery,
variables() {
const { types } = this.queryVariables;
return { ...this.queryVariables, types: types ? [types] : this.$options.IssuableTypes };
return this.queryVariables;
},
update(data) {
return data[this.namespace] ?? {};
@ -240,11 +237,22 @@ export default {
state: this.state,
...this.pageParams,
...this.apiFilterParams,
types: this.apiFilterParams.types || this.defaultWorkItemTypes,
};
},
namespace() {
return this.isProject ? ITEM_TYPE.PROJECT : ITEM_TYPE.GROUP;
},
defaultWorkItemTypes() {
return this.isWorkItemsEnabled
? defaultWorkItemTypes.concat(WORK_ITEM_TYPE_ENUM_TASK)
: defaultWorkItemTypes;
},
typeTokenOptions() {
return this.isWorkItemsEnabled
? defaultTypeTokenOptions.concat(TYPE_TOKEN_TASK_OPTION)
: defaultTypeTokenOptions;
},
hasSearch() {
return (
this.searchQuery ||
@ -262,6 +270,9 @@ export default {
isOpenTab() {
return this.state === IssuableStates.Opened;
},
isWorkItemsEnabled() {
return this.glFeatures.workItems;
},
showCsvButtons() {
return this.isProject && this.isSignedIn;
},
@ -340,11 +351,7 @@ export default {
title: TOKEN_TITLE_TYPE,
icon: 'issues',
token: GlFilteredSearchToken,
options: [
{ icon: 'issue-type-issue', title: 'issue', value: 'issue' },
{ icon: 'issue-type-incident', title: 'incident', value: 'incident' },
{ icon: 'issue-type-test-case', title: 'test_case', value: 'test_case' },
],
options: this.typeTokenOptions,
},
];

View File

@ -8,6 +8,11 @@ import {
OPERATOR_IS,
OPERATOR_IS_NOT,
} from '~/vue_shared/components/filtered_search_bar/constants';
import {
WORK_ITEM_TYPE_ENUM_INCIDENT,
WORK_ITEM_TYPE_ENUM_ISSUE,
WORK_ITEM_TYPE_ENUM_TEST_CASE,
} from '~/work_items/constants';
export const i18n = {
anonymousSearchingMessage: __('You must sign in to search for specific terms.'),
@ -147,6 +152,20 @@ export const TOKEN_TYPE_WEIGHT = 'weight';
export const TOKEN_TYPE_CONTACT = 'crm_contact';
export const TOKEN_TYPE_ORGANIZATION = 'crm_organization';
export const TYPE_TOKEN_TASK_OPTION = { icon: 'task-done', title: 'task', value: 'task' };
export const defaultWorkItemTypes = [
WORK_ITEM_TYPE_ENUM_ISSUE,
WORK_ITEM_TYPE_ENUM_INCIDENT,
WORK_ITEM_TYPE_ENUM_TEST_CASE,
];
export const defaultTypeTokenOptions = [
{ icon: 'issue-type-issue', title: 'issue', value: 'issue' },
{ icon: 'issue-type-incident', title: 'incident', value: 'incident' },
{ icon: 'issue-type-test-case', title: 'test_case', value: 'test_case' },
];
export const filters = {
[TOKEN_TYPE_AUTHOR]: {
[API_PARAM]: {

View File

@ -46,13 +46,6 @@ export const AvailableSortOptions = [
},
];
export const IssuableTypes = {
Issue: 'ISSUE',
Incident: 'INCIDENT',
TestCase: 'TEST_CASE',
Requirement: 'REQUIREMENT',
};
export const DEFAULT_PAGE_SIZE = 20;
export const DEFAULT_SKELETON_COUNT = 5;

View File

@ -8,11 +8,6 @@ export const STATE_EVENT_CLOSE = 'CLOSE';
export const TRACKING_CATEGORY_SHOW = 'workItems:show';
export const i18n = {
fetchError: s__('WorkItem|Something went wrong when fetching the work item. Please try again.'),
updateError: s__('WorkItem|Something went wrong while updating the work item. Please try again.'),
};
export const TASK_TYPE_NAME = 'Task';
export const WIDGET_TYPE_ASSIGNEES = 'ASSIGNEES';
@ -22,7 +17,15 @@ export const WIDGET_TYPE_WEIGHT = 'WEIGHT';
export const WIDGET_TYPE_HIERARCHY = 'HIERARCHY';
export const WORK_ITEM_VIEWED_STORAGE_KEY = 'gl-show-work-item-banner';
export const WIDGET_TYPE_TASK_ICON = 'task-done';
export const WORK_ITEM_TYPE_ENUM_INCIDENT = 'INCIDENT';
export const WORK_ITEM_TYPE_ENUM_ISSUE = 'ISSUE';
export const WORK_ITEM_TYPE_ENUM_TASK = 'TASK';
export const WORK_ITEM_TYPE_ENUM_TEST_CASE = 'TEST_CASE';
export const i18n = {
fetchError: s__('WorkItem|Something went wrong when fetching the work item. Please try again.'),
updateError: s__('WorkItem|Something went wrong while updating the work item. Please try again.'),
};
export const WIDGET_ICONS = {
TASK: 'task-done',

View File

@ -34,6 +34,10 @@ class GroupsController < Groups::ApplicationController
before_action :track_experiment_event, only: [:new]
before_action only: :issues do
push_force_frontend_feature_flag(:work_items, group.work_items_feature_flag_enabled?)
end
helper_method :captcha_required?
skip_cross_project_access_check :index, :new, :create, :edit, :update,

View File

@ -44,10 +44,13 @@ class Projects::IssuesController < Projects::ApplicationController
push_frontend_feature_flag(:incident_timeline, project)
end
before_action only: [:index, :show] do
push_force_frontend_feature_flag(:work_items, project&.work_items_feature_flag_enabled?)
end
before_action only: :show do
push_frontend_feature_flag(:issue_assignees_widget, project)
push_frontend_feature_flag(:realtime_labels, project)
push_force_frontend_feature_flag(:work_items, project&.work_items_feature_flag_enabled?)
push_frontend_feature_flag(:work_items_mvc_2)
push_frontend_feature_flag(:work_items_hierarchy, project)
end

View File

@ -1,9 +1,9 @@
= form_for [@project, @protected_branch], html: { class: 'new-protected-branch js-new-protected-branch' } do |f|
%input{ type: 'hidden', name: 'update_section', value: 'js-protected-branches-settings' }
.card
.card-header.gl-font-weight-bold
= render Pajamas::CardComponent.new(card_options: { class: "gl-mb-5" }) do |c|
- c.header do
= s_("ProtectedBranch|Protect a branch")
.card-body
- c.body do
= form_errors(@protected_branch, pajamas_alert: true)
.form-group.row
= f.label :name, s_('ProtectedBranch|Branch:'), class: 'col-sm-12'
@ -31,5 +31,5 @@
- force_push_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: force_push_docs_url }
= (s_("ProtectedBranch|Allow all users with push access to %{tag_start}force push%{tag_end}.") % { tag_start: force_push_link_start, tag_end: '</a>' }).html_safe
= render_if_exists 'projects/protected_branches/ee/code_owner_approval_form', f: f
.card-footer
- c.footer do
= f.submit s_('ProtectedBranch|Protect'), class: 'gl-button btn btn-confirm', disabled: true, data: { qa_selector: 'protect_button' }

View File

@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/321948
milestone: '13.11'
type: development
group: group::workspace
default_enabled: false
default_enabled: true

View File

@ -55,7 +55,7 @@ end
OPTIONAL_REVIEW_TEMPLATE = '%{role} review is optional for %{category}'
NOT_AVAILABLE_TEMPLATES = {
default: 'No %{role} available',
product_intelligence: group_not_available_template('#g_product_intelligence', '@gitlab-org/growth/product-intelligence/engineers'),
product_intelligence: group_not_available_template('#g_product_intelligence', '@gitlab-org/analytics-section/product-intelligence/engineers'),
integrations_be: group_not_available_template('#g_ecosystem_integrations', '@gitlab-org/ecosystem-stage/integrations'),
integrations_fe: group_not_available_template('#g_ecosystem_integrations', '@gitlab-org/ecosystem-stage/integrations')
}.freeze

View File

@ -1158,6 +1158,23 @@ requests redirected from the secondary to the primary node do not properly send
Authorization header. This may result in either an infinite `Authorization <-> Redirect`
loop, or Authorization error messages.
### Error: Net::ReadTimeout when pushing through SSH on a Geo secondary
When you push large repositories through SSH on a Geo secondary site, you may encounter a timeout.
This is because Rails proxies the push to the primary and has a 60 second default timeout,
[as described in this Geo issue](https://gitlab.com/gitlab-org/gitlab/-/issues/7405).
Current workarounds are:
- Push through HTTP instead, where Workhorse proxies the request to the primary (or redirects to the primary if Geo proxying is not enabled).
- Push directly to the primary.
Example log (`gitlab-shell.log`):
```plaintext
Failed to contact primary https://primary.domain.com/namespace/push_test.git\\nError: Net::ReadTimeout\",\"result\":null}" code=500 method=POST pid=5483 url="http://127.0.0.1:3000/api/v4/geo/proxy_git_push_ssh/push"
```
## Recovering from a partial failover
The partial failover to a secondary Geo *site* may be the result of a temporary/transient issue. Therefore, first attempt to run the promote command again.

View File

@ -144,7 +144,7 @@ with [domain expertise](#domain-experts).
by a [Software Engineer in Test](https://about.gitlab.com/handbook/engineering/quality/#individual-contributors)**.
1. If your merge request only includes end-to-end changes (*4*) **or** if the MR author is a [Software Engineer in Test](https://about.gitlab.com/handbook/engineering/quality/#individual-contributors), it must be **approved by a [Quality maintainer](https://about.gitlab.com/handbook/engineering/projects/#gitlab_maintainers_qa)**
1. If your merge request includes a new or updated [application limit](https://about.gitlab.com/handbook/product/product-processes/#introducing-application-limits), it must be **approved by a [product manager](https://about.gitlab.com/company/team/)**.
1. If your merge request includes Product Intelligence (telemetry or analytics) changes, it should be reviewed and approved by a [Product Intelligence engineer](https://gitlab.com/gitlab-org/growth/product-intelligence/engineers).
1. If your merge request includes Product Intelligence (telemetry or analytics) changes, it should be reviewed and approved by a [Product Intelligence engineer](https://gitlab.com/gitlab-org/analytics-section/product-intelligence/engineers).
1. If your merge request includes an addition of, or changes to a [Feature spec](testing_guide/testing_levels.md#frontend-feature-tests), it must be **approved by a [Quality maintainer](https://about.gitlab.com/handbook/engineering/projects/#gitlab_maintainers_qa) or [Quality reviewer](https://about.gitlab.com/handbook/engineering/projects/#gitlab_reviewers_qa)**.
1. If your merge request introduces a new service to GitLab (Puma, Sidekiq, Gitaly are examples), it must be **approved by a [product manager](https://about.gitlab.com/company/team/)**. See the [process for adding a service component to GitLab](adding_service_component.md) for details.
1. If your merge request includes changes related to authentication or authorization, it must be **approved by a [Manage:Authentication and Authorization team member](https://about.gitlab.com/company/team/)**. Check the [code review section on the group page](https://about.gitlab.com/handbook/engineering/development/dev/manage/authentication-and-authorization/#additional-considerations) for more details. Patterns for files known to require review from the team are listed in the in the `Authentication and Authorization` section of the [`CODEOWNERS`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/CODEOWNERS) file, and the team will be listed in the approvers section of all merge requests that modify these files.

View File

@ -205,8 +205,8 @@ instance unique identifier.
key_path: uuid
description: GitLab instance unique identifier
product_category: collection
product_section: growth
product_stage: growth
product_section: analytics
product_stage: analytics
product_group: product_intelligence
value_type: string
status: active
@ -301,7 +301,7 @@ bundle exec rails generate gitlab:usage_metric_definition:redis_hll issues users
## Metrics Dictionary
[Metrics Dictionary is a separate application](https://gitlab.com/gitlab-org/growth/product-intelligence/metric-dictionary).
[Metrics Dictionary is a separate application](https://gitlab.com/gitlab-org/analytics-section/product-intelligence/metric-dictionary).
All metrics available in Service Ping are in the [Metrics Dictionary](https://metrics.gitlab.com/).

View File

@ -29,7 +29,7 @@ A metric definition has the [`instrumentation_class`](metrics_dictionary.md) fie
The defined instrumentation class should inherit one of the existing metric classes: `DatabaseMetric`, `RedisMetric`, `RedisHLLMetric`, `NumbersMetric` or `GenericMetric`.
The current convention is that a single instrumentation class corresponds to a single metric. On a rare occasions, there are exceptions to that convention like [Redis metrics](#redis-metrics). To use a single instrumentation class for more than one metric, please reach out to one of the `@gitlab-org/growth/product-intelligence/engineers` members to consult about your case.
The current convention is that a single instrumentation class corresponds to a single metric. On rare occasions, there are exceptions to that convention like [Redis metrics](#redis-metrics). To use a single instrumentation class for more than one metric, please reach out to one of the `@gitlab-org/analytics-section/product-intelligence/engineers` members to consult about your case.
Using the instrumentation classes ensures that metrics can fail safe individually, without breaking the entire
process of Service Ping generation.

View File

@ -7,7 +7,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Service Ping review guidelines
This page includes introductory material for a
[Product Intelligence](https://about.gitlab.com/handbook/engineering/development/growth/product-intelligence/)
[Product Intelligence](https://about.gitlab.com/handbook/engineering/development/analytics/product-intelligence/)
review, and is specific to Service Ping related reviews. For broader advice and
general best practices for code reviews, refer to our [code review guide](../code_review.md).
@ -42,7 +42,7 @@ are regular backend changes.
- Assign both the `~backend` and `~product intelligence` reviews to another Product Intelligence team member.
- Assign the maintainer review to someone outside of the Product Intelligence group.
- Assign an
[engineer](https://gitlab.com/groups/gitlab-org/growth/product-intelligence/engineers/-/group_members?with_inherited_permissions=exclude) from the Product Intelligence team for a review.
[engineer](https://gitlab.com/groups/gitlab-org/analytics-section/product-intelligence/engineers/-/group_members?with_inherited_permissions=exclude) from the Product Intelligence team for a review.
- Set the correct attributes in the metric's YAML definition:
- `product_section`, `product_stage`, `product_group`, `product_category`
- Provide a clear description of the metric.
@ -76,7 +76,7 @@ are regular backend changes.
[Danger bot](../dangerbot.md) adds the list of changed Product Intelligence files
and pings the
[`@gitlab-org/growth/product-intelligence/engineers`](https://gitlab.com/groups/gitlab-org/growth/product-intelligence/engineers/-/group_members?with_inherited_permissions=exclude) group for merge requests
[`@gitlab-org/analytics-section/product-intelligence/engineers`](https://gitlab.com/groups/gitlab-org/analytics-section/product-intelligence/engineers/-/group_members?with_inherited_permissions=exclude) group for merge requests
that are not drafts.
Any of the Product Intelligence engineers can be assigned for the Product Intelligence review.

View File

@ -50,7 +50,7 @@ See [Snowplow technology 101](https://github.com/snowplow/snowplow/#snowplow-tec
### Pseudonymization
In contrast to a typical Snowplow pipeline, after enrichment, GitLab Snowplow events go through a [pseudonymization service](https://gitlab.com/gitlab-org/growth/product-intelligence/snowplow-pseudonymization) in the form of an AWS Lambda service before they are stored in S3 storage.
In contrast to a typical Snowplow pipeline, after enrichment, GitLab Snowplow events go through a [pseudonymization service](https://gitlab.com/gitlab-org/analytics-section/product-intelligence/snowplow-pseudonymization) in the form of an AWS Lambda service before they are stored in S3 storage.
#### Why events need to be pseudonymized
@ -85,7 +85,7 @@ There are several tools that monitor Snowplow events tracking in different stage
- The number of events that successfully reach Snowplow collectors.
- The number of events that failed to reach Snowplow collectors.
- The number of backend events that were sent.
- [AWS CloudWatch dashboard](https://console.aws.amazon.com/cloudwatch/home?region=us-east-1#dashboards:name=SnowPlow;start=P3D) monitors the state of the events in a processing pipeline. The pipeline starts from Snowplow collectors, goes through to enrichers and pseudonymization, and then up to persistence in an S3 bucket. From S3, the events are imported into the Snowflake Data Warehouse. You must have AWS access rights to view this dashboard. For more information, see [monitoring](https://gitlab.com/gitlab-org/growth/product-intelligence/snowplow-pseudonymization#monitoring) in the Snowplow Events pseudonymization service documentation.
- [AWS CloudWatch dashboard](https://console.aws.amazon.com/cloudwatch/home?region=us-east-1#dashboards:name=SnowPlow;start=P3D) monitors the state of the events in a processing pipeline. The pipeline starts from Snowplow collectors, goes through to enrichers and pseudonymization, and then up to persistence in an S3 bucket. From S3, the events are imported into the Snowflake Data Warehouse. You must have AWS access rights to view this dashboard. For more information, see [monitoring](https://gitlab.com/gitlab-org/analytics-section/product-intelligence/snowplow-pseudonymization#monitoring) in the Snowplow Events pseudonymization service documentation.
- [Sisense dashboard](https://app.periscopedata.com/app/gitlab/417669/Snowplow-Summary-Dashboard) provides information about the number of good and bad events imported into the Data Warehouse, in addition to the total number of imported Snowplow events.
For more information, see this [video walk-through](https://www.youtube.com/watch?v=NxPS0aKa_oU).
@ -93,7 +93,7 @@ For more information, see this [video walk-through](https://www.youtube.com/watc
## Related topics
- [Snowplow technology 101](https://github.com/snowplow/snowplow/#snowplow-technology-101)
- [Snowplow pseudonymization AWS Lambda project](https://gitlab.com/gitlab-org/growth/product-intelligence/snowplow-pseudonymization)
- [Snowplow pseudonymization AWS Lambda project](https://gitlab.com/gitlab-org/analytics-section/product-intelligence/snowplow-pseudonymization)
- [Product Intelligence Guide](https://about.gitlab.com/handbook/product/product-intelligence-guide/)
- [Data Infrastructure](https://about.gitlab.com/handbook/business-technology/data-team/platform/infrastructure/)
- [Snowplow architecture overview (internal)](https://www.youtube.com/watch?v=eVYJjzspsLU)

View File

@ -7,7 +7,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Snowplow review guidelines
This page includes introductory material for a
[Product Intelligence](https://about.gitlab.com/handbook/engineering/development/growth/product-intelligence/)
[Product Intelligence](https://about.gitlab.com/handbook/engineering/development/analytics/product-intelligence/)
review, and is specific to Snowplow related reviews. For broader advice and
general best practices for code reviews, refer to our [code review guide](../code_review.md).

View File

@ -36,7 +36,7 @@ To change your password:
1. In the **New password** and **Password confirmation** text box, enter your new password.
1. Select **Save password**.
If you don't know your current password, select the **I forgot my password** link.
If you don't know your current password, select the **I forgot my password** link. A password reset email is sent to the account's **primary** email address.
## Change your username

View File

@ -105,9 +105,11 @@ module Gitlab
.joins("INNER JOIN namespaces n2 ON namespaces.parent_id = n2.id")
.select("namespaces.id as project_namespace_id, n2.traversal_ids")
# some customers have namespaces.id column type as bigint, which makes array_append(integer[], bigint) to fail
# so we just explicitly cast arguments to compatible types
ApplicationRecord.connection.execute <<~SQL
UPDATE namespaces
SET traversal_ids = array_append(project_namespaces.traversal_ids, project_namespaces.project_namespace_id)
SET traversal_ids = array_append(project_namespaces.traversal_ids::bigint[], project_namespaces.project_namespace_id::bigint)
FROM (#{namespaces.to_sql}) as project_namespaces(project_namespace_id, traversal_ids)
WHERE id = project_namespaces.project_namespace_id
SQL

View File

@ -31,7 +31,7 @@ kics-iac-sast:
image:
name: "$SAST_ANALYZER_IMAGE"
variables:
SAST_ANALYZER_IMAGE_TAG: 2
SAST_ANALYZER_IMAGE_TAG: 3
SAST_ANALYZER_IMAGE: "$SECURE_ANALYZERS_PREFIX/kics:$SAST_ANALYZER_IMAGE_TAG$SAST_IMAGE_SUFFIX"
rules:
- if: $SAST_DISABLED

View File

@ -31,7 +31,7 @@ kics-iac-sast:
image:
name: "$SAST_ANALYZER_IMAGE"
variables:
SAST_ANALYZER_IMAGE_TAG: 2
SAST_ANALYZER_IMAGE_TAG: 3
SAST_ANALYZER_IMAGE: "$SECURE_ANALYZERS_PREFIX/kics:$SAST_ANALYZER_IMAGE_TAG$SAST_IMAGE_SUFFIX"
rules:
- if: $SAST_DISABLED

View File

@ -0,0 +1,47 @@
# frozen_string_literal: true
module Gitlab
module GithubImport
module Importer
module Events
class ChangedMilestone
attr_reader :project, :user_id
# GitHub API doesn't provide the historical state of an issue for
# de/milestoned issue events. So we'll assign the default state to
# those events that are imported from GitHub.
DEFAULT_STATE = Issue.available_states[:opened]
def initialize(project, user_id)
@project = project
@user_id = user_id
end
# issue_event - An instance of `Gitlab::GithubImport::Representation::IssueEvent`.
def execute(issue_event)
create_event(issue_event)
end
private
def create_event(issue_event)
ResourceMilestoneEvent.create!(
issue_id: issue_event.issue_db_id,
user_id: user_id,
created_at: issue_event.created_at,
milestone_id: project.milestones.find_by_title(issue_event.milestone_title)&.id,
action: action(issue_event.event),
state: DEFAULT_STATE
)
end
def action(event_type)
return ResourceMilestoneEvent.actions[:remove] if event_type == 'demilestoned'
ResourceMilestoneEvent.actions[:add]
end
end
end
end
end
end

View File

@ -30,6 +30,9 @@ module Gitlab
when 'renamed'
Gitlab::GithubImport::Importer::Events::Renamed.new(project, author_id)
.execute(issue_event)
when 'milestoned', 'demilestoned'
Gitlab::GithubImport::Importer::Events::ChangedMilestone.new(project, author_id)
.execute(issue_event)
when 'cross-referenced'
Gitlab::GithubImport::Importer::Events::CrossReferenced.new(project, author_id)
.execute(issue_event)

View File

@ -10,7 +10,7 @@ module Gitlab
attr_reader :attributes
expose_attribute :id, :actor, :event, :commit_id, :label_title, :old_title, :new_title,
:source, :created_at
:milestone_title, :source, :created_at
expose_attribute :issue_db_id # set in SingleEndpointIssueEventsImporter#each_associated
# Builds a event from a GitHub API response.
@ -27,6 +27,7 @@ module Gitlab
new_title: event.rename && event.rename[:to],
source: event.source,
issue_db_id: event.issue_db_id,
milestone_title: event.milestone && event.milestone[:title],
created_at: event.created_at
)
end

View File

@ -62,6 +62,10 @@ module Gitlab
end
end
def to_json(*_args)
{ major: @major, minor: @minor, patch: @patch }.to_json
end
def suffix
@suffix ||= @suffix_s.strip.gsub('-', '.pre.').scan(/\d+|[a-z]+/i).map do |s|
/^\d+$/ =~ s ? s.to_i : s

49
rubocop/cop_todo.rb Normal file
View File

@ -0,0 +1,49 @@
# frozen_string_literal: true
module RuboCop
class CopTodo
attr_accessor :previously_disabled
attr_reader :cop_name, :files, :offense_count
def initialize(cop_name)
@cop_name = cop_name
@files = Set.new
@offense_count = 0
@cop_class = self.class.find_cop_by_name(cop_name)
@previously_disabled = false
end
def record(file, offense_count)
@files << file
@offense_count += offense_count
end
def autocorrectable?
@cop_class&.support_autocorrect?
end
def to_yaml
yaml = []
yaml << '---'
yaml << '# Cop supports --auto-correct.' if autocorrectable?
yaml << "#{cop_name}:"
if previously_disabled
yaml << " # Offense count: #{offense_count}"
yaml << ' # Temporarily disabled due to too many offenses'
yaml << ' Enabled: false'
end
yaml << ' Exclude:'
yaml.concat files.sort.map { |file| " - '#{file}'" }
yaml << ''
yaml.join("\n")
end
def self.find_cop_by_name(cop_name)
RuboCop::Cop::Registry.global.find_by_cop_name(cop_name)
end
end
end

View File

@ -5,6 +5,7 @@ require 'rubocop'
require 'yaml'
require_relative '../todo_dir'
require_relative '../cop_todo'
module RuboCop
module Formatter
@ -14,26 +15,6 @@ module RuboCop
# For example, this formatter stores offenses for `RSpec/VariableName`
# in `.rubocop_todo/rspec/variable_name.yml`.
class TodoFormatter < BaseFormatter
class Todo
attr_reader :cop_name, :files, :offense_count
def initialize(cop_name)
@cop_name = cop_name
@files = Set.new
@offense_count = 0
@cop_class = RuboCop::Cop::Registry.global.find_by_cop_name(cop_name)
end
def record(file, offense_count)
@files << file
@offense_count += offense_count
end
def autocorrectable?
@cop_class&.support_autocorrect?
end
end
DEFAULT_BASE_DIRECTORY = File.expand_path('../../.rubocop_todo', __dir__)
class << self
@ -44,7 +25,7 @@ module RuboCop
def initialize(output, _options = {})
@directory = self.class.base_directory
@todos = Hash.new { |hash, cop_name| hash[cop_name] = Todo.new(cop_name) }
@todos = Hash.new { |hash, cop_name| hash[cop_name] = CopTodo.new(cop_name) }
@todo_dir = TodoDir.new(directory)
@config_inspect_todo_dir = load_config_inspect_todo_dir
@config_old_todo_yml = load_config_old_todo_yml
@ -65,8 +46,8 @@ module RuboCop
def finished(_inspected_files)
@todos.values.sort_by(&:cop_name).each do |todo|
yaml = to_yaml(todo)
path = @todo_dir.write(todo.cop_name, yaml)
todo.previously_disabled = previously_disabled?(todo)
path = @todo_dir.write(todo.cop_name, todo.to_yaml)
output.puts "Written to #{relative_path(path)}\n"
end
@ -90,27 +71,6 @@ module RuboCop
path.delete_prefix("#{parent}/")
end
def to_yaml(todo)
yaml = []
yaml << '---'
yaml << '# Cop supports --auto-correct.' if todo.autocorrectable?
yaml << "#{todo.cop_name}:"
if previously_disabled?(todo)
yaml << " # Offense count: #{todo.offense_count}"
yaml << ' # Temporarily disabled due to too many offenses'
yaml << ' Enabled: false'
end
yaml << ' Exclude:'
files = todo.files.sort.map { |file| " - '#{file}'" }
yaml.concat files
yaml << ''
yaml.join("\n")
end
def check_multiple_configurations!
cop_names = @config_inspect_todo_dir.keys & @config_old_todo_yml.keys
return if cop_names.empty?

View File

@ -52,6 +52,12 @@ import { getSortKey, getSortOptions } from '~/issues/list/utils';
import axios from '~/lib/utils/axios_utils';
import { scrollUp } from '~/lib/utils/scroll_utils';
import { joinPaths } from '~/lib/utils/url_utility';
import {
WORK_ITEM_TYPE_ENUM_INCIDENT,
WORK_ITEM_TYPE_ENUM_ISSUE,
WORK_ITEM_TYPE_ENUM_TASK,
WORK_ITEM_TYPE_ENUM_TEST_CASE,
} from '~/work_items/constants';
jest.mock('@sentry/browser');
jest.mock('~/flash');
@ -123,6 +129,7 @@ describe('CE IssuesListApp component', () => {
const mountComponent = ({
provide = {},
data = {},
workItems = false,
issuesQueryResponse = mockIssuesQueryResponse,
issuesCountsQueryResponse = mockIssuesCountsQueryResponse,
sortPreferenceMutationResponse = jest.fn().mockResolvedValue(setSortPreferenceMutationResponse),
@ -141,6 +148,9 @@ describe('CE IssuesListApp component', () => {
apolloProvider: createMockApollo(requestHandlers),
router,
provide: {
glFeatures: {
workItems,
},
...defaultProvide,
...provide,
},
@ -168,22 +178,6 @@ describe('CE IssuesListApp component', () => {
return waitForPromises();
});
it('queries list with types `ISSUE` and `INCIDENT', () => {
const expectedTypes = ['ISSUE', 'INCIDENT', 'TEST_CASE'];
expect(mockIssuesQueryResponse).toHaveBeenCalledWith(
expect.objectContaining({
types: expectedTypes,
}),
);
expect(mockIssuesCountsQueryResponse).toHaveBeenCalledWith(
expect.objectContaining({
types: expectedTypes,
}),
);
});
it('renders', () => {
expect(findIssuableList().props()).toMatchObject({
namespace: defaultProvide.fullPath,
@ -1024,6 +1018,21 @@ describe('CE IssuesListApp component', () => {
});
});
});
describe('when "page-size-change" event is emitted by IssuableList', () => {
it('updates url params with new page size', async () => {
wrapper = mountComponent();
router.push = jest.fn();
findIssuableList().vm.$emit('page-size-change', 50);
await nextTick();
expect(router.push).toHaveBeenCalledTimes(1);
expect(router.push).toHaveBeenCalledWith({
query: expect.objectContaining({ first_page_size: 50 }),
});
});
});
});
describe('public visibility', () => {
@ -1045,17 +1054,45 @@ describe('CE IssuesListApp component', () => {
});
});
describe('when "page-size-change" event is emitted by IssuableList', () => {
it('updates url params with new page size', async () => {
wrapper = mountComponent();
router.push = jest.fn();
describe('fetching issues', () => {
describe('when work_items feature flag is disabled', () => {
beforeEach(() => {
wrapper = mountComponent({ workItems: false });
jest.runOnlyPendingTimers();
});
findIssuableList().vm.$emit('page-size-change', 50);
await nextTick();
it('fetches issue, incident, and test case types', () => {
const types = [
WORK_ITEM_TYPE_ENUM_ISSUE,
WORK_ITEM_TYPE_ENUM_INCIDENT,
WORK_ITEM_TYPE_ENUM_TEST_CASE,
];
expect(router.push).toHaveBeenCalledTimes(1);
expect(router.push).toHaveBeenCalledWith({
query: expect.objectContaining({ first_page_size: 50 }),
expect(mockIssuesQueryResponse).toHaveBeenCalledWith(expect.objectContaining({ types }));
expect(mockIssuesCountsQueryResponse).toHaveBeenCalledWith(
expect.objectContaining({ types }),
);
});
});
describe('when work_items feature flag is enabled', () => {
beforeEach(() => {
wrapper = mountComponent({ workItems: true });
jest.runOnlyPendingTimers();
});
it('fetches issue, incident, test case, and task types', () => {
const types = [
WORK_ITEM_TYPE_ENUM_ISSUE,
WORK_ITEM_TYPE_ENUM_INCIDENT,
WORK_ITEM_TYPE_ENUM_TEST_CASE,
WORK_ITEM_TYPE_ENUM_TASK,
];
expect(mockIssuesQueryResponse).toHaveBeenCalledWith(expect.objectContaining({ types }));
expect(mockIssuesCountsQueryResponse).toHaveBeenCalledWith(
expect.objectContaining({ types }),
);
});
});
});

View File

@ -5,199 +5,211 @@ require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::ProjectNamespaces::BackfillProjectNamespaces, :migration, schema: 20220326161803 do
include MigrationsHelpers
context 'when migrating data', :aggregate_failures do
let(:projects) { table(:projects) }
let(:namespaces) { table(:namespaces) }
RSpec.shared_examples 'backfills project namespaces' do
context 'when migrating data', :aggregate_failures do
let(:projects) { table(:projects) }
let(:namespaces) { table(:namespaces) }
let(:parent_group1) { namespaces.create!(name: 'parent_group1', path: 'parent_group1', visibility_level: 20, type: 'Group') }
let(:parent_group2) { namespaces.create!(name: 'test1', path: 'test1', runners_token: 'my-token1', project_creation_level: 1, visibility_level: 20, type: 'Group') }
let(:parent_group1) { namespaces.create!(name: 'parent_group1', path: 'parent_group1', visibility_level: 20, type: 'Group') }
let(:parent_group2) { namespaces.create!(name: 'test1', path: 'test1', runners_token: 'my-token1', project_creation_level: 1, visibility_level: 20, type: 'Group') }
let(:parent_group1_project) { projects.create!(name: 'parent_group1_project', path: 'parent_group1_project', namespace_id: parent_group1.id, visibility_level: 20) }
let(:parent_group2_project) { projects.create!(name: 'parent_group2_project', path: 'parent_group2_project', namespace_id: parent_group2.id, visibility_level: 20) }
let(:parent_group1_project) { projects.create!(name: 'parent_group1_project', path: 'parent_group1_project', namespace_id: parent_group1.id, visibility_level: 20) }
let(:parent_group2_project) { projects.create!(name: 'parent_group2_project', path: 'parent_group2_project', namespace_id: parent_group2.id, visibility_level: 20) }
let(:child_nodes_count) { 2 }
let(:tree_depth) { 3 }
let(:child_nodes_count) { 2 }
let(:tree_depth) { 3 }
let(:backfilled_namespace) { nil }
let(:backfilled_namespace) { nil }
before do
BackfillProjectNamespaces::TreeGenerator.new(namespaces, projects, [parent_group1, parent_group2], child_nodes_count, tree_depth).build_tree
end
describe '#up' do
shared_examples 'back-fill project namespaces' do
it 'back-fills all project namespaces' do
start_id = ::Project.minimum(:id)
end_id = ::Project.maximum(:id)
projects_count = ::Project.count
batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
project_namespaces_count = ::Namespace.where(type: 'Project').count
migration = described_class.new
expect(projects_count).not_to eq(project_namespaces_count)
expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
expect { migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up') }.to change(Namespace.where(type: 'Project'), :count)
expect(projects_count).to eq(::Namespace.where(type: 'Project').count)
check_projects_in_sync_with(Namespace.where(type: 'Project'))
end
context 'when passing specific group as parameter' do
let(:backfilled_namespace) { parent_group1 }
it 'back-fills project namespaces for the specified group hierarchy' do
backfilled_namespace_projects = base_ancestor(backfilled_namespace).first.all_projects
start_id = backfilled_namespace_projects.minimum(:id)
end_id = backfilled_namespace_projects.maximum(:id)
group_projects_count = backfilled_namespace_projects.count
batches_count = (group_projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
project_namespaces_in_hierarchy = project_namespaces_in_hierarchy(base_ancestor(backfilled_namespace))
before do
BackfillProjectNamespaces::TreeGenerator.new(namespaces, projects, [parent_group1, parent_group2], child_nodes_count, tree_depth).build_tree
end
describe '#up' do
shared_examples 'back-fill project namespaces' do
it 'back-fills all project namespaces' do
start_id = ::Project.minimum(:id)
end_id = ::Project.maximum(:id)
projects_count = ::Project.count
batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
project_namespaces_count = ::Namespace.where(type: 'Project').count
migration = described_class.new
expect(project_namespaces_in_hierarchy.count).to eq(0)
expect(projects_count).not_to eq(project_namespaces_count)
expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
expect(group_projects_count).to eq(14)
expect(project_namespaces_in_hierarchy.count).to eq(0)
expect { migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up') }.to change(Namespace.where(type: 'Project'), :count)
migration.perform(start_id, end_id, nil, nil, nil, nil, backfilled_namespace.id, 'up')
expect(projects_count).to eq(::Namespace.where(type: 'Project').count)
check_projects_in_sync_with(Namespace.where(type: 'Project'))
end
expect(project_namespaces_in_hierarchy.count).to eq(14)
check_projects_in_sync_with(project_namespaces_in_hierarchy)
context 'when passing specific group as parameter' do
let(:backfilled_namespace) { parent_group1 }
it 'back-fills project namespaces for the specified group hierarchy' do
backfilled_namespace_projects = base_ancestor(backfilled_namespace).first.all_projects
start_id = backfilled_namespace_projects.minimum(:id)
end_id = backfilled_namespace_projects.maximum(:id)
group_projects_count = backfilled_namespace_projects.count
batches_count = (group_projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
project_namespaces_in_hierarchy = project_namespaces_in_hierarchy(base_ancestor(backfilled_namespace))
migration = described_class.new
expect(project_namespaces_in_hierarchy.count).to eq(0)
expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
expect(group_projects_count).to eq(14)
expect(project_namespaces_in_hierarchy.count).to eq(0)
migration.perform(start_id, end_id, nil, nil, nil, nil, backfilled_namespace.id, 'up')
expect(project_namespaces_in_hierarchy.count).to eq(14)
check_projects_in_sync_with(project_namespaces_in_hierarchy)
end
end
context 'when projects already have project namespaces' do
before do
hierarchy1_projects = base_ancestor(parent_group1).first.all_projects
start_id = hierarchy1_projects.minimum(:id)
end_id = hierarchy1_projects.maximum(:id)
described_class.new.perform(start_id, end_id, nil, nil, nil, nil, parent_group1.id, 'up')
end
it 'does not duplicate project namespaces' do
# check there are already some project namespaces but not for all
projects_count = ::Project.count
start_id = ::Project.minimum(:id)
end_id = ::Project.maximum(:id)
batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
project_namespaces = ::Namespace.where(type: 'Project')
migration = described_class.new
expect(project_namespaces_in_hierarchy(base_ancestor(parent_group1)).count).to be >= 14
expect(project_namespaces_in_hierarchy(base_ancestor(parent_group2)).count).to eq(0)
expect(projects_count).not_to eq(project_namespaces.count)
# run migration again to test we do not generate extra project namespaces
expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
expect { migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up') }.to change(project_namespaces, :count).by(14)
expect(projects_count).to eq(project_namespaces.count)
end
end
end
context 'when projects already have project namespaces' do
before do
hierarchy1_projects = base_ancestor(parent_group1).first.all_projects
start_id = hierarchy1_projects.minimum(:id)
end_id = hierarchy1_projects.maximum(:id)
it 'checks no project namespaces exist in the defined hierarchies' do
hierarchy1_project_namespaces = project_namespaces_in_hierarchy(base_ancestor(parent_group1))
hierarchy2_project_namespaces = project_namespaces_in_hierarchy(base_ancestor(parent_group2))
hierarchy1_projects_count = base_ancestor(parent_group1).first.all_projects.count
hierarchy2_projects_count = base_ancestor(parent_group2).first.all_projects.count
described_class.new.perform(start_id, end_id, nil, nil, nil, nil, parent_group1.id, 'up')
expect(hierarchy1_project_namespaces).to be_empty
expect(hierarchy2_project_namespaces).to be_empty
expect(hierarchy1_projects_count).to eq(14)
expect(hierarchy2_projects_count).to eq(14)
end
context 'back-fill project namespaces in a single batch' do
it_behaves_like 'back-fill project namespaces'
end
context 'back-fill project namespaces in batches' do
before do
stub_const("#{described_class.name}::SUB_BATCH_SIZE", 2)
end
it 'does not duplicate project namespaces' do
# check there are already some project namespaces but not for all
it_behaves_like 'back-fill project namespaces'
end
end
describe '#down' do
before do
start_id = ::Project.minimum(:id)
end_id = ::Project.maximum(:id)
# back-fill first
described_class.new.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up')
end
shared_examples 'cleanup project namespaces' do
it 'removes project namespaces' do
projects_count = ::Project.count
start_id = ::Project.minimum(:id)
end_id = ::Project.maximum(:id)
migration = described_class.new
batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
project_namespaces = ::Namespace.where(type: 'Project')
migration = described_class.new
expect(project_namespaces_in_hierarchy(base_ancestor(parent_group1)).count).to be >= 14
expect(project_namespaces_in_hierarchy(base_ancestor(parent_group2)).count).to eq(0)
expect(projects_count).not_to eq(project_namespaces.count)
expect(projects_count).to be > 0
expect(projects_count).to eq(::Namespace.where(type: 'Project').count)
# run migration again to test we do not generate extra project namespaces
expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
expect { migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up') }.to change(project_namespaces, :count).by(14)
expect(projects_count).to eq(project_namespaces.count)
end
end
end
it 'checks no project namespaces exist in the defined hierarchies' do
hierarchy1_project_namespaces = project_namespaces_in_hierarchy(base_ancestor(parent_group1))
hierarchy2_project_namespaces = project_namespaces_in_hierarchy(base_ancestor(parent_group2))
hierarchy1_projects_count = base_ancestor(parent_group1).first.all_projects.count
hierarchy2_projects_count = base_ancestor(parent_group2).first.all_projects.count
expect(hierarchy1_project_namespaces).to be_empty
expect(hierarchy2_project_namespaces).to be_empty
expect(hierarchy1_projects_count).to eq(14)
expect(hierarchy2_projects_count).to eq(14)
end
context 'back-fill project namespaces in a single batch' do
it_behaves_like 'back-fill project namespaces'
end
context 'back-fill project namespaces in batches' do
before do
stub_const("#{described_class.name}::SUB_BATCH_SIZE", 2)
end
it_behaves_like 'back-fill project namespaces'
end
end
describe '#down' do
before do
start_id = ::Project.minimum(:id)
end_id = ::Project.maximum(:id)
# back-fill first
described_class.new.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up')
end
shared_examples 'cleanup project namespaces' do
it 'removes project namespaces' do
projects_count = ::Project.count
start_id = ::Project.minimum(:id)
end_id = ::Project.maximum(:id)
migration = described_class.new
batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
expect(projects_count).to be > 0
expect(projects_count).to eq(::Namespace.where(type: 'Project').count)
expect(migration).to receive(:nullify_project_namespaces_in_projects).exactly(batches_count).and_call_original
expect(migration).to receive(:delete_project_namespace_records).exactly(batches_count).and_call_original
migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'down')
expect(::Project.count).to be > 0
expect(::Namespace.where(type: 'Project').count).to eq(0)
end
context 'when passing specific group as parameter' do
let(:backfilled_namespace) { parent_group1 }
it 'removes project namespaces only for the specific group hierarchy' do
backfilled_namespace_projects = base_ancestor(backfilled_namespace).first.all_projects
start_id = backfilled_namespace_projects.minimum(:id)
end_id = backfilled_namespace_projects.maximum(:id)
group_projects_count = backfilled_namespace_projects.count
batches_count = (group_projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
project_namespaces_in_hierarchy = project_namespaces_in_hierarchy(base_ancestor(backfilled_namespace))
migration = described_class.new
expect(project_namespaces_in_hierarchy.count).to eq(14)
expect(migration).to receive(:nullify_project_namespaces_in_projects).exactly(batches_count).and_call_original
expect(migration).to receive(:delete_project_namespace_records).exactly(batches_count).and_call_original
migration.perform(start_id, end_id, nil, nil, nil, nil, backfilled_namespace.id, 'down')
migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'down')
expect(::Namespace.where(type: 'Project').count).to be > 0
expect(project_namespaces_in_hierarchy.count).to eq(0)
expect(::Project.count).to be > 0
expect(::Namespace.where(type: 'Project').count).to eq(0)
end
context 'when passing specific group as parameter' do
let(:backfilled_namespace) { parent_group1 }
it 'removes project namespaces only for the specific group hierarchy' do
backfilled_namespace_projects = base_ancestor(backfilled_namespace).first.all_projects
start_id = backfilled_namespace_projects.minimum(:id)
end_id = backfilled_namespace_projects.maximum(:id)
group_projects_count = backfilled_namespace_projects.count
batches_count = (group_projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
project_namespaces_in_hierarchy = project_namespaces_in_hierarchy(base_ancestor(backfilled_namespace))
migration = described_class.new
expect(project_namespaces_in_hierarchy.count).to eq(14)
expect(migration).to receive(:nullify_project_namespaces_in_projects).exactly(batches_count).and_call_original
expect(migration).to receive(:delete_project_namespace_records).exactly(batches_count).and_call_original
migration.perform(start_id, end_id, nil, nil, nil, nil, backfilled_namespace.id, 'down')
expect(::Namespace.where(type: 'Project').count).to be > 0
expect(project_namespaces_in_hierarchy.count).to eq(0)
end
end
end
end
context 'cleanup project namespaces in a single batch' do
it_behaves_like 'cleanup project namespaces'
end
context 'cleanup project namespaces in batches' do
before do
stub_const("#{described_class.name}::SUB_BATCH_SIZE", 2)
context 'cleanup project namespaces in a single batch' do
it_behaves_like 'cleanup project namespaces'
end
it_behaves_like 'cleanup project namespaces'
context 'cleanup project namespaces in batches' do
before do
stub_const("#{described_class.name}::SUB_BATCH_SIZE", 2)
end
it_behaves_like 'cleanup project namespaces'
end
end
end
end
it_behaves_like 'backfills project namespaces'
context 'when namespaces.id is bigint' do
before do
namespaces.connection.execute("ALTER TABLE namespaces ALTER COLUMN id TYPE bigint")
end
it_behaves_like 'backfills project namespaces'
end
def base_ancestor(ancestor)
::Namespace.where(id: ancestor.id)
end
@ -209,7 +221,7 @@ RSpec.describe Gitlab::BackgroundMigration::ProjectNamespaces::BackfillProjectNa
def check_projects_in_sync_with(namespaces)
project_namespaces_attrs = namespaces.order(:id).pluck(:id, :name, :path, :parent_id, :visibility_level, :shared_runners_enabled)
corresponding_projects_attrs = Project.where(project_namespace_id: project_namespaces_attrs.map(&:first))
.order(:project_namespace_id).pluck(:project_namespace_id, :name, :path, :namespace_id, :visibility_level, :shared_runners_enabled)
.order(:project_namespace_id).pluck(:project_namespace_id, :name, :path, :namespace_id, :visibility_level, :shared_runners_enabled)
expect(project_namespaces_attrs).to eq(corresponding_projects_attrs)
end

View File

@ -0,0 +1,64 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedMilestone do
subject(:importer) { described_class.new(project, user.id) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:issue) { create(:issue, project: project) }
let!(:milestone) { create(:milestone, project: project) }
let(:issue_event) do
Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
'id' => 6501124486,
'actor' => { 'id' => 4, 'login' => 'alice' },
'event' => event_type,
'commit_id' => nil,
'milestone_title' => milestone.title,
'issue_db_id' => issue.id,
'created_at' => '2022-04-26 18:30:53 UTC'
)
end
let(:event_attrs) do
{
user_id: user.id,
issue_id: issue.id,
milestone_id: milestone.id,
state: 'opened',
created_at: issue_event.created_at
}.stringify_keys
end
shared_examples 'new event' do
it 'creates a new milestone event' do
expect { importer.execute(issue_event) }.to change { issue.resource_milestone_events.count }
.from(0).to(1)
expect(issue.resource_milestone_events.last)
.to have_attributes(expected_event_attrs)
end
end
describe '#execute' do
before do
allow(Gitlab::Cache::Import::Caching).to receive(:read_integer).and_return(milestone.id)
end
context 'when importing a milestoned event' do
let(:event_type) { 'milestoned' }
let(:expected_event_attrs) { event_attrs.merge(action: 'add') }
it_behaves_like 'new event'
end
context 'when importing demilestoned event' do
let(:event_type) { 'demilestoned' }
let(:expected_event_attrs) { event_attrs.merge(action: 'remove') }
it_behaves_like 'new event'
end
end
end

View File

@ -87,6 +87,20 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventImporter, :clean_gitlab
Gitlab::GithubImport::Importer::Events::Renamed
end
context "when it's milestoned issue event" do
let(:event_name) { 'milestoned' }
it_behaves_like 'triggers specific event importer',
Gitlab::GithubImport::Importer::Events::ChangedMilestone
end
context "when it's demilestoned issue event" do
let(:event_name) { 'demilestoned' }
it_behaves_like 'triggers specific event importer',
Gitlab::GithubImport::Importer::Events::ChangedMilestone
end
context "when it's cross-referenced issue event" do
let(:event_name) { 'cross-referenced' }

View File

@ -77,6 +77,20 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
end
end
context 'when milestone data is present' do
it 'includes the milestone_title' do
expect(issue_event.milestone_title).to eq('milestone title')
end
end
context 'when milestone data is empty' do
let(:with_milestone) { false }
it 'does not return such info' do
expect(issue_event.milestone_title).to eq nil
end
end
it 'includes the created timestamp' do
expect(issue_event.created_at).to eq('2022-04-26 18:30:53 UTC')
end
@ -93,7 +107,7 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
let(:response) do
event_resource = Struct.new(
:id, :node_id, :url, :actor, :event, :commit_id, :commit_url, :label,
:rename, :issue_db_id, :created_at, :performed_via_github_app, :source,
:rename, :milestone, :source, :issue_db_id, :created_at, :performed_via_github_app,
keyword_init: true
)
user_resource = Struct.new(:id, :login, keyword_init: true)
@ -106,10 +120,11 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
commit_id: '570e7b2abdd848b95f2f578043fc23bd6f6fd24d',
commit_url: 'https://api.github.com/repos/octocat/Hello-World/commits'\
'/570e7b2abdd848b95f2f578043fc23bd6f6fd24d',
label: with_label ? { name: 'label title' } : nil,
rename: with_rename ? { from: 'old title', to: 'new title' } : nil,
milestone: with_milestone ? { title: 'milestone title' } : nil,
source: { type: 'issue', id: 123456 },
issue_db_id: 100500,
label: with_label ? { name: 'label title' } : nil,
created_at: '2022-04-26 18:30:53 UTC',
performed_via_github_app: nil
)
@ -118,6 +133,7 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
let(:with_actor) { true }
let(:with_label) { true }
let(:with_rename) { true }
let(:with_milestone) { true }
it_behaves_like 'an IssueEvent' do
let(:issue_event) { described_class.from_api_response(response) }
@ -139,6 +155,7 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
'label_title' => (with_label ? 'label title' : nil),
'old_title' => with_rename ? 'old title' : nil,
'new_title' => with_rename ? 'new title' : nil,
'milestone_title' => (with_milestone ? 'milestone title' : nil),
'source' => { 'type' => 'issue', 'id' => 123456 },
"issue_db_id" => 100500,
'created_at' => '2022-04-26 18:30:53 UTC',
@ -149,6 +166,7 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
let(:with_actor) { true }
let(:with_label) { true }
let(:with_rename) { true }
let(:with_milestone) { true }
let(:issue_event) { described_class.from_json_hash(hash) }
end

View File

@ -133,6 +133,20 @@ RSpec.describe Gitlab::VersionInfo do
it { expect(@unknown.to_s).to eq("Unknown") }
end
describe '.to_json' do
let(:correct_version) do
"{\"major\":1,\"minor\":0,\"patch\":1}"
end
let(:unknown_version) do
"{\"major\":0,\"minor\":0,\"patch\":0}"
end
it { expect(@v1_0_1.to_json).to eq(correct_version) }
it { expect(@v1_0_1_rc2.to_json).to eq(correct_version) }
it { expect(@unknown.to_json).to eq(unknown_version) }
end
describe '.hash' do
it { expect(described_class.parse("1.0.0").hash).to eq(@v1_0_0.hash) }
it { expect(described_class.parse("1.0.0.1").hash).to eq(@v1_0_0.hash) }

View File

@ -0,0 +1,124 @@
# frozen_string_literal: true
require 'fast_spec_helper'
require_relative '../../rubocop/cop_todo'
RSpec.describe RuboCop::CopTodo do
let(:cop_name) { 'Cop/Rule' }
subject(:cop_todo) { described_class.new(cop_name) }
describe '#initialize' do
it 'initializes a cop todo' do
expect(cop_todo).to have_attributes(
cop_name: cop_name,
files: be_empty,
offense_count: 0,
previously_disabled: false
)
end
end
describe '#record' do
it 'records offenses' do
cop_todo.record('a.rb', 1)
cop_todo.record('b.rb', 2)
expect(cop_todo).to have_attributes(
files: contain_exactly('a.rb', 'b.rb'),
offense_count: 3
)
end
end
describe '#autocorrectable?' do
subject { cop_todo.autocorrectable? }
context 'when found in rubocop registry' do
before do
fake_cop = double(:cop, support_autocorrect?: autocorrectable) # rubocop:disable RSpec/VerifiedDoubles
allow(described_class).to receive(:find_cop_by_name)
.with(cop_name).and_return(fake_cop)
end
context 'when autocorrectable' do
let(:autocorrectable) { true }
it { is_expected.to be_truthy }
end
context 'when not autocorrectable' do
let(:autocorrectable) { false }
it { is_expected.to be_falsey }
end
end
context 'when not found in rubocop registry' do
before do
allow(described_class).to receive(:find_cop_by_name)
.with(cop_name).and_return(nil).and_call_original
end
it { is_expected.to be_falsey }
end
end
describe '#to_yaml' do
subject(:yaml) { cop_todo.to_yaml }
context 'when autocorrectable' do
before do
allow(cop_todo).to receive(:autocorrectable?).and_return(true)
end
specify do
expect(yaml).to eq(<<~YAML)
---
# Cop supports --auto-correct.
#{cop_name}:
Exclude:
YAML
end
end
context 'when previously disabled' do
specify do
cop_todo.record('a.rb', 1)
cop_todo.record('b.rb', 2)
cop_todo.previously_disabled = true
expect(yaml).to eq(<<~YAML)
---
#{cop_name}:
# Offense count: 3
# Temporarily disabled due to too many offenses
Enabled: false
Exclude:
- 'a.rb'
- 'b.rb'
YAML
end
end
context 'with multiple files' do
before do
cop_todo.record('a.rb', 0)
cop_todo.record('c.rb', 0)
cop_todo.record('b.rb', 0)
end
it 'sorts excludes alphabetically' do
expect(yaml).to eq(<<~YAML)
---
#{cop_name}:
Exclude:
- 'a.rb'
- 'b.rb'
- 'c.rb'
YAML
end
end
end
end

View File

@ -261,16 +261,12 @@ RSpec.describe RuboCop::Formatter::TodoFormatter do
double(:offense, cop_name: cop_name)
end
def stub_rubocop_registry(**cops)
rubocop_registry = double(:rubocop_registry)
allow(RuboCop::Cop::Registry).to receive(:global).and_return(rubocop_registry)
allow(rubocop_registry).to receive(:find_by_cop_name)
.with(String).and_return(nil)
def stub_rubocop_registry(cops)
allow(RuboCop::CopTodo).to receive(:find_cop_by_name)
.with(String).and_return(nil).and_call_original
cops.each do |cop_name, attributes|
allow(rubocop_registry).to receive(:find_by_cop_name)
allow(RuboCop::CopTodo).to receive(:find_cop_by_name)
.with(cop_name).and_return(fake_cop(**attributes))
end
end

View File

@ -7,7 +7,7 @@ module Tooling
APPROVED_LABEL = 'product intelligence::approved'
REVIEW_LABEL = 'product intelligence::review pending'
CHANGED_FILES_MESSAGE = <<~MSG
For the following files, a review from the [Data team and Product Intelligence team](https://gitlab.com/groups/gitlab-org/growth/product-intelligence/engineers/-/group_members?with_inherited_permissions=exclude) is recommended
For the following files, a review from the [Data team and Product Intelligence team](https://gitlab.com/groups/gitlab-org/analytics-section/product-intelligence/engineers/-/group_members?with_inherited_permissions=exclude) is recommended
Please check the ~"product intelligence" [Service Ping guide](https://docs.gitlab.com/ee/development/service_ping/) or the [Snowplow guide](https://docs.gitlab.com/ee/development/snowplow/).
For MR review guidelines, see the [Service Ping review guidelines](https://docs.gitlab.com/ee/development/service_ping/review_guidelines.html) or the [Snowplow review guidelines](https://docs.gitlab.com/ee/development/snowplow/review_guidelines.html).