Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-06-10 18:10:05 +00:00
parent be1b7b709e
commit d715acda3b
45 changed files with 650 additions and 289 deletions

View File

@ -1,28 +0,0 @@
<!-- Title suggestion: [Feature flag] Remove FEATURE_FLAG_NAME -->
## Feature
The `:feature_name` feature flag was previously [enabled by default](URL) and should be removed.
## Owners
- Group: ~"group::GROUP_NAME"
- Slack channel: `#g_GROUP_NAME`
- DRI: USERNAME
- PM: USERNAME
**Removal**
This is an __important__ phase, that should be either done in the next Milestone or as soon as possible. For the cleanup phase, please follow our documentation on how to [clean up the feature flag](https://docs.gitlab.com/ee/development/feature_flags/controls.html#cleaning-up).
- [ ] Remove `:feature_name` feature flag
- [ ] Remove all references to the feature flag from the codebase
- [ ] Remove the YAML definitions for the feature from the repository
- [ ] Create a Changelog Entry
- [ ] Clean up the feature flag from all environments by running this chatops command in `#production` channel `/chatops run feature delete some_feature`.
- [ ] Close this issue after the feature flag is removed from the codebase.
/label ~"feature flag" ~"technical debt"
/assign DRI

View File

@ -110,6 +110,12 @@ To do so, follow these steps:
the feature can be officially announced in a release blog post.
- [ ] `/chatops run auto_deploy status <merge-commit>`
- [ ] Close [the feature issue](ISSUE LINK) to indicate the feature will be released in the current milestone.
- [ ] Set the next milestone to this rollout issue for scheduling [the flag removal](#release-the-feature).
- [ ] (Optional) You can create a separate issue for scheduling the steps below to [Release the feature](#release-the-feature).
- [ ] Set the title to "[Feature flag] Cleanup `<feature-flag-name>`".
- [ ] Execute the `/copy_metadata <this-rollout-issue-link>` quick action to copy the labels from this rollout issue.
- [ ] Link this rollout issue as a related issue.
- [ ] Close this rollout issue.
**WARNING:** This approach has the downside that it makes it difficult for us to
[clean up](https://docs.gitlab.com/ee/development/feature_flags/controls.html#cleaning-up) the flag.

View File

@ -1,20 +1,22 @@
<script>
import { GlCard, GlLink, GlSprintf } from '@gitlab/ui';
import { s__ } from '~/locale';
import PipelineVisualReference from '../ui/pipeline_visual_reference.vue';
export default {
i18n: {
title: s__('PipelineEditorTutorial|🚀 Run your first pipeline'),
firstParagraph: s__(
'PipelineEditorTutorial|A typical GitLab pipeline consists of three stages: build, test and deploy. Each stage can have one or more jobs.',
),
secondParagraph: s__(
'PipelineEditorTutorial|In the example below, %{codeStart}build%{codeEnd} and %{codeStart}deploy%{codeEnd} each contain one job, and %{codeStart}test%{codeEnd} contains two jobs. Your scripts run in jobs like these.',
),
thirdParagraph: s__(
'PipelineEditorTutorial|You can use %{linkStart}CI/CD examples and templates%{linkEnd} to get your first %{codeStart}.gitlab-ci.yml%{codeEnd} configuration file started. Your first pipeline runs when you commit the changes.',
'PipelineEditorTutorial|This template creates a simple test pipeline. To use it:',
),
listItems: [
s__(
'PipelineEditorTutorial|Commit the file to your repository. The pipeline then runs automatically.',
),
s__('PipelineEditorTutorial|The pipeline status is at the top of the page.'),
s__(
'PipelineEditorTutorial|Select the pipeline ID to view the full details about your first pipeline run.',
),
],
note: s__(
'PipelineEditorTutorial|If youre using a self-managed GitLab instance, %{linkStart}make sure your instance has runners available.%{linkEnd}',
),
@ -23,9 +25,8 @@ export default {
GlCard,
GlLink,
GlSprintf,
PipelineVisualReference,
},
inject: ['ciExamplesHelpPagePath', 'runnerHelpPagePath'],
inject: ['runnerHelpPagePath'],
};
</script>
<template>
@ -33,26 +34,9 @@ export default {
<template #default>
<h4 class="gl-font-lg gl-mt-0">{{ $options.i18n.title }}</h4>
<p class="gl-mb-3">{{ $options.i18n.firstParagraph }}</p>
<p class="gl-mb-3">
<gl-sprintf :message="$options.i18n.secondParagraph">
<template #code="{ content }">
<code>{{ content }}</code>
</template>
</gl-sprintf>
</p>
<pipeline-visual-reference />
<p class="gl-my-3">
<gl-sprintf :message="$options.i18n.thirdParagraph">
<template #link="{ content }">
<gl-link :href="ciExamplesHelpPagePath" target="_blank">
{{ content }}
</gl-link>
</template>
<template #code="{ content }">
<code>{{ content }}</code>
</template>
</gl-sprintf>
</p>
<ol class="gl-mb-3">
<li v-for="(item, i) in $options.i18n.listItems" :key="`li-${i}`">{{ item }}</li>
</ol>
<p class="gl-mb-0">
<gl-sprintf :message="$options.i18n.note">
<template #link="{ content }">

View File

@ -1,43 +0,0 @@
<script>
import { s__ } from '~/locale';
import DemoJobPill from './demo_job_pill.vue';
export default {
i18n: {
stageNames: {
build: s__('StageName|Build'),
test: s__('StageName|Test'),
deploy: s__('StageName|Deploy'),
},
jobNames: {
build: s__('JobName|build-job'),
test_1: s__('JobName|unit-test'),
test_2: s__('JobName|lint-test'),
deploy: s__('JobName|deploy-app'),
},
},
stageClasses:
'gl-bg-blue-50 gl-display-flex gl-flex-direction-column gl-align-items-center gl-p-4 gl-rounded-base',
titleClasses: 'gl-text-blue-600 gl-mb-4',
components: {
DemoJobPill,
},
};
</script>
<template>
<div class="gl-display-flex gl-justify-content-center">
<div :class="$options.stageClasses" class="gl-mr-5">
<div :class="$options.titleClasses">{{ $options.i18n.stageNames.build }}</div>
<demo-job-pill :job-name="$options.i18n.jobNames.build" />
</div>
<div :class="$options.stageClasses" class="gl-mr-5">
<div :class="$options.titleClasses">{{ $options.i18n.stageNames.test }}</div>
<demo-job-pill class="gl-mb-3" :job-name="$options.i18n.jobNames.test_1" />
<demo-job-pill :job-name="$options.i18n.jobNames.test_2" />
</div>
<div :class="$options.stageClasses">
<div :class="$options.titleClasses">{{ $options.i18n.stageNames.deploy }}</div>
<demo-job-pill :job-name="$options.i18n.jobNames.deploy" />
</div>
</div>
</template>

View File

@ -31,3 +31,5 @@ export const DRAWER_EXPANDED_KEY = 'pipeline_editor_drawer_expanded';
export const BRANCH_PAGINATION_LIMIT = 20;
export const BRANCH_SEARCH_DEBOUNCE = '500';
export const STARTER_TEMPLATE_NAME = 'Getting-Started';

View File

@ -0,0 +1,7 @@
query getTemplate($projectPath: ID!, $templateName: String!) {
project(fullPath: $projectPath) {
ciTemplate(name: $templateName) {
content
}
}
}

View File

@ -14,12 +14,14 @@ import {
EDITOR_APP_STATUS_ERROR,
EDITOR_APP_STATUS_LOADING,
LOAD_FAILURE_UNKNOWN,
STARTER_TEMPLATE_NAME,
} from './constants';
import getBlobContent from './graphql/queries/blob_content.graphql';
import getCiConfigData from './graphql/queries/ci_config.graphql';
import getAppStatus from './graphql/queries/client/app_status.graphql';
import getCurrentBranch from './graphql/queries/client/current_branch.graphql';
import getIsNewCiConfigFile from './graphql/queries/client/is_new_ci_config_file.graphql';
import getTemplate from './graphql/queries/get_starter_template.query.graphql';
import PipelineEditorHome from './pipeline_editor_home.vue';
export default {
@ -51,12 +53,13 @@ export default {
showStartScreen: false,
showSuccess: false,
showFailure: false,
starterTemplate: '',
};
},
apollo: {
initialCiFileContent: {
fetchPolicy: fetchPolicies.NETWORK,
fetchPolicy: fetchPolicies.NETWORK_ONLY,
query: getBlobContent,
// If it's a brand new file, we don't want to fetch the content.
// Then when the user commits the first time, the query would run
@ -135,6 +138,24 @@ export default {
isNewCiConfigFile: {
query: getIsNewCiConfigFile,
},
starterTemplate: {
query: getTemplate,
variables() {
return {
projectPath: this.projectFullPath,
templateName: STARTER_TEMPLATE_NAME,
};
},
skip({ isNewCiConfigFile }) {
return !isNewCiConfigFile;
},
update(data) {
return data.project?.ciTemplate?.content || '';
},
error() {
this.reportFailure(LOAD_FAILURE_UNKNOWN);
},
},
},
computed: {
hasUnsavedChanges() {
@ -149,6 +170,9 @@ export default {
isEmpty() {
return this.currentCiFileContent === '';
},
templateOrCurrentContent() {
return this.isNewCiConfigFile ? this.starterTemplate : this.currentCiFileContent;
},
},
i18n: {
tabEdit: s__('Pipelines|Edit'),
@ -256,7 +280,7 @@ export default {
/>
<pipeline-editor-home
:ci-config-data="ciConfigData"
:ci-file-content="currentCiFileContent"
:ci-file-content="templateOrCurrentContent"
:is-new-ci-config-file="isNewCiConfigFile"
@commit="updateOnCommit"
@resetContent="resetContent"

View File

@ -2,19 +2,29 @@
module FormHelper
def form_errors(model, type: 'form', truncate: [])
return unless model.errors.any?
errors = model.errors
return unless errors.any?
headline = n_(
'The %{type} contains the following error:',
'The %{type} contains the following errors:',
errors.count
) % { type: type }
headline = n_('The %{type} contains the following error:', 'The %{type} contains the following errors:', model.errors.count) % { type: type }
truncate = Array.wrap(truncate)
content_tag(:div, class: 'alert alert-danger', id: 'error_explanation') do
content_tag(:h4, headline) <<
content_tag(:ul) do
messages = model.errors.map do |attribute, message|
message = html_escape_once(model.errors.full_message(attribute, message)).html_safe
message = content_tag(:span, message, class: 'str-truncated-100') if truncate.include?(attribute)
tag.div(class: 'alert alert-danger', id: 'error_explanation') do
tag.h4(headline) <<
tag.ul do
messages = errors.map do |error|
attribute = error.attribute
message = error.message
content_tag(:li, message)
message = html_escape_once(errors.full_message(attribute, message)).html_safe
message = tag.span(message, class: 'str-truncated-100') if truncate.include?(attribute)
tag.li(message)
end
messages.join.html_safe

View File

@ -15,8 +15,6 @@ class Commit
include ActsAsPaginatedDiff
include CacheMarkdownField
attr_mentionable :safe_message, pipeline: :single_line
participant :author
participant :committer
participant :notes_with_associations
@ -39,6 +37,10 @@ class Commit
cache_markdown_field :full_title, pipeline: :single_line, limit: 1.kilobyte
cache_markdown_field :description, pipeline: :commit_description, limit: 1.megabyte
# Share the cache used by the markdown fields
attr_mentionable :full_title, pipeline: :single_line, limit: 1.kilobyte
attr_mentionable :description, pipeline: :commit_description, limit: 1.megabyte
class << self
def decorate(commits, container)
commits.map do |commit|

View File

@ -157,6 +157,9 @@ module CacheMarkdownField
end
def store_mentions!
# We can only store mentions if the mentionable is a database object
return unless self.is_a?(ApplicationRecord)
refs = all_references(self.author)
references = {}

4
app/services/github.rb Normal file
View File

@ -0,0 +1,4 @@
# frozen_string_literal: true
module Github
end

View File

@ -1373,8 +1373,7 @@
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags:
- :exclude_from_kubernetes
:tags: []
- :name: pipeline_background:ci_pipeline_artifacts_create_quality_report
:worker_name: Ci::PipelineArtifacts::CreateQualityReportWorker
:feature_category: :code_testing

View File

@ -9,7 +9,6 @@ module Ci
include PipelineBackgroundQueue
feature_category :code_testing
tags :exclude_from_kubernetes
idempotent!

View File

@ -1,8 +1,8 @@
---
name: redirect_to_latest_template_jobs_deploy
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63144
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/332660
name: github_review_importer_query_only_unimported_merge_requests
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/62036
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/332982
milestone: '14.0'
type: development
group: group::pipeline authoring
default_enabled: false
group: group::import
default_enabled: true

View File

@ -1,6 +1,8 @@
# frozen_string_literal: true
class CleanUpPendingBuildsTable < ActiveRecord::Migration[6.0]
include ::Gitlab::Database::DynamicModelHelpers
BATCH_SIZE = 1000
disable_ddl_transaction!
@ -8,7 +10,7 @@ class CleanUpPendingBuildsTable < ActiveRecord::Migration[6.0]
def up
return unless Gitlab.dev_or_test_env? || Gitlab.com?
each_batch('ci_pending_builds', of: BATCH_SIZE) do |min, max|
each_batch_range('ci_pending_builds', of: BATCH_SIZE) do |min, max|
execute <<~SQL
DELETE FROM ci_pending_builds
USING ci_builds
@ -23,19 +25,4 @@ class CleanUpPendingBuildsTable < ActiveRecord::Migration[6.0]
def down
# noop
end
private
def each_batch(table_name, scope: ->(table) { table.all }, of: 1000)
table = Class.new(ActiveRecord::Base) do
include EachBatch
self.table_name = table_name
self.inheritance_column = :_type_disabled
end
scope.call(table).each_batch(of: of) do |batch|
yield batch.pluck('MIN(id), MAX(id)').first
end
end
end

View File

@ -17,9 +17,9 @@ described, it is possible to adapt these instructions to your needs.
_[diagram source - GitLab employees only](https://docs.google.com/drawings/d/1z0VlizKiLNXVVVaERFwgsIOuEgjcUqDTWPdQYsE7Z4c/edit)_
The topology above assumes that the **primary** and **secondary** Geo clusters
The topology above assumes the **primary** and **secondary** Geo clusters
are located in two separate locations, on their own virtual network
with private IP addresses. The network is configured such that all machines within
with private IP addresses. The network is configured such that all machines in
one geographic location can communicate with each other using their private IP addresses.
The IP addresses given are examples and may be different depending on the
network topology of your deployment.
@ -44,9 +44,10 @@ Support for PostgreSQL on **secondary** nodes in multi-node configuration
Because of the additional complexity involved in setting up this configuration
for PostgreSQL and Redis, it is not covered by this Geo multi-node documentation.
For more information about setting up a multi-node PostgreSQL cluster and Redis cluster using the omnibus package see the multi-node documentation for
[PostgreSQL](../../postgresql/replication_and_failover.md) and
[Redis](../../redis/replication_and_failover.md), respectively.
For more information on setting up a multi-node PostgreSQL cluster and Redis cluster using the Omnibus GitLab package, see:
- [PostgreSQL multi-node documentation](../../postgresql/replication_and_failover.md)
- [Redis multi-node documentation](../../redis/replication_and_failover.md)
NOTE:
It is possible to use cloud hosted services for PostgreSQL and Redis, but this is beyond the scope of this document.
@ -60,8 +61,8 @@ you already have a working GitLab instance that is in-use, it can be used as a
The second cluster serves as the **secondary** node. Again, use the
[GitLab multi-node documentation](../../reference_architectures/index.md) to set this up.
It's a good idea to log in and test it, however, note that its data is
wiped out as part of the process of replicating from the **primary**.
It's a good idea to log in and test it. However, be aware that its data is
wiped out as part of the process of replicating from the **primary** node.
## Configure the GitLab cluster to be the **primary** node
@ -92,9 +93,9 @@ After making these changes, [reconfigure GitLab](../../restart_gitlab.md#omnibus
NOTE:
PostgreSQL and Redis should have already been disabled on the
application servers, and connections from the application servers to those
services on the backend servers configured, during normal GitLab multi-node set up. See
multi-node configuration documentation for
application servers during normal GitLab multi-node setup. Connections
from the application servers to services on the backend servers should
have also been configured. See multi-node configuration documentation for
[PostgreSQL](../../postgresql/replication_and_failover.md#configuring-the-application-nodes)
and [Redis](../../redis/replication_and_failover.md#example-configuration-for-the-gitlab-application).
@ -120,12 +121,12 @@ major differences:
called the "tracking database", which tracks the synchronization state of
various resources.
Therefore, we set up the multi-node components one-by-one, and include deviations
from the normal multi-node setup. However, we highly recommend first configuring a
brand-new cluster as if it were not part of a Geo setup so that it can be
tested and verified as a working cluster. And only then should it be modified
for use as a Geo **secondary**. This helps to separate problems that are related
and are not related to Geo setup.
Therefore, we set up the multi-node components one by one and include deviations
from the normal multi-node setup. However, we highly recommend configuring a
brand-new cluster first, as if it were not part of a Geo setup. This allows
verifying that it is a working cluster. And only then should it be modified
for use as a Geo **secondary**. This helps to separate Geo setup problems from
unrelated problems.
### Step 1: Configure the Redis and Gitaly services on the **secondary** node
@ -364,10 +365,10 @@ then make the following modifications:
```
NOTE:
If you had set up PostgreSQL cluster using the omnibus package and you had set
up `postgresql['sql_user_password'] = 'md5 digest of secret'` setting, keep in
If you had set up PostgreSQL cluster using the omnibus package and had set
`postgresql['sql_user_password'] = 'md5 digest of secret'`, keep in
mind that `gitlab_rails['db_password']` and `geo_secondary['db_password']`
mentioned above contains the plaintext passwords. This is used to let the Rails
contains the plaintext passwords. This is used to let the Rails
servers connect to the databases.
NOTE:

View File

@ -35,7 +35,9 @@ metrics exposed by the [GitLab exporter](../prometheus/gitlab_metrics.md#metrics
1. Go to **Admin Area > Settings > Metrics and profiling** and expand the **Self monitoring** section.
1. Toggle the **Create Project** button on.
1. Once your GitLab instance creates the project, GitLab displays a link to the project in the text above the **Create Project** toggle. You can also find it under **Projects > Your projects**.
1. After your GitLab instance creates the project, GitLab displays a link to the
project in the text above the **Create Project** toggle. You can also find it
from the top bar by selecting **Menu > Project**, then selecting **Your projects**.
## Deleting the self monitoring project

View File

@ -0,0 +1,97 @@
---
stage: Plan
group: Project Management
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# Real-Time Features
This guide contains instructions on how to safely roll out new real-time
features.
Real-time features are implemented using GraphQL Subscriptions.
[Developer documentation](api_graphql_styleguide.md#subscriptions) is available.
WebSockets are a relatively new technology at GitLab, and supporting them at
scale introduces some challenges. For that reason, new features should be rolled
out using the instructions below.
## Reuse an existing WebSocket connection
Features reusing an existing connection incur minimal risk. Feature flag rollout
is recommended in order to give more control to self-hosting customers. However,
it is not necessary to roll out in percentages, or to estimate new connections for
GitLab.com.
## Introduce a new WebSocket connection
Any change that introduces a WebSocket connection to part of the GitLab application
incurs some scalability risk, both to nodes responsible for maintaining open
connections and on downstream services; such as Redis and the primary database.
### Estimate peak connections
The first real-time feature to be fully enabled on GitLab.com was
[real-time assignees](https://gitlab.com/gitlab-org/gitlab/-/issues/17589). By comparing
peak throughput to the issue page against peak simultaneous WebSocket connections it is
possible to crudely estimate that each 1 request per second adds
approximately 4200 WebSocket connections.
To understand the impact a new feature might have, sum the peak throughput (RPS)
to the pages it originates from (`n`) and apply the formula:
```ruby
(n * 4200) / peak_active_connections
```
Current active connections are visible on
[this Grafana chart](https://dashboards.gitlab.net/d/websockets-main/websockets-overview?viewPanel=1357460996&orgId=1).
This calculation is crude, and should be revised as new features are
deployed. It yields a rough estimate of the capacity that must be
supported, as a proportion of existing capacity.
### Graduated roll-out
New capacity may need to be provisioned to support your changes, depending on
current saturation and the proportion of new connections required. While
Kubernetes makes this relatively easy in most cases, there remains a risk to
downstream services.
To mitigate this, ensure that the code establishing the new WebSocket connection
is feature flagged and defaulted to `off`. A careful, percentage-based roll-out
of the feature flag ensures that effects can be observed on the [WebSocket
dashboard](https://dashboards.gitlab.net/d/websockets-main/websockets-overview?orgId=1)
1. Create a
[feature flag roll-out](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/issue_templates/Feature%20Flag%20Roll%20Out.md)
issue.
1. Add the estimated new connections required under the **What are we expecting to happen** section.
1. Copy in a member of the Plan and Scalability teams to estimate a percentage-based
roll-out plan.
## Backward compatibility
For the duration of the feature flag roll-out and indefinitely thereafter,
real-time features must be backward-compatible, or at least degrade
gracefully. Not all customers have Action Cable enabled, and further work
needs to be done before Action Cable can be enabled by default.
Making real-time a requirement represents a breaking change, so the next
opportunity to do this is version 15.0.
## Enable Real-Time by default
Mounting the Action Cable library adds minimal memory footprint. However,
serving WebSocket requests introduces additional memory requirements. For this
reason, enabling Action Cable by default requires additional work; perhaps
to reduce overall memory usage, including a known issue with Workhorse, but at
least to revise Reference Architectures.
## Real-time infrastructure on GitLab.com
On GitLab.com, WebSocket connections are served from dedicated infrastructure,
entirely separate from the regular Web fleet and deployed with Kubernetes. This
limits risk to nodes handling requests but not to shared services. For more
information on the WebSockets Kubernetes deployment see
[this epic](https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/355).

View File

@ -91,8 +91,8 @@ instance, ensure you're purchasing enough seats to
If you are an administrator, you can view the status of your subscription:
1. Go to **Admin Area**.
1. From the left-hand menu, select **License**.
1. On the top bar, select **Menu >** **{admin}** **Admin**.
1. On the left sidebar, select **License**.
The **License** page includes the following details:
@ -264,8 +264,9 @@ Sg0KU1hNMGExaE9SVGR2V2pKQlBUMWNiaUo5DQo=',
You can view the exact JSON payload in the administration panel. To view the payload:
1. Navigate to **Admin Area > Settings > Metrics and profiling** and expand **Seat Link**.
1. Click **Preview payload**.
1. On the top bar, select **Menu >** **{admin}** **Admin**.
1. On the left sidebar, select **Settings > Metrics and profiling** and expand **Seat Link**.
1. Select **Preview payload**.
#### Disable Seat Link
@ -273,7 +274,12 @@ You can view the exact JSON payload in the administration panel. To view the pay
Seat Link is enabled by default.
To disable this feature, go to **Admin Area > Settings > Metrics and profiling**, uncheck the **Enable Seat Link** checkbox > **Save changes**.
To disable this feature:
1. On the top bar, select **Menu >** **{admin}** **Admin**.
1. On the left sidebar, select **Settings > Metrics and profiling** and expand **Seat Link**.
1. Clear the **Enable Seat Link** checkbox.
1. Select **Save changes**.
To disable Seat Link in an Omnibus GitLab installation, and prevent it from
being configured in the future through the administration panel, set the following in

View File

@ -174,6 +174,8 @@ include:
- remote: https://gitlab.com/gitlab-org/gitlab/-/raw/v13.3.0-ee/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml
```
Alternatively, you can use the [v13.12 Auto DevOps templates archive](https://gitlab.com/hfyngvason/auto-devops-v13-12).
### Ignore warnings and continue deploying
If you are certain that the new chart version is safe to be deployed, you can add

View File

@ -23,7 +23,8 @@ For larger organizations, you can also create [subgroups](subgroups/index.md).
To view groups:
1. In the top menu, select **Groups > Your Groups**. All groups you are a member of are displayed.
1. On the top bar, select **Menu > Groups**.
1. Select **Your Groups**. All groups you are a member of are displayed.
1. To view a list of public groups, select **Explore public groups**.
You can also view groups by namespace.
@ -48,8 +49,8 @@ For example, consider a user named Alex:
To create a group:
1. From the top menu, either:
- Select **Groups > Your Groups**, and on the right, select the **New group** button.
1. On the top bar, either:
- Select **Menu > Groups**, and on the right, select **Create group**.
- To the left of the search box, select the plus sign and then **New group**.
1. Select **Create group**.
1. For the **Group name**, use only:
@ -75,7 +76,8 @@ For details about groups, watch [GitLab Namespaces (users, groups and subgroups)
You can give a user access to all projects in a group.
1. From the top menu, select **Groups > Your Groups**.
1. On the top bar, select **Menu > Groups**.
1. Select **Your Groups**.
1. Find your group and select it.
1. From the left sidebar, select **Members**.
1. Fill in the fields.
@ -86,7 +88,8 @@ You can give a user access to all projects in a group.
As a user, you can request to be a member of a group, if an administrator allows it.
1. From the top menu, select **Groups > Your Groups**.
1. On the top bar, select **Menu > Groups**.
1. Select **Your Groups**.
1. Find the group and select it.
1. Under the group name, select **Request Access**.
@ -101,7 +104,8 @@ If you change your mind before your request is approved, select
As a group owner, you can prevent non-members from requesting access to
your group.
1. From the top menu, select **Groups > Your Groups**.
1. On the top bar, select **Menu > Groups**.
1. Select **Your Groups**.
1. Find the group and select it.
1. From the left menu, select **Settings > General**.
1. Expand the **Permissions, LFS, 2FA** section.
@ -246,9 +250,10 @@ These Group Activity Analytics can be enabled with the `group_activity_analytics
You can view the most recent actions taken in a group.
1. From the top menu, select **Groups > Your Groups**.
1. On the top bar, select **Menu > Groups**.
1. Select **Your Groups**.
1. Find the group and select it.
1. From the left menu, select **Group overview > Activity**.
1. On the left sidebar, select **Group information > Activity**.
To view the activity feed in Atom format, select the
**RSS** (**{rss}**) icon.

View File

@ -134,3 +134,42 @@ To disable it:
```ruby
Feature.disable(:local_file_reviews)
```
## Show merge request conflicts in diff
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/232484) in GitLab 13.5.
> - [Deployed behind a feature flag](../../feature_flags.md), disabled by default.
> - Disabled on GitLab.com.
> - Not recommended for production use.
> - To use in GitLab self-managed instances, ask a GitLab administrator to [enable it](#enable-or-disable-merge-request-conflicts-in-diff). **(FREE SELF)**
This in-development feature might not be available for your use. There can be
[risks when enabling features still in development](../../feature_flags.md#risks-when-enabling-features-still-in-development).
Refer to this feature's version history for more details.
To avoid displaying the changes that are already on target branch in the diff,
we compare the merge request's source branch with HEAD of the target branch.
When there are conflicts between the source and target branch, we show the
conflicts on the merge request diff as well:
![Example of a conflict shown in a merge request diff](img/conflict_ui_v14_0.png)
### Enable or disable merge request conflicts in diff **(FREE SELF)**
Merge request conflicts in diff is under development and not ready for production use. It is
deployed behind a feature flag that is **disabled by default**.
[GitLab administrators with access to the GitLab Rails console](../../../administration/feature_flags.md)
can enable it.
To enable it:
```ruby
Feature.enable(:display_merge_conflicts_in_diff)
```
To disable it:
```ruby
Feature.disable(:display_merge_conflicts_in_diff)
```

View File

@ -168,7 +168,8 @@ Click on **Compare branches and continue** to go to the
After forking a project and applying your local changes, complete the following steps to
create a merge request from your fork to contribute back to the main project:
1. Go to **Projects > Your Projects** and select your fork of the repository.
1. On the top bar, select **Menu > Project**.
1. Select **Your Projects**, then select your fork of the repository.
1. In the left menu, go to **Merge requests**, and click **New merge request**.
1. In the **Source branch** drop-down list box, select your branch in your forked repository as the source branch.
1. In the **Target branch** drop-down list box, select the branch from the upstream repository as the target branch.

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.2 KiB

View File

@ -79,7 +79,7 @@ Example `.compliance-gitlab-ci.yml`
```yaml
# Allows compliance team to control the ordering and interweaving of stages/jobs.
# Stages without jobs defined will remain hidden.
stages:
stages:
- pre-compliance
- build
- test
@ -112,7 +112,7 @@ sanity check:
- when: always
allow_failure: false
before_script:
- "# No before scripts."
- "# No before scripts."
script:
- echo "running $FOO"
after_script:
@ -298,10 +298,11 @@ To find an archived project:
1. If you:
- Have the project's URL, open the project's page in your browser.
- Don't have the project's URL:
1. Click **Projects > Explore projects**.
1. In the **Sort projects** dropdown box, select **Show archived projects**.
1. In the **Filter by name** field, provide the project's name.
1. Click the link to the project to open its **Details** page.
1. On the top bar, select **Menu > Project**.
1. Select **Explore projects**.
1. In the **Sort projects** dropdown box, select **Show archived projects**.
1. In the **Filter by name** field, provide the project's name.
1. Click the link to the project to open its **Details** page.
Next, to unarchive the project:

View File

@ -13,8 +13,8 @@ code are saved in projects, and most features are in the scope of projects.
You can explore other popular projects available on GitLab. To explore projects:
1. Click **Projects** in the navigation bar.
1. Click **Explore Projects**.
1. On the top bar, select **Menu > Project**.
1. Select **Explore Projects**.
GitLab displays a list of projects, sorted by last updated date. To view
projects with the most [stars](#star-a-project), click **Most stars**. To view
@ -197,8 +197,8 @@ To star a project:
To view your starred projects:
1. Click **Projects** in the navigation bar.
1. Click **Starred Projects**.
1. On the top bar, select **Menu > Project**.
1. Select **Starred Projects**.
1. GitLab displays information about your starred projects, including:
- Project description, including name, description, and icon

View File

@ -194,7 +194,7 @@ author, type, and action. Also, you can sort them by
## Projects
You can search through your projects from the left menu, by clicking the menu bar, then **Projects**.
You can search through your projects from the top bar, by selecting **Menu > Projects**.
On the field **Filter by name**, type the project or group name you want to find, and GitLab
filters them for you as you type.

View File

@ -113,6 +113,17 @@ module Gitlab
end
end
# Returns the values of the given set.
#
# raw_key - The key of the set to check.
def self.values_from_set(raw_key)
key = cache_key_for(raw_key)
Redis::Cache.with do |redis|
redis.smembers(key)
end
end
# Sets multiple keys to given values.
#
# mapping - A Hash mapping the cache keys to their values.

View File

@ -1,5 +1,5 @@
.dast-auto-deploy:
image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v1.0.7"
image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v2.6.0"
dast_environment_deploy:
extends: .dast-auto-deploy

View File

@ -1,5 +1,5 @@
.auto-deploy:
image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v1.0.7"
image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v2.6.0"
dependencies: []
review:
@ -91,7 +91,7 @@ canary:
- auto-deploy ensure_namespace
- auto-deploy initialize_tiller
- auto-deploy create_secret
- auto-deploy deploy canary
- auto-deploy deploy canary 50
environment:
name: production
url: http://$CI_PROJECT_PATH_SLUG.$KUBE_INGRESS_BASE_DOMAIN
@ -114,7 +114,6 @@ canary:
- auto-deploy create_secret
- auto-deploy deploy
- auto-deploy delete canary
- auto-deploy delete rollout
- auto-deploy persist_environment_url
environment:
name: production
@ -163,9 +162,7 @@ production_manual:
- auto-deploy ensure_namespace
- auto-deploy initialize_tiller
- auto-deploy create_secret
- auto-deploy deploy rollout $ROLLOUT_PERCENTAGE
- auto-deploy scale stable $((100-ROLLOUT_PERCENTAGE))
- auto-deploy delete canary
- auto-deploy deploy canary $ROLLOUT_PERCENTAGE
- auto-deploy persist_environment_url
environment:
name: production

View File

@ -11,6 +11,25 @@ module Gitlab
self.inheritance_column = :_type_disabled
end
end
def each_batch(table_name, scope: ->(table) { table.all }, of: 1000)
if transaction_open?
raise <<~MSG.squish
each_batch should not run inside a transaction, you can disable
transactions by calling disable_ddl_transaction! in the body of
your migration class
MSG
end
scope.call(define_batchable_model(table_name))
.each_batch(of: of) { |batch| yield batch }
end
def each_batch_range(table_name, scope: ->(table) { table.all }, of: 1000)
each_batch(table_name, scope: scope, of: of) do |batch|
yield batch.pluck('MIN(id), MAX(id)').first
end
end
end
end
end

View File

@ -6,6 +6,13 @@ module Gitlab
class PullRequestsReviewsImporter
include ParallelScheduling
def initialize(...)
super
@merge_requests_already_imported_cache_key =
"github-importer/merge_request/already-imported/#{project.id}"
end
def importer_class
PullRequestReviewImporter
end
@ -22,11 +29,31 @@ module Gitlab
:pull_request_reviews
end
def id_for_already_imported_cache(merge_request)
merge_request.id
def id_for_already_imported_cache(review)
review.id
end
def each_object_to_import
def each_object_to_import(&block)
if use_github_review_importer_query_only_unimported_merge_requests?
each_merge_request_to_import(&block)
else
each_merge_request_skipping_imported(&block)
end
end
private
attr_reader :merge_requests_already_imported_cache_key
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/62036#note_587181108
def use_github_review_importer_query_only_unimported_merge_requests?
Feature.enabled?(
:github_review_importer_query_only_unimported_merge_requests,
default_enabled: :yaml
)
end
def each_merge_request_skipping_imported
project.merge_requests.find_each do |merge_request|
next if already_imported?(merge_request)
@ -40,6 +67,67 @@ module Gitlab
mark_as_imported(merge_request)
end
end
# The worker can be interrupted, by rate limit for instance,
# in different situations. To avoid requesting already imported data,
# if the worker is interrupted:
# - before importing all reviews of a merge request
# The reviews page is cached with the `PageCounter`, by merge request.
# - before importing all merge requests reviews
# Merge requests that had all the reviews imported are cached with
# `mark_merge_request_reviews_imported`
def each_merge_request_to_import
each_review_page do |page, merge_request|
page.objects.each do |review|
next if already_imported?(review)
review.merge_request_id = merge_request.id
yield(review)
mark_as_imported(review)
end
end
end
def each_review_page
merge_requests_to_import.find_each do |merge_request|
# The page counter needs to be scoped by merge request to avoid skipping
# pages of reviews from already imported merge requests.
page_counter = PageCounter.new(project, page_counter_id(merge_request))
repo = project.import_source
options = collection_options.merge(page: page_counter.current)
client.each_page(collection_method, repo, merge_request.iid, options) do |page|
next unless page_counter.set(page.number)
yield(page, merge_request)
end
# Avoid unnecessary Redis cache keys after the work is done.
page_counter.expire!
mark_merge_request_reviews_imported(merge_request)
end
end
# Returns only the merge requests that still have reviews to be imported.
def merge_requests_to_import
project.merge_requests.where.not(id: already_imported_merge_requests) # rubocop: disable CodeReuse/ActiveRecord
end
def already_imported_merge_requests
Gitlab::Cache::Import::Caching.values_from_set(merge_requests_already_imported_cache_key)
end
def page_counter_id(merge_request)
"merge_request/#{merge_request.id}/#{collection_method}"
end
def mark_merge_request_reviews_imported(merge_request)
Gitlab::Cache::Import::Caching.set_add(
merge_requests_already_imported_cache_key,
merge_request.id
)
end
end
end
end

View File

@ -26,6 +26,10 @@ module Gitlab
def current
Gitlab::Cache::Import::Caching.read_integer(cache_key) || 1
end
def expire!
Gitlab::Cache::Import::Caching.expire(cache_key, 0)
end
end
end
end

View File

@ -6,7 +6,6 @@ module Gitlab
BASE_EXCLUDED_PATTERNS = [%r{\.latest\.}].freeze
TEMPLATES_WITH_LATEST_VERSION = {
'Jobs/Deploy' => true,
'Jobs/Browser-Performance-Testing' => true,
'Security/API-Fuzzing' => true,
'Security/DAST' => true,

View File

@ -18754,18 +18754,6 @@ msgstr ""
msgid "Job was retried"
msgstr ""
msgid "JobName|build-job"
msgstr ""
msgid "JobName|deploy-app"
msgstr ""
msgid "JobName|lint-test"
msgstr ""
msgid "JobName|unit-test"
msgstr ""
msgid "Jobs"
msgstr ""
@ -23984,10 +23972,10 @@ msgstr ""
msgid "PipelineCharts|Total:"
msgstr ""
msgid "PipelineEditorTutorial|A typical GitLab pipeline consists of three stages: build, test and deploy. Each stage can have one or more jobs."
msgid "PipelineEditorTutorial|Browse %{linkStart}CI/CD examples and templates%{linkEnd}"
msgstr ""
msgid "PipelineEditorTutorial|Browse %{linkStart}CI/CD examples and templates%{linkEnd}"
msgid "PipelineEditorTutorial|Commit the file to your repository. The pipeline then runs automatically."
msgstr ""
msgid "PipelineEditorTutorial|Get started with GitLab CI/CD"
@ -23999,9 +23987,6 @@ msgstr ""
msgid "PipelineEditorTutorial|If youre using a self-managed GitLab instance, %{linkStart}make sure your instance has runners available.%{linkEnd}"
msgstr ""
msgid "PipelineEditorTutorial|In the example below, %{codeStart}build%{codeEnd} and %{codeStart}deploy%{codeEnd} each contain one job, and %{codeStart}test%{codeEnd} contains two jobs. Your scripts run in jobs like these."
msgstr ""
msgid "PipelineEditorTutorial|Learn more about %{linkStart}GitLab CI/CD concepts%{linkEnd}"
msgstr ""
@ -24011,18 +23996,24 @@ msgstr ""
msgid "PipelineEditorTutorial|Resources to help with your CI/CD configuration:"
msgstr ""
msgid "PipelineEditorTutorial|Select the pipeline ID to view the full details about your first pipeline run."
msgstr ""
msgid "PipelineEditorTutorial|The pipeline stages and jobs are defined in a %{codeStart}.gitlab-ci.yml%{codeEnd} file. You can edit, visualize and validate the syntax in this file by using the Pipeline Editor."
msgstr ""
msgid "PipelineEditorTutorial|The pipeline status is at the top of the page."
msgstr ""
msgid "PipelineEditorTutorial|This template creates a simple test pipeline. To use it:"
msgstr ""
msgid "PipelineEditorTutorial|Use the Visualize and Lint tabs in the Pipeline Editor to visualize your pipeline and check for any errors or warnings before committing your changes."
msgstr ""
msgid "PipelineEditorTutorial|View %{linkStart}.gitlab-ci.yml syntax reference%{linkEnd}"
msgstr ""
msgid "PipelineEditorTutorial|You can use %{linkStart}CI/CD examples and templates%{linkEnd} to get your first %{codeStart}.gitlab-ci.yml%{codeEnd} configuration file started. Your first pipeline runs when you commit the changes."
msgstr ""
msgid "PipelineEditorTutorial|⚙️ Pipeline configuration reference"
msgstr ""
@ -30762,15 +30753,6 @@ msgstr ""
msgid "Stage"
msgstr ""
msgid "StageName|Build"
msgstr ""
msgid "StageName|Deploy"
msgstr ""
msgid "StageName|Test"
msgstr ""
msgid "Standard"
msgstr ""

View File

@ -1,13 +1,11 @@
import { getByRole } from '@testing-library/dom';
import { mount } from '@vue/test-utils';
import FirstPipelineCard from '~/pipeline_editor/components/drawer/cards/first_pipeline_card.vue';
import PipelineVisualReference from '~/pipeline_editor/components/drawer/ui/pipeline_visual_reference.vue';
describe('First pipeline card', () => {
let wrapper;
const defaultProvide = {
ciExamplesHelpPagePath: '/pipelines/examples',
runnerHelpPagePath: '/help/runners',
};
@ -20,9 +18,9 @@ describe('First pipeline card', () => {
};
const getLinkByName = (name) => getByRole(wrapper.element, 'link', { name }).href;
const findPipelinesLink = () => getLinkByName(/examples and templates/i);
const findRunnersLink = () => getLinkByName(/make sure your instance has runners available/i);
const findVisualReference = () => wrapper.findComponent(PipelineVisualReference);
const findInstructionsList = () => wrapper.find('ol');
const findAllInstructions = () => findInstructionsList().findAll('li');
beforeEach(() => {
createComponent();
@ -37,11 +35,11 @@ describe('First pipeline card', () => {
});
it('renders the content', () => {
expect(findVisualReference().exists()).toBe(true);
expect(findInstructionsList().exists()).toBe(true);
expect(findAllInstructions()).toHaveLength(3);
});
it('renders the links', () => {
it('renders the link', () => {
expect(findRunnersLink()).toContain(defaultProvide.runnerHelpPagePath);
expect(findPipelinesLink()).toContain(defaultProvide.ciExamplesHelpPagePath);
});
});

View File

@ -1,31 +0,0 @@
import { shallowMount } from '@vue/test-utils';
import DemoJobPill from '~/pipeline_editor/components/drawer/ui/demo_job_pill.vue';
import PipelineVisualReference from '~/pipeline_editor/components/drawer/ui/pipeline_visual_reference.vue';
describe('Demo job pill', () => {
let wrapper;
const createComponent = () => {
wrapper = shallowMount(PipelineVisualReference);
};
const findAllDemoJobPills = () => wrapper.findAllComponents(DemoJobPill);
beforeEach(() => {
createComponent();
});
afterEach(() => {
wrapper.destroy();
});
it('renders all stage names', () => {
expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.stageNames.build);
expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.stageNames.test);
expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.stageNames.deploy);
});
it('renders all job pills', () => {
expect(findAllDemoJobPills()).toHaveLength(4);
});
});

View File

@ -88,6 +88,18 @@ RSpec.describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache do
end
end
describe '.values_from_set' do
it 'returns empty list when the set is empty' do
expect(described_class.values_from_set('foo')).to eq([])
end
it 'returns the set list of values' do
described_class.set_add('foo', 10)
expect(described_class.values_from_set('foo')).to eq(['10'])
end
end
describe '.write_multiple' do
it 'sets multiple keys when key_prefix not set' do
mapping = { 'foo' => 10, 'bar' => 20 }

View File

@ -3,12 +3,12 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::DynamicModelHelpers do
let(:including_class) { Class.new.include(described_class) }
let(:table_name) { 'projects' }
describe '#define_batchable_model' do
subject { including_class.new.define_batchable_model(table_name) }
let(:including_class) { Class.new.include(described_class) }
let(:table_name) { 'projects' }
it 'is an ActiveRecord model' do
expect(subject.ancestors).to include(ActiveRecord::Base)
end
@ -25,4 +25,86 @@ RSpec.describe Gitlab::Database::DynamicModelHelpers do
expect(subject.inheritance_column).to eq('_type_disabled')
end
end
describe '#each_batch' do
subject { including_class.new }
before do
create_list(:project, 2)
end
context 'when no transaction is open' do
before do
allow(subject).to receive(:transaction_open?).and_return(false)
end
it 'iterates table in batches' do
each_batch_size = ->(&block) do
subject.each_batch(table_name, of: 1) do |batch|
block.call(batch.size)
end
end
expect { |b| each_batch_size.call(&b) }
.to yield_successive_args(1, 1)
end
end
context 'when transaction is open' do
before do
allow(subject).to receive(:transaction_open?).and_return(true)
end
it 'raises an error' do
expect { subject.each_batch(table_name, of: 1) { |batch| batch.size } }
.to raise_error(RuntimeError, /each_batch should not run inside a transaction/)
end
end
end
describe '#each_batch_range' do
subject { including_class.new }
let(:first_project) { create(:project) }
let(:second_project) { create(:project) }
context 'when no transaction is open' do
before do
allow(subject).to receive(:transaction_open?).and_return(false)
end
it 'iterates table in batch ranges' do
expect { |b| subject.each_batch_range(table_name, of: 1, &b) }
.to yield_successive_args(
[first_project.id, first_project.id],
[second_project.id, second_project.id]
)
end
it 'yields only one batch if bigger than the table size' do
expect { |b| subject.each_batch_range(table_name, of: 2, &b) }
.to yield_successive_args([first_project.id, second_project.id])
end
it 'makes it possible to apply a scope' do
each_batch_limited = ->(&b) do
subject.each_batch_range(table_name, scope: ->(table) { table.limit(1) }, of: 1, &b)
end
expect { |b| each_batch_limited.call(&b) }
.to yield_successive_args([first_project.id, first_project.id])
end
end
context 'when transaction is open' do
before do
allow(subject).to receive(:transaction_open?).and_return(true)
end
it 'raises an error' do
expect { subject.each_batch_range(table_name, of: 1) { 1 } }
.to raise_error(RuntimeError, /each_batch should not run inside a transaction/)
end
end
end
end

View File

@ -27,30 +27,100 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do
end
describe '#each_object_to_import', :clean_gitlab_redis_cache do
it 'fetchs the merged pull requests data' do
merge_request = create(
:merged_merge_request,
iid: 999,
source_project: project,
target_project: project
)
context 'when github_review_importer_query_only_unimported_merge_requests is enabled' do
before do
stub_feature_flags(github_review_importer_query_only_unimported_merge_requests: true)
end
review = double
let(:merge_request) do
create(
:merged_merge_request,
iid: 999,
source_project: project,
target_project: project
)
end
expect(review)
.to receive(:merge_request_id=)
.with(merge_request.id)
let(:review) { double(id: 1) }
allow(client)
.to receive(:pull_request_reviews)
.exactly(:once) # ensure to be cached on the second call
.with('github/repo', merge_request.iid)
.and_return([review])
it 'fetches the pull requests reviews data' do
page = double(objects: [review], number: 1)
expect { |b| subject.each_object_to_import(&b) }
.to yield_with_args(review)
expect(review)
.to receive(:merge_request_id=)
.with(merge_request.id)
subject.each_object_to_import {}
expect(client)
.to receive(:each_page)
.exactly(:once) # ensure to be cached on the second call
.with(:pull_request_reviews, 'github/repo', merge_request.iid, page: 1)
.and_yield(page)
expect { |b| subject.each_object_to_import(&b) }
.to yield_with_args(review)
subject.each_object_to_import {}
end
it 'skips cached pages' do
Gitlab::GithubImport::PageCounter
.new(project, "merge_request/#{merge_request.id}/pull_request_reviews")
.set(2)
expect(review).not_to receive(:merge_request_id=)
expect(client)
.to receive(:each_page)
.exactly(:once) # ensure to be cached on the second call
.with(:pull_request_reviews, 'github/repo', merge_request.iid, page: 2)
subject.each_object_to_import {}
end
it 'skips cached merge requests' do
Gitlab::Cache::Import::Caching.set_add(
"github-importer/merge_request/already-imported/#{project.id}",
merge_request.id
)
expect(review).not_to receive(:merge_request_id=)
expect(client).not_to receive(:each_page)
subject.each_object_to_import {}
end
end
context 'when github_review_importer_query_only_unimported_merge_requests is disabled' do
before do
stub_feature_flags(github_review_importer_query_only_unimported_merge_requests: false)
end
it 'fetchs the merged pull requests data' do
merge_request = create(
:merged_merge_request,
iid: 999,
source_project: project,
target_project: project
)
review = double
expect(review)
.to receive(:merge_request_id=)
.with(merge_request.id)
allow(client)
.to receive(:pull_request_reviews)
.exactly(:once) # ensure to be cached on the second call
.with('github/repo', merge_request.iid)
.and_return([review])
expect { |b| subject.each_object_to_import(&b) }
.to yield_with_args(review)
subject.each_object_to_import {}
end
end
end
end

View File

@ -31,4 +31,15 @@ RSpec.describe Gitlab::GithubImport::PageCounter, :clean_gitlab_redis_cache do
expect(counter.current).to eq(2)
end
end
describe '#expire!' do
it 'expires the current page counter' do
counter.set(2)
counter.expire!
expect(Gitlab::Cache::Import::Caching.read_integer(counter.cache_key)).to be_nil
expect(counter.current).to eq(1)
end
end
end

View File

@ -471,16 +471,25 @@ eos
end
it_behaves_like 'a mentionable' do
subject { create(:project, :repository).commit }
subject(:commit) { create(:project, :repository).commit }
let(:author) { create(:user, email: subject.author_email) }
let(:backref_text) { "commit #{subject.id}" }
let(:set_mentionable_text) do
->(txt) { allow(subject).to receive(:safe_message).and_return(txt) }
->(txt) { allow(commit).to receive(:safe_message).and_return(txt) }
end
# Include the subject in the repository stub.
let(:extra_commits) { [subject] }
let(:extra_commits) { [commit] }
it 'uses the CachedMarkdownField cache instead of the Mentionable cache', :use_clean_rails_redis_caching do
expect(commit.title_html).not_to be_present
commit.all_references(project.owner).all
expect(commit.title_html).to be_present
expect(Rails.cache.read("banzai/commit:#{commit.id}/safe_message/single_line")).to be_nil
end
end
describe '#hook_attrs' do

View File

@ -66,7 +66,7 @@ RSpec.shared_examples 'a mentionable' do
expect(subject.gfm_reference).to eq(backref_text)
end
it "extracts references from its reference property" do
it "extracts references from its reference property", :clean_gitlab_redis_cache do
# De-duplicate and omit itself
refs = subject.referenced_mentionables
expect(refs.size).to eq(6)
@ -98,7 +98,7 @@ RSpec.shared_examples 'a mentionable' do
end
end
it 'creates cross-reference notes' do
it 'creates cross-reference notes', :clean_gitlab_redis_cache do
mentioned_objects = [mentioned_issue, mentioned_mr, mentioned_commit,
ext_issue, ext_mr, ext_commit]

View File

@ -138,7 +138,7 @@ RSpec.describe ProcessCommitWorker do
end
end
describe '#update_issue_metrics' do
describe '#update_issue_metrics', :clean_gitlab_redis_cache do
context 'when commit has issue reference' do
subject(:update_metrics_and_reload) do
-> {