Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-05-31 00:09:17 +00:00
parent ec246c2be1
commit 68e6b85dfa
30 changed files with 194 additions and 299 deletions

View file

@ -132,16 +132,6 @@ export default {
required: false,
default: false,
},
alertsEndpoint: {
type: String,
required: false,
default: null,
},
prometheusAlertsAvailable: {
type: Boolean,
required: false,
default: false,
},
rearrangePanelsAvailable: {
type: Boolean,
required: false,
@ -461,9 +451,7 @@ export default {
:settings-path="settingsPath"
:clipboard-text="generatePanelUrl(expandedPanel.group, expandedPanel.panel)"
:graph-data="expandedPanel.panel"
:alerts-endpoint="alertsEndpoint"
:height="600"
:prometheus-alerts-available="prometheusAlertsAvailable"
@timerangezoom="onTimeRangeZoom"
>
<template #top-left>
@ -526,8 +514,6 @@ export default {
:settings-path="settingsPath"
:clipboard-text="generatePanelUrl(groupData.group, graphData)"
:graph-data="graphData"
:alerts-endpoint="alertsEndpoint"
:prometheus-alerts-available="prometheusAlertsAvailable"
@timerangezoom="onTimeRangeZoom"
@expand="onExpandPanel(groupData.group, graphData)"
/>

View file

@ -1,43 +0,0 @@
import axios from '~/lib/utils/axios_utils';
const mapAlert = ({ runbook_url, ...alert }) => {
return { runbookUrl: runbook_url, ...alert };
};
export default class AlertsService {
constructor({ alertsEndpoint }) {
this.alertsEndpoint = alertsEndpoint;
}
getAlerts() {
return axios.get(this.alertsEndpoint).then((resp) => mapAlert(resp.data));
}
createAlert({ prometheus_metric_id, operator, threshold, runbookUrl }) {
return axios
.post(this.alertsEndpoint, {
prometheus_metric_id,
operator,
threshold,
runbook_url: runbookUrl,
})
.then((resp) => mapAlert(resp.data));
}
// eslint-disable-next-line class-methods-use-this
readAlert(alertPath) {
return axios.get(alertPath).then((resp) => mapAlert(resp.data));
}
// eslint-disable-next-line class-methods-use-this
updateAlert(alertPath, { operator, threshold, runbookUrl }) {
return axios
.put(alertPath, { operator, threshold, runbook_url: runbookUrl })
.then((resp) => mapAlert(resp.data));
}
// eslint-disable-next-line class-methods-use-this
deleteAlert(alertPath) {
return axios.delete(alertPath).then((resp) => resp.data);
}
}

View file

@ -122,7 +122,14 @@ class Projects::CompareController < Projects::ApplicationController
end
def define_commits
@commits = compare.present? ? set_commits_for_rendering(@compare.commits) : []
strong_memoize(:commits) do
if compare.present?
commits = compare.commits.with_markdown_cache.with_latest_pipeline(head_ref)
set_commits_for_rendering(commits)
else
[]
end
end
end
def define_diffs

View file

@ -19,10 +19,6 @@ module Projects
feature_category :incident_management
urgency :low
def index
render json: serialize_as_json(alerts)
end
def show
render json: serialize_as_json(alert)
end

View file

@ -102,7 +102,6 @@ module EnvironmentsHelper
'metrics_endpoint' => additional_metrics_project_environment_path(project, environment, format: :json),
'dashboard_endpoint' => metrics_dashboard_project_environment_path(project, environment, format: :json),
'deployments_endpoint' => project_environment_deployments_path(project, environment, format: :json),
'alerts_endpoint' => project_prometheus_alerts_path(project, environment_id: environment.id, format: :json),
'operations_settings_path' => project_settings_operations_path(project),
'can_access_operations_settings' => can?(current_user, :admin_operations, project).to_s,
'panel_preview_endpoint' => project_metrics_dashboards_builder_path(project, format: :json)

View file

@ -40,7 +40,10 @@ class Compare
end
def commits
@commits ||= Commit.decorate(@compare.commits, project)
@commits ||= begin
decorated_commits = Commit.decorate(@compare.commits, project)
CommitCollection.new(project, decorated_commits)
end
end
def start_commit

View file

@ -520,7 +520,7 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
end
namespace :prometheus do
resources :alerts, constraints: { id: /\d+/ }, only: [:index, :show] do # rubocop: disable Cop/PutProjectRoutesUnderScope
resources :alerts, constraints: { id: /\d+/ }, only: [:show] do # rubocop: disable Cop/PutProjectRoutesUnderScope
post :notify, on: :collection # rubocop:todo Cop/PutProjectRoutesUnderScope
member do
get :metrics_dashboard # rubocop:todo Cop/PutProjectRoutesUnderScope

View file

@ -0,0 +1,9 @@
---
table_name: issuable_resource_links
classes:
- IncidentManagement::IssuableResourceLink
feature_categories:
- incident_management
description: Persists resources links for an issuable, particularly incident.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/88417
milestone: '15.1'

View file

@ -0,0 +1,14 @@
# frozen_string_literal: true
class CreateIssuableResourceLinks < Gitlab::Database::Migration[2.0]
def change
create_table :issuable_resource_links do |t|
t.references :issue, null: false, foreign_key: { on_delete: :cascade }, index: true
t.text :link_text, null: true, limit: 255
t.text :link, null: false, limit: 2200
t.integer :link_type, null: false, limit: 2, default: 0 # general resource link
t.timestamps_with_timezone null: false
end
end
end

View file

@ -0,0 +1,23 @@
# frozen_string_literal: true
class FinalizeBackfillNullNoteDiscussionIds < Gitlab::Database::Migration[2.0]
MIGRATION = 'BackfillNoteDiscussionId'
BATCH_SIZE = 10_000
restrict_gitlab_migration gitlab_schema: :gitlab_main
disable_ddl_transaction!
def up
Gitlab::BackgroundMigration.steal(MIGRATION)
define_batchable_model('notes').where(discussion_id: nil).each_batch(of: BATCH_SIZE) do |batch|
range = batch.pluck('MIN(id)', 'MAX(id)').first
Gitlab::BackgroundMigration::BackfillNoteDiscussionId.new.perform(*range)
end
end
def down
end
end

View file

@ -0,0 +1,15 @@
# frozen_string_literal: true
class RemoveNotesNullDiscussionIdTempIndex < Gitlab::Database::Migration[2.0]
INDEX_NAME = 'tmp_index_notes_on_id_where_discussion_id_is_null'
disable_ddl_transaction!
def up
remove_concurrent_index_by_name :notes, INDEX_NAME
end
def down
add_concurrent_index :notes, :id, where: 'discussion_id IS NULL', name: INDEX_NAME
end
end

View file

@ -0,0 +1 @@
377a05674a9f0fb3a2e77ec25effe8b380381d6b827d987afd5a717f73d6d782

View file

@ -0,0 +1 @@
5defac462c6b6bf9e1af542d07bfe23af52ed01ddbb77f43bb206f08090fc098

View file

@ -0,0 +1 @@
265effde7f2abd222d5b7101c6d03a185ee3deeb6be772016f06e1eade84c1f3

View file

@ -16304,6 +16304,27 @@ CREATE SEQUENCE issuable_metric_images_id_seq
ALTER SEQUENCE issuable_metric_images_id_seq OWNED BY issuable_metric_images.id;
CREATE TABLE issuable_resource_links (
id bigint NOT NULL,
issue_id bigint NOT NULL,
link_text text,
link text NOT NULL,
link_type smallint DEFAULT 0 NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
CONSTRAINT check_67be6729db CHECK ((char_length(link) <= 2200)),
CONSTRAINT check_b137147e0b CHECK ((char_length(link_text) <= 255))
);
CREATE SEQUENCE issuable_resource_links_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE issuable_resource_links_id_seq OWNED BY issuable_resource_links.id;
CREATE TABLE issuable_severities (
id bigint NOT NULL,
issue_id bigint NOT NULL,
@ -22971,6 +22992,8 @@ ALTER TABLE ONLY ip_restrictions ALTER COLUMN id SET DEFAULT nextval('ip_restric
ALTER TABLE ONLY issuable_metric_images ALTER COLUMN id SET DEFAULT nextval('issuable_metric_images_id_seq'::regclass);
ALTER TABLE ONLY issuable_resource_links ALTER COLUMN id SET DEFAULT nextval('issuable_resource_links_id_seq'::regclass);
ALTER TABLE ONLY issuable_severities ALTER COLUMN id SET DEFAULT nextval('issuable_severities_id_seq'::regclass);
ALTER TABLE ONLY issuable_slas ALTER COLUMN id SET DEFAULT nextval('issuable_slas_id_seq'::regclass);
@ -24887,6 +24910,9 @@ ALTER TABLE ONLY ip_restrictions
ALTER TABLE ONLY issuable_metric_images
ADD CONSTRAINT issuable_metric_images_pkey PRIMARY KEY (id);
ALTER TABLE ONLY issuable_resource_links
ADD CONSTRAINT issuable_resource_links_pkey PRIMARY KEY (id);
ALTER TABLE ONLY issuable_severities
ADD CONSTRAINT issuable_severities_pkey PRIMARY KEY (id);
@ -28187,6 +28213,8 @@ CREATE INDEX index_ip_restrictions_on_group_id ON ip_restrictions USING btree (g
CREATE INDEX index_issuable_metric_images_on_issue_id ON issuable_metric_images USING btree (issue_id);
CREATE INDEX index_issuable_resource_links_on_issue_id ON issuable_resource_links USING btree (issue_id);
CREATE UNIQUE INDEX index_issuable_severities_on_issue_id ON issuable_severities USING btree (issue_id);
CREATE INDEX index_issuable_slas_on_due_at_id_label_applied_issuable_closed ON issuable_slas USING btree (due_at, id) WHERE ((label_applied = false) AND (issuable_closed = false));
@ -29951,8 +29979,6 @@ CREATE INDEX tmp_index_members_on_state ON members USING btree (state) WHERE (st
CREATE INDEX tmp_index_merge_requests_draft_and_status ON merge_requests USING btree (id) WHERE ((draft = false) AND (state_id = 1) AND ((title)::text ~* '^(\[draft\]|\(draft\)|draft:|draft|\[WIP\]|WIP:|WIP)'::text));
CREATE INDEX tmp_index_notes_on_id_where_discussion_id_is_null ON notes USING btree (id) WHERE (discussion_id IS NULL);
CREATE UNIQUE INDEX tmp_index_on_tmp_project_id_on_namespaces ON namespaces USING btree (tmp_project_id);
CREATE INDEX tmp_index_on_vulnerabilities_non_dismissed ON vulnerabilities USING btree (id) WHERE (state <> 2);
@ -32592,6 +32618,9 @@ ALTER TABLE ONLY epic_user_mentions
ALTER TABLE ONLY analytics_cycle_analytics_project_stages
ADD CONSTRAINT fk_rails_3ec9fd7912 FOREIGN KEY (end_event_label_id) REFERENCES labels(id) ON DELETE CASCADE;
ALTER TABLE ONLY issuable_resource_links
ADD CONSTRAINT fk_rails_3f0ec6b1cf FOREIGN KEY (issue_id) REFERENCES issues(id) ON DELETE CASCADE;
ALTER TABLE ONLY board_assignees
ADD CONSTRAINT fk_rails_3f6f926bd5 FOREIGN KEY (board_id) REFERENCES boards(id) ON DELETE CASCADE;

View file

@ -13,8 +13,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
Users can set an HTTP endpoint for a top-level group to receive all audit events about the group, its subgroups, and
projects as structured JSON. Event streaming is only available for top-level groups.
Top-level group owners can manage their audit logs in third-party systems such as Splunk, using the Splunk
[HTTP Event Collector](https://docs.splunk.com/Documentation/Splunk/8.2.2/Data/UsetheHTTPEventCollector). Any service that can receive
Top-level group owners can manage their audit logs in third-party systems. Any service that can receive
structured JSON data can be used as the endpoint.
NOTE:

View file

@ -26,27 +26,8 @@ but contributions are welcome.
### GitLab
Please see [our Docker test environment docs](../../install/digitaloceandocker.md#create-new-gitlab-container)
for how to run GitLab on Docker. When spinning this up with `docker-machine`, ensure
you change a few things:
1. Update the name of the `docker-machine` host. You can see a list of hosts
with `docker-machine ls`.
1. Expose the necessary ports using the `-p` flag. Docker normally doesn't
allow access to any ports it uses outside of the container, so they must be
explicitly exposed.
1. Add any necessary `gitlab.rb` configuration to the
`GITLAB_OMNIBUS_CONFIG` variable.
For example, when the `docker-machine` host we want to use is `do-docker`:
```shell
docker run --detach --name gitlab \
--env GITLAB_OMNIBUS_CONFIG="external_url 'http://$(docker-machine ip do-docker)'; gitlab_rails['gitlab_shell_ssh_port'] = 2222;" \
--hostname $(docker-machine ip do-docker) \
-p 80:80 -p 2222:22 \
gitlab/gitlab-ee:11.5.3-ee.0
```
Please see [our official Docker installation method](../../install/docker.md)
for how to run GitLab on Docker.
### SAML

View file

@ -464,7 +464,10 @@ Page titles are hardcoded as `GitLab` for the same reason.
#### Snowplow Inspector Chrome Extension
Snowplow Inspector Chrome Extension is a browser extension for testing frontend events. This works in production, staging, and local development environments.
Snowplow Inspector Chrome Extension is a browser extension for testing frontend events. This works in production, staging, and local development environments.
<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
For a video tutorial, see the [Snowplow plugin walk through](https://www.youtube.com/watch?v=g4rqnIZ1Mb4).
1. Install [Snowplow Inspector](https://chrome.google.com/webstore/detail/snowplow-inspector/maplkdomeamdlngconidoefjpogkmljm?hl=en).
1. To open the extension, select the Snowplow Inspector icon beside the address bar.

View file

@ -1,150 +1,11 @@
---
stage: Systems
group: Distribution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
redirect_to: 'docker.md'
remove_date: '2022-08-29'
---
# Digital Ocean and Docker Machine test environment **(FREE SELF)**
This document was moved to [another location](docker.md).
This guide is for quickly testing different versions of GitLab and not
recommended for ease of future upgrades or keeping the data you create.
## Initial setup
This guide configures a Digital Ocean droplet and sets up Docker
locally on either macOS or Linux.
### On macOS
#### Install Docker Desktop
- <https://www.docker.com/products/docker-desktop>
### On Linux
#### Install Docker Engine
- <https://docs.docker.com/engine/installation/linux/>
#### Install Docker Machine
- <https://docs.docker.com/machine/install-machine/>
NOTE:
The rest of the steps are identical for macOS and Linux.
## Create new Docker host
1. Login to Digital Ocean.
1. Generate a new API token at <https://cloud.digitalocean.com/settings/api/tokens>.
This command creates a new Digital Ocean droplet called `gitlab-test-env-do` that acts as a Docker host.
NOTE:
4GB is the minimum requirement for a Docker host that runs more than one GitLab instance.
- RAM: 4GB
- Name: `gitlab-test-env-do`
- Driver: `digitalocean`
1. Set the DO token:
```shell
export DOTOKEN=<your generated token>
```
1. Create the machine:
```shell
docker-machine create \
--driver digitalocean \
--digitalocean-access-token=$DOTOKEN \
--digitalocean-size "4gb" \
gitlab-test-env-do
```
Resource: <https://docs.docker.com/machine/drivers/digital-ocean/>.
## Creating GitLab test instance
### Connect your shell to the new machine
This example creates a GitLab EE 8.10.8 instance.
First connect the Docker client to the Docker host you created previously.
```shell
eval "$(docker-machine env gitlab-test-env-do)"
```
You can add this to your `~/.bash_profile` file to ensure the `docker` client uses the `gitlab-test-env-do` Docker host
### Create new GitLab container
- HTTP port: `8888`
- SSH port: `2222`
- Set `gitlab_shell_ssh_port` using `--env GITLAB_OMNIBUS_CONFIG`
- Hostname: IP of Docker host
- Container name: `gitlab-test-8.10`
- GitLab version: **EE** `8.10.8-ee.0`
#### Set up container settings
```shell
export SSH_PORT=2222
export HTTP_PORT=8888
export VERSION=8.10.8-ee.0
export NAME=gitlab-test-8.10
```
#### Create container
```shell
docker run --detach \
--env GITLAB_OMNIBUS_CONFIG="external_url 'http://$(docker-machine ip gitlab-test-env-do):$HTTP_PORT'; gitlab_rails['gitlab_shell_ssh_port'] = $SSH_PORT;" \
--hostname $(docker-machine ip gitlab-test-env-do) \
-p $HTTP_PORT:$HTTP_PORT -p $SSH_PORT:22 \
--name $NAME \
gitlab/gitlab-ee:$VERSION
```
### Connect to the GitLab container
#### Retrieve the Docker host IP
```shell
docker-machine ip gitlab-test-env-do
# example output: 192.168.151.134
```
Browse to: `http://192.168.151.134:8888/`.
#### Execute interactive shell/edit configuration
```shell
docker exec -it $NAME /bin/bash
```
```shell
# example commands
root@192:/# vi /etc/gitlab/gitlab.rb
root@192:/# gitlab-ctl reconfigure
```
### Resources
- <https://docs.gitlab.com/omnibus/docker/>.
- <https://docs.docker.com/machine/get-started/>.
- <https://docs.docker.com/machine/reference/ip/>.
<!-- ## Troubleshooting
Include any troubleshooting steps that you can foresee. If you know beforehand what issues
one might have when setting this up, or when something is changed, or on upgrading, it's
important to describe those, too. Think of things that may go wrong and include them here.
This is important to minimize requests for support, and to avoid doc comments with
questions that you know someone might ask.
Each scenario can be a third-level heading, e.g. `### Getting error message X`.
If you have none to add when creating a doc, leave this section in place
but commented out to help encourage others to add to it in the future. -->
<!-- This redirect file can be deleted after <2022-08-29>. -->
<!-- Redirects that point to other docs in the same project expire in three months. -->
<!-- Redirects that point to docs in a different project or site (link is not relative and starts with `https:`) expire in one year. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/redirects.html -->

View file

@ -45,7 +45,6 @@ methods, the majority which use the Linux packages:
| [AWS (HA)](aws/index.md) | Install GitLab on AWS using the community AMIs provided by GitLab. |
| [Google Cloud Platform (GCP)](google_cloud_platform/index.md) | Install GitLab on a VM in GCP. |
| [Azure](azure/index.md) | Install GitLab from Azure Marketplace. |
| [DigitalOcean](https://about.gitlab.com/blog/2016/04/27/getting-started-with-gitlab-and-digitalocean/) | Install GitLab on DigitalOcean. You can also [test GitLab on DigitalOcean using Docker Machine](digitaloceandocker.md). |
## Next steps

View file

@ -1,7 +1,7 @@
# To contribute improvements to CI/CD templates, please follow the Development guide at:
# https://docs.gitlab.com/ee/development/cicd/templates.html
# This specific template is located at:
# https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Terraform.latest.gitlab-ci.yml
# https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Terraform.gitlab-ci.yml
include:
- template: Terraform/Base.gitlab-ci.yml # https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Terraform/Base.gitlab-ci.yml

View file

@ -267,6 +267,7 @@ integrations: :gitlab_main
internal_ids: :gitlab_main
ip_restrictions: :gitlab_main
issuable_metric_images: :gitlab_main
issuable_resource_links: :gitlab_main
issuable_severities: :gitlab_main
issuable_slas: :gitlab_main
issue_assignees: :gitlab_main

View file

@ -102,6 +102,23 @@ RSpec.describe Projects::CompareController do
end
end
context 'when refs have CI::Pipeline' do
let(:from_project_id) { nil }
let(:from_ref) { '08f22f25' }
let(:to_ref) { '59e29889' }
before do
create(:ci_pipeline, project: project)
end
it 'avoids N+1 queries' do
control = ActiveRecord::QueryRecorder.new { show_request }
# Only 1 query to ci/pipeline.rb is allowed
expect(control.find_query(/pipeline\.rb/, 1)).to be_empty
end
end
context 'when the refs exist in different projects that the user can see' do
let(:from_project_id) { public_fork.id }
let(:from_ref) { 'improve%2Fmore-awesome' }
@ -434,7 +451,7 @@ RSpec.describe Projects::CompareController do
expect(CompareService).to receive(:new).with(project, escaped_to_ref).and_return(compare_service)
expect(compare_service).to receive(:execute).with(project, escaped_from_ref).and_return(compare)
expect(compare).to receive(:commits).and_return([signature_commit, non_signature_commit])
expect(compare).to receive(:commits).and_return(CommitCollection.new(project, [signature_commit, non_signature_commit]))
expect(non_signature_commit).to receive(:has_signature?).and_return(false)
end

View file

@ -53,61 +53,6 @@ RSpec.describe Projects::Prometheus::AlertsController do
end
end
describe 'GET #index' do
def make_request(opts = {})
get :index, params: request_params(opts, environment_id: environment)
end
context 'when project has no prometheus alert' do
it 'returns an empty response' do
make_request
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_empty
end
end
context 'when project has prometheus alerts' do
let(:production) { create(:environment, project: project) }
let(:staging) { create(:environment, project: project) }
let(:json_alert_ids) { json_response.map { |alert| alert['id'] } }
let!(:production_alerts) do
create_list(:prometheus_alert, 2, project: project, environment: production)
end
let!(:staging_alerts) do
create_list(:prometheus_alert, 1, project: project, environment: staging)
end
it 'contains prometheus alerts only for the production environment' do
make_request(environment_id: production)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.count).to eq(2)
expect(json_alert_ids).to eq(production_alerts.map(&:id))
end
it 'contains prometheus alerts only for the staging environment' do
make_request(environment_id: staging)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.count).to eq(1)
expect(json_alert_ids).to eq(staging_alerts.map(&:id))
end
it 'does not return prometheus alerts without environment' do
make_request(environment_id: nil)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_empty
end
end
it_behaves_like 'unprivileged'
it_behaves_like 'project non-specific environment', :ok
end
describe 'GET #show' do
let(:alert) do
create(:prometheus_alert,

View file

@ -14,13 +14,12 @@ const datasetState = stateAndPropsFromDataset(
convertObjectPropsToCamelCase(metricsDashboardResponse.metrics_data),
);
// new properties like addDashboardDocumentationPath prop and alertsEndpoint
// new properties like addDashboardDocumentationPath prop
// was recently added to dashboard.vue component this needs to be
// added to fixtures data
// https://gitlab.com/gitlab-org/gitlab/-/issues/229256
export const dashboardProps = {
...datasetState.dataProps,
alertsEndpoint: null,
};
export const metricsDashboardViewModel = mapToDashboardViewModel(metricsDashboardPayload);

View file

@ -39,7 +39,6 @@ RSpec.describe EnvironmentsHelper do
'custom_metrics_path' => project_prometheus_metrics_path(project),
'validate_query_path' => validate_query_project_prometheus_metrics_path(project),
'custom_metrics_available' => 'true',
'alerts_endpoint' => project_prometheus_alerts_path(project, environment_id: environment.id, format: :json),
'custom_dashboard_base_path' => Gitlab::Metrics::Dashboard::RepoDashboardFinder::DASHBOARD_ROOT,
'operations_settings_path' => project_settings_operations_path(project),
'can_access_operations_settings' => 'true',

View file

@ -64,7 +64,7 @@ RSpec.describe Gitlab::Email::Message::RepositoryPush do
describe '#commits' do
subject { message.commits }
it { is_expected.to be_kind_of Array }
it { is_expected.to be_kind_of CommitCollection }
it { is_expected.to all(be_instance_of Commit) }
end

View file

@ -65,6 +65,7 @@ issues:
- customer_relations_contacts
- issue_customer_relations_contacts
- email
- issuable_resource_links
work_item_type:
- issues
events:

View file

@ -0,0 +1,34 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe FinalizeBackfillNullNoteDiscussionIds, :migration do
subject(:migration) { described_class.new }
let(:notes) { table(:notes) }
let(:bg_migration_class) { Gitlab::BackgroundMigration::BackfillNoteDiscussionId }
let(:bg_migration) { instance_double(bg_migration_class) }
before do
stub_const("#{described_class.name}::BATCH_SIZE", 2)
end
it 'performs remaining background migrations', :aggregate_failures do
# Already migrated
notes.create!(noteable_type: 'Issue', noteable_id: 1, discussion_id: Digest::SHA1.hexdigest('note1'))
notes.create!(noteable_type: 'Issue', noteable_id: 1, discussion_id: Digest::SHA1.hexdigest('note2'))
# update required
record1 = notes.create!(noteable_type: 'Issue', noteable_id: 1, discussion_id: nil)
record2 = notes.create!(noteable_type: 'Issue', noteable_id: 1, discussion_id: nil)
record3 = notes.create!(noteable_type: 'Issue', noteable_id: 1, discussion_id: nil)
expect(Gitlab::BackgroundMigration).to receive(:steal).with(bg_migration_class.name.demodulize)
expect(bg_migration_class).to receive(:new).twice.and_return(bg_migration)
expect(bg_migration).to receive(:perform).with(record1.id, record2.id)
expect(bg_migration).to receive(:perform).with(record3.id, record3.id)
migrate!
end
end

View file

@ -35,6 +35,21 @@ RSpec.describe Compare do
end
end
describe '#commits' do
subject { compare.commits }
it 'returns a CommitCollection' do
is_expected.to be_kind_of(CommitCollection)
end
it 'returns a list of commits' do
commit_ids = subject.map(&:id)
expect(commit_ids).to include(head_commit.id)
expect(commit_ids.length).to eq(6)
end
end
describe '#commit' do
it 'returns raw compare head commit' do
expect(subject.commit.id).to eq(head_commit.id)