Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-06-23 09:08:58 +00:00
parent 48720d9da0
commit 17f2e5035c
26 changed files with 254 additions and 672 deletions

View File

@ -1,6 +1,4 @@
<script>
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import { INFINITELY_NESTED_COLLAPSIBLE_SECTIONS_FF } from '../../constants';
import LogLine from './line.vue';
import LogLineHeader from './line_header.vue';
@ -9,9 +7,7 @@ export default {
components: {
LogLine,
LogLineHeader,
CollapsibleLogSection: () => import('./collapsible_section.vue'),
},
mixins: [glFeatureFlagsMixin()],
props: {
section: {
type: Object,
@ -26,9 +22,6 @@ export default {
badgeDuration() {
return this.section.line && this.section.line.section_duration;
},
infinitelyCollapsibleSectionsFlag() {
return this.glFeatures?.[INFINITELY_NESTED_COLLAPSIBLE_SECTIONS_FF];
},
},
methods: {
handleOnClickCollapsibleLine(section) {
@ -47,26 +40,12 @@ export default {
@toggleLine="handleOnClickCollapsibleLine(section)"
/>
<template v-if="!section.isClosed">
<template v-if="infinitelyCollapsibleSectionsFlag">
<template v-for="line in section.lines">
<collapsible-log-section
v-if="line.isHeader"
:key="line.line.offset"
:section="line"
:job-log-endpoint="jobLogEndpoint"
@onClickCollapsibleLine="handleOnClickCollapsibleLine"
/>
<log-line v-else :key="line.offset" :line="line" :path="jobLogEndpoint" />
</template>
</template>
<template v-else>
<log-line
v-for="line in section.lines"
:key="line.offset"
:line="line"
:path="jobLogEndpoint"
/>
</template>
<log-line
v-for="line in section.lines"
:key="line.offset"
:line="line"
:path="jobLogEndpoint"
/>
</template>
</div>
</template>

View File

@ -1,6 +1,4 @@
<script>
import { INFINITELY_NESTED_COLLAPSIBLE_SECTIONS_FF } from '../../constants';
export default {
functional: true,
props: {
@ -16,9 +14,7 @@ export default {
render(h, { props }) {
const { lineNumber, path } = props;
const parsedLineNumber = gon.features?.[INFINITELY_NESTED_COLLAPSIBLE_SECTIONS_FF]
? lineNumber
: lineNumber + 1;
const parsedLineNumber = lineNumber + 1;
const lineId = `L${parsedLineNumber}`;
const lineHref = `${path}#${lineId}`;

View File

@ -24,5 +24,3 @@ export const JOB_RETRY_FORWARD_DEPLOYMENT_MODAL = {
};
export const SUCCESS_STATUS = 'SUCCESS';
export const INFINITELY_NESTED_COLLAPSIBLE_SECTIONS_FF = 'infinitelyCollapsibleSections';

View File

@ -1,7 +1,6 @@
import Vue from 'vue';
import { INFINITELY_NESTED_COLLAPSIBLE_SECTIONS_FF } from '../constants';
import * as types from './mutation_types';
import { logLinesParser, logLinesParserLegacy, updateIncrementalJobLog } from './utils';
import { logLinesParser, updateIncrementalJobLog } from './utils';
export default {
[types.SET_JOB_ENDPOINT](state, endpoint) {
@ -21,26 +20,12 @@ export default {
},
[types.RECEIVE_JOB_LOG_SUCCESS](state, log = {}) {
const infinitelyCollapsibleSectionsFlag =
gon.features?.[INFINITELY_NESTED_COLLAPSIBLE_SECTIONS_FF];
if (log.state) {
state.jobLogState = log.state;
}
if (log.append) {
if (infinitelyCollapsibleSectionsFlag) {
if (log.lines) {
const parsedResult = logLinesParser(
log.lines,
state.auxiliaryPartialJobLogHelpers,
state.jobLog,
);
state.jobLog = parsedResult.parsedLines;
state.auxiliaryPartialJobLogHelpers = parsedResult.auxiliaryPartialJobLogHelpers;
}
} else {
state.jobLog = log.lines ? updateIncrementalJobLog(log.lines, state.jobLog) : state.jobLog;
}
state.jobLog = log.lines ? updateIncrementalJobLog(log.lines, state.jobLog) : state.jobLog;
state.jobLogSize += log.size;
} else {
@ -49,13 +34,7 @@ export default {
// html or size. We keep the old value otherwise these
// will be set to `null`
if (infinitelyCollapsibleSectionsFlag) {
const parsedResult = logLinesParser(log.lines);
state.jobLog = parsedResult.parsedLines;
state.auxiliaryPartialJobLogHelpers = parsedResult.auxiliaryPartialJobLogHelpers;
} else {
state.jobLog = log.lines ? logLinesParserLegacy(log.lines) : state.jobLog;
}
state.jobLog = log.lines ? logLinesParser(log.lines) : state.jobLog;
state.jobLogSize = log.size || state.jobLogSize;
}

View File

@ -30,7 +30,4 @@ export default () => ({
selectedStage: '',
stages: [],
jobs: [],
// to parse partial logs
auxiliaryPartialJobLogHelpers: {},
});

View File

@ -104,7 +104,7 @@ export const getIncrementalLineNumber = (acc) => {
* @param Array accumulator
* @returns Array parsed log lines
*/
export const logLinesParserLegacy = (lines = [], accumulator = []) =>
export const logLinesParser = (lines = [], accumulator = []) =>
lines.reduce(
(acc, line, index) => {
const lineNumber = accumulator.length > 0 ? getIncrementalLineNumber(acc) : index;
@ -131,82 +131,6 @@ export const logLinesParserLegacy = (lines = [], accumulator = []) =>
[...accumulator],
);
export const logLinesParser = (lines = [], previousJobLogState = {}, prevParsedLines = []) => {
let currentLineCount = previousJobLogState?.prevLineCount ?? 0;
let currentHeader = previousJobLogState?.currentHeader;
let isPreviousLineHeader = previousJobLogState?.isPreviousLineHeader ?? false;
const parsedLines = prevParsedLines.length > 0 ? prevParsedLines : [];
const sectionsQueue = previousJobLogState?.sectionsQueue ?? [];
for (let i = 0; i < lines.length; i += 1) {
const line = lines[i];
// First run we can use the current index, later runs we have to retrieve the last number of lines
currentLineCount = previousJobLogState?.prevLineCount ? currentLineCount + 1 : i + 1;
if (line.section_header && !isPreviousLineHeader) {
// If there's no previous line header that means we're at the root of the log
isPreviousLineHeader = true;
parsedLines.push(parseHeaderLine(line, currentLineCount));
currentHeader = { index: parsedLines.length - 1 };
} else if (line.section_header && isPreviousLineHeader) {
// If there's a current section, we can't push to the parsedLines array
sectionsQueue.push(currentHeader);
currentHeader = parseHeaderLine(line, currentLineCount); // Let's parse the incoming header line
} else if (line.section && !line.section_duration) {
// We're inside a collapsible section and want to parse a standard line
if (currentHeader?.index) {
// If the current section header is only an index, add the line as part of the lines
// array of the current collapsible section
parsedLines[currentHeader.index].lines.push(parseLine(line, currentLineCount));
} else {
// Otherwise add it to the innermost collapsible section lines array
currentHeader.lines.push(parseLine(line, currentLineCount));
}
} else if (line.section && line.section_duration) {
// NOTE: This marks the end of a section_header
const previousSection = sectionsQueue.pop();
// Add the duration to section header
// If at the root, just push the end to the current parsedLine,
// otherwise, push it to the previous sections queue
if (currentHeader?.index) {
parsedLines[currentHeader.index].line.section_duration = line.section_duration;
isPreviousLineHeader = false;
currentHeader = null;
} else if (currentHeader?.isHeader) {
currentHeader.line.section_duration = line.section_duration;
if (previousSection && previousSection?.index) {
// Is the previous section on root?
parsedLines[previousSection.index].lines.push(currentHeader);
} else if (previousSection && !previousSection?.index) {
previousSection.lines.push(currentHeader);
}
currentHeader = previousSection;
} else {
// On older job logs, there's no `section_header: true` response, it's just an object
// with the `section_duration` and `section` props, so we just parse it
// as a standard line
parsedLines.push(parseLine(line, currentLineCount));
}
} else {
parsedLines.push(parseLine(line, currentLineCount));
}
}
return {
parsedLines,
auxiliaryPartialJobLogHelpers: {
isPreviousLineHeader,
currentHeader,
sectionsQueue,
prevLineCount: currentLineCount,
},
};
};
/**
* Finds the repeated offset, removes the old one
*
@ -253,5 +177,5 @@ export const findOffsetAndRemove = (newLog = [], oldParsed = []) => {
export const updateIncrementalJobLog = (newLog = [], oldParsed = []) => {
const parsedLog = findOffsetAndRemove(newLog, oldParsed);
return logLinesParserLegacy(newLog, parsedLog);
return logLinesParser(newLog, parsedLog);
};

View File

@ -23,7 +23,6 @@ class Projects::JobsController < Projects::ApplicationController
before_action :reject_if_build_artifacts_size_refreshing!, only: [:erase]
before_action do
push_frontend_feature_flag(:infinitely_collapsible_sections, @project)
push_frontend_feature_flag(:trigger_job_retry_action, @project)
end

View File

@ -21,10 +21,9 @@ module Projects
def pagination
return unless pagination_enabled?
Kaminari.paginate_array([], total_count: blob_lines_count)
Kaminari.paginate_array([], total_count: blob_lines_count, limit: per_page)
.tap { |pagination| pagination.max_paginates_per(per_page) }
.page(page)
.per(per_page)
.limit(per_page)
end
private

View File

@ -125,17 +125,18 @@ module ContainerRegistry
def next_repository
strong_memoize(:next_repository) do
# Using .limit(2)[0] instead of take here. Using a LIMIT 1 caused the query planner to
# use an inefficient sequential scan instead of picking an index. LIMIT 2 works around
# Using .limit(25)[0] instead of take here. Using a LIMIT 1 and 2 caused the query planner to
# use an inefficient sequential scan instead of picking an index. LIMIT 25 works around
# this issue.
# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87733 for details.
ContainerRepository.ready_for_import.limit(2)[0] # rubocop:disable CodeReuse/ActiveRecord
# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87733 and
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/90735 for details.
ContainerRepository.ready_for_import.limit(25)[0] # rubocop:disable CodeReuse/ActiveRecord
end
end
def next_aborted_repository
strong_memoize(:next_aborted_repository) do
ContainerRepository.with_migration_state('import_aborted').limit(2)[0] # rubocop:disable CodeReuse/ActiveRecord
ContainerRepository.with_migration_state('import_aborted').limit(25)[0] # rubocop:disable CodeReuse/ActiveRecord
end
end

View File

@ -1,8 +0,0 @@
---
name: infinitely_collapsible_sections
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/65496
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/335297
milestone: '14.1'
type: development
group: group::pipeline execution
default_enabled: false

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class UpdateLastRunDateForIterationsCadences < Gitlab::Database::Migration[2.0]
restrict_gitlab_migration gitlab_schema: :gitlab_main
def up
execute <<~SQL
UPDATE iterations_cadences SET last_run_date=CURRENT_DATE WHERE automatic=true;
SQL
end
def down
# no op
# 'last_run_date' stores the date on which the cadence record should be
# updated using `CreateIterationsInAdvance` service that is idempotent
# and the column is only useful for optimizing when to run the service
# ('last_run_date' is also a misnomer as it can be better-named 'next_run_date'.)
end
end

View File

@ -0,0 +1 @@
cc7a1d81c9de121380b7b1a4bbbb1947313635c6ae16ce15184b586765333a8e

View File

@ -675,6 +675,45 @@ artifacts are restored after [caches](#cache).
[Read more about artifacts](../pipelines/job_artifacts.md).
#### `artifacts:paths`
Paths are relative to the project directory (`$CI_PROJECT_DIR`) and can't directly
link outside it.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default` section](#default).
**Possible inputs**:
- An array of file paths, relative to the project directory.
- You can use Wildcards that use [glob](https://en.wikipedia.org/wiki/Glob_(programming))
patterns and:
- In [GitLab Runner 13.0 and later](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/2620),
[`doublestar.Glob`](https://pkg.go.dev/github.com/bmatcuk/doublestar@v1.2.2?tab=doc#Match).
- In GitLab Runner 12.10 and earlier, [`filepath.Match`](https://pkg.go.dev/path/filepath#Match).
**Example of `artifacts:paths`**:
```yaml
job:
artifacts:
paths:
- binaries/
- .config
```
This example creates an artifact with `.config` and all the files in the `binaries` directory.
**Additional details**:
- If not used with [`artifacts:name`](#artifactsname), the artifacts file
is named `artifacts`, which becomes `artifacts.zip` when downloaded.
**Related topics**:
- To restrict which jobs a specific job fetches artifacts from, see [`dependencies`](#dependencies).
- [Create job artifacts](../pipelines/job_artifacts.md#create-job-artifacts).
#### `artifacts:exclude`
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/15122) in GitLab 13.1
@ -843,45 +882,6 @@ job:
- [Use CI/CD variables to define the artifacts name](../pipelines/job_artifacts.md#use-cicd-variables-to-define-the-artifacts-name).
#### `artifacts:paths`
Paths are relative to the project directory (`$CI_PROJECT_DIR`) and can't directly
link outside it.
**Keyword type**: Job keyword. You can use it only as part of a job or in the
[`default` section](#default).
**Possible inputs**:
- An array of file paths, relative to the project directory.
- You can use Wildcards that use [glob](https://en.wikipedia.org/wiki/Glob_(programming))
patterns and:
- In [GitLab Runner 13.0 and later](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/2620),
[`doublestar.Glob`](https://pkg.go.dev/github.com/bmatcuk/doublestar@v1.2.2?tab=doc#Match).
- In GitLab Runner 12.10 and earlier, [`filepath.Match`](https://pkg.go.dev/path/filepath#Match).
**Example of `artifacts:paths`**:
```yaml
job:
artifacts:
paths:
- binaries/
- .config
```
This example creates an artifact with `.config` and all the files in the `binaries` directory.
**Additional details**:
- If not used with [`artifacts:name`](#artifactsname) defined, the artifacts file
is named `artifacts`, which becomes `artifacts.zip` when downloaded.
**Related topics**:
- To restrict which jobs a specific job fetches artifacts from, see [`dependencies`](#dependencies).
- [Create job artifacts](../pipelines/job_artifacts.md#create-job-artifacts).
#### `artifacts:public`
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/49775) in GitLab 13.8
@ -910,7 +910,7 @@ pipelines, set `artifacts:public` to `false`:
- `true` (default if not defined) or `false`.
**Example of `artifacts:paths`**:
**Example of `artifacts:public`**:
```yaml
job:
@ -2428,13 +2428,6 @@ You can use `only` and `except` to control when to add jobs to pipelines.
- Use `only` to define when a job runs.
- Use `except` to define when a job **does not** run.
Four keywords can be used with `only` and `except`:
- [`refs`](#onlyrefs--exceptrefs)
- [`variables`](#onlyvariables--exceptvariables)
- [`changes`](#onlychanges--exceptchanges)
- [`kubernetes`](#onlykubernetes--exceptkubernetes)
See [specify when jobs run with `only` and `except`](../jobs/job_control.md#specify-when-jobs-run-with-only-and-except)
for more details and examples.
@ -2443,6 +2436,10 @@ for more details and examples.
Use the `only:refs` and `except:refs` keywords to control when to add jobs to a
pipeline based on branch names or pipeline types.
`only:refs` and `except:refs` are not being actively developed. [`rules:if`](#rulesif)
is the preferred keyword when using refs, regular expressions, or variables to control
when to add jobs to pipelines.
**Keyword type**: Job keyword. You can use it only as part of a job.
**Possible inputs**: An array including any number of:
@ -2528,6 +2525,10 @@ job2:
Use the `only:variables` or `except:variables` keywords to control when to add jobs
to a pipeline, based on the status of [CI/CD variables](../variables/index.md).
`only:variables` and `except:variables` are not being actively developed. [`rules:if`](#rulesif)
is the preferred keyword when using refs, regular expressions, or variables to control
when to add jobs to pipelines.
**Keyword type**: Job keyword. You can use it only as part of a job.
**Possible inputs**:
@ -2560,6 +2561,9 @@ Use `changes` in pipelines with the following refs:
- `external_pull_requests`
- `merge_requests` (see additional details about [using `only:changes` with merge request pipelines](../jobs/job_control.md#use-onlychanges-with-merge-request-pipelines))
`only:changes` and `except:changes` are not being actively developed. [`rules:changes`](#ruleschanges)
is the preferred keyword when using changed files to control when to add jobs to pipelines.
**Keyword type**: Job keyword. You can use it only as part of a job.
**Possible inputs**: An array including any number of:
@ -2610,6 +2614,10 @@ docker build:
Use `only:kubernetes` or `except:kubernetes` to control if jobs are added to the pipeline
when the Kubernetes service is active in the project.
`only:refs` and `except:refs` are not being actively developed. Use [`rules:if`](#rulesif)
with the [`CI_KUBERNETES_ACTIVE`](../variables/predefined_variables.md) predefined CI/CD variable
to control if jobs are added to the pipeline when the Kubernetes service is active in the project.
**Keyword type**: Job-specific. You can use it only as part of a job.
**Possible inputs**:

View File

@ -63,14 +63,14 @@ The table shows a list of related workflow items for the selected stage. Based o
- Merge requests
- Pipelines
## View metrics for each development stage
## View DORA metrics and key metrics for a group
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/210315) in GitLab 13.0.
> - [Feature flag removed](https://gitlab.com/gitlab-org/gitlab/-/issues/323982) in GitLab 13.12.
Value stream analytics shows the median time spent by issues or merge requests in each development stage.
The **Overview** dashboard in value stream analytics shows key metrics and DORA metrics of group performance. Based on the filter you select, the dashboard automatically aggregates DORA metrics and displays the current status of the value stream.
To view the median time spent in each stage by a group:
To view the DORA metrics and key metrics:
1. On the top bar, select **Menu > Groups** and find your group.
1. On the left sidebar, select **Analytics > Value stream**.
@ -81,88 +81,41 @@ To view the median time spent in each stage by a group:
1. To adjust the date range:
- In the **From** field, select a start date.
- In the **To** field, select an end date.
1. To view the metrics for each stage, above the **Filter results** text box, hover over a stage.
Key metrics and DORA metrics display below the **Filter results** text box.
## View the lead time and cycle time for issues
<iframe width="560" height="315" src="https://www.youtube.com/embed/wQU-mWvNSiI" frameborder="0" allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>
Value stream analytics shows the lead time and cycle time for issues in your groups:
### Key metrics in the value stream
The **Overview** dashboard shows the following key metrics that measure team performance:
- Lead time: Median time from when the issue was created to when it was closed.
- Cycle time: Median time from first commit to issue closed. GitLab measures cycle time from the earliest
commit of a [linked issue's merge request](../../project/issues/crosslinking_issues.md#from-commit-messages)
to when that issue is closed. The cycle time approach underestimates the lead time because merge request creation
is always later than commit time.
- Cycle time: Median time from first commit to issue closed. GitLab measures cycle time from the earliest commit of a [linked issue's merge request](../../project/issues/crosslinking_issues.md#from-commit-messages) to when that issue is closed. The cycle time approach underestimates the lead time because merge request creation is always later than commit time.
- New issues: Number of new issues created.
- Deploys: Total number of deployments to production.
To view the lead time and cycle time for issues:
### DORA metrics **(ULTIMATE)**
1. On the top bar, select **Menu > Groups** and find your group.
1. On the left sidebar, select **Analytics > Value stream**.
1. Optional. Filter the results:
1. Select the **Filter results** text box.
1. Select a parameter.
1. Select a value or enter text to refine the results.
1. To adjust the date range:
- In the **From** field, select a start date.
- In the **To** field, select an end date.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/340150) lead time for changes DORA metric in GitLab 14.5.
> - DORA API-based deployment metrics for value stream analytics for groups were [moved](https://gitlab.com/gitlab-org/gitlab/-/issues/337256) from GitLab Ultimate to GitLab Premium in GitLab 14.3.
> - DORA and key metrics were [separated into distinct rows in the UI](https://gitlab.com/gitlab-org/gitlab/-/issues/359060) in GitLab 15.0.
The **Lead Time** and **Cycle Time** metrics display below the **Filter results** text box.
The value stream analytics **Overview** dashboard displays the following [DORA](../../../user/analytics/index.md) metrics:
## View lead time for changes for merge requests **(ULTIMATE)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/340150) in GitLab 14.5.
Lead time for changes is the median duration between when a merge request is merged and when it's deployed to production.
To view the lead time for changes for merge requests in your group:
1. On the top bar, select **Menu > Groups** and find your group.
1. On the left sidebar, select **Analytics > Value stream**.
1. Optional. Filter the results:
1. Select the **Filter results** text box.
1. Select a parameter.
1. Select a value or enter text to refine the results.
1. To adjust the date range:
- In the **From** field, select a start date.
- In the **To** field, select an end date.
The **Lead Time for Changes** metrics display below the **Filter results** text box.
## View number of successful deployments **(PREMIUM)**
> DORA API-based deployment metrics for value stream analytics for groups were [moved](https://gitlab.com/gitlab-org/gitlab/-/issues/337256) from GitLab Ultimate to GitLab Premium in 14.3.
These [four DORA metrics](../../../user/analytics/index.md) are: Deployment Frequency, Lead time for changes, Time to restore service and Change failure rate.
DORA metrics are calculated based on data from the
[DORA API](../../../api/dora/metrics.md#devops-research-and-assessment-dora-key-metrics-api).
To view deployment metrics, you must have a
[production environment configured](../../../ci/environments/index.md#deployment-tier-of-environments).
Value stream analytics shows the following deployment metrics for your group:
- Deploys: The number of successful deployments in the date range.
- Deployment Frequency: The average number of successful deployments per day in the date range.
To view deployment metrics for your group:
1. On the top bar, select **Menu > Groups** and find your group.
1. On the left sidebar, select **Analytics > Value stream**.
1. Optional. Filter the results:
1. Select the **Filter results** text box.
1. Select a parameter.
1. Select a value or enter text to refine the results.
1. To adjust the date range:
- In the **From** field, select a start date.
- In the **To** field, select an end date.
NOTE:
The date range selector filters items by the event time. This is the time when the currently
selected stage finished for the given item.
The **Deploys** and **Deployment Frequency** metrics display below the **Filter results** text box.
Deployment metrics are calculated based on data from the
[DORA API](../../../api/dora/metrics.md#devops-research-and-assessment-dora-key-metrics-api).
NOTE:
In GitLab 13.9 and later, metrics are calculated based on when the deployment was finished.
In GitLab 13.8 and earlier, metrics are calculated based on when the deployment was created.
In GitLab 13.9 and later, deployment frequency metrics are calculated based on when the deployment was finished.
In GitLab 13.8 and earlier, deployment frequency metrics are calculated based on when the deployment was created.
### How value stream analytics aggregates data
@ -186,6 +139,26 @@ longer than 10 minutes in the following cases:
To view when the data was most recently updated, in the right corner next to **Edit**, hover over the **Last updated** badge.
## View metrics for each development stage
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/210315) in GitLab 13.0.
> - [Feature flag removed](https://gitlab.com/gitlab-org/gitlab/-/issues/323982) in GitLab 13.12.
Value stream analytics shows the median time spent by issues or merge requests in each development stage.
To view the median time spent in each stage by a group:
1. On the top bar, select **Menu > Groups** and find your group.
1. On the left sidebar, select **Analytics > Value stream**.
1. Optional. Filter the results:
1. Select the **Filter results** text box.
1. Select a parameter.
1. Select a value or enter text to refine the results.
1. To adjust the date range:
- In the **From** field, select a start date.
- In the **To** field, select an end date.
1. To view the metrics for each stage, above the **Filter results** text box, hover over a stage.
## How value stream analytics measures stages
Value stream analytics measures each stage from its start event to its end event.
@ -207,6 +180,8 @@ Each pre-defined stages of value stream analytics is further described in the ta
| Review | The median time taken to review a merge request that has a closing issue pattern, between its creation and until it's merged. |
| Staging | The median time between merging a merge request that has a closing issue pattern until the very first deployment to a [production environment](#how-value-stream-analytics-identifies-the-production-environment). If there isn't a production environment, this is not tracked. |
For information about how value stream analytics calculates each stage, see the [Value stream analytics development guide](../../../development/value_stream_analytics.md).
### Example workflow
This example shows a workflow through all seven stages in one day.

View File

@ -29,6 +29,10 @@ module ErrorTracking
exception_entry = payload['exception']
return unless exception_entry
# Some SDK send exception payload as Array. For exmple Go lang SDK.
# We need to convert it to hash format we expect.
exception_entry = { 'values' => exception_entry } if exception_entry.is_a?(Array)
exception_values = exception_entry['values']
stack_trace_entry = exception_values&.detect { |h| h['stacktrace'].present? }
stack_trace_entry&.dig('stacktrace', 'frames')

View File

@ -70,4 +70,37 @@ RSpec.describe 'File blame', :js do
end
end
end
context 'when blob length is over global max page limit' do
before do
stub_const('Projects::BlameService::PER_PAGE', 200)
end
let(:path) { 'files/markdown/ruby-style-guide.md' }
it 'displays two hundred lines of the file with pagination' do
visit_blob_blame(path)
expect(page).to have_css('.blame-commit')
expect(page).to have_css('.gl-pagination')
expect(page).to have_css('#L1')
expect(page).not_to have_css('#L201')
expect(find('.page-link.active')).to have_text('1')
end
context 'when user clicks on the next button' do
before do
visit_blob_blame(path)
find('.js-next-button').click
end
it 'displays next two hundred lines of the file with pagination' do
expect(page).not_to have_css('#L1')
expect(page).to have_css('#L201')
expect(find('.page-link.active')).to have_text('2')
end
end
end
end

View File

@ -22,7 +22,6 @@ describe('Job App', () => {
let store;
let wrapper;
let mock;
let origGon;
const initSettings = {
endpoint: `${TEST_HOST}jobs/123.json`,
@ -80,17 +79,11 @@ describe('Job App', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
store = createStore();
origGon = window.gon;
window.gon = { features: { infinitelyCollapsibleSections: false } }; // NOTE: All of this passes with the feature flag
});
afterEach(() => {
wrapper.destroy();
mock.restore();
window.gon = origGon;
});
describe('while loading', () => {

View File

@ -5,7 +5,6 @@ import { collapsibleSectionClosed, collapsibleSectionOpened } from './mock_data'
describe('Job Log Collapsible Section', () => {
let wrapper;
let origGon;
const jobLogEndpoint = 'jobs/335';
@ -20,16 +19,8 @@ describe('Job Log Collapsible Section', () => {
});
};
beforeEach(() => {
origGon = window.gon;
window.gon = { features: { infinitelyCollapsibleSections: false } }; // NOTE: This also works with true
});
afterEach(() => {
wrapper.destroy();
window.gon = origGon;
});
describe('with closed section', () => {

View File

@ -2,7 +2,7 @@ import { mount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
import Log from '~/jobs/components/log/log.vue';
import { logLinesParserLegacy, logLinesParser } from '~/jobs/store/utils';
import { logLinesParser } from '~/jobs/store/utils';
import { jobLog } from './mock_data';
describe('Job Log', () => {
@ -10,7 +10,6 @@ describe('Job Log', () => {
let actions;
let state;
let store;
let origGon;
Vue.use(Vuex);
@ -25,12 +24,8 @@ describe('Job Log', () => {
toggleCollapsibleLine: () => {},
};
origGon = window.gon;
window.gon = { features: { infinitelyCollapsibleSections: false } };
state = {
jobLog: logLinesParserLegacy(jobLog),
jobLog: logLinesParser(jobLog),
jobLogEndpoint: 'jobs/id',
};
@ -44,88 +39,6 @@ describe('Job Log', () => {
afterEach(() => {
wrapper.destroy();
window.gon = origGon;
});
const findCollapsibleLine = () => wrapper.find('.collapsible-line');
describe('line numbers', () => {
it('renders a line number for each open line', () => {
expect(wrapper.find('#L1').text()).toBe('1');
expect(wrapper.find('#L2').text()).toBe('2');
expect(wrapper.find('#L3').text()).toBe('3');
});
it('links to the provided path and correct line number', () => {
expect(wrapper.find('#L1').attributes('href')).toBe(`${state.jobLogEndpoint}#L1`);
});
});
describe('collapsible sections', () => {
it('renders a clickable header section', () => {
expect(findCollapsibleLine().attributes('role')).toBe('button');
});
it('renders an icon with the open state', () => {
expect(findCollapsibleLine().find('[data-testid="chevron-lg-down-icon"]').exists()).toBe(
true,
);
});
describe('on click header section', () => {
it('calls toggleCollapsibleLine', () => {
jest.spyOn(wrapper.vm, 'toggleCollapsibleLine');
findCollapsibleLine().trigger('click');
expect(wrapper.vm.toggleCollapsibleLine).toHaveBeenCalled();
});
});
});
});
describe('Job Log, infinitelyCollapsibleSections feature flag enabled', () => {
let wrapper;
let actions;
let state;
let store;
let origGon;
Vue.use(Vuex);
const createComponent = () => {
wrapper = mount(Log, {
store,
});
};
beforeEach(() => {
actions = {
toggleCollapsibleLine: () => {},
};
origGon = window.gon;
window.gon = { features: { infinitelyCollapsibleSections: true } };
state = {
jobLog: logLinesParser(jobLog).parsedLines,
jobLogEndpoint: 'jobs/id',
};
store = new Vuex.Store({
actions,
state,
});
createComponent();
});
afterEach(() => {
wrapper.destroy();
window.gon = origGon;
});
const findCollapsibleLine = () => wrapper.find('.collapsible-line');

View File

@ -58,80 +58,6 @@ export const utilsMockData = [
},
];
export const multipleCollapsibleSectionsMockData = [
{
offset: 1001,
content: [{ text: ' on docker-auto-scale-com 8a6210b8' }],
},
{
offset: 1002,
content: [
{
text: 'Executing "step_script" stage of the job script',
},
],
section: 'step-script',
section_header: true,
},
{
offset: 1003,
content: [{ text: 'sleep 60' }],
section: 'step-script',
},
{
offset: 1004,
content: [
{
text:
'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Etiam lorem dolor, congue ac condimentum vitae',
},
],
section: 'step-script',
},
{
offset: 1005,
content: [{ text: 'executing...' }],
section: 'step-script',
},
{
offset: 1006,
content: [{ text: '1st collapsible section' }],
section: 'collapsible-1',
section_header: true,
},
{
offset: 1007,
content: [
{
text:
'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Etiam lorem dolor, congue ac condimentum vitae',
},
],
section: 'collapsible-1',
},
{
offset: 1008,
content: [],
section: 'collapsible-1',
section_duration: '01:00',
},
{
offset: 1009,
content: [],
section: 'step-script',
section_duration: '10:00',
},
];
export const backwardsCompatibilityTrace = [
{
offset: 2365,
content: [],
section: 'download-artifacts',
section_duration: '00:01',
},
];
export const originalTrace = [
{
offset: 1,

View File

@ -4,21 +4,12 @@ import state from '~/jobs/store/state';
describe('Jobs Store Mutations', () => {
let stateCopy;
let origGon;
const html =
'I, [2018-08-17T22:57:45.707325 #1841] INFO -- : Writing /builds/ab89e95b0fa0b9272ea0c797b76908f24d36992630e9325273a4ce3.png<br>I';
beforeEach(() => {
stateCopy = state();
origGon = window.gon;
window.gon = { features: { infinitelyCollapsibleSections: false } };
});
afterEach(() => {
window.gon = origGon;
});
describe('SET_JOB_ENDPOINT', () => {
@ -276,88 +267,3 @@ describe('Jobs Store Mutations', () => {
});
});
});
describe('Job Store mutations, feature flag ON', () => {
let stateCopy;
let origGon;
const html =
'I, [2018-08-17T22:57:45.707325 #1841] INFO -- : Writing /builds/ab89e95b0fa0b9272ea0c797b76908f24d36992630e9325273a4ce3.png<br>I';
beforeEach(() => {
stateCopy = state();
origGon = window.gon;
window.gon = { features: { infinitelyCollapsibleSections: true } };
});
afterEach(() => {
window.gon = origGon;
});
describe('RECEIVE_JOB_LOG_SUCCESS', () => {
describe('with new job log', () => {
describe('log.lines', () => {
describe('when append is true', () => {
it('sets the parsed log ', () => {
mutations[types.RECEIVE_JOB_LOG_SUCCESS](stateCopy, {
append: true,
size: 511846,
complete: true,
lines: [
{
offset: 1,
content: [{ text: 'Running with gitlab-runner 11.12.1 (5a147c92)' }],
},
],
});
expect(stateCopy.jobLog).toEqual([
{
offset: 1,
content: [{ text: 'Running with gitlab-runner 11.12.1 (5a147c92)' }],
lineNumber: 1,
},
]);
});
});
describe('when lines are defined', () => {
it('sets the parsed log ', () => {
mutations[types.RECEIVE_JOB_LOG_SUCCESS](stateCopy, {
append: false,
size: 511846,
complete: true,
lines: [
{ offset: 0, content: [{ text: 'Running with gitlab-runner 11.11.1 (5a147c92)' }] },
],
});
expect(stateCopy.jobLog).toEqual([
{
offset: 0,
content: [{ text: 'Running with gitlab-runner 11.11.1 (5a147c92)' }],
lineNumber: 1,
},
]);
});
});
describe('when lines are null', () => {
it('sets the default value', () => {
mutations[types.RECEIVE_JOB_LOG_SUCCESS](stateCopy, {
append: true,
html,
size: 511846,
complete: false,
lines: null,
});
expect(stateCopy.jobLog).toEqual([]);
});
});
});
});
});
});

View File

@ -1,6 +1,5 @@
import {
logLinesParser,
logLinesParserLegacy,
updateIncrementalJobLog,
parseHeaderLine,
parseLine,
@ -18,8 +17,6 @@ import {
headerTraceIncremental,
collapsibleTrace,
collapsibleTraceIncremental,
multipleCollapsibleSectionsMockData,
backwardsCompatibilityTrace,
} from '../components/log/mock_data';
describe('Jobs Store Utils', () => {
@ -178,11 +175,11 @@ describe('Jobs Store Utils', () => {
expect(isCollapsibleSection()).toEqual(false);
});
});
describe('logLinesParserLegacy', () => {
describe('logLinesParser', () => {
let result;
beforeEach(() => {
result = logLinesParserLegacy(utilsMockData);
result = logLinesParser(utilsMockData);
});
describe('regular line', () => {
@ -219,102 +216,6 @@ describe('Jobs Store Utils', () => {
});
});
describe('logLinesParser', () => {
let result;
beforeEach(() => {
result = logLinesParser(utilsMockData);
});
describe('regular line', () => {
it('adds a lineNumber property with correct index', () => {
expect(result.parsedLines[0].lineNumber).toEqual(1);
expect(result.parsedLines[1].line.lineNumber).toEqual(2);
});
});
describe('collapsible section', () => {
it('adds a `isClosed` property', () => {
expect(result.parsedLines[1].isClosed).toEqual(false);
});
it('adds a `isHeader` property', () => {
expect(result.parsedLines[1].isHeader).toEqual(true);
});
it('creates a lines array property with the content of the collapsible section', () => {
expect(result.parsedLines[1].lines.length).toEqual(2);
expect(result.parsedLines[1].lines[0].content).toEqual(utilsMockData[2].content);
expect(result.parsedLines[1].lines[1].content).toEqual(utilsMockData[3].content);
});
});
describe('section duration', () => {
it('adds the section information to the header section', () => {
expect(result.parsedLines[1].line.section_duration).toEqual(
utilsMockData[4].section_duration,
);
});
it('does not add section duration as a line', () => {
expect(result.parsedLines[1].lines.includes(utilsMockData[4])).toEqual(false);
});
});
describe('multiple collapsible sections', () => {
beforeEach(() => {
result = logLinesParser(multipleCollapsibleSectionsMockData);
});
it('should contain a section inside another section', () => {
const innerSection = [
{
isClosed: false,
isHeader: true,
line: {
content: [{ text: '1st collapsible section' }],
lineNumber: 6,
offset: 1006,
section: 'collapsible-1',
section_duration: '01:00',
section_header: true,
},
lines: [
{
content: [
{
text:
'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Etiam lorem dolor, congue ac condimentum vitae',
},
],
lineNumber: 7,
offset: 1007,
section: 'collapsible-1',
},
],
},
];
expect(result.parsedLines[1].lines).toEqual(expect.arrayContaining(innerSection));
});
});
describe('backwards compatibility', () => {
beforeEach(() => {
result = logLinesParser(backwardsCompatibilityTrace);
});
it('should return an object with a parsedLines prop', () => {
expect(result).toEqual(
expect.objectContaining({
parsedLines: expect.any(Array),
}),
);
expect(result.parsedLines).toHaveLength(1);
});
});
});
describe('findOffsetAndRemove', () => {
describe('when last item is header', () => {
const existingLog = [
@ -490,7 +391,7 @@ describe('Jobs Store Utils', () => {
describe('updateIncrementalJobLog', () => {
describe('without repeated section', () => {
it('concats and parses both arrays', () => {
const oldLog = logLinesParserLegacy(originalTrace);
const oldLog = logLinesParser(originalTrace);
const result = updateIncrementalJobLog(regularIncremental, oldLog);
expect(result).toEqual([
@ -518,7 +419,7 @@ describe('Jobs Store Utils', () => {
describe('with regular line repeated offset', () => {
it('updates the last line and formats with the incremental part', () => {
const oldLog = logLinesParserLegacy(originalTrace);
const oldLog = logLinesParser(originalTrace);
const result = updateIncrementalJobLog(regularIncrementalRepeated, oldLog);
expect(result).toEqual([
@ -537,7 +438,7 @@ describe('Jobs Store Utils', () => {
describe('with header line repeated', () => {
it('updates the header line and formats with the incremental part', () => {
const oldLog = logLinesParserLegacy(headerTrace);
const oldLog = logLinesParser(headerTrace);
const result = updateIncrementalJobLog(headerTraceIncremental, oldLog);
expect(result).toEqual([
@ -563,7 +464,7 @@ describe('Jobs Store Utils', () => {
describe('with collapsible line repeated', () => {
it('updates the collapsible line and formats with the incremental part', () => {
const oldLog = logLinesParserLegacy(collapsibleTrace);
const oldLog = logLinesParser(collapsibleTrace);
const result = updateIncrementalJobLog(collapsibleTraceIncremental, oldLog);
expect(result).toEqual([

View File

@ -56,6 +56,35 @@ RSpec.describe ErrorTracking::StacktraceBuilder do
end
end
context 'when exception payload is a list' do
let(:payload_file) { 'error_tracking/go_two_exception_event.json' }
it 'extracts a stracktrace' do
expected_entry = {
'lineNo' => 54,
'context' => [
[49, "\t// Set the timeout to the maximum duration the program can afford to wait."],
[50, "\tdefer sentry.Flush(2 * time.Second)"],
[51, ""],
[52, "\tresp, err := http.Get(os.Args[1])"],
[53, "\tif err != nil {"],
[54, "\t\tsentry.CaptureException(err)"],
[55, "\t\tlog.Printf(\"reported to Sentry: %s\", err)"],
[56, "\t\treturn"],
[57, "\t}"],
[58, "\tdefer resp.Body.Close()"],
[59, ""]
],
'filename' => nil,
'function' => 'main',
'colNo' => 0
}
expect(stacktrace).to be_kind_of(Array)
expect(stacktrace.first).to eq(expected_entry)
end
end
context 'with empty payload' do
let(:payload) { {} }

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe UpdateLastRunDateForIterationsCadences, :migration do
let(:current_date) { Date.parse(ApplicationRecord.connection.execute("SELECT CURRENT_DATE").first["current_date"]) }
let(:namespaces) { table(:namespaces) }
let(:iterations_cadences) { table(:iterations_cadences) }
let!(:group) { namespaces.create!(name: 'foo', path: 'foo') }
let!(:cadence_1) do
iterations_cadences.create!(group_id: group.id, title: "cadence 1", last_run_date: Date.today - 5.days)
end
let!(:cadence_2) { iterations_cadences.create!(group_id: group.id, title: "cadence 2", last_run_date: nil) }
let!(:cadence_3) do
iterations_cadences.create!(group_id: group.id, title: "cadence 2", last_run_date: nil, automatic: false)
end
it 'sets last_run_date to CURRENT_DATE for iterations cadences with automatic=true', :aggregate_failures do
migrate!
expect(cadence_1.reload.last_run_date).to eq(current_date)
expect(cadence_2.reload.last_run_date).to eq(current_date)
expect(cadence_3.reload.last_run_date).to eq(nil)
end
end

View File

@ -98,31 +98,21 @@ RSpec.describe Projects::BlameService, :aggregate_failures do
end
end
describe 'Current page' do
subject { service.pagination.current_page }
describe 'Pagination attributes' do
using RSpec::Parameterized::TableSyntax
context 'with page = 1' do
let(:page) { 1 }
it { is_expected.to eq(1) }
where(:page, :current_page, :total_pages) do
1 | 1 | 2
2 | 2 | 2
3 | 1 | 2 # Overlimit
0 | 1 | 2 # Incorrect
end
context 'with page = 2' do
let(:page) { 2 }
it { is_expected.to eq(2) }
end
context 'with page = 3 (overlimit)' do
let(:page) { 3 }
it { is_expected.to eq(1) }
end
context 'with page = 0 (incorrect)' do
let(:page) { 0 }
it { is_expected.to eq(1) }
with_them do
it 'returns the correct pagination attributes' do
expect(subject.current_page).to eq(current_page)
expect(subject.total_pages).to eq(total_pages)
end
end
end
end

View File

@ -158,7 +158,7 @@ RSpec.describe ContainerRegistry::Migration::EnqueuerWorker, :aggregate_failures
expect(worker).to receive(:handle_next_migration).exactly(3).times.and_call_original
expect { subject }.to make_queries_matching(/LIMIT 2/)
expect { subject }.to make_queries_matching(/LIMIT 25/)
expect(container_repository.reload).to be_pre_importing
expect(container_repository2.reload).to be_pre_importing