Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
9c918ae5c6
commit
63eb68d335
40 changed files with 202 additions and 65 deletions
|
@ -34,6 +34,6 @@ after the implementation is merged/deployed/released.
|
|||
|
||||
- [ ] The solution improved the situation.
|
||||
- If yes, check this box and close the issue. Well done! :tada:
|
||||
- Otherwise, create a new "Productivity Improvement" issue. You can re-use the description from this issue, but obviously another solution should be chosen this time.
|
||||
- Otherwise, create a new "Productivity Improvement" issue. You can re-use the description from this issue, but another solution should be chosen this time.
|
||||
|
||||
/label ~"Engineering Productivity" ~meta
|
||||
|
|
|
@ -99,8 +99,6 @@ Rails/SaveBang:
|
|||
- 'ee/spec/models/visible_approvable_spec.rb'
|
||||
- 'ee/spec/models/vulnerabilities/feedback_spec.rb'
|
||||
- 'ee/spec/models/vulnerabilities/issue_link_spec.rb'
|
||||
- 'ee/spec/presenters/audit_event_presenter_spec.rb'
|
||||
- 'ee/spec/presenters/epic_presenter_spec.rb'
|
||||
- 'ee/spec/requests/api/boards_spec.rb'
|
||||
- 'ee/spec/requests/api/epic_issues_spec.rb'
|
||||
- 'ee/spec/requests/api/epic_links_spec.rb'
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
export const fieldTypes = {
|
||||
codeBock: 'codeBlock',
|
||||
codeBlock: 'codeBlock',
|
||||
link: 'link',
|
||||
seconds: 'seconds',
|
||||
text: 'text',
|
||||
|
|
|
@ -25,6 +25,14 @@ export default {
|
|||
required: true,
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
filteredModalData() {
|
||||
// Filter out the properties that don't have a value
|
||||
return Object.fromEntries(
|
||||
Object.entries(this.modalData).filter((data) => Boolean(data[1].value)),
|
||||
);
|
||||
},
|
||||
},
|
||||
fieldTypes,
|
||||
};
|
||||
</script>
|
||||
|
@ -36,23 +44,18 @@ export default {
|
|||
:hide-footer="true"
|
||||
@hide="$emit('hide')"
|
||||
>
|
||||
<div
|
||||
v-for="(field, key, index) in modalData"
|
||||
v-if="field.value"
|
||||
:key="index"
|
||||
class="row gl-mt-3 gl-mb-3"
|
||||
>
|
||||
<div v-for="(field, key, index) in filteredModalData" :key="index" class="row gl-mt-3 gl-mb-3">
|
||||
<strong class="col-sm-3 text-right"> {{ field.text }}: </strong>
|
||||
|
||||
<div class="col-sm-9 text-secondary">
|
||||
<code-block v-if="field.type === $options.fieldTypes.codeBock" :code="field.value" />
|
||||
<code-block v-if="field.type === $options.fieldTypes.codeBlock" :code="field.value" />
|
||||
|
||||
<gl-link
|
||||
v-else-if="field.type === $options.fieldTypes.link"
|
||||
:href="field.value"
|
||||
:href="field.value.path"
|
||||
target="_blank"
|
||||
>
|
||||
{{ field.value }}
|
||||
{{ field.value.text }}
|
||||
</gl-link>
|
||||
|
||||
<gl-sprintf
|
||||
|
|
|
@ -39,6 +39,10 @@ export default {
|
|||
required: false,
|
||||
default: '',
|
||||
},
|
||||
headBlobPath: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
componentNames,
|
||||
computed: {
|
||||
|
@ -73,12 +77,15 @@ export default {
|
|||
},
|
||||
},
|
||||
created() {
|
||||
this.setEndpoint(this.endpoint);
|
||||
this.setPaths({
|
||||
endpoint: this.endpoint,
|
||||
headBlobPath: this.headBlobPath,
|
||||
});
|
||||
|
||||
this.fetchReports();
|
||||
},
|
||||
methods: {
|
||||
...mapActions(['setEndpoint', 'fetchReports', 'closeModal']),
|
||||
...mapActions(['setPaths', 'fetchReports', 'closeModal']),
|
||||
reportText(report) {
|
||||
const { name, summary } = report || {};
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ import httpStatusCodes from '../../../lib/utils/http_status';
|
|||
import Poll from '../../../lib/utils/poll';
|
||||
import * as types from './mutation_types';
|
||||
|
||||
export const setEndpoint = ({ commit }, endpoint) => commit(types.SET_ENDPOINT, endpoint);
|
||||
export const setPaths = ({ commit }, paths) => commit(types.SET_PATHS, paths);
|
||||
|
||||
export const requestReports = ({ commit }) => commit(types.REQUEST_REPORTS);
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
export const SET_ENDPOINT = 'SET_ENDPOINT';
|
||||
export const SET_PATHS = 'SET_PATHS';
|
||||
|
||||
export const REQUEST_REPORTS = 'REQUEST_REPORTS';
|
||||
export const RECEIVE_REPORTS_SUCCESS = 'RECEIVE_REPORTS_SUCCESS';
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
import * as types from './mutation_types';
|
||||
import { countRecentlyFailedTests } from './utils';
|
||||
import { countRecentlyFailedTests, formatFilePath } from './utils';
|
||||
|
||||
export default {
|
||||
[types.SET_ENDPOINT](state, endpoint) {
|
||||
[types.SET_PATHS](state, { endpoint, headBlobPath }) {
|
||||
state.endpoint = endpoint;
|
||||
state.headBlobPath = headBlobPath;
|
||||
},
|
||||
[types.REQUEST_REPORTS](state) {
|
||||
state.isLoading = true;
|
||||
|
@ -42,17 +43,25 @@ export default {
|
|||
state.status = null;
|
||||
},
|
||||
[types.SET_ISSUE_MODAL_DATA](state, payload) {
|
||||
state.modal.title = payload.issue.name;
|
||||
const { issue } = payload;
|
||||
state.modal.title = issue.name;
|
||||
|
||||
Object.keys(payload.issue).forEach((key) => {
|
||||
Object.keys(issue).forEach((key) => {
|
||||
if (Object.prototype.hasOwnProperty.call(state.modal.data, key)) {
|
||||
state.modal.data[key] = {
|
||||
...state.modal.data[key],
|
||||
value: payload.issue[key],
|
||||
value: issue[key],
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
if (issue.file) {
|
||||
state.modal.data.filename.value = {
|
||||
text: issue.file,
|
||||
path: `${state.headBlobPath}/${formatFilePath(issue.file)}`,
|
||||
};
|
||||
}
|
||||
|
||||
state.modal.open = true;
|
||||
},
|
||||
[types.RESET_ISSUE_MODAL_DATA](state) {
|
||||
|
|
|
@ -41,16 +41,16 @@ export default () => ({
|
|||
open: false,
|
||||
|
||||
data: {
|
||||
class: {
|
||||
value: null,
|
||||
text: s__('Reports|Class'),
|
||||
type: fieldTypes.link,
|
||||
},
|
||||
classname: {
|
||||
value: null,
|
||||
text: s__('Reports|Classname'),
|
||||
type: fieldTypes.text,
|
||||
},
|
||||
filename: {
|
||||
value: null,
|
||||
text: s__('Reports|Filename'),
|
||||
type: fieldTypes.link,
|
||||
},
|
||||
execution_time: {
|
||||
value: null,
|
||||
text: s__('Reports|Execution time'),
|
||||
|
@ -59,12 +59,12 @@ export default () => ({
|
|||
failure: {
|
||||
value: null,
|
||||
text: s__('Reports|Failure'),
|
||||
type: fieldTypes.codeBock,
|
||||
type: fieldTypes.codeBlock,
|
||||
},
|
||||
system_output: {
|
||||
value: null,
|
||||
text: s__('Reports|System output'),
|
||||
type: fieldTypes.codeBock,
|
||||
type: fieldTypes.codeBlock,
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
@ -100,3 +100,12 @@ export const statusIcon = (status) => {
|
|||
|
||||
return ICON_NOTFOUND;
|
||||
};
|
||||
|
||||
/**
|
||||
* Removes `./` from the beginning of a file path so it can be appended onto a blob path
|
||||
* @param {String} file
|
||||
* @returns {String} - formatted value
|
||||
*/
|
||||
export const formatFilePath = (file) => {
|
||||
return file.replace(/^\.?\/*/, '');
|
||||
};
|
||||
|
|
|
@ -480,6 +480,7 @@ export default {
|
|||
v-if="mr.testResultsPath"
|
||||
class="js-reports-container"
|
||||
:endpoint="mr.testResultsPath"
|
||||
:head-blob-path="mr.headBlobPath"
|
||||
:pipeline-path="mr.pipeline.path"
|
||||
/>
|
||||
|
||||
|
|
|
@ -284,7 +284,7 @@ MyObjectSerializer.new.represent(object.present)
|
|||
entity requires `request.user` attribute, but the second one wants
|
||||
`request.current_user`. When it happens that these two entities are used in
|
||||
the same serialization request, you might need to pass both parameters to
|
||||
the serializer, which is obviously not a perfect situation.
|
||||
the serializer, which is not a perfect situation.
|
||||
|
||||
When in doubt, pass only `current_user` and `project` if these are required.
|
||||
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add link to test case file in the test report for merge requests
|
||||
merge_request: 57911
|
||||
author:
|
||||
type: added
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Fix force_random_password option when creating Users via API
|
||||
merge_request: 57751
|
||||
author:
|
||||
type: fixed
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add index to improve project deployments endpoint performance
|
||||
merge_request: 57554
|
||||
author:
|
||||
type: performance
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Optimize database query for last deployment
|
||||
merge_request: 57979
|
||||
author:
|
||||
type: performance
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddIndexForProjectDeploymentsWithEnvironmentIdAndUpdatedAt < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
INDEX_NAME = 'index_deployments_on_project_and_environment_and_updated_at'
|
||||
|
||||
def up
|
||||
add_concurrent_index :deployments, [:project_id, :environment_id, :updated_at], name: INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :deployments, INDEX_NAME
|
||||
end
|
||||
end
|
18
db/migrate/20210331145548_add_index_for_last_deployment.rb
Normal file
18
db/migrate/20210331145548_add_index_for_last_deployment.rb
Normal file
|
@ -0,0 +1,18 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddIndexForLastDeployment < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
INDEX_NAME = 'index_deployments_on_environment_id_status_and_id'
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_concurrent_index :deployments, [:environment_id, :status, :id], name: INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :deployments, INDEX_NAME
|
||||
end
|
||||
end
|
1
db/schema_migrations/20210326035553
Normal file
1
db/schema_migrations/20210326035553
Normal file
|
@ -0,0 +1 @@
|
|||
018381c15d859a777afb2b3402ca4425ce52ab35dcd4d1e930b3a9928b2a2019
|
1
db/schema_migrations/20210331145548
Normal file
1
db/schema_migrations/20210331145548
Normal file
|
@ -0,0 +1 @@
|
|||
f27446d1950acaf45f623b2cec7733cd7ba4b82eefddfa2203acbbaf77d59e18
|
|
@ -22520,12 +22520,16 @@ CREATE INDEX index_deployments_on_environment_id_and_iid_and_project_id ON deplo
|
|||
|
||||
CREATE INDEX index_deployments_on_environment_id_status_and_finished_at ON deployments USING btree (environment_id, status, finished_at);
|
||||
|
||||
CREATE INDEX index_deployments_on_environment_id_status_and_id ON deployments USING btree (environment_id, status, id);
|
||||
|
||||
CREATE INDEX index_deployments_on_environment_status_sha ON deployments USING btree (environment_id, status, sha);
|
||||
|
||||
CREATE INDEX index_deployments_on_id_and_status_and_created_at ON deployments USING btree (id, status, created_at);
|
||||
|
||||
CREATE INDEX index_deployments_on_id_where_cluster_id_present ON deployments USING btree (id) WHERE (cluster_id IS NOT NULL);
|
||||
|
||||
CREATE INDEX index_deployments_on_project_and_environment_and_updated_at ON deployments USING btree (project_id, environment_id, updated_at);
|
||||
|
||||
CREATE INDEX index_deployments_on_project_and_finished ON deployments USING btree (project_id, finished_at) WHERE (status = 2);
|
||||
|
||||
CREATE INDEX index_deployments_on_project_id_and_id ON deployments USING btree (project_id, id DESC);
|
||||
|
|
|
@ -15,7 +15,7 @@ This page is a development guide for application secrets.
|
|||
|`secret_key_base` | The base key to be used for generating a various secrets |
|
||||
| `otp_key_base` | The base key for One Time Passwords, described in [User management](../raketasks/user_management.md#rotate-two-factor-authentication-encryption-key) |
|
||||
|`db_key_base` | The base key to encrypt the data for `attr_encrypted` columns |
|
||||
|`openid_connect_signing_key` | The singing key for OpenID Connect |
|
||||
|`openid_connect_signing_key` | The signing key for OpenID Connect |
|
||||
| `encrypted_settings_key_base` | The base key to encrypt settings files with |
|
||||
|
||||
## Where the secrets are stored
|
||||
|
|
|
@ -30,9 +30,9 @@ A database review is required for:
|
|||
See the [Product Intelligence Guide](https://about.gitlab.com/handbook/product/product-intelligence-guide/)
|
||||
for implementation details.
|
||||
|
||||
A database reviewer is expected to look out for obviously complex
|
||||
A database reviewer is expected to look out for overly complex
|
||||
queries in the change and review those closer. If the author does not
|
||||
point out specific queries for review and there are no obviously
|
||||
point out specific queries for review and there are no overly
|
||||
complex queries, it is enough to concentrate on reviewing the
|
||||
migration only.
|
||||
|
||||
|
@ -221,7 +221,7 @@ test its execution using `CREATE INDEX CONCURRENTLY` in the `#database-lab` Slac
|
|||
- Data migrations should be reversible too or come with a description of how to reverse, when possible.
|
||||
This applies to all types of migrations (regular, post-deploy, background).
|
||||
- Query performance
|
||||
- Check for any obviously complex queries and queries the author specifically
|
||||
- Check for any overly complex queries and queries the author specifically
|
||||
points out for review (if any)
|
||||
- If not present yet, ask the author to provide SQL queries and query plans
|
||||
(for example, by using [ChatOps](understanding_explain_plans.md#chatops) or direct
|
||||
|
|
|
@ -424,7 +424,7 @@ Feature.enabled?(:licensed_feature_feature_flag, project) &&
|
|||
### Feature groups
|
||||
|
||||
Feature groups must be defined statically in `lib/feature.rb` (in the
|
||||
`.register_feature_groups` method), but their implementation can obviously be
|
||||
`.register_feature_groups` method), but their implementation can be
|
||||
dynamic (querying the DB, for example).
|
||||
|
||||
Once defined in `lib/feature.rb`, you can to activate a
|
||||
|
|
|
@ -86,7 +86,7 @@ a parent context. Examples of these are:
|
|||
- `:clean_gitlab_redis_cache` which provides a clean Redis cache to the examples.
|
||||
- `:request_store` which provides a request store to the examples.
|
||||
|
||||
Obviously we should reduce test dependencies, and avoiding
|
||||
We should reduce test dependencies, and avoiding
|
||||
capabilities also reduces the amount of set-up needed.
|
||||
|
||||
`:js` is particularly important to avoid. This must only be used if the feature
|
||||
|
|
|
@ -23,7 +23,7 @@ as described in ([Measuring DevOps Performance](https://devops.com/measuring-dev
|
|||
- MTTD (Mean Time to Detect): The average duration that a bug goes undetected in production. GitLab measures MTTD from deployment of bug to issue creation.
|
||||
- MTTM (Mean Time To Merge): The average lifespan of a merge request. GitLab measures MTTM from merge request creation to merge request merge (and closed/un-merged merge requests are excluded). For more information, see [Merge Request Analytics](merge_request_analytics.md).
|
||||
- MTTR (Mean Time to Recover/Repair/Resolution/Resolve/Restore): The average duration that a bug is not fixed in production. GitLab measures MTTR from deployment of bug to deployment of fix.
|
||||
- Lead time: The duration of the work itself. Often displayed in combination with "cycle time." GitLab measures from issue first merge request creation to issue close. Note: Obviously work started before the creation of the first merge request. We plan to start measuring from "issue first commit" as a better proxy, although still imperfect. GitLab displays lead time in [Value Stream Analytics](value_stream_analytics.md).
|
||||
- Lead time: The duration of the work itself. Often displayed in combination with "cycle time." GitLab measures from issue first merge request creation to issue close. Note: Work started before the creation of the first merge request. We plan to start measuring from "issue first commit" as a better proxy, although still imperfect. GitLab displays lead time in [Value Stream Analytics](value_stream_analytics.md).
|
||||
- Throughput: The number of issues closed or merge requests merged (not closed) in some period of time. Often measured per sprint. GitLab displays merge request throughput in [Merge Request Analytics](merge_request_analytics.md).
|
||||
- Value Stream: The entire work process that is followed to deliver value to customers. For example, the [DevOps lifecycle](https://about.gitlab.com/stages-devops-lifecycle/) is a value stream that starts with "plan" and ends with "monitor". GitLab helps you track your value stream using [Value Stream Analytics](value_stream_analytics.md).
|
||||
- Velocity: The total issue burden completed in some period of time. The burden is usually measured in points or weight, often per sprint. For example, your velocity may be "30 points per sprint". GitLab measures velocity as the total points/weight of issues closed in a given period of time.
|
||||
|
|
|
@ -58,10 +58,7 @@ GitLab provides the ability to filter analytics based on a date range. To filter
|
|||
The "Time" metrics near the top of the page are measured as follows:
|
||||
|
||||
- **Lead time**: median time from issue created to issue closed.
|
||||
- **Cycle time**: median time from first commit to issue closed.
|
||||
|
||||
NOTE:
|
||||
A commit is associated with an issue by [crosslinking](../project/issues/crosslinking_issues.md) in the commit message or by manually linking the merge request containing the commit.
|
||||
- **Cycle time**: median time from first commit to issue closed. (You can associate a commit with an issue by [crosslinking in the commit message](../project/issues/crosslinking_issues.md#from-commit-messages).)
|
||||
|
||||
## How the stages are measured
|
||||
|
||||
|
|
|
@ -759,6 +759,29 @@ An approval is optional when a license report:
|
|||
|
||||
## Troubleshooting
|
||||
|
||||
### ASDF_PYTHON_VERSION does not automatically install the version
|
||||
|
||||
Defining a non-latest Python version in ASDF_PYTHON_VERSION [doesn't have it automatically installed](https://gitlab.com/gitlab-org/gitlab/-/issues/325604). If your project requires a non-latest version of Python:
|
||||
|
||||
1. Define the required version by setting the `ASDF_PYTHON_VERSION` CI/CD variable.
|
||||
1. Pass a custom script to the `SETUP_CMD` CI/CD variable to install the required version and dependencies.
|
||||
|
||||
For example:
|
||||
|
||||
```yaml
|
||||
include:
|
||||
- template: Security/License-Scanning.gitlab-ci.yml
|
||||
|
||||
license_scanning:
|
||||
SETUP_CMD: ./setup.sh
|
||||
ASDF_PYTHON_VERSION: "3.7.2"
|
||||
before_script:
|
||||
- echo "asdf install python 3.7.2 && pip install -r requirements.txt" > setup.sh
|
||||
- chmod +x setup.sh
|
||||
- apt-get -y update
|
||||
- apt-get -y install build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev python-openssl git
|
||||
```
|
||||
|
||||
### `ERROR -- : asdf: No preset version installed for command`
|
||||
|
||||
This error occurs when the version of the tools used by your project
|
||||
|
|
|
@ -76,9 +76,7 @@ GitLab provides the ability to filter analytics based on a date range. To filter
|
|||
The "Time" metrics near the top of the page are measured as follows:
|
||||
|
||||
- **Lead time**: median time from issue created to issue closed.
|
||||
- **Cycle time**: median time from first commit to issue closed.
|
||||
|
||||
A commit is associated with an issue by [crosslinking](../../project/issues/crosslinking_issues.md) in the commit message or by manually linking the merge request containing the commit.
|
||||
- **Cycle time**: median time from first commit to issue closed. (You can associate a commit with an issue by [crosslinking in the commit message](../../project/issues/crosslinking_issues.md#from-commit-messages).)
|
||||
|
||||
![Value stream analytics time metrics](img/vsa_time_metrics_v13_0.png "Time metrics for value stream analytics")
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ git clone https://gitlab.com/esr/irker.git
|
|||
|
||||
Once you have downloaded the code, you can run the Python script named `irkerd`.
|
||||
This script is the gateway script, it acts both as an IRC client, for sending
|
||||
messages to an IRC server obviously, and as a TCP server, for receiving messages
|
||||
messages to an IRC server, and as a TCP server, for receiving messages
|
||||
from the GitLab service.
|
||||
|
||||
If the Irker server runs on the same machine, you are done. If not, you
|
||||
|
|
|
@ -231,7 +231,7 @@ module API
|
|||
optional :password, type: String, desc: 'The password of the new user'
|
||||
optional :reset_password, type: Boolean, desc: 'Flag indicating the user will be sent a password reset token'
|
||||
optional :skip_confirmation, type: Boolean, desc: 'Flag indicating the account is confirmed'
|
||||
at_least_one_of :password, :reset_password
|
||||
at_least_one_of :password, :reset_password, :force_random_password
|
||||
requires :name, type: String, desc: 'The name of the user'
|
||||
requires :username, type: String, desc: 'The username of the user'
|
||||
optional :force_random_password, type: Boolean, desc: 'Flag indicating a random password will be set'
|
||||
|
|
|
@ -25882,9 +25882,6 @@ msgstr ""
|
|||
msgid "Reports|Base report parsing error:"
|
||||
msgstr ""
|
||||
|
||||
msgid "Reports|Class"
|
||||
msgstr ""
|
||||
|
||||
msgid "Reports|Classname"
|
||||
msgstr ""
|
||||
|
||||
|
@ -25904,6 +25901,9 @@ msgstr[1] ""
|
|||
msgid "Reports|Failure"
|
||||
msgstr ""
|
||||
|
||||
msgid "Reports|Filename"
|
||||
msgstr ""
|
||||
|
||||
msgid "Reports|Head report parsing error:"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
module QA
|
||||
RSpec.describe 'Create' do
|
||||
describe 'Merge request creation from fork', :smoke do
|
||||
describe 'Merge request creation from fork' do
|
||||
let!(:merge_request) do
|
||||
Resource::MergeRequestFromFork.fabricate_via_browser_ui! do |merge_request|
|
||||
merge_request.fork_branch = 'feature-branch'
|
||||
|
|
|
@ -19,6 +19,7 @@ Migration/UpdateLargeTable:
|
|||
- :merge_request_diffs
|
||||
- :merge_request_metrics
|
||||
- :merge_requests
|
||||
- :namespace_settings
|
||||
- :namespaces
|
||||
- :note_diff_files
|
||||
- :notes
|
||||
|
|
|
@ -15,7 +15,10 @@ describe('Grouped Test Reports Modal', () => {
|
|||
// populate data
|
||||
modalDataStructure.execution_time.value = 0.009411;
|
||||
modalDataStructure.system_output.value = 'Failure/Error: is_expected.to eq(3)\n\n';
|
||||
modalDataStructure.class.value = 'link';
|
||||
modalDataStructure.filename.value = {
|
||||
text: 'link',
|
||||
path: '/file/path',
|
||||
};
|
||||
|
||||
let wrapper;
|
||||
|
||||
|
@ -43,9 +46,9 @@ describe('Grouped Test Reports Modal', () => {
|
|||
it('renders link', () => {
|
||||
const link = wrapper.findComponent(GlLink);
|
||||
|
||||
expect(link.attributes().href).toEqual(modalDataStructure.class.value);
|
||||
expect(link.attributes().href).toEqual(modalDataStructure.filename.value.path);
|
||||
|
||||
expect(link.text()).toEqual(modalDataStructure.class.value);
|
||||
expect(link.text()).toEqual(modalDataStructure.filename.value.text);
|
||||
});
|
||||
|
||||
it('renders seconds', () => {
|
||||
|
|
|
@ -17,6 +17,7 @@ localVue.use(Vuex);
|
|||
|
||||
describe('Grouped test reports app', () => {
|
||||
const endpoint = 'endpoint.json';
|
||||
const headBlobPath = '/blob/path';
|
||||
const pipelinePath = '/path/to/pipeline';
|
||||
let wrapper;
|
||||
let mockStore;
|
||||
|
@ -27,6 +28,7 @@ describe('Grouped test reports app', () => {
|
|||
localVue,
|
||||
propsData: {
|
||||
endpoint,
|
||||
headBlobPath,
|
||||
pipelinePath,
|
||||
...props,
|
||||
},
|
||||
|
@ -56,7 +58,7 @@ describe('Grouped test reports app', () => {
|
|||
...getStoreConfig(),
|
||||
actions: {
|
||||
fetchReports: () => {},
|
||||
setEndpoint: () => {},
|
||||
setPaths: () => {},
|
||||
},
|
||||
});
|
||||
mountComponent();
|
||||
|
|
|
@ -3,7 +3,7 @@ import { TEST_HOST } from 'helpers/test_constants';
|
|||
import testAction from 'helpers/vuex_action_helper';
|
||||
import axios from '~/lib/utils/axios_utils';
|
||||
import {
|
||||
setEndpoint,
|
||||
setPaths,
|
||||
requestReports,
|
||||
fetchReports,
|
||||
stopPolling,
|
||||
|
@ -23,13 +23,18 @@ describe('Reports Store Actions', () => {
|
|||
mockedState = state();
|
||||
});
|
||||
|
||||
describe('setEndpoint', () => {
|
||||
it('should commit SET_ENDPOINT mutation', (done) => {
|
||||
describe('setPaths', () => {
|
||||
it('should commit SET_PATHS mutation', (done) => {
|
||||
testAction(
|
||||
setEndpoint,
|
||||
'endpoint.json',
|
||||
setPaths,
|
||||
{ endpoint: 'endpoint.json', headBlobPath: '/blob/path' },
|
||||
mockedState,
|
||||
[{ type: types.SET_ENDPOINT, payload: 'endpoint.json' }],
|
||||
[
|
||||
{
|
||||
type: types.SET_PATHS,
|
||||
payload: { endpoint: 'endpoint.json', headBlobPath: '/blob/path' },
|
||||
},
|
||||
],
|
||||
[],
|
||||
done,
|
||||
);
|
||||
|
|
|
@ -10,11 +10,15 @@ describe('Reports Store Mutations', () => {
|
|||
stateCopy = state();
|
||||
});
|
||||
|
||||
describe('SET_ENDPOINT', () => {
|
||||
describe('SET_PATHS', () => {
|
||||
it('should set endpoint', () => {
|
||||
mutations[types.SET_ENDPOINT](stateCopy, 'endpoint.json');
|
||||
mutations[types.SET_PATHS](stateCopy, {
|
||||
endpoint: 'endpoint.json',
|
||||
headBlobPath: '/blob/path',
|
||||
});
|
||||
|
||||
expect(stateCopy.endpoint).toEqual('endpoint.json');
|
||||
expect(stateCopy.headBlobPath).toEqual('/blob/path');
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -238,4 +238,18 @@ describe('Reports store utils', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatFilePath', () => {
|
||||
it.each`
|
||||
file | expected
|
||||
${'./test.js'} | ${'test.js'}
|
||||
${'/test.js'} | ${'test.js'}
|
||||
${'.//////////////test.js'} | ${'test.js'}
|
||||
${'test.js'} | ${'test.js'}
|
||||
${'mock/path./test.js'} | ${'mock/path./test.js'}
|
||||
${'./mock/path./test.js'} | ${'mock/path./test.js'}
|
||||
`('should format $file to be $expected', ({ file, expected }) => {
|
||||
expect(utils.formatFilePath(file)).toBe(expected);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -928,7 +928,8 @@ RSpec.describe API::Users do
|
|||
end
|
||||
|
||||
it "creates user with random password" do
|
||||
params = attributes_for(:user, force_random_password: true, reset_password: true)
|
||||
params = attributes_for(:user, force_random_password: true)
|
||||
params.delete(:password)
|
||||
post api('/users', admin), params: params
|
||||
|
||||
expect(response).to have_gitlab_http_status(:created)
|
||||
|
@ -936,8 +937,7 @@ RSpec.describe API::Users do
|
|||
user_id = json_response['id']
|
||||
new_user = User.find(user_id)
|
||||
|
||||
expect(new_user.valid_password?(params[:password])).to eq(false)
|
||||
expect(new_user.recently_sent_password_reset?).to eq(true)
|
||||
expect(new_user.encrypted_password).to be_present
|
||||
end
|
||||
|
||||
it "creates user with private profile" do
|
||||
|
|
Loading…
Reference in a new issue