Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
9e27f0d920
commit
05f4b2fb34
40 changed files with 748 additions and 115 deletions
|
@ -8,6 +8,7 @@ stages:
|
|||
- review
|
||||
- qa
|
||||
- post-test
|
||||
- notification
|
||||
- pages
|
||||
|
||||
variables:
|
||||
|
@ -27,11 +28,12 @@ after_script:
|
|||
- date
|
||||
|
||||
include:
|
||||
- local: .gitlab/ci/global.gitlab-ci.yml
|
||||
- local: .gitlab/ci/cng.gitlab-ci.yml
|
||||
- local: .gitlab/ci/docs.gitlab-ci.yml
|
||||
- local: .gitlab/ci/frontend.gitlab-ci.yml
|
||||
- local: .gitlab/ci/global.gitlab-ci.yml
|
||||
- local: .gitlab/ci/memory.gitlab-ci.yml
|
||||
- local: .gitlab/ci/notifications.gitlab-ci.yml
|
||||
- local: .gitlab/ci/pages.gitlab-ci.yml
|
||||
- local: .gitlab/ci/qa.gitlab-ci.yml
|
||||
- local: .gitlab/ci/reports.gitlab-ci.yml
|
||||
|
|
|
@ -6,8 +6,8 @@
|
|||
/doc/ @axil @marcia @eread @mikelewis
|
||||
|
||||
# Frontend maintainers should see everything in `app/assets/`
|
||||
app/assets/ @ClemMakesApps @fatihacet @filipa @mikegreiling @timzallmann @kushalpandya @pslaughter
|
||||
*.scss @annabeldunstone @ClemMakesApps @fatihacet @filipa @mikegreiling @timzallmann @kushalpandya @pslaughter
|
||||
app/assets/ @ClemMakesApps @fatihacet @filipa @mikegreiling @timzallmann @kushalpandya @pslaughter @wortschi
|
||||
*.scss @annabeldunstone @ClemMakesApps @fatihacet @filipa @mikegreiling @timzallmann @kushalpandya @pslaughter @wortschi
|
||||
|
||||
# Database maintainers should review changes in `db/`
|
||||
db/ @gitlab-org/maintainers/database
|
||||
|
|
|
@ -110,6 +110,12 @@
|
|||
- $CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_NAMESPACE == "gitlab-org"
|
||||
kubernetes: active
|
||||
|
||||
.only-canonical-schedules:
|
||||
only:
|
||||
refs:
|
||||
- schedules@gitlab-org/gitlab
|
||||
- schedules@gitlab-org/gitlab-foss
|
||||
|
||||
.use-pg9:
|
||||
services:
|
||||
- name: postgres:9.6
|
||||
|
|
25
.gitlab/ci/notifications.gitlab-ci.yml
Normal file
25
.gitlab/ci/notifications.gitlab-ci.yml
Normal file
|
@ -0,0 +1,25 @@
|
|||
.notify:
|
||||
image: alpine
|
||||
stage: notification
|
||||
dependencies: []
|
||||
cache: {}
|
||||
before_script:
|
||||
- apk update && apk add git curl bash
|
||||
|
||||
schedule:package-and-qa:notify-success:
|
||||
extends:
|
||||
- .only-canonical-schedules
|
||||
- .notify
|
||||
script:
|
||||
- 'scripts/notify-slack qa-master ":tada: Scheduled QA against `master` passed! :tada: See $CI_PIPELINE_URL." ci_passing'
|
||||
needs: ["schedule:package-and-qa"]
|
||||
when: on_success
|
||||
|
||||
schedule:package-and-qa:notify-failure:
|
||||
extends:
|
||||
- .only-canonical-schedules
|
||||
- .notify
|
||||
script:
|
||||
- 'scripts/notify-slack qa-master ":skull_and_crossbones: Scheduled QA against `master` failed! :skull_and_crossbones: See $CI_PIPELINE_URL." ci_failing'
|
||||
needs: ["schedule:package-and-qa"]
|
||||
when: on_failure
|
|
@ -38,9 +38,5 @@ schedule:package-and-qa:
|
|||
extends:
|
||||
- .package-and-qa-base
|
||||
- .only-code-qa-changes
|
||||
only:
|
||||
refs:
|
||||
- schedules@gitlab-org/gitlab
|
||||
- schedules@gitlab-org/gitlab-foss
|
||||
- .only-canonical-schedules
|
||||
needs: ["build-qa-image", "gitlab:assets:compile"]
|
||||
allow_failure: true
|
||||
|
|
|
@ -26,8 +26,7 @@ export default {
|
|||
|
||||
if (log.append) {
|
||||
if (isNewJobLogActive()) {
|
||||
state.originalTrace = state.originalTrace.concat(log.trace);
|
||||
state.trace = updateIncrementalTrace(state.originalTrace, state.trace, log.lines);
|
||||
state.trace = updateIncrementalTrace(log.lines, state.trace);
|
||||
} else {
|
||||
state.trace += log.html;
|
||||
}
|
||||
|
@ -38,7 +37,6 @@ export default {
|
|||
// html or size. We keep the old value otherwise these
|
||||
// will be set to `undefined`
|
||||
if (isNewJobLogActive()) {
|
||||
state.originalTrace = log.lines || state.trace;
|
||||
state.trace = logLinesParser(log.lines) || state.trace;
|
||||
} else {
|
||||
state.trace = log.html || state.trace;
|
||||
|
|
|
@ -19,7 +19,6 @@ export default () => ({
|
|||
isScrolledToBottomBeforeReceivingTrace: true,
|
||||
|
||||
trace: isNewJobLogActive() ? [] : '',
|
||||
originalTrace: [],
|
||||
isTraceComplete: false,
|
||||
traceSize: 0,
|
||||
isTraceSizeVisible: false,
|
||||
|
|
|
@ -63,6 +63,30 @@ export const isCollapsibleSection = (acc = [], last = {}, section = {}) =>
|
|||
!section.section_duration &&
|
||||
section.section === last.line.section;
|
||||
|
||||
/**
|
||||
* Returns the lineNumber of the last line in
|
||||
* a parsed log
|
||||
*
|
||||
* @param Array acc
|
||||
* @returns Number
|
||||
*/
|
||||
export const getIncrementalLineNumber = acc => {
|
||||
let lineNumberValue;
|
||||
const lastIndex = acc.length - 1;
|
||||
const lastElement = acc[lastIndex];
|
||||
const nestedLines = lastElement.lines;
|
||||
|
||||
if (lastElement.isHeader && !nestedLines.length && lastElement.line) {
|
||||
lineNumberValue = lastElement.line.lineNumber;
|
||||
} else if (lastElement.isHeader && nestedLines.length) {
|
||||
lineNumberValue = nestedLines[nestedLines.length - 1].lineNumber;
|
||||
} else {
|
||||
lineNumberValue = lastElement.lineNumber;
|
||||
}
|
||||
|
||||
return lineNumberValue === 0 ? 1 : lineNumberValue + 1;
|
||||
};
|
||||
|
||||
/**
|
||||
* Parses the job log content into a structure usable by the template
|
||||
*
|
||||
|
@ -75,13 +99,14 @@ export const isCollapsibleSection = (acc = [], last = {}, section = {}) =>
|
|||
* - adds the index as lineNumber
|
||||
*
|
||||
* @param Array lines
|
||||
* @param Number lineNumberStart
|
||||
* @param Array accumulator
|
||||
* @returns Array parsed log lines
|
||||
*/
|
||||
export const logLinesParser = (lines = [], lineNumberStart, accumulator = []) =>
|
||||
lines.reduce((acc, line, index) => {
|
||||
const lineNumber = lineNumberStart ? lineNumberStart + index : index;
|
||||
export const logLinesParser = (lines = [], accumulator = []) =>
|
||||
lines.reduce(
|
||||
(acc, line, index) => {
|
||||
const lineNumber = accumulator.length > 0 ? getIncrementalLineNumber(acc) : index;
|
||||
|
||||
const last = acc[acc.length - 1];
|
||||
|
||||
// If the object is an header, we parse it into another structure
|
||||
|
@ -100,7 +125,9 @@ export const logLinesParser = (lines = [], lineNumberStart, accumulator = []) =>
|
|||
}
|
||||
|
||||
return acc;
|
||||
}, accumulator);
|
||||
},
|
||||
[...accumulator],
|
||||
);
|
||||
|
||||
/**
|
||||
* Finds the repeated offset, removes the old one
|
||||
|
@ -113,7 +140,7 @@ export const logLinesParser = (lines = [], lineNumberStart, accumulator = []) =>
|
|||
* @returns Array
|
||||
*
|
||||
*/
|
||||
export const findOffsetAndRemove = (newLog, oldParsed) => {
|
||||
export const findOffsetAndRemove = (newLog = [], oldParsed = []) => {
|
||||
const cloneOldLog = [...oldParsed];
|
||||
const lastIndex = cloneOldLog.length - 1;
|
||||
const last = cloneOldLog[lastIndex];
|
||||
|
@ -140,40 +167,13 @@ export const findOffsetAndRemove = (newLog, oldParsed) => {
|
|||
* We need to check if that is the case by looking for the offset property
|
||||
* before parsing the incremental part
|
||||
*
|
||||
* @param array originalTrace
|
||||
* @param array oldLog
|
||||
* @param array newLog
|
||||
*/
|
||||
export const updateIncrementalTrace = (originalTrace = [], oldLog = [], newLog = []) => {
|
||||
const firstLine = newLog[0];
|
||||
const firstLineOffset = firstLine.offset;
|
||||
export const updateIncrementalTrace = (newLog, oldParsed = []) => {
|
||||
const parsedLog = findOffsetAndRemove(newLog, oldParsed);
|
||||
|
||||
// We are going to return a new array,
|
||||
// let's make a shallow copy to make sure we
|
||||
// are not updating the state outside of a mutation first.
|
||||
const cloneOldLog = [...oldLog];
|
||||
|
||||
const lastIndex = cloneOldLog.length - 1;
|
||||
const lastLine = cloneOldLog[lastIndex];
|
||||
|
||||
// The last line may be inside a collpasible section
|
||||
// If it is, we use the not parsed saved log, remove the last element
|
||||
// and parse the first received part togheter with the incremental log
|
||||
if (
|
||||
lastLine.isHeader &&
|
||||
(lastLine.line.offset === firstLineOffset ||
|
||||
(lastLine.lines.length &&
|
||||
lastLine.lines[lastLine.lines.length - 1].offset === firstLineOffset))
|
||||
) {
|
||||
const cloneOriginal = [...originalTrace];
|
||||
cloneOriginal.splice(cloneOriginal.length - 1);
|
||||
return logLinesParser(cloneOriginal.concat(newLog));
|
||||
} else if (lastLine.offset === firstLineOffset) {
|
||||
cloneOldLog.splice(lastIndex);
|
||||
return cloneOldLog.concat(logLinesParser(newLog, cloneOldLog.length));
|
||||
}
|
||||
// there are no matches, let's parse the new log and return them together
|
||||
return cloneOldLog.concat(logLinesParser(newLog, cloneOldLog.length));
|
||||
return logLinesParser(newLog, parsedLog);
|
||||
};
|
||||
|
||||
export const isNewJobLogActive = () => gon && gon.features && gon.features.jobLogJson;
|
||||
|
|
|
@ -5,6 +5,7 @@ import tooltip from '~/vue_shared/directives/tooltip';
|
|||
import Icon from '~/vue_shared/components/icon.vue';
|
||||
import eventHub from '~/sidebar/event_hub';
|
||||
import editForm from './edit_form.vue';
|
||||
import recaptchaModalImplementor from '~/vue_shared/mixins/recaptcha_modal_implementor';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
|
@ -14,6 +15,7 @@ export default {
|
|||
directives: {
|
||||
tooltip,
|
||||
},
|
||||
mixins: [recaptchaModalImplementor],
|
||||
props: {
|
||||
isConfidential: {
|
||||
required: true,
|
||||
|
@ -54,9 +56,14 @@ export default {
|
|||
updateConfidentialAttribute(confidential) {
|
||||
this.service
|
||||
.update('issue', { confidential })
|
||||
.then(({ data }) => this.checkForSpam(data))
|
||||
.then(() => window.location.reload())
|
||||
.catch(() => {
|
||||
.catch(error => {
|
||||
if (error.name === 'SpamError') {
|
||||
this.openRecaptcha();
|
||||
} else {
|
||||
Flash(__('Something went wrong trying to change the confidentiality of this issue'));
|
||||
}
|
||||
});
|
||||
},
|
||||
},
|
||||
|
@ -112,5 +119,7 @@ export default {
|
|||
{{ __('This issue is confidential') }}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<recaptcha-modal v-if="showRecaptcha" :html="recaptchaHTML" @close="closeRecaptcha" />
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
@ -32,6 +32,10 @@ export default {
|
|||
|
||||
mounted() {
|
||||
eventHub.$on('submit', this.submit);
|
||||
|
||||
if (this.html) {
|
||||
this.appendRecaptchaScript();
|
||||
}
|
||||
},
|
||||
|
||||
beforeDestroy() {
|
||||
|
|
9
app/serializers/evidences/author_entity.rb
Normal file
9
app/serializers/evidences/author_entity.rb
Normal file
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Evidences
|
||||
class AuthorEntity < Grape::Entity
|
||||
expose :id
|
||||
expose :name
|
||||
expose :email
|
||||
end
|
||||
end
|
15
app/serializers/evidences/issue_entity.rb
Normal file
15
app/serializers/evidences/issue_entity.rb
Normal file
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Evidences
|
||||
class IssueEntity < Grape::Entity
|
||||
expose :id
|
||||
expose :title
|
||||
expose :description
|
||||
expose :author, using: AuthorEntity
|
||||
expose :state
|
||||
expose :iid
|
||||
expose :confidential
|
||||
expose :created_at
|
||||
expose :due_date
|
||||
end
|
||||
end
|
14
app/serializers/evidences/milestone_entity.rb
Normal file
14
app/serializers/evidences/milestone_entity.rb
Normal file
|
@ -0,0 +1,14 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Evidences
|
||||
class MilestoneEntity < Grape::Entity
|
||||
expose :id
|
||||
expose :title
|
||||
expose :description
|
||||
expose :state
|
||||
expose :iid
|
||||
expose :created_at
|
||||
expose :due_date
|
||||
expose :issues, using: IssueEntity
|
||||
end
|
||||
end
|
10
app/serializers/evidences/project_entity.rb
Normal file
10
app/serializers/evidences/project_entity.rb
Normal file
|
@ -0,0 +1,10 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Evidences
|
||||
class ProjectEntity < Grape::Entity
|
||||
expose :id
|
||||
expose :name
|
||||
expose :description
|
||||
expose :created_at
|
||||
end
|
||||
end
|
13
app/serializers/evidences/release_entity.rb
Normal file
13
app/serializers/evidences/release_entity.rb
Normal file
|
@ -0,0 +1,13 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Evidences
|
||||
class ReleaseEntity < Grape::Entity
|
||||
expose :id
|
||||
expose :tag, as: :tag_name
|
||||
expose :name
|
||||
expose :description
|
||||
expose :created_at
|
||||
expose :project, using: ProjectEntity
|
||||
expose :milestones, using: MilestoneEntity
|
||||
end
|
||||
end
|
7
app/serializers/evidences/release_serializer.rb
Normal file
7
app/serializers/evidences/release_serializer.rb
Normal file
|
@ -0,0 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Evidences
|
||||
class ReleaseSerializer < BaseSerializer
|
||||
entity ReleaseEntity
|
||||
end
|
||||
end
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Fix routing bugs in security dashboards
|
||||
merge_request: 16738
|
||||
author:
|
||||
type: fixed
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: 'Geo: Invalidate cache after refreshing foreign tables'
|
||||
merge_request: 17885
|
||||
author:
|
||||
type: fixed
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Display reCAPTCHA modal when making issue public
|
||||
merge_request: 17553
|
||||
author:
|
||||
type: fixed
|
|
@ -850,3 +850,8 @@ To remove the proxy setting, run the following commands (depending on which vari
|
|||
unset http_proxy
|
||||
unset https_proxy
|
||||
```
|
||||
|
||||
### Praefect
|
||||
|
||||
Praefect is an experimental daemon that allows for replication of the Git data.
|
||||
It can be setup with omnibus, [as explained here](./praefect.md).
|
||||
|
|
93
doc/administration/gitaly/praefect.md
Normal file
93
doc/administration/gitaly/praefect.md
Normal file
|
@ -0,0 +1,93 @@
|
|||
# Praefect
|
||||
|
||||
NOTE: **Note:** Praefect is an experimental service, and for testing purposes only at
|
||||
this time.
|
||||
|
||||
## Omnibus
|
||||
|
||||
### Architecture
|
||||
|
||||
For this document, the following network topology is assumed:
|
||||
|
||||
```mermaid
|
||||
graph TB
|
||||
GitLab --> Gitaly;
|
||||
GitLab --> Praefect;
|
||||
Praefect --> Preafect-Git-1;
|
||||
Praefect --> Preafect-Git-2;
|
||||
Praefect --> Preafect-Git-3;
|
||||
```
|
||||
|
||||
Where `GitLab` is the collection of clients that can request Git operations.
|
||||
`Gitaly` is a Gitaly server before using Praefect. The Praefect node has two
|
||||
storage nodes attached. Praefect itself doesn't storage data, but connects to
|
||||
three Gitaly nodes, `Praefect-Git-1`, `Praefect-Git-2`, and `Praefect-Git-3`.
|
||||
There should be no knowledge other than with Praefect about the existence of
|
||||
the `Praefect-Git-X` nodes.
|
||||
|
||||
### Enable the daemon
|
||||
|
||||
Praefect is expected to run on their own host, this means that no other service
|
||||
other than the support services run on this machine.
|
||||
|
||||
Praefect is disabled by default, to enable praefect uncomment the following line
|
||||
and set it to `true`: `# praefect['enable'] = false'`
|
||||
|
||||
```ruby
|
||||
praefect['enable'] = true
|
||||
```
|
||||
|
||||
By default praefect will listen on port `:2305`. It's recommended to enable
|
||||
prometheus to expose metrics. Uncomment the line so it looks like:
|
||||
|
||||
```ruby
|
||||
praefect['prometheus_listen_addr'] = "localhost:9652"
|
||||
```
|
||||
|
||||
Preafect needs at least one storage to store the Git data on. This node should
|
||||
run Gitaly and should not be listed as storage for GitLab itself, that is, the
|
||||
only way it receives traffic is through Praefect and it's not listed in the
|
||||
`git_data_dirs` on any `gitlab.rb` in your GitLab cluster.
|
||||
|
||||
To set the nodes as depicted in the diagram above, the configuration should look
|
||||
like:
|
||||
|
||||
```ruby
|
||||
praefect['storage_nodes'] = [
|
||||
{
|
||||
'storage' => 'praefect-git-1',
|
||||
'address' => 'tcp://praefect-git-1.internal',
|
||||
'primary' => true
|
||||
}
|
||||
{
|
||||
'storage' => 'praefect-git-2',
|
||||
'address' => 'tcp://praefect-git-2.internal'
|
||||
},
|
||||
{
|
||||
'storage' => 'praefect-git-3',
|
||||
'address' => 'tcp://praefect-git-3.internal'
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
Save the file, and run `gitlab-ctl reconfigure`. To test if Praefect is running,
|
||||
you could run `gitlab-ctl status` which should list praefect as being up.
|
||||
|
||||
### Enable Preafect as storage backend in GitLab
|
||||
|
||||
When Praefect is running, it should be exposed as a storage to GitLab. This
|
||||
is done through setting the `git_data_dirs`. Assuming the default storage
|
||||
configuration is used, there would be two storages available to GitLab:
|
||||
|
||||
```ruby
|
||||
git_data_dirs({
|
||||
"default" => {
|
||||
"gitaly_address" => "tcp://gitaly.internal"
|
||||
},
|
||||
"praefect" => {
|
||||
"gitaly_address" => "tcp://praefect.internal:2305"
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
Restart GitLab using `gitlab-ctl restart` on the GitLab node.
|
|
@ -27,6 +27,7 @@ The current stages are:
|
|||
- `review`: This stage includes jobs that deploy the GitLab and Docs Review Apps.
|
||||
- `qa`: This stage includes jobs that perform QA tasks against the Review App
|
||||
that is deployed in the previous stage.
|
||||
- `notification`: This stage includes jobs that sends notifications about pipeline status.
|
||||
- `post-test`: This stage includes jobs that build reports or gather data from
|
||||
the previous stages' jobs (e.g. coverage, Knapsack metadata etc.).
|
||||
- `pages`: This stage includes a job that deploys the various reports as
|
||||
|
@ -191,6 +192,11 @@ subgraph "`qa` stage"
|
|||
dast -.-> |depends on| G;
|
||||
end
|
||||
|
||||
subgraph "`notification` stage"
|
||||
NOTIFICATION1["schedule:package-and-qa:notify-success<br>(on_success)"] -.-> |needs| P;
|
||||
NOTIFICATION2["schedule:package-and-qa:notify-failure<br>(on_failure)"] -.-> |needs| P;
|
||||
end
|
||||
|
||||
subgraph "`post-test` stage"
|
||||
M
|
||||
end
|
||||
|
|
|
@ -37,7 +37,7 @@ module Gitlab
|
|||
usage_data
|
||||
end
|
||||
|
||||
# rubocop:disable Metrics/AbcSize
|
||||
# rubocop: disable Metrics/AbcSize
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def system_usage_data
|
||||
{
|
||||
|
@ -96,14 +96,16 @@ module Gitlab
|
|||
todos: count(Todo),
|
||||
uploads: count(Upload),
|
||||
web_hooks: count(WebHook)
|
||||
}.merge(services_usage)
|
||||
.merge(approximate_counts)
|
||||
.merge(usage_counters)
|
||||
}.tap do |data|
|
||||
data[:counts][:user_preferences] = user_preferences_usage
|
||||
end
|
||||
}.merge(
|
||||
services_usage,
|
||||
approximate_counts,
|
||||
usage_counters,
|
||||
user_preferences_usage
|
||||
)
|
||||
}
|
||||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
# rubocop: enable Metrics/AbcSize
|
||||
|
||||
def cycle_analytics_usage_data
|
||||
Gitlab::CycleAnalytics::UsageData.new.to_json
|
||||
|
|
14
scripts/notify-slack
Executable file
14
scripts/notify-slack
Executable file
|
@ -0,0 +1,14 @@
|
|||
#!/bin/bash
|
||||
# Sends Slack notification MSG to CI_SLACK_WEBHOOK_URL (which needs to be set).
|
||||
# ICON_EMOJI needs to be set to an icon emoji name (without the `:` around it).
|
||||
|
||||
CHANNEL=$1
|
||||
MSG=$2
|
||||
ICON_EMOJI=$3
|
||||
|
||||
if [ -z "$CHANNEL" ] || [ -z "$CI_SLACK_WEBHOOK_URL" ] || [ -z "$MSG" ] || [ -z "$ICON_EMOJI" ]; then
|
||||
echo "Missing argument(s) - Use: $0 channel message icon_emoji"
|
||||
echo "and set CI_SLACK_WEBHOOK_URL environment variable."
|
||||
else
|
||||
curl -X POST --data-urlencode 'payload={"channel": "#'"$CHANNEL"'", "username": "GitLab QA Bot", "text": "'"$MSG"'", "icon_emoji": "'":$ICON_EMOJI:"'"}' "$CI_SLACK_WEBHOOK_URL"
|
||||
fi
|
14
spec/fixtures/api/schemas/evidences/author.json
vendored
Normal file
14
spec/fixtures/api/schemas/evidences/author.json
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"id",
|
||||
"name",
|
||||
"email"
|
||||
],
|
||||
"properties": {
|
||||
"id": { "type": "integer" },
|
||||
"name": { "type": "string" },
|
||||
"email": { "type": "string" }
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
26
spec/fixtures/api/schemas/evidences/issue.json
vendored
Normal file
26
spec/fixtures/api/schemas/evidences/issue.json
vendored
Normal file
|
@ -0,0 +1,26 @@
|
|||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"id",
|
||||
"title",
|
||||
"description",
|
||||
"author",
|
||||
"state",
|
||||
"iid",
|
||||
"confidential",
|
||||
"created_at",
|
||||
"due_date"
|
||||
],
|
||||
"properties": {
|
||||
"id": { "type": "integer" },
|
||||
"title": { "type": "string" },
|
||||
"description": { "type": "string" },
|
||||
"author": { "$ref": "author.json" },
|
||||
"state": { "type": "string" },
|
||||
"iid": { "type": "integer" },
|
||||
"confidential": { "type": "boolean" },
|
||||
"created_at": { "type": "date" },
|
||||
"due_date": { "type": "date" }
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
27
spec/fixtures/api/schemas/evidences/milestone.json
vendored
Normal file
27
spec/fixtures/api/schemas/evidences/milestone.json
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"id",
|
||||
"title",
|
||||
"description",
|
||||
"state",
|
||||
"iid",
|
||||
"created_at",
|
||||
"due_date",
|
||||
"issues"
|
||||
],
|
||||
"properties": {
|
||||
"id": { "type": "integer" },
|
||||
"title": { "type": "string" },
|
||||
"description": { "type": "string" },
|
||||
"state": { "type": "string" },
|
||||
"iid": { "type": "integer" },
|
||||
"created_at": { "type": "date" },
|
||||
"due_date": { "type": "date" },
|
||||
"issues": {
|
||||
"type": "array",
|
||||
"items": { "$ref": "issue.json" }
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
16
spec/fixtures/api/schemas/evidences/project.json
vendored
Normal file
16
spec/fixtures/api/schemas/evidences/project.json
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"id",
|
||||
"name",
|
||||
"description",
|
||||
"created_at"
|
||||
],
|
||||
"properties": {
|
||||
"id": { "type": "integer" },
|
||||
"name": { "type": "string" },
|
||||
"description": { "type": "string" },
|
||||
"created_at": { "type": "date" }
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
25
spec/fixtures/api/schemas/evidences/release.json
vendored
Normal file
25
spec/fixtures/api/schemas/evidences/release.json
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"id",
|
||||
"tag",
|
||||
"name",
|
||||
"description",
|
||||
"created_at",
|
||||
"project",
|
||||
"milestones"
|
||||
],
|
||||
"properties": {
|
||||
"id": { "type": "integer" },
|
||||
"tag": { "type": "string" },
|
||||
"name": { "type": "string" },
|
||||
"description": { "type": "string" },
|
||||
"created_at": { "type": "date" },
|
||||
"project": { "$ref": "project.json" },
|
||||
"milestones": {
|
||||
"type": "array",
|
||||
"items": { "$ref": "milestone.json" }
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
|
@ -73,6 +73,7 @@ describe('Jobs Store Mutations', () => {
|
|||
html,
|
||||
size: 511846,
|
||||
complete: true,
|
||||
lines: [],
|
||||
});
|
||||
|
||||
expect(stateCopy.trace).toEqual(html);
|
||||
|
|
|
@ -6,6 +6,7 @@ import {
|
|||
addDurationToHeader,
|
||||
isCollapsibleSection,
|
||||
findOffsetAndRemove,
|
||||
getIncrementalLineNumber,
|
||||
} from '~/jobs/store/utils';
|
||||
import {
|
||||
utilsMockData,
|
||||
|
@ -292,11 +293,91 @@ describe('Jobs Store Utils', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('getIncrementalLineNumber', () => {
|
||||
describe('when last line is 0', () => {
|
||||
it('returns 1', () => {
|
||||
const log = [
|
||||
{
|
||||
content: [],
|
||||
lineNumber: 0,
|
||||
},
|
||||
];
|
||||
|
||||
expect(getIncrementalLineNumber(log)).toEqual(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with unnested line', () => {
|
||||
it('returns the lineNumber of the last item in the array', () => {
|
||||
const log = [
|
||||
{
|
||||
content: [],
|
||||
lineNumber: 10,
|
||||
},
|
||||
{
|
||||
content: [],
|
||||
lineNumber: 101,
|
||||
},
|
||||
];
|
||||
|
||||
expect(getIncrementalLineNumber(log)).toEqual(102);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when last line is the header section', () => {
|
||||
it('returns the lineNumber of the last item in the array', () => {
|
||||
const log = [
|
||||
{
|
||||
content: [],
|
||||
lineNumber: 10,
|
||||
},
|
||||
{
|
||||
isHeader: true,
|
||||
line: {
|
||||
lineNumber: 101,
|
||||
content: [],
|
||||
},
|
||||
lines: [],
|
||||
},
|
||||
];
|
||||
|
||||
expect(getIncrementalLineNumber(log)).toEqual(102);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when last line is a nested line', () => {
|
||||
it('returns the lineNumber of the last item in the nested array', () => {
|
||||
const log = [
|
||||
{
|
||||
content: [],
|
||||
lineNumber: 10,
|
||||
},
|
||||
{
|
||||
isHeader: true,
|
||||
line: {
|
||||
lineNumber: 101,
|
||||
content: [],
|
||||
},
|
||||
lines: [
|
||||
{
|
||||
lineNumber: 102,
|
||||
content: [],
|
||||
},
|
||||
{ lineNumber: 103, content: [] },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
expect(getIncrementalLineNumber(log)).toEqual(104);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateIncrementalTrace', () => {
|
||||
describe('without repeated section', () => {
|
||||
it('concats and parses both arrays', () => {
|
||||
const oldLog = logLinesParser(originalTrace);
|
||||
const result = updateIncrementalTrace(originalTrace, oldLog, regularIncremental);
|
||||
const result = updateIncrementalTrace(regularIncremental, oldLog);
|
||||
|
||||
expect(result).toEqual([
|
||||
{
|
||||
|
@ -324,7 +405,7 @@ describe('Jobs Store Utils', () => {
|
|||
describe('with regular line repeated offset', () => {
|
||||
it('updates the last line and formats with the incremental part', () => {
|
||||
const oldLog = logLinesParser(originalTrace);
|
||||
const result = updateIncrementalTrace(originalTrace, oldLog, regularIncrementalRepeated);
|
||||
const result = updateIncrementalTrace(regularIncrementalRepeated, oldLog);
|
||||
|
||||
expect(result).toEqual([
|
||||
{
|
||||
|
@ -343,7 +424,7 @@ describe('Jobs Store Utils', () => {
|
|||
describe('with header line repeated', () => {
|
||||
it('updates the header line and formats with the incremental part', () => {
|
||||
const oldLog = logLinesParser(headerTrace);
|
||||
const result = updateIncrementalTrace(headerTrace, oldLog, headerTraceIncremental);
|
||||
const result = updateIncrementalTrace(headerTraceIncremental, oldLog);
|
||||
|
||||
expect(result).toEqual([
|
||||
{
|
||||
|
@ -369,11 +450,7 @@ describe('Jobs Store Utils', () => {
|
|||
describe('with collapsible line repeated', () => {
|
||||
it('updates the collapsible line and formats with the incremental part', () => {
|
||||
const oldLog = logLinesParser(collapsibleTrace);
|
||||
const result = updateIncrementalTrace(
|
||||
collapsibleTrace,
|
||||
oldLog,
|
||||
collapsibleTraceIncremental,
|
||||
);
|
||||
const result = updateIncrementalTrace(collapsibleTraceIncremental, oldLog);
|
||||
|
||||
expect(result).toEqual([
|
||||
{
|
||||
|
|
|
@ -49,6 +49,8 @@ exports[`Confidential Issue Sidebar Block renders for isConfidential = false and
|
|||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
</div>
|
||||
`;
|
||||
|
||||
|
@ -111,6 +113,8 @@ exports[`Confidential Issue Sidebar Block renders for isConfidential = false and
|
|||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
</div>
|
||||
`;
|
||||
|
||||
|
@ -163,6 +167,8 @@ exports[`Confidential Issue Sidebar Block renders for isConfidential = true and
|
|||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
</div>
|
||||
`;
|
||||
|
||||
|
@ -225,5 +231,7 @@ exports[`Confidential Issue Sidebar Block renders for isConfidential = true and
|
|||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!---->
|
||||
</div>
|
||||
`;
|
||||
|
|
|
@ -2,15 +2,36 @@ import { shallowMount } from '@vue/test-utils';
|
|||
import ConfidentialIssueSidebar from '~/sidebar/components/confidential/confidential_issue_sidebar.vue';
|
||||
import { mockTracking, triggerEvent } from 'helpers/tracking_helper';
|
||||
import EditForm from '~/sidebar/components/confidential/edit_form.vue';
|
||||
import SidebarService from '~/sidebar/services/sidebar_service';
|
||||
import createFlash from '~/flash';
|
||||
import RecaptchaModal from '~/vue_shared/components/recaptcha_modal';
|
||||
|
||||
jest.mock('~/flash');
|
||||
jest.mock('~/sidebar/services/sidebar_service');
|
||||
|
||||
describe('Confidential Issue Sidebar Block', () => {
|
||||
let wrapper;
|
||||
|
||||
const createComponent = propsData => {
|
||||
const service = {
|
||||
update: () => Promise.resolve(true),
|
||||
const findRecaptchaModal = () => wrapper.find(RecaptchaModal);
|
||||
|
||||
const triggerUpdateConfidentialAttribute = () => {
|
||||
wrapper.setData({ edit: true });
|
||||
return (
|
||||
// wait for edit form to become visible
|
||||
wrapper.vm
|
||||
.$nextTick()
|
||||
.then(() => {
|
||||
const editForm = wrapper.find(EditForm);
|
||||
const { updateConfidentialAttribute } = editForm.props();
|
||||
updateConfidentialAttribute();
|
||||
})
|
||||
// wait for reCAPTCHA modal to render
|
||||
.then(() => wrapper.vm.$nextTick())
|
||||
);
|
||||
};
|
||||
|
||||
const createComponent = propsData => {
|
||||
const service = new SidebarService();
|
||||
wrapper = shallowMount(ConfidentialIssueSidebar, {
|
||||
propsData: {
|
||||
service,
|
||||
|
@ -20,6 +41,15 @@ describe('Confidential Issue Sidebar Block', () => {
|
|||
});
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
jest.spyOn(window.location, 'reload').mockImplementation();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
wrapper.destroy();
|
||||
});
|
||||
|
||||
it.each`
|
||||
isConfidential | isEditable
|
||||
${false} | ${false}
|
||||
|
@ -38,10 +68,6 @@ describe('Confidential Issue Sidebar Block', () => {
|
|||
},
|
||||
);
|
||||
|
||||
afterEach(() => {
|
||||
wrapper.destroy();
|
||||
});
|
||||
|
||||
describe('if editable', () => {
|
||||
beforeEach(() => {
|
||||
createComponent({
|
||||
|
@ -81,5 +107,61 @@ describe('Confidential Issue Sidebar Block', () => {
|
|||
property: 'confidentiality',
|
||||
});
|
||||
});
|
||||
|
||||
describe('for successful update', () => {
|
||||
beforeEach(() => {
|
||||
SidebarService.prototype.update.mockResolvedValue({ data: 'irrelevant' });
|
||||
});
|
||||
|
||||
it('reloads the page', () =>
|
||||
triggerUpdateConfidentialAttribute().then(() => {
|
||||
expect(window.location.reload).toHaveBeenCalled();
|
||||
}));
|
||||
|
||||
it('does not show an error message', () =>
|
||||
triggerUpdateConfidentialAttribute().then(() => {
|
||||
expect(createFlash).not.toHaveBeenCalled();
|
||||
}));
|
||||
});
|
||||
|
||||
describe('for update error', () => {
|
||||
beforeEach(() => {
|
||||
SidebarService.prototype.update.mockRejectedValue(new Error('updating failed!'));
|
||||
});
|
||||
|
||||
it('does not reload the page', () =>
|
||||
triggerUpdateConfidentialAttribute().then(() => {
|
||||
expect(window.location.reload).not.toHaveBeenCalled();
|
||||
}));
|
||||
|
||||
it('shows an error message', () =>
|
||||
triggerUpdateConfidentialAttribute().then(() => {
|
||||
expect(createFlash).toHaveBeenCalled();
|
||||
}));
|
||||
});
|
||||
|
||||
describe('for spam error', () => {
|
||||
beforeEach(() => {
|
||||
SidebarService.prototype.update.mockRejectedValue({ name: 'SpamError' });
|
||||
});
|
||||
|
||||
it('does not reload the page', () =>
|
||||
triggerUpdateConfidentialAttribute().then(() => {
|
||||
expect(window.location.reload).not.toHaveBeenCalled();
|
||||
}));
|
||||
|
||||
it('does not show an error message', () =>
|
||||
triggerUpdateConfidentialAttribute().then(() => {
|
||||
expect(createFlash).not.toHaveBeenCalled();
|
||||
}));
|
||||
|
||||
it('shows a reCAPTCHA modal', () => {
|
||||
expect(findRecaptchaModal().exists()).toBe(false);
|
||||
|
||||
return triggerUpdateConfidentialAttribute().then(() => {
|
||||
expect(findRecaptchaModal().exists()).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -38,7 +38,7 @@ describe Gitlab::UsageData do
|
|||
|
||||
subject { described_class.data }
|
||||
|
||||
it 'gathers usage data' do
|
||||
it 'gathers usage data', :aggregate_failures do
|
||||
expect(subject.keys).to include(*%i(
|
||||
active_user_count
|
||||
counts
|
||||
|
@ -151,7 +151,8 @@ describe Gitlab::UsageData do
|
|||
todos
|
||||
uploads
|
||||
web_hooks
|
||||
user_preferences
|
||||
user_preferences_group_overview_details
|
||||
user_preferences_group_overview_security_dashboard
|
||||
).push(*smau_keys)
|
||||
|
||||
count_data = subject[:counts]
|
||||
|
@ -163,7 +164,7 @@ describe Gitlab::UsageData do
|
|||
expect(expected_keys - count_data.keys).to be_empty
|
||||
end
|
||||
|
||||
it 'gathers projects data correctly' do
|
||||
it 'gathers projects data correctly', :aggregate_failures do
|
||||
count_data = subject[:counts]
|
||||
|
||||
expect(count_data[:projects]).to eq(4)
|
||||
|
@ -209,11 +210,8 @@ describe Gitlab::UsageData do
|
|||
describe 'the results of calling #totals on all objects in the array' do
|
||||
subject { described_class.usage_data_counters.map(&:totals) }
|
||||
|
||||
it do
|
||||
is_expected
|
||||
.to all(be_a Hash)
|
||||
.and all(have_attributes(keys: all(be_a Symbol), values: all(be_a Integer)))
|
||||
end
|
||||
it { is_expected.to all(be_a Hash) }
|
||||
it { is_expected.to all(have_attributes(keys: all(be_a Symbol), values: all(be_a Integer))) }
|
||||
end
|
||||
|
||||
it 'does not have any conflicts' do
|
||||
|
@ -226,7 +224,7 @@ describe Gitlab::UsageData do
|
|||
describe '#features_usage_data_ce' do
|
||||
subject { described_class.features_usage_data_ce }
|
||||
|
||||
it 'gathers feature usage data' do
|
||||
it 'gathers feature usage data', :aggregate_failures do
|
||||
expect(subject[:mattermost_enabled]).to eq(Gitlab.config.mattermost.enabled)
|
||||
expect(subject[:signup_enabled]).to eq(Gitlab::CurrentSettings.allow_signup?)
|
||||
expect(subject[:ldap_enabled]).to eq(Gitlab.config.ldap.enabled)
|
||||
|
@ -242,7 +240,7 @@ describe Gitlab::UsageData do
|
|||
describe '#components_usage_data' do
|
||||
subject { described_class.components_usage_data }
|
||||
|
||||
it 'gathers components usage data' do
|
||||
it 'gathers components usage data', :aggregate_failures do
|
||||
expect(subject[:gitlab_pages][:enabled]).to eq(Gitlab.config.pages.enabled)
|
||||
expect(subject[:gitlab_pages][:version]).to eq(Gitlab::Pages::VERSION)
|
||||
expect(subject[:git][:version]).to eq(Gitlab::Git.version)
|
||||
|
@ -258,7 +256,7 @@ describe Gitlab::UsageData do
|
|||
describe '#license_usage_data' do
|
||||
subject { described_class.license_usage_data }
|
||||
|
||||
it 'gathers license data' do
|
||||
it 'gathers license data', :aggregate_failures do
|
||||
expect(subject[:uuid]).to eq(Gitlab::CurrentSettings.uuid)
|
||||
expect(subject[:version]).to eq(Gitlab::VERSION)
|
||||
expect(subject[:installation_type]).to eq('gitlab-development-kit')
|
||||
|
@ -290,7 +288,7 @@ describe Gitlab::UsageData do
|
|||
end
|
||||
|
||||
describe '#approximate_counts' do
|
||||
it 'gets approximate counts for selected models' do
|
||||
it 'gets approximate counts for selected models', :aggregate_failures do
|
||||
create(:label)
|
||||
|
||||
expect(Gitlab::Database::Count).to receive(:approximate_counts)
|
||||
|
@ -302,14 +300,12 @@ describe Gitlab::UsageData do
|
|||
expect(counts.any? { |count| count < 0 }).to be_falsey
|
||||
end
|
||||
|
||||
it 'returns default values if counts can not be retrieved' do
|
||||
it 'returns default values if counts can not be retrieved', :aggregate_failures do
|
||||
described_class::APPROXIMATE_COUNT_MODELS.map do |model|
|
||||
model.name.underscore.pluralize.to_sym
|
||||
end
|
||||
|
||||
expect(Gitlab::Database::Count).to receive(:approximate_counts)
|
||||
.and_return({})
|
||||
|
||||
expect(Gitlab::Database::Count).to receive(:approximate_counts).and_return({})
|
||||
expect(described_class.approximate_counts.values.uniq).to eq([-1])
|
||||
end
|
||||
end
|
||||
|
|
13
spec/serializers/evidences/author_entity_spec.rb
Normal file
13
spec/serializers/evidences/author_entity_spec.rb
Normal file
|
@ -0,0 +1,13 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Evidences::AuthorEntity do
|
||||
let(:entity) { described_class.new(build(:author)) }
|
||||
|
||||
subject { entity.as_json }
|
||||
|
||||
it 'exposes the expected fields' do
|
||||
expect(subject.keys).to contain_exactly(:id, :name, :email)
|
||||
end
|
||||
end
|
13
spec/serializers/evidences/issue_entity_spec.rb
Normal file
13
spec/serializers/evidences/issue_entity_spec.rb
Normal file
|
@ -0,0 +1,13 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Evidences::IssueEntity do
|
||||
let(:entity) { described_class.new(build(:issue)) }
|
||||
|
||||
subject { entity.as_json }
|
||||
|
||||
it 'exposes the expected fields' do
|
||||
expect(subject.keys).to contain_exactly(:id, :title, :description, :author, :state, :iid, :confidential, :created_at, :due_date)
|
||||
end
|
||||
end
|
35
spec/serializers/evidences/milestone_entity_spec.rb
Normal file
35
spec/serializers/evidences/milestone_entity_spec.rb
Normal file
|
@ -0,0 +1,35 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Evidences::MilestoneEntity do
|
||||
let(:milestone) { build(:milestone) }
|
||||
let(:entity) { described_class.new(milestone) }
|
||||
|
||||
subject { entity.as_json }
|
||||
|
||||
it 'exposes the expected fields' do
|
||||
expect(subject.keys).to contain_exactly(:id, :title, :description, :state, :iid, :created_at, :due_date, :issues)
|
||||
end
|
||||
|
||||
context 'when there issues linked to this milestone' do
|
||||
let(:issue_1) { build(:issue) }
|
||||
let(:issue_2) { build(:issue) }
|
||||
let(:milestone) { build(:milestone, issues: [issue_1, issue_2]) }
|
||||
|
||||
it 'exposes these issues' do
|
||||
expect(subject[:issues]).to contain_exactly(
|
||||
Evidences::IssueEntity.new(issue_1).as_json,
|
||||
Evidences::IssueEntity.new(issue_2).as_json
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the release has no milestone' do
|
||||
let(:milestone) { build(:milestone, issues: []) }
|
||||
|
||||
it 'exposes an empty array for milestones' do
|
||||
expect(subject[:issues]).to be_empty
|
||||
end
|
||||
end
|
||||
end
|
13
spec/serializers/evidences/project_entity_spec.rb
Normal file
13
spec/serializers/evidences/project_entity_spec.rb
Normal file
|
@ -0,0 +1,13 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Evidences::ProjectEntity do
|
||||
let(:entity) { described_class.new(build(:project)) }
|
||||
|
||||
subject { entity.as_json }
|
||||
|
||||
it 'exposes the expected fields' do
|
||||
expect(subject.keys).to contain_exactly(:id, :name, :description, :created_at)
|
||||
end
|
||||
end
|
36
spec/serializers/evidences/release_entity_spec.rb
Normal file
36
spec/serializers/evidences/release_entity_spec.rb
Normal file
|
@ -0,0 +1,36 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Evidences::ReleaseEntity do
|
||||
let(:release) { build(:release) }
|
||||
let(:entity) { described_class.new(release) }
|
||||
|
||||
subject { entity.as_json }
|
||||
|
||||
it 'exposes the expected fields' do
|
||||
expect(subject.keys).to contain_exactly(:id, :tag_name, :name, :description, :created_at, :project, :milestones)
|
||||
end
|
||||
|
||||
context 'when the release has milestones' do
|
||||
let(:project) { create(:project) }
|
||||
let(:milestone_1) { build(:milestone, project: project) }
|
||||
let(:milestone_2) { build(:milestone, project: project) }
|
||||
let(:release) { build(:release, project: project, milestones: [milestone_1, milestone_2]) }
|
||||
|
||||
it 'exposes these milestones' do
|
||||
expect(subject[:milestones]).to contain_exactly(
|
||||
Evidences::MilestoneEntity.new(milestone_1).as_json,
|
||||
Evidences::MilestoneEntity.new(milestone_2).as_json
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the release has no milestone' do
|
||||
let(:release) { build(:release, milestones: []) }
|
||||
|
||||
it 'exposes an empty array for milestones' do
|
||||
expect(subject[:milestones]).to be_empty
|
||||
end
|
||||
end
|
||||
end
|
9
spec/serializers/evidences/release_serializer_spec.rb
Normal file
9
spec/serializers/evidences/release_serializer_spec.rb
Normal file
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
describe Evidences::ReleaseSerializer do
|
||||
it 'represents an Evidence::ReleaseEntity entity' do
|
||||
expect(described_class.entity_class).to eq(Evidences::ReleaseEntity)
|
||||
end
|
||||
end
|
Loading…
Reference in a new issue