2018-08-03 03:15:25 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2015-08-25 21:42:46 -04:00
|
|
|
module Ci
|
2019-03-28 09:17:42 -04:00
|
|
|
class Pipeline < ApplicationRecord
|
2017-09-06 07:13:52 -04:00
|
|
|
extend Gitlab::Ci::Model
|
2020-06-22 11:09:27 -04:00
|
|
|
include Ci::HasStatus
|
2016-08-30 04:09:41 -04:00
|
|
|
include Importable
|
2016-10-12 06:07:58 -04:00
|
|
|
include AfterCommitQueue
|
2017-03-28 16:27:16 -04:00
|
|
|
include Presentable
|
2017-10-03 04:12:28 -04:00
|
|
|
include Gitlab::OptimisticLocking
|
2018-03-21 04:33:14 -04:00
|
|
|
include Gitlab::Utils::StrongMemoize
|
2018-04-24 04:08:43 -04:00
|
|
|
include AtomicInternalId
|
2018-05-31 14:32:36 -04:00
|
|
|
include EnumWithNil
|
2020-03-13 11:09:21 -04:00
|
|
|
include Ci::HasRef
|
2019-02-20 01:37:49 -05:00
|
|
|
include ShaAttribute
|
2019-03-01 01:18:08 -05:00
|
|
|
include FromUnion
|
2019-12-06 13:07:44 -05:00
|
|
|
include UpdatedAtFilterable
|
2019-02-20 01:37:49 -05:00
|
|
|
|
2020-02-20 13:08:51 -05:00
|
|
|
PROJECT_ROUTE_AND_NAMESPACE_ROUTE = {
|
|
|
|
project: [:project_feature, :route, { namespace: :route }]
|
|
|
|
}.freeze
|
|
|
|
|
2020-02-08 01:08:50 -05:00
|
|
|
BridgeStatusError = Class.new(StandardError)
|
|
|
|
|
2019-02-20 01:37:49 -05:00
|
|
|
sha_attribute :source_sha
|
|
|
|
sha_attribute :target_sha
|
2015-09-24 11:09:33 -04:00
|
|
|
|
2018-12-05 09:39:15 -05:00
|
|
|
belongs_to :project, inverse_of: :all_pipelines
|
2016-07-15 09:42:29 -04:00
|
|
|
belongs_to :user
|
2017-03-18 07:46:56 -04:00
|
|
|
belongs_to :auto_canceled_by, class_name: 'Ci::Pipeline'
|
2017-05-07 18:35:56 -04:00
|
|
|
belongs_to :pipeline_schedule, class_name: 'Ci::PipelineSchedule'
|
2018-12-05 01:57:00 -05:00
|
|
|
belongs_to :merge_request, class_name: 'MergeRequest'
|
2019-08-09 05:40:45 -04:00
|
|
|
belongs_to :external_pull_request
|
2020-06-10 05:08:35 -04:00
|
|
|
belongs_to :ci_ref, class_name: 'Ci::Ref', foreign_key: :ci_ref_id, inverse_of: :pipelines
|
2016-07-15 09:42:29 -04:00
|
|
|
|
2020-01-14 10:07:55 -05:00
|
|
|
has_internal_id :iid, scope: :project, presence: false, track_if: -> { !importing? }, ensure_if: -> { !importing? }, init: ->(s) do
|
2018-12-05 09:39:15 -05:00
|
|
|
s&.project&.all_pipelines&.maximum(:iid) || s&.project&.all_pipelines&.count
|
2018-05-03 04:48:23 -04:00
|
|
|
end
|
2018-04-24 04:08:43 -04:00
|
|
|
|
2018-05-22 07:03:42 -04:00
|
|
|
has_many :stages, -> { order(position: :asc) }, inverse_of: :pipeline
|
2018-02-20 23:34:38 -05:00
|
|
|
has_many :statuses, class_name: 'CommitStatus', foreign_key: :commit_id, inverse_of: :pipeline
|
2019-12-24 10:07:44 -05:00
|
|
|
has_many :latest_statuses_ordered_by_stage, -> { latest.order(:stage_idx, :stage) }, class_name: 'CommitStatus', foreign_key: :commit_id, inverse_of: :pipeline
|
2020-01-10 13:07:43 -05:00
|
|
|
has_many :processables, class_name: 'Ci::Processable', foreign_key: :commit_id, inverse_of: :pipeline
|
2020-06-16 08:09:00 -04:00
|
|
|
has_many :bridges, class_name: 'Ci::Bridge', foreign_key: :commit_id, inverse_of: :pipeline
|
2018-02-27 21:56:18 -05:00
|
|
|
has_many :builds, foreign_key: :commit_id, inverse_of: :pipeline
|
2020-06-04 08:08:21 -04:00
|
|
|
has_many :job_artifacts, through: :builds
|
2017-06-08 11:16:27 -04:00
|
|
|
has_many :trigger_requests, dependent: :destroy, foreign_key: :commit_id # rubocop:disable Cop/ActiveRecordDependent
|
2017-07-26 05:31:09 -04:00
|
|
|
has_many :variables, class_name: 'Ci::PipelineVariable'
|
2018-12-03 01:46:45 -05:00
|
|
|
has_many :deployments, through: :builds
|
|
|
|
has_many :environments, -> { distinct }, through: :deployments
|
2020-06-04 08:08:21 -04:00
|
|
|
has_many :latest_builds, -> { latest }, foreign_key: :commit_id, inverse_of: :pipeline, class_name: 'Ci::Build'
|
|
|
|
has_many :downloadable_artifacts, -> { not_expired.downloadable }, through: :latest_builds, source: :job_artifacts
|
2017-03-28 16:04:14 -04:00
|
|
|
|
2020-06-29 08:09:20 -04:00
|
|
|
has_many :messages, class_name: 'Ci::PipelineMessage', inverse_of: :pipeline
|
|
|
|
|
2017-03-28 16:04:14 -04:00
|
|
|
# Merge requests for which the current pipeline is running against
|
|
|
|
# the merge request's latest commit.
|
2019-03-01 00:10:07 -05:00
|
|
|
has_many :merge_requests_as_head_pipeline, foreign_key: "head_pipeline_id", class_name: 'MergeRequest'
|
2015-08-25 21:42:46 -04:00
|
|
|
|
2020-07-21 20:09:26 -04:00
|
|
|
has_many :pending_builds, -> { pending }, foreign_key: :commit_id, class_name: 'Ci::Build', inverse_of: :pipeline
|
2019-12-24 10:07:44 -05:00
|
|
|
has_many :failed_builds, -> { latest.failed }, foreign_key: :commit_id, class_name: 'Ci::Build', inverse_of: :pipeline
|
2020-07-21 20:09:26 -04:00
|
|
|
has_many :retryable_builds, -> { latest.failed_or_canceled.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build', inverse_of: :pipeline
|
2017-04-06 08:31:38 -04:00
|
|
|
has_many :cancelable_statuses, -> { cancelable }, foreign_key: :commit_id, class_name: 'CommitStatus'
|
2020-07-21 20:09:26 -04:00
|
|
|
has_many :manual_actions, -> { latest.manual_actions.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build', inverse_of: :pipeline
|
|
|
|
has_many :scheduled_actions, -> { latest.scheduled_actions.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build', inverse_of: :pipeline
|
2017-04-06 08:31:38 -04:00
|
|
|
|
2017-06-02 06:16:11 -04:00
|
|
|
has_many :auto_canceled_pipelines, class_name: 'Ci::Pipeline', foreign_key: 'auto_canceled_by_id'
|
|
|
|
has_many :auto_canceled_jobs, class_name: 'CommitStatus', foreign_key: 'auto_canceled_by_id'
|
2019-10-16 05:07:51 -04:00
|
|
|
has_many :sourced_pipelines, class_name: 'Ci::Sources::Pipeline', foreign_key: :source_pipeline_id
|
2017-04-06 08:31:38 -04:00
|
|
|
|
2019-10-16 05:07:51 -04:00
|
|
|
has_one :source_pipeline, class_name: 'Ci::Sources::Pipeline', inverse_of: :pipeline
|
2020-02-27 13:09:21 -05:00
|
|
|
|
2019-02-20 16:29:48 -05:00
|
|
|
has_one :chat_data, class_name: 'Ci::PipelineChatData'
|
|
|
|
|
2019-10-16 05:07:51 -04:00
|
|
|
has_many :triggered_pipelines, through: :sourced_pipelines, source: :pipeline
|
2020-01-13 07:08:04 -05:00
|
|
|
has_many :child_pipelines, -> { merge(Ci::Sources::Pipeline.same_project) }, through: :sourced_pipelines, source: :pipeline
|
2019-10-16 05:07:51 -04:00
|
|
|
has_one :triggered_by_pipeline, through: :source_pipeline, source: :source_pipeline
|
2020-01-13 07:08:04 -05:00
|
|
|
has_one :parent_pipeline, -> { merge(Ci::Sources::Pipeline.same_project) }, through: :source_pipeline, source: :source_pipeline
|
2019-10-16 05:07:51 -04:00
|
|
|
has_one :source_job, through: :source_pipeline, source: :source_job
|
2020-02-08 01:08:50 -05:00
|
|
|
has_one :source_bridge, through: :source_pipeline, source: :source_bridge
|
2019-10-16 05:07:51 -04:00
|
|
|
|
2019-12-23 19:07:31 -05:00
|
|
|
has_one :pipeline_config, class_name: 'Ci::PipelineConfig', inverse_of: :pipeline
|
|
|
|
|
2020-05-06 11:09:42 -04:00
|
|
|
has_many :daily_build_group_report_results, class_name: 'Ci::DailyBuildGroupReportResult', foreign_key: :last_pipeline_id
|
2020-06-19 14:08:39 -04:00
|
|
|
has_many :latest_builds_report_results, through: :latest_builds, source: :report_results
|
2020-08-07 02:09:47 -04:00
|
|
|
has_many :pipeline_artifacts, class_name: 'Ci::PipelineArtifact', inverse_of: :pipeline, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
|
2020-03-18 05:09:31 -04:00
|
|
|
|
2018-04-18 11:35:39 -04:00
|
|
|
accepts_nested_attributes_for :variables, reject_if: :persisted?
|
|
|
|
|
2017-02-22 17:35:08 -05:00
|
|
|
delegate :id, to: :project, prefix: true
|
2017-09-18 08:29:43 -04:00
|
|
|
delegate :full_path, to: :project, prefix: true
|
2017-02-22 17:35:08 -05:00
|
|
|
|
2017-02-21 19:40:04 -05:00
|
|
|
validates :sha, presence: { unless: :importing? }
|
|
|
|
validates :ref, presence: { unless: :importing? }
|
2020-02-13 07:08:49 -05:00
|
|
|
validates :tag, inclusion: { in: [false], if: :merge_request? }
|
2019-08-09 05:40:45 -04:00
|
|
|
|
|
|
|
validates :external_pull_request, presence: { if: :external_pull_request_event? }
|
|
|
|
validates :external_pull_request, absence: { unless: :external_pull_request_event? }
|
|
|
|
validates :tag, inclusion: { in: [false], if: :external_pull_request_event? }
|
|
|
|
|
2017-02-21 19:40:04 -05:00
|
|
|
validates :status, presence: { unless: :importing? }
|
2016-08-30 04:09:41 -04:00
|
|
|
validate :valid_commit_sha, unless: :importing?
|
2018-12-15 04:06:56 -05:00
|
|
|
validates :source, exclusion: { in: %w(unknown), unless: :importing? }, on: :create
|
2018-05-11 09:07:23 -04:00
|
|
|
|
2016-10-19 08:03:31 -04:00
|
|
|
after_create :keep_around_commits, unless: :importing?
|
2016-04-11 10:55:40 -04:00
|
|
|
|
2020-07-31 20:09:45 -04:00
|
|
|
# We use `Ci::PipelineEnums.sources` here so that EE can more easily extend
|
2018-11-13 11:29:14 -05:00
|
|
|
# this `Hash` with new values.
|
2020-07-31 20:09:45 -04:00
|
|
|
enum_with_nil source: ::Ci::PipelineEnums.sources
|
2017-05-24 09:13:51 -04:00
|
|
|
|
2020-07-31 20:09:45 -04:00
|
|
|
enum_with_nil config_source: ::Ci::PipelineEnums.config_sources
|
2017-08-31 07:47:29 -04:00
|
|
|
|
2020-07-31 20:09:45 -04:00
|
|
|
# We use `Ci::PipelineEnums.failure_reasons` here so that EE can more easily
|
2018-11-13 11:29:14 -05:00
|
|
|
# extend this `Hash` with new values.
|
2020-07-31 20:09:45 -04:00
|
|
|
enum failure_reason: ::Ci::PipelineEnums.failure_reasons
|
2017-09-29 04:18:53 -04:00
|
|
|
|
2020-07-14 08:09:14 -04:00
|
|
|
enum locked: { unlocked: 0, artifacts_locked: 1 }
|
|
|
|
|
2016-08-11 14:54:02 -04:00
|
|
|
state_machine :status, initial: :created do
|
2016-08-12 07:57:58 -04:00
|
|
|
event :enqueue do
|
2020-05-08 08:09:37 -04:00
|
|
|
transition [:created, :manual, :waiting_for_resource, :preparing, :skipped, :scheduled] => :pending
|
2017-11-07 10:47:50 -05:00
|
|
|
transition [:success, :failed, :canceled] => :running
|
2020-05-08 08:09:37 -04:00
|
|
|
|
|
|
|
# this is needed to ensure tests to be covered
|
|
|
|
transition [:running] => :running
|
2016-08-12 05:36:51 -04:00
|
|
|
end
|
|
|
|
|
2019-12-24 07:08:01 -05:00
|
|
|
event :request_resource do
|
|
|
|
transition any - [:waiting_for_resource] => :waiting_for_resource
|
|
|
|
end
|
|
|
|
|
2019-02-26 21:13:06 -05:00
|
|
|
event :prepare do
|
|
|
|
transition any - [:preparing] => :preparing
|
|
|
|
end
|
|
|
|
|
2016-08-12 05:36:51 -04:00
|
|
|
event :run do
|
2016-10-20 03:33:44 -04:00
|
|
|
transition any - [:running] => :running
|
2016-08-12 05:36:51 -04:00
|
|
|
end
|
|
|
|
|
2016-08-11 14:54:02 -04:00
|
|
|
event :skip do
|
2016-10-20 03:33:44 -04:00
|
|
|
transition any - [:skipped] => :skipped
|
2016-08-11 14:54:02 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
event :drop do
|
2016-10-20 03:33:44 -04:00
|
|
|
transition any - [:failed] => :failed
|
2016-08-11 14:54:02 -04:00
|
|
|
end
|
|
|
|
|
2016-08-12 05:36:51 -04:00
|
|
|
event :succeed do
|
2016-10-20 03:33:44 -04:00
|
|
|
transition any - [:success] => :success
|
2016-08-12 05:36:51 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
event :cancel do
|
2016-10-20 03:33:44 -04:00
|
|
|
transition any - [:canceled] => :canceled
|
2016-08-11 14:54:02 -04:00
|
|
|
end
|
|
|
|
|
2017-03-03 06:51:23 -05:00
|
|
|
event :block do
|
2017-03-03 08:35:19 -05:00
|
|
|
transition any - [:manual] => :manual
|
2017-03-03 06:51:23 -05:00
|
|
|
end
|
|
|
|
|
2018-10-04 03:40:43 -04:00
|
|
|
event :delay do
|
2018-09-21 02:24:19 -04:00
|
|
|
transition any - [:scheduled] => :scheduled
|
|
|
|
end
|
|
|
|
|
2016-10-14 13:08:48 -04:00
|
|
|
# IMPORTANT
|
|
|
|
# Do not add any operations to this state_machine
|
|
|
|
# Create a separate worker for each new operation
|
|
|
|
|
2019-12-24 07:08:01 -05:00
|
|
|
before_transition [:created, :waiting_for_resource, :preparing, :pending] => :running do |pipeline|
|
2020-05-22 05:08:09 -04:00
|
|
|
pipeline.started_at = Time.current
|
2016-08-11 14:54:02 -04:00
|
|
|
end
|
|
|
|
|
2016-08-12 05:53:27 -04:00
|
|
|
before_transition any => [:success, :failed, :canceled] do |pipeline|
|
2020-05-22 05:08:09 -04:00
|
|
|
pipeline.finished_at = Time.current
|
2016-10-04 09:59:11 -04:00
|
|
|
pipeline.update_duration
|
|
|
|
end
|
|
|
|
|
2017-04-21 13:45:14 -04:00
|
|
|
before_transition any => [:manual] do |pipeline|
|
|
|
|
pipeline.update_duration
|
|
|
|
end
|
|
|
|
|
2017-04-06 11:47:37 -04:00
|
|
|
before_transition canceled: any - [:canceled] do |pipeline|
|
2017-04-06 10:04:45 -04:00
|
|
|
pipeline.auto_canceled_by = nil
|
|
|
|
end
|
|
|
|
|
2017-10-02 07:09:07 -04:00
|
|
|
before_transition any => :failed do |pipeline, transition|
|
|
|
|
transition.args.first.try do |reason|
|
|
|
|
pipeline.failure_reason = reason
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-24 07:08:01 -05:00
|
|
|
after_transition [:created, :waiting_for_resource, :preparing, :pending] => :running do |pipeline|
|
2017-04-20 02:44:01 -04:00
|
|
|
pipeline.run_after_commit { PipelineMetricsWorker.perform_async(pipeline.id) }
|
Improve performance of the cycle analytics page.
1. These changes bring down page load time for 100 issues from more than
a minute to about 1.5 seconds.
2. This entire commit is composed of these types of performance
enhancements:
- Cache relevant data in `IssueMetrics` wherever possible.
- Cache relevant data in `MergeRequestMetrics` wherever possible.
- Preload metrics
3. Given these improvements, we now only need to make 4 SQL calls:
- Load all issues
- Load all merge requests
- Load all metrics for the issues
- Load all metrics for the merge requests
4. A list of all the data points that are now being pre-calculated:
a. The first time an issue is mentioned in a commit
- In `GitPushService`, find all issues mentioned by the given commit
using `ReferenceExtractor`. Set the `first_mentioned_in_commit_at`
flag for each of them.
- There seems to be a (pre-existing) bug here - files (and
therefore commits) created using the Web CI don't have
cross-references created, and issues are not closed even when
the commit title is "Fixes #xx".
b. The first time a merge request is deployed to production
When a `Deployment` is created, find all merge requests that
were merged in before the deployment, and set the
`first_deployed_to_production_at` flag for each of them.
c. The start / end time for a merge request pipeline
Hook into the `Pipeline` state machine. When the `status` moves to
`running`, find the merge requests whose tip commit matches the
pipeline, and record the `latest_build_started_at` time for each
of them. When the `status` moves to `success`, record the
`latest_build_finished_at` time.
d. The merge requests that close an issue
- This was a big cause of the performance problems we were having
with Cycle Analytics. We need to use `ReferenceExtractor` to make
this calculation, which is slow when we have to run it on a large
number of merge requests.
- When a merge request is created, updated, or refreshed, find the
issues it closes, and create an instance of
`MergeRequestsClosingIssues`, which acts as a join model between
merge requests and issues.
- If a `MergeRequestsClosingIssues` instance links a merge request
and an issue, that issue closes that merge request.
5. The `Queries` module was changed into a class, so we can cache the
results of `issues` and `merge_requests_closing_issues` across
various cycle analytics stages.
6. The code added in this commit is untested. Tests will be added in the
next commit.
2016-09-15 04:59:36 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
after_transition any => [:success] do |pipeline|
|
2017-04-20 02:44:01 -04:00
|
|
|
pipeline.run_after_commit { PipelineMetricsWorker.perform_async(pipeline.id) }
|
Improve performance of the cycle analytics page.
1. These changes bring down page load time for 100 issues from more than
a minute to about 1.5 seconds.
2. This entire commit is composed of these types of performance
enhancements:
- Cache relevant data in `IssueMetrics` wherever possible.
- Cache relevant data in `MergeRequestMetrics` wherever possible.
- Preload metrics
3. Given these improvements, we now only need to make 4 SQL calls:
- Load all issues
- Load all merge requests
- Load all metrics for the issues
- Load all metrics for the merge requests
4. A list of all the data points that are now being pre-calculated:
a. The first time an issue is mentioned in a commit
- In `GitPushService`, find all issues mentioned by the given commit
using `ReferenceExtractor`. Set the `first_mentioned_in_commit_at`
flag for each of them.
- There seems to be a (pre-existing) bug here - files (and
therefore commits) created using the Web CI don't have
cross-references created, and issues are not closed even when
the commit title is "Fixes #xx".
b. The first time a merge request is deployed to production
When a `Deployment` is created, find all merge requests that
were merged in before the deployment, and set the
`first_deployed_to_production_at` flag for each of them.
c. The start / end time for a merge request pipeline
Hook into the `Pipeline` state machine. When the `status` moves to
`running`, find the merge requests whose tip commit matches the
pipeline, and record the `latest_build_started_at` time for each
of them. When the `status` moves to `success`, record the
`latest_build_finished_at` time.
d. The merge requests that close an issue
- This was a big cause of the performance problems we were having
with Cycle Analytics. We need to use `ReferenceExtractor` to make
this calculation, which is slow when we have to run it on a large
number of merge requests.
- When a merge request is created, updated, or refreshed, find the
issues it closes, and create an instance of
`MergeRequestsClosingIssues`, which acts as a join model between
merge requests and issues.
- If a `MergeRequestsClosingIssues` instance links a merge request
and an issue, that issue closes that merge request.
5. The `Queries` module was changed into a class, so we can cache the
results of `issues` and `merge_requests_closing_issues` across
various cycle analytics stages.
6. The code added in this commit is untested. Tests will be added in the
next commit.
2016-09-15 04:59:36 -04:00
|
|
|
end
|
|
|
|
|
2019-12-24 07:08:01 -05:00
|
|
|
after_transition [:created, :waiting_for_resource, :preparing, :pending, :running] => :success do |pipeline|
|
2020-03-18 05:09:31 -04:00
|
|
|
# We wait a little bit to ensure that all BuildFinishedWorkers finish first
|
|
|
|
# because this is where some metrics like code coverage is parsed and stored
|
|
|
|
# in CI build records which the daily build metrics worker relies on.
|
2020-05-06 11:09:42 -04:00
|
|
|
pipeline.run_after_commit { Ci::DailyBuildGroupReportResultsWorker.perform_in(10.minutes, pipeline.id) }
|
2016-08-11 14:54:02 -04:00
|
|
|
end
|
2016-08-12 06:03:30 -04:00
|
|
|
|
|
|
|
after_transition do |pipeline, transition|
|
2016-10-12 07:44:33 -04:00
|
|
|
next if transition.loopback?
|
|
|
|
|
|
|
|
pipeline.run_after_commit do
|
2017-04-20 02:44:01 -04:00
|
|
|
PipelineHooksWorker.perform_async(pipeline.id)
|
2020-02-13 13:09:00 -05:00
|
|
|
ExpirePipelineCacheWorker.perform_async(pipeline.id) if pipeline.cacheable?
|
2016-10-12 07:44:33 -04:00
|
|
|
end
|
2016-08-12 06:03:30 -04:00
|
|
|
end
|
2016-09-14 07:42:16 -04:00
|
|
|
|
2019-05-27 08:21:36 -04:00
|
|
|
after_transition any => ::Ci::Pipeline.completed_statuses do |pipeline|
|
|
|
|
pipeline.run_after_commit do
|
2019-09-27 14:06:20 -04:00
|
|
|
pipeline.persistent_ref.delete
|
|
|
|
|
2019-05-27 08:21:36 -04:00
|
|
|
pipeline.all_merge_requests.each do |merge_request|
|
|
|
|
next unless merge_request.auto_merge_enabled?
|
|
|
|
|
|
|
|
AutoMergeProcessWorker.perform_async(merge_request.id)
|
|
|
|
end
|
2020-01-16 13:08:46 -05:00
|
|
|
|
|
|
|
if pipeline.auto_devops_source?
|
|
|
|
self.class.auto_devops_pipelines_completed_total.increment(status: pipeline.status)
|
|
|
|
end
|
2019-05-27 08:21:36 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-02-08 01:08:50 -05:00
|
|
|
after_transition any => ::Ci::Pipeline.completed_statuses do |pipeline|
|
|
|
|
next unless pipeline.bridge_triggered?
|
|
|
|
next unless pipeline.bridge_waiting?
|
|
|
|
|
|
|
|
pipeline.run_after_commit do
|
|
|
|
::Ci::PipelineBridgeStatusWorker.perform_async(pipeline.id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-09-28 05:22:06 -04:00
|
|
|
after_transition any => [:success, :failed] do |pipeline|
|
2020-06-10 05:08:35 -04:00
|
|
|
ref_status = pipeline.ci_ref&.update_status_by!(pipeline)
|
|
|
|
|
2016-10-17 06:09:43 -04:00
|
|
|
pipeline.run_after_commit do
|
2020-06-10 05:08:35 -04:00
|
|
|
PipelineNotificationWorker.perform_async(pipeline.id, ref_status: ref_status)
|
2016-10-17 06:09:43 -04:00
|
|
|
end
|
2016-09-14 07:42:16 -04:00
|
|
|
end
|
2018-09-06 15:20:42 -04:00
|
|
|
|
|
|
|
after_transition any => [:failed] do |pipeline|
|
|
|
|
next unless pipeline.auto_devops_source?
|
|
|
|
|
|
|
|
pipeline.run_after_commit { AutoDevops::DisableWorker.perform_async(pipeline.id) }
|
|
|
|
end
|
2016-08-11 14:54:02 -04:00
|
|
|
end
|
|
|
|
|
2017-11-10 14:57:11 -05:00
|
|
|
scope :internal, -> { where(source: internal_sources) }
|
2020-01-13 07:08:04 -05:00
|
|
|
scope :no_child, -> { where.not(source: :parent_pipeline) }
|
2020-07-31 20:09:45 -04:00
|
|
|
scope :ci_sources, -> { where(config_source: ::Ci::PipelineEnums.ci_config_sources_values) }
|
2018-12-04 10:14:24 -05:00
|
|
|
scope :for_user, -> (user) { where(user: user) }
|
2019-03-01 01:18:08 -05:00
|
|
|
scope :for_sha, -> (sha) { where(sha: sha) }
|
|
|
|
scope :for_source_sha, -> (source_sha) { where(source_sha: source_sha) }
|
|
|
|
scope :for_sha_or_source_sha, -> (sha) { for_sha(sha).or(for_source_sha(sha)) }
|
2019-10-18 07:11:44 -04:00
|
|
|
scope :for_ref, -> (ref) { where(ref: ref) }
|
|
|
|
scope :for_id, -> (id) { where(id: id) }
|
2020-06-15 14:08:43 -04:00
|
|
|
scope :for_iid, -> (iid) { where(iid: iid) }
|
2020-07-15 17:09:26 -04:00
|
|
|
scope :for_project, -> (project) { where(project: project) }
|
2019-08-19 10:29:53 -04:00
|
|
|
scope :created_after, -> (time) { where('ci_pipelines.created_at > ?', time) }
|
2020-07-14 08:09:14 -04:00
|
|
|
scope :created_before_id, -> (id) { where('ci_pipelines.id < ?', id) }
|
|
|
|
scope :before_pipeline, -> (pipeline) { created_before_id(pipeline.id).outside_pipeline_family(pipeline) }
|
|
|
|
|
|
|
|
scope :outside_pipeline_family, ->(pipeline) do
|
|
|
|
where.not(id: pipeline.same_family_pipeline_ids)
|
|
|
|
end
|
2019-01-02 01:39:45 -05:00
|
|
|
|
2019-04-16 07:06:52 -04:00
|
|
|
scope :with_reports, -> (reports_scope) do
|
|
|
|
where('EXISTS (?)', ::Ci::Build.latest.with_reports(reports_scope).where('ci_pipelines.id=ci_builds.commit_id').select(1))
|
|
|
|
end
|
|
|
|
|
2019-09-11 18:09:42 -04:00
|
|
|
scope :with_only_interruptible_builds, -> do
|
2019-09-05 10:50:39 -04:00
|
|
|
where('NOT EXISTS (?)',
|
|
|
|
Ci::Build.where('ci_builds.commit_id = ci_pipelines.id')
|
|
|
|
.with_status(:running, :success, :failed)
|
|
|
|
.not_interruptible
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2020-07-15 17:09:26 -04:00
|
|
|
# Returns the pipelines that associated with the given merge request.
|
|
|
|
# In general, please use `Ci::PipelinesForMergeRequestFinder` instead,
|
|
|
|
# for checking permission of the actor.
|
|
|
|
scope :triggered_by_merge_request, -> (merge_request) do
|
|
|
|
ci_sources.where(source: :merge_request_event,
|
|
|
|
merge_request: merge_request,
|
|
|
|
project: [merge_request.source_project, merge_request.target_project])
|
|
|
|
end
|
|
|
|
|
2017-11-10 14:57:11 -05:00
|
|
|
# Returns the pipelines in descending order (= newest first), optionally
|
|
|
|
# limited to a number of references.
|
|
|
|
#
|
|
|
|
# ref - The name (or names) of the branch(es)/tag(s) to limit the list of
|
|
|
|
# pipelines to.
|
2019-07-23 15:29:12 -04:00
|
|
|
# sha - The commit SHA (or mutliple SHAs) to limit the list of pipelines to.
|
2018-11-05 17:52:15 -05:00
|
|
|
# limit - This limits a backlog search, default to 100.
|
2019-07-17 09:33:52 -04:00
|
|
|
def self.newest_first(ref: nil, sha: nil, limit: 100)
|
2017-11-10 14:57:11 -05:00
|
|
|
relation = order(id: :desc)
|
2018-11-05 17:52:15 -05:00
|
|
|
relation = relation.where(ref: ref) if ref
|
2019-07-17 09:33:52 -04:00
|
|
|
relation = relation.where(sha: sha) if sha
|
2017-11-10 14:57:11 -05:00
|
|
|
|
2018-11-05 17:52:15 -05:00
|
|
|
if limit
|
|
|
|
ids = relation.limit(limit).select(:id)
|
|
|
|
relation = relation.where(id: ids)
|
|
|
|
end
|
|
|
|
|
|
|
|
relation
|
2016-11-21 09:19:16 -05:00
|
|
|
end
|
2016-11-07 10:44:11 -05:00
|
|
|
|
2016-12-14 08:00:06 -05:00
|
|
|
def self.latest_status(ref = nil)
|
2018-11-05 17:52:15 -05:00
|
|
|
newest_first(ref: ref).pluck(:status).first
|
2016-12-14 08:00:06 -05:00
|
|
|
end
|
|
|
|
|
2019-07-23 15:29:12 -04:00
|
|
|
def self.latest_successful_for_ref(ref)
|
2018-11-05 17:52:15 -05:00
|
|
|
newest_first(ref: ref).success.take
|
2016-07-19 08:59:38 -04:00
|
|
|
end
|
|
|
|
|
2019-07-17 09:33:52 -04:00
|
|
|
def self.latest_successful_for_sha(sha)
|
|
|
|
newest_first(sha: sha).success.take
|
|
|
|
end
|
|
|
|
|
2017-03-13 12:00:07 -04:00
|
|
|
def self.latest_successful_for_refs(refs)
|
2018-11-05 17:52:15 -05:00
|
|
|
relation = newest_first(ref: refs).success
|
2017-11-10 14:57:11 -05:00
|
|
|
|
|
|
|
relation.each_with_object({}) do |pipeline, hash|
|
2017-03-13 12:00:07 -04:00
|
|
|
hash[pipeline.ref] ||= pipeline
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-30 11:08:09 -04:00
|
|
|
# Returns a Hash containing the latest pipeline for every given
|
2017-11-10 14:57:11 -05:00
|
|
|
# commit.
|
|
|
|
#
|
2019-09-30 11:08:09 -04:00
|
|
|
# The keys of this Hash are the commit SHAs, the values the pipelines.
|
2017-11-10 14:57:11 -05:00
|
|
|
#
|
2019-09-30 11:08:09 -04:00
|
|
|
# commits - The list of commit SHAs to get the pipelines for.
|
2017-11-10 14:57:11 -05:00
|
|
|
# ref - The ref to scope the data to (e.g. "master"). If the ref is not
|
2019-09-30 11:08:09 -04:00
|
|
|
# given we simply get the latest pipelines for the commits, regardless
|
|
|
|
# of what refs the pipelines belong to.
|
2020-06-26 14:09:03 -04:00
|
|
|
def self.latest_pipeline_per_commit(commits, ref = nil)
|
2017-11-10 14:57:11 -05:00
|
|
|
p1 = arel_table
|
|
|
|
p2 = arel_table.alias
|
|
|
|
|
|
|
|
# This LEFT JOIN will filter out all but the newest row for every
|
|
|
|
# combination of (project_id, sha) or (project_id, sha, ref) if a ref is
|
|
|
|
# given.
|
|
|
|
cond = p1[:sha].eq(p2[:sha])
|
|
|
|
.and(p1[:project_id].eq(p2[:project_id]))
|
|
|
|
.and(p1[:id].lt(p2[:id]))
|
|
|
|
|
|
|
|
cond = cond.and(p1[:ref].eq(p2[:ref])) if ref
|
|
|
|
join = p1.join(p2, Arel::Nodes::OuterJoin).on(cond)
|
|
|
|
|
2019-09-30 11:08:09 -04:00
|
|
|
relation = where(sha: commits)
|
2017-11-10 14:57:11 -05:00
|
|
|
.where(p2[:id].eq(nil))
|
|
|
|
.joins(join.join_sources)
|
|
|
|
|
|
|
|
relation = relation.where(ref: ref) if ref
|
|
|
|
|
2019-09-30 11:08:09 -04:00
|
|
|
relation.each_with_object({}) do |pipeline, hash|
|
2020-06-26 14:09:03 -04:00
|
|
|
hash[pipeline.sha] = pipeline
|
2017-11-10 14:57:11 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-05 18:56:52 -05:00
|
|
|
def self.latest_successful_ids_per_project
|
|
|
|
success.group(:project_id).select('max(id) as id')
|
|
|
|
end
|
|
|
|
|
2020-06-25 11:08:37 -04:00
|
|
|
def self.last_finished_for_ref_id(ci_ref_id)
|
|
|
|
where(ci_ref_id: ci_ref_id).ci_sources.finished.order(id: :desc).select(:id).take
|
|
|
|
end
|
|
|
|
|
2015-08-25 21:42:46 -04:00
|
|
|
def self.truncate_sha(sha)
|
|
|
|
sha[0...8]
|
|
|
|
end
|
|
|
|
|
2016-08-15 06:54:38 -04:00
|
|
|
def self.total_duration
|
2016-08-15 06:50:02 -04:00
|
|
|
where.not(duration: nil).sum(:duration)
|
2016-08-10 10:45:30 -04:00
|
|
|
end
|
|
|
|
|
2017-06-19 13:56:27 -04:00
|
|
|
def self.internal_sources
|
|
|
|
sources.reject { |source| source == "external" }.values
|
|
|
|
end
|
|
|
|
|
2019-07-31 11:36:57 -04:00
|
|
|
def self.bridgeable_statuses
|
2019-12-24 07:08:01 -05:00
|
|
|
::Ci::Pipeline::AVAILABLE_STATUSES - %w[created waiting_for_resource preparing pending]
|
2019-07-31 11:36:57 -04:00
|
|
|
end
|
|
|
|
|
2020-01-16 13:08:46 -05:00
|
|
|
def self.auto_devops_pipelines_completed_total
|
|
|
|
@auto_devops_pipelines_completed_total ||= Gitlab::Metrics.counter(:auto_devops_pipelines_completed_total, 'Number of completed auto devops pipelines')
|
|
|
|
end
|
|
|
|
|
2016-12-05 08:17:42 -05:00
|
|
|
def stages_count
|
|
|
|
statuses.select(:stage).distinct.count
|
2016-11-10 09:32:23 -05:00
|
|
|
end
|
|
|
|
|
2017-12-18 04:13:46 -05:00
|
|
|
def total_size
|
|
|
|
statuses.count(:id)
|
|
|
|
end
|
|
|
|
|
2017-06-01 05:55:18 -04:00
|
|
|
def stages_names
|
2017-06-21 09:48:12 -04:00
|
|
|
statuses.order(:stage_idx).distinct
|
|
|
|
.pluck(:stage, :stage_idx).map(&:first)
|
2016-12-06 08:49:37 -05:00
|
|
|
end
|
|
|
|
|
2017-06-01 05:55:18 -04:00
|
|
|
def legacy_stage(name)
|
2017-06-01 06:00:37 -04:00
|
|
|
stage = Ci::LegacyStage.new(self, name: name)
|
2020-08-12 02:09:53 -04:00
|
|
|
stage unless stage.statuses_count == 0
|
2017-06-01 05:55:18 -04:00
|
|
|
end
|
|
|
|
|
2018-10-08 11:55:13 -04:00
|
|
|
def ref_exists?
|
2018-10-22 05:37:52 -04:00
|
|
|
project.repository.ref_exists?(git_ref)
|
2018-10-22 07:33:28 -04:00
|
|
|
rescue Gitlab::Git::Repository::NoRepository
|
|
|
|
false
|
2018-10-08 11:55:13 -04:00
|
|
|
end
|
|
|
|
|
2019-10-01 08:05:59 -04:00
|
|
|
def legacy_stages_using_composite_status
|
2019-12-24 10:07:44 -05:00
|
|
|
stages = latest_statuses_ordered_by_stage.group_by(&:stage)
|
2019-10-01 08:05:59 -04:00
|
|
|
|
|
|
|
stages.map do |stage_name, jobs|
|
|
|
|
composite_status = Gitlab::Ci::Status::Composite
|
|
|
|
.new(jobs)
|
|
|
|
|
|
|
|
Ci::LegacyStage.new(self,
|
|
|
|
name: stage_name,
|
|
|
|
status: composite_status.status,
|
|
|
|
warnings: composite_status.warnings?)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-07-10 08:09:15 -04:00
|
|
|
def triggered_pipelines_with_preloads
|
|
|
|
triggered_pipelines.preload(:source_job)
|
|
|
|
end
|
|
|
|
|
2020-08-13 11:10:03 -04:00
|
|
|
# TODO: Remove usage of this method in templates
|
2019-10-01 08:05:59 -04:00
|
|
|
def legacy_stages
|
2020-08-19 08:10:17 -04:00
|
|
|
legacy_stages_using_composite_status
|
2019-10-01 08:05:59 -04:00
|
|
|
end
|
|
|
|
|
2015-08-25 21:42:46 -04:00
|
|
|
def valid_commit_sha
|
2015-12-11 11:57:04 -05:00
|
|
|
if self.sha == Gitlab::Git::BLANK_SHA
|
2015-08-25 21:42:46 -04:00
|
|
|
self.errors.add(:sha, " cant be 00000000 (branch removal)")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def git_author_name
|
2018-05-07 11:13:03 -04:00
|
|
|
strong_memoize(:git_author_name) do
|
|
|
|
commit.try(:author_name)
|
|
|
|
end
|
2015-08-25 21:42:46 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def git_author_email
|
2018-05-07 11:13:03 -04:00
|
|
|
strong_memoize(:git_author_email) do
|
|
|
|
commit.try(:author_email)
|
|
|
|
end
|
2015-08-25 21:42:46 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def git_commit_message
|
2018-05-07 11:13:03 -04:00
|
|
|
strong_memoize(:git_commit_message) do
|
|
|
|
commit.try(:message)
|
|
|
|
end
|
2015-08-25 21:42:46 -04:00
|
|
|
end
|
|
|
|
|
2016-07-11 03:11:20 -04:00
|
|
|
def git_commit_title
|
2018-05-07 11:13:03 -04:00
|
|
|
strong_memoize(:git_commit_title) do
|
|
|
|
commit.try(:title)
|
|
|
|
end
|
2016-07-11 03:11:20 -04:00
|
|
|
end
|
|
|
|
|
2018-05-01 11:09:33 -04:00
|
|
|
def git_commit_full_title
|
2018-05-07 11:13:03 -04:00
|
|
|
strong_memoize(:git_commit_full_title) do
|
|
|
|
commit.try(:full_title)
|
|
|
|
end
|
2018-05-01 11:09:33 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def git_commit_description
|
2018-05-07 11:13:03 -04:00
|
|
|
strong_memoize(:git_commit_description) do
|
|
|
|
commit.try(:description)
|
|
|
|
end
|
2016-07-11 03:11:20 -04:00
|
|
|
end
|
|
|
|
|
2019-11-20 13:06:04 -05:00
|
|
|
def before_sha
|
|
|
|
super || Gitlab::Git::BLANK_SHA
|
|
|
|
end
|
|
|
|
|
2015-08-25 21:42:46 -04:00
|
|
|
def short_sha
|
2016-06-02 10:19:18 -04:00
|
|
|
Ci::Pipeline.truncate_sha(sha)
|
2015-08-25 21:42:46 -04:00
|
|
|
end
|
|
|
|
|
2017-12-05 08:15:30 -05:00
|
|
|
# NOTE: This is loaded lazily and will never be nil, even if the commit
|
|
|
|
# cannot be found.
|
|
|
|
#
|
|
|
|
# Use constructs like: `pipeline.commit.present?`
|
2016-06-21 08:43:37 -04:00
|
|
|
def commit
|
2017-12-05 08:15:30 -05:00
|
|
|
@commit ||= Commit.lazy(project, sha)
|
2015-08-25 21:42:46 -04:00
|
|
|
end
|
|
|
|
|
2016-12-15 16:06:39 -05:00
|
|
|
def stuck?
|
2017-04-06 08:31:38 -04:00
|
|
|
pending_builds.any?(&:stuck?)
|
2016-07-16 12:39:58 -04:00
|
|
|
end
|
|
|
|
|
2016-03-31 13:51:28 -04:00
|
|
|
def retryable?
|
2017-04-06 08:31:38 -04:00
|
|
|
retryable_builds.any?
|
2016-03-31 13:51:28 -04:00
|
|
|
end
|
|
|
|
|
2016-05-31 09:26:00 -04:00
|
|
|
def cancelable?
|
2017-04-06 08:31:38 -04:00
|
|
|
cancelable_statuses.any?
|
2016-05-31 09:26:00 -04:00
|
|
|
end
|
|
|
|
|
2017-04-05 15:10:52 -04:00
|
|
|
def auto_canceled?
|
|
|
|
canceled? && auto_canceled_by_id?
|
|
|
|
end
|
|
|
|
|
2019-09-02 04:44:52 -04:00
|
|
|
def cancel_running(retries: nil)
|
|
|
|
retry_optimistic_lock(cancelable_statuses, retries) do |cancelable|
|
2017-04-07 02:47:15 -04:00
|
|
|
cancelable.find_each do |job|
|
|
|
|
yield(job) if block_given?
|
|
|
|
job.cancel
|
2016-11-23 06:43:56 -05:00
|
|
|
end
|
2017-04-07 02:47:15 -04:00
|
|
|
end
|
2016-05-09 19:26:13 -04:00
|
|
|
end
|
|
|
|
|
2019-09-02 04:44:52 -04:00
|
|
|
def auto_cancel_running(pipeline, retries: nil)
|
2017-04-06 09:32:56 -04:00
|
|
|
update(auto_canceled_by: pipeline)
|
|
|
|
|
2019-09-02 04:44:52 -04:00
|
|
|
cancel_running(retries: retries) do |job|
|
2017-04-06 09:32:56 -04:00
|
|
|
job.auto_canceled_by = pipeline
|
2017-04-06 08:31:38 -04:00
|
|
|
end
|
2016-05-09 19:26:13 -04:00
|
|
|
end
|
|
|
|
|
2018-08-27 11:31:01 -04:00
|
|
|
# rubocop: disable CodeReuse/ServiceClass
|
2017-02-14 06:20:02 -05:00
|
|
|
def retry_failed(current_user)
|
2017-02-22 17:54:59 -05:00
|
|
|
Ci::RetryPipelineService.new(project, current_user)
|
|
|
|
.execute(self)
|
2016-05-09 19:26:13 -04:00
|
|
|
end
|
2018-08-27 11:31:01 -04:00
|
|
|
# rubocop: enable CodeReuse/ServiceClass
|
2016-05-09 19:26:13 -04:00
|
|
|
|
2016-08-18 17:36:54 -04:00
|
|
|
def mark_as_processable_after_stage(stage_idx)
|
2020-01-17 10:08:37 -05:00
|
|
|
builds.skipped.after_stage(stage_idx).find_each do |build|
|
|
|
|
Gitlab::OptimisticLocking.retry_lock(build, &:process)
|
|
|
|
end
|
2016-08-18 17:36:54 -04:00
|
|
|
end
|
|
|
|
|
2020-06-23 14:09:28 -04:00
|
|
|
def lazy_ref_commit
|
|
|
|
return unless ::Gitlab::Ci::Features.pipeline_latest?
|
|
|
|
|
|
|
|
BatchLoader.for(ref).batch do |refs, loader|
|
|
|
|
next unless project.repository_exists?
|
|
|
|
|
|
|
|
project.repository.list_commits_by_ref_name(refs).then do |commits|
|
2020-06-30 08:08:57 -04:00
|
|
|
commits.each { |key, commit| loader.call(key, commits[key]) }
|
2020-06-23 14:09:28 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-04-13 07:01:08 -04:00
|
|
|
def latest?
|
2019-03-17 02:23:11 -04:00
|
|
|
return false unless git_ref && commit.present?
|
2017-11-14 04:02:39 -05:00
|
|
|
|
2020-06-23 14:09:28 -04:00
|
|
|
unless ::Gitlab::Ci::Features.pipeline_latest?
|
|
|
|
return project.commit(git_ref) == commit
|
|
|
|
end
|
|
|
|
|
|
|
|
return false if lazy_ref_commit.nil?
|
|
|
|
|
|
|
|
lazy_ref_commit.id == commit.id
|
2016-04-13 07:01:08 -04:00
|
|
|
end
|
|
|
|
|
2015-10-06 06:01:16 -04:00
|
|
|
def retried
|
|
|
|
@retried ||= (statuses.order(id: :desc) - statuses.latest)
|
2015-08-25 21:42:46 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def coverage
|
2016-04-13 14:51:03 -04:00
|
|
|
coverage_array = statuses.latest.map(&:coverage).compact
|
2015-12-04 06:55:23 -05:00
|
|
|
if coverage_array.size >= 1
|
|
|
|
'%.2f' % (coverage_array.reduce(:+) / coverage_array.size)
|
2015-08-25 21:42:46 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-07-17 02:09:11 -04:00
|
|
|
def batch_lookup_report_artifact_for_file_type(file_type)
|
|
|
|
latest_report_artifacts
|
|
|
|
.values_at(*::Ci::JobArtifact.associated_file_types_for(file_type.to_s))
|
|
|
|
.flatten
|
|
|
|
.compact
|
|
|
|
.last
|
|
|
|
end
|
|
|
|
|
|
|
|
# This batch loads the latest reports for each CI job artifact
|
|
|
|
# type (e.g. sast, dast, etc.) in a single SQL query to eliminate
|
|
|
|
# the need to do N different `job_artifacts.where(file_type:
|
|
|
|
# X).last` calls.
|
|
|
|
#
|
|
|
|
# Return a hash of file type => array of 1 job artifact
|
|
|
|
def latest_report_artifacts
|
|
|
|
::Gitlab::SafeRequestStore.fetch("pipeline:#{self.id}:latest_report_artifacts") do
|
|
|
|
# Note we use read_attribute(:project_id) to read the project
|
|
|
|
# ID instead of self.project_id. The latter appears to load
|
|
|
|
# the Project model. This extra filter doesn't appear to
|
|
|
|
# affect query plan but included to ensure we don't leak the
|
|
|
|
# wrong informaiton.
|
|
|
|
::Ci::JobArtifact.where(
|
|
|
|
id: job_artifacts.with_reports
|
|
|
|
.select('max(ci_job_artifacts.id) as id')
|
|
|
|
.where(project_id: self.read_attribute(:project_id))
|
|
|
|
.group(:file_type)
|
|
|
|
)
|
|
|
|
.preload(:job)
|
|
|
|
.group_by(&:file_type)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-01 06:58:22 -04:00
|
|
|
def has_kubernetes_active?
|
2017-11-27 08:35:16 -05:00
|
|
|
project.deployment_platform&.active?
|
2017-08-26 06:45:36 -04:00
|
|
|
end
|
|
|
|
|
2020-06-25 20:09:13 -04:00
|
|
|
def freeze_period?
|
|
|
|
Ci::FreezePeriodStatus.new(project: project).execute
|
|
|
|
end
|
|
|
|
|
2016-07-14 10:58:05 -04:00
|
|
|
def has_warnings?
|
2020-08-12 02:09:53 -04:00
|
|
|
number_of_warnings > 0
|
2018-05-07 12:22:07 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def number_of_warnings
|
|
|
|
BatchLoader.for(id).batch(default_value: 0) do |pipeline_ids, loader|
|
2018-05-22 06:30:45 -04:00
|
|
|
::Ci::Build.where(commit_id: pipeline_ids)
|
2018-05-07 12:22:07 -04:00
|
|
|
.latest
|
|
|
|
.failed_but_allowed
|
|
|
|
.group(:commit_id)
|
|
|
|
.count
|
|
|
|
.each { |id, amount| loader.call(id, amount) }
|
|
|
|
end
|
2016-07-12 13:05:48 -04:00
|
|
|
end
|
|
|
|
|
2020-01-17 10:08:37 -05:00
|
|
|
def needs_processing?
|
|
|
|
statuses
|
|
|
|
.where(processed: [false, nil])
|
|
|
|
.latest
|
|
|
|
.exists?
|
|
|
|
end
|
|
|
|
|
2019-11-18 13:06:53 -05:00
|
|
|
# TODO: this logic is duplicate with Pipeline::Chain::Config::Content
|
|
|
|
# we should persist this is `ci_pipelines.config_path`
|
|
|
|
def config_path
|
2018-12-04 10:14:24 -05:00
|
|
|
return unless repository_source? || unknown_source?
|
|
|
|
|
2019-09-26 05:06:04 -04:00
|
|
|
project.ci_config_path.presence || '.gitlab-ci.yml'
|
2017-07-05 08:04:53 -04:00
|
|
|
end
|
|
|
|
|
2016-12-21 08:26:47 -05:00
|
|
|
def has_yaml_errors?
|
|
|
|
yaml_errors.present?
|
|
|
|
end
|
|
|
|
|
2020-06-29 08:09:20 -04:00
|
|
|
def add_error_message(content)
|
2020-07-13 08:09:18 -04:00
|
|
|
add_message(:error, content)
|
|
|
|
end
|
|
|
|
|
|
|
|
def add_warning_message(content)
|
|
|
|
add_message(:warning, content)
|
|
|
|
end
|
|
|
|
|
|
|
|
# We can't use `messages.error` scope here because messages should also be
|
|
|
|
# read when the pipeline is not persisted. Using the scope will return no
|
|
|
|
# results as it would query persisted data.
|
|
|
|
def error_messages
|
|
|
|
messages.select(&:error?)
|
|
|
|
end
|
2020-06-29 08:09:20 -04:00
|
|
|
|
2020-07-13 08:09:18 -04:00
|
|
|
def warning_messages
|
|
|
|
messages.select(&:warning?)
|
2020-06-29 08:09:20 -04:00
|
|
|
end
|
|
|
|
|
2016-06-23 06:29:11 -04:00
|
|
|
# Manually set the notes for a Ci::Pipeline
|
|
|
|
# There is no ActiveRecord relation between Ci::Pipeline and notes
|
|
|
|
# as they are related to a commit sha. This method helps importing
|
2020-02-26 13:09:24 -05:00
|
|
|
# them using the +Gitlab::ImportExport::Project::RelationFactory+ class.
|
2016-06-23 06:29:11 -04:00
|
|
|
def notes=(notes)
|
|
|
|
notes.each do |note|
|
|
|
|
note[:id] = nil
|
|
|
|
note[:commit_id] = sha
|
|
|
|
note[:noteable_id] = self['id']
|
|
|
|
note.save!
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-06-13 04:55:54 -04:00
|
|
|
def notes
|
2017-11-07 11:11:37 -05:00
|
|
|
project.notes.for_commit_id(sha)
|
2016-06-13 04:55:54 -04:00
|
|
|
end
|
|
|
|
|
2020-01-17 10:08:37 -05:00
|
|
|
def set_status(new_status)
|
2017-10-03 04:12:28 -04:00
|
|
|
retry_optimistic_lock(self) do
|
2019-10-01 08:05:59 -04:00
|
|
|
case new_status
|
2018-06-04 05:29:39 -04:00
|
|
|
when 'created' then nil
|
2019-12-24 07:08:01 -05:00
|
|
|
when 'waiting_for_resource' then request_resource
|
2019-02-26 21:13:06 -05:00
|
|
|
when 'preparing' then prepare
|
2016-09-19 08:13:08 -04:00
|
|
|
when 'pending' then enqueue
|
|
|
|
when 'running' then run
|
|
|
|
when 'success' then succeed
|
|
|
|
when 'failed' then drop
|
|
|
|
when 'canceled' then cancel
|
|
|
|
when 'skipped' then skip
|
2017-03-03 08:35:19 -05:00
|
|
|
when 'manual' then block
|
2018-10-04 03:40:43 -04:00
|
|
|
when 'scheduled' then delay
|
2018-06-04 05:29:39 -04:00
|
|
|
else
|
2020-06-22 11:09:27 -04:00
|
|
|
raise Ci::HasStatus::UnknownStatusError,
|
2019-10-01 08:05:59 -04:00
|
|
|
"Unknown status `#{new_status}`"
|
2016-09-19 08:13:08 -04:00
|
|
|
end
|
2016-08-12 05:36:51 -04:00
|
|
|
end
|
2016-08-11 09:22:35 -04:00
|
|
|
end
|
|
|
|
|
2018-03-21 04:33:14 -04:00
|
|
|
def protected_ref?
|
2018-11-10 08:34:53 -05:00
|
|
|
strong_memoize(:protected_ref) { project.protected_for?(git_ref) }
|
2018-03-21 04:33:14 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def legacy_trigger
|
2018-03-21 04:53:26 -04:00
|
|
|
strong_memoize(:legacy_trigger) { trigger_requests.first }
|
2018-03-21 04:33:14 -04:00
|
|
|
end
|
|
|
|
|
2018-05-18 05:28:32 -04:00
|
|
|
def persisted_variables
|
|
|
|
Gitlab::Ci::Variables::Collection.new.tap do |variables|
|
2018-05-20 08:18:50 -04:00
|
|
|
break variables unless persisted?
|
|
|
|
|
|
|
|
variables.append(key: 'CI_PIPELINE_ID', value: id.to_s)
|
|
|
|
variables.append(key: 'CI_PIPELINE_URL', value: Gitlab::Routing.url_helpers.project_pipeline_url(project, self))
|
2018-05-18 05:28:32 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-07-20 07:17:21 -04:00
|
|
|
def predefined_variables
|
2018-12-06 07:52:06 -05:00
|
|
|
Gitlab::Ci::Variables::Collection.new.tap do |variables|
|
|
|
|
variables.append(key: 'CI_PIPELINE_IID', value: iid.to_s)
|
|
|
|
variables.append(key: 'CI_PIPELINE_SOURCE', value: source.to_s)
|
2019-11-20 13:06:04 -05:00
|
|
|
|
|
|
|
variables.append(key: 'CI_CONFIG_PATH', value: config_path)
|
|
|
|
|
|
|
|
variables.concat(predefined_commit_variables)
|
2018-12-06 07:52:06 -05:00
|
|
|
|
2020-02-13 07:08:49 -05:00
|
|
|
if merge_request?
|
2019-08-28 03:51:26 -04:00
|
|
|
variables.append(key: 'CI_MERGE_REQUEST_EVENT_TYPE', value: merge_request_event_type.to_s)
|
2019-02-20 01:37:49 -05:00
|
|
|
variables.append(key: 'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA', value: source_sha.to_s)
|
|
|
|
variables.append(key: 'CI_MERGE_REQUEST_TARGET_BRANCH_SHA', value: target_sha.to_s)
|
2018-12-06 07:52:06 -05:00
|
|
|
variables.concat(merge_request.predefined_variables)
|
|
|
|
end
|
2019-08-09 05:40:45 -04:00
|
|
|
|
2020-05-07 23:09:54 -04:00
|
|
|
variables.append(key: 'CI_KUBERNETES_ACTIVE', value: 'true') if has_kubernetes_active?
|
2020-06-25 20:09:13 -04:00
|
|
|
variables.append(key: 'CI_DEPLOY_FREEZE', value: 'true') if freeze_period?
|
2020-05-07 23:09:54 -04:00
|
|
|
|
2019-08-09 05:40:45 -04:00
|
|
|
if external_pull_request_event? && external_pull_request
|
|
|
|
variables.concat(external_pull_request.predefined_variables)
|
|
|
|
end
|
2018-12-06 07:52:06 -05:00
|
|
|
end
|
2016-07-20 07:17:21 -04:00
|
|
|
end
|
|
|
|
|
2019-11-20 13:06:04 -05:00
|
|
|
def predefined_commit_variables
|
|
|
|
Gitlab::Ci::Variables::Collection.new.tap do |variables|
|
|
|
|
variables.append(key: 'CI_COMMIT_SHA', value: sha)
|
|
|
|
variables.append(key: 'CI_COMMIT_SHORT_SHA', value: short_sha)
|
|
|
|
variables.append(key: 'CI_COMMIT_BEFORE_SHA', value: before_sha)
|
|
|
|
variables.append(key: 'CI_COMMIT_REF_NAME', value: source_ref)
|
|
|
|
variables.append(key: 'CI_COMMIT_REF_SLUG', value: source_ref_slug)
|
2019-12-16 07:07:43 -05:00
|
|
|
variables.append(key: 'CI_COMMIT_BRANCH', value: ref) if branch?
|
2019-11-20 13:06:04 -05:00
|
|
|
variables.append(key: 'CI_COMMIT_TAG', value: ref) if tag?
|
|
|
|
variables.append(key: 'CI_COMMIT_MESSAGE', value: git_commit_message.to_s)
|
|
|
|
variables.append(key: 'CI_COMMIT_TITLE', value: git_commit_full_title.to_s)
|
|
|
|
variables.append(key: 'CI_COMMIT_DESCRIPTION', value: git_commit_description.to_s)
|
|
|
|
variables.append(key: 'CI_COMMIT_REF_PROTECTED', value: (!!protected_ref?).to_s)
|
|
|
|
|
|
|
|
# legacy variables
|
|
|
|
variables.append(key: 'CI_BUILD_REF', value: sha)
|
|
|
|
variables.append(key: 'CI_BUILD_BEFORE_SHA', value: before_sha)
|
|
|
|
variables.append(key: 'CI_BUILD_REF_NAME', value: source_ref)
|
|
|
|
variables.append(key: 'CI_BUILD_REF_SLUG', value: source_ref_slug)
|
|
|
|
variables.append(key: 'CI_BUILD_TAG', value: ref) if tag?
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-09-05 06:49:27 -04:00
|
|
|
def queued_duration
|
|
|
|
return unless started_at
|
|
|
|
|
|
|
|
seconds = (started_at - created_at).to_i
|
2020-08-12 02:09:53 -04:00
|
|
|
seconds unless seconds == 0
|
2016-09-05 06:49:27 -04:00
|
|
|
end
|
|
|
|
|
2016-08-11 14:54:02 -04:00
|
|
|
def update_duration
|
2016-08-29 11:37:15 -04:00
|
|
|
return unless started_at
|
|
|
|
|
2017-09-25 07:22:04 -04:00
|
|
|
self.duration = Gitlab::Ci::Pipeline::Duration.from_pipeline(self)
|
2016-08-02 06:06:31 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def execute_hooks
|
2016-08-16 04:00:13 -04:00
|
|
|
data = pipeline_data
|
|
|
|
project.execute_hooks(data, :pipeline_hooks)
|
|
|
|
project.execute_services(data, :pipeline_hooks)
|
2016-08-04 12:43:16 -04:00
|
|
|
end
|
|
|
|
|
2017-04-18 15:26:56 -04:00
|
|
|
# All the merge requests for which the current pipeline runs/ran against
|
|
|
|
def all_merge_requests
|
2018-12-05 01:57:00 -05:00
|
|
|
@all_merge_requests ||=
|
2020-02-13 07:08:49 -05:00
|
|
|
if merge_request?
|
2019-02-21 17:37:36 -05:00
|
|
|
MergeRequest.where(id: merge_request_id)
|
2018-12-05 01:57:00 -05:00
|
|
|
else
|
2019-02-21 17:37:36 -05:00
|
|
|
MergeRequest.where(source_project_id: project_id, source_branch: ref)
|
2020-04-13 23:09:39 -04:00
|
|
|
.by_commit_sha(sha)
|
2018-12-05 01:57:00 -05:00
|
|
|
end
|
2017-04-18 15:26:56 -04:00
|
|
|
end
|
|
|
|
|
2019-10-08 05:06:09 -04:00
|
|
|
def all_merge_requests_by_recency
|
|
|
|
all_merge_requests.order(id: :desc)
|
|
|
|
end
|
|
|
|
|
2020-01-13 07:08:04 -05:00
|
|
|
# If pipeline is a child of another pipeline, include the parent
|
2020-07-14 08:09:14 -04:00
|
|
|
# and the siblings, otherwise return only itself and children.
|
2020-01-13 07:08:04 -05:00
|
|
|
def same_family_pipeline_ids
|
2020-07-14 20:09:23 -04:00
|
|
|
parent = parent_pipeline || self
|
|
|
|
[parent.id] + parent.child_pipelines.pluck(:id)
|
2020-01-13 07:08:04 -05:00
|
|
|
end
|
|
|
|
|
2020-02-08 01:08:50 -05:00
|
|
|
def bridge_triggered?
|
|
|
|
source_bridge.present?
|
|
|
|
end
|
|
|
|
|
|
|
|
def bridge_waiting?
|
|
|
|
source_bridge&.dependent?
|
|
|
|
end
|
|
|
|
|
2020-01-13 07:08:04 -05:00
|
|
|
def child?
|
|
|
|
parent_pipeline.present?
|
|
|
|
end
|
|
|
|
|
|
|
|
def parent?
|
|
|
|
child_pipelines.exists?
|
|
|
|
end
|
|
|
|
|
2020-03-03 13:08:16 -05:00
|
|
|
def created_successfully?
|
|
|
|
persisted? && failure_reason.blank?
|
|
|
|
end
|
|
|
|
|
2016-12-08 11:52:24 -05:00
|
|
|
def detailed_status(current_user)
|
2017-02-22 17:54:59 -05:00
|
|
|
Gitlab::Ci::Status::Pipeline::Factory
|
|
|
|
.new(self, current_user)
|
|
|
|
.fabricate!
|
2016-12-05 07:23:58 -05:00
|
|
|
end
|
|
|
|
|
2020-03-06 10:08:05 -05:00
|
|
|
def find_job_with_archive_artifacts(name)
|
2020-05-14 08:08:21 -04:00
|
|
|
builds.latest.with_downloadable_artifacts.find_by_name(name)
|
2020-03-06 10:08:05 -05:00
|
|
|
end
|
|
|
|
|
2017-09-12 12:13:07 -04:00
|
|
|
def latest_builds_with_artifacts
|
2017-11-21 11:32:12 -05:00
|
|
|
# We purposely cast the builds to an Array here. Because we always use the
|
|
|
|
# rows if there are more than 0 this prevents us from having to run two
|
|
|
|
# queries: one to get the count and one to get the rows.
|
2019-04-04 14:41:54 -04:00
|
|
|
@latest_builds_with_artifacts ||= builds.latest.with_artifacts_not_expired.to_a
|
2017-09-12 12:13:07 -04:00
|
|
|
end
|
|
|
|
|
2020-06-09 17:08:21 -04:00
|
|
|
def latest_report_builds(reports_scope = ::Ci::JobArtifact.with_reports)
|
|
|
|
builds.latest.with_reports(reports_scope)
|
|
|
|
end
|
|
|
|
|
2019-04-16 07:06:52 -04:00
|
|
|
def has_reports?(reports_scope)
|
2020-06-09 17:08:21 -04:00
|
|
|
complete? && latest_report_builds(reports_scope).exists?
|
2018-08-02 02:05:07 -04:00
|
|
|
end
|
|
|
|
|
2020-06-19 14:08:39 -04:00
|
|
|
def test_report_summary
|
|
|
|
Gitlab::Ci::Reports::TestReportSummary.new(latest_builds_report_results)
|
|
|
|
end
|
|
|
|
|
2018-08-02 02:05:07 -04:00
|
|
|
def test_reports
|
|
|
|
Gitlab::Ci::Reports::TestReports.new.tap do |test_reports|
|
2020-06-09 17:08:21 -04:00
|
|
|
latest_report_builds(Ci::JobArtifact.test_reports).preload(:project).find_each do |build|
|
2018-08-02 02:05:07 -04:00
|
|
|
build.collect_test_reports!(test_reports)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-05-07 05:09:51 -04:00
|
|
|
def accessibility_reports
|
|
|
|
Gitlab::Ci::Reports::AccessibilityReports.new.tap do |accessibility_reports|
|
|
|
|
builds.latest.with_reports(Ci::JobArtifact.accessibility_reports).each do |build|
|
|
|
|
build.collect_accessibility_reports!(accessibility_reports)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-17 14:09:44 -04:00
|
|
|
def coverage_reports
|
|
|
|
Gitlab::Ci::Reports::CoverageReports.new.tap do |coverage_reports|
|
2020-06-09 17:08:21 -04:00
|
|
|
latest_report_builds(Ci::JobArtifact.coverage_reports).each do |build|
|
2020-03-17 14:09:44 -04:00
|
|
|
build.collect_coverage_reports!(coverage_reports)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-04-21 11:21:10 -04:00
|
|
|
def terraform_reports
|
|
|
|
::Gitlab::Ci::Reports::TerraformReports.new.tap do |terraform_reports|
|
2020-06-09 17:08:21 -04:00
|
|
|
latest_report_builds(::Ci::JobArtifact.terraform_reports).each do |build|
|
2020-04-21 11:21:10 -04:00
|
|
|
build.collect_terraform_reports!(terraform_reports)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-07-14 08:09:14 -04:00
|
|
|
def has_archive_artifacts?
|
|
|
|
complete? && builds.latest.with_existing_job_artifacts(Ci::JobArtifact.archive.or(Ci::JobArtifact.metadata)).exists?
|
|
|
|
end
|
|
|
|
|
2019-10-21 11:05:58 -04:00
|
|
|
def has_exposed_artifacts?
|
|
|
|
complete? && builds.latest.with_exposed_artifacts.exists?
|
|
|
|
end
|
|
|
|
|
2018-10-02 07:10:56 -04:00
|
|
|
def branch_updated?
|
|
|
|
strong_memoize(:branch_updated) do
|
|
|
|
push_details.branch_updated?
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-13 05:59:34 -05:00
|
|
|
# Returns the modified paths.
|
|
|
|
#
|
|
|
|
# The returned value is
|
|
|
|
# * Array: List of modified paths that should be evaluated
|
|
|
|
# * nil: Modified path can not be evaluated
|
2018-10-02 07:10:56 -04:00
|
|
|
def modified_paths
|
2018-10-03 06:21:41 -04:00
|
|
|
strong_memoize(:modified_paths) do
|
2020-02-13 07:08:49 -05:00
|
|
|
if merge_request?
|
2019-02-13 05:59:34 -05:00
|
|
|
merge_request.modified_paths
|
|
|
|
elsif branch_updated?
|
|
|
|
push_details.modified_paths
|
|
|
|
end
|
2018-10-02 07:10:56 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-24 11:06:34 -04:00
|
|
|
def all_worktree_paths
|
|
|
|
strong_memoize(:all_worktree_paths) do
|
|
|
|
project.repository.ls_files(sha)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def top_level_worktree_paths
|
|
|
|
strong_memoize(:top_level_worktree_paths) do
|
|
|
|
project.repository.tree(sha).blobs.map(&:path)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-10-04 02:23:29 -04:00
|
|
|
def default_branch?
|
|
|
|
ref == project.default_branch
|
|
|
|
end
|
|
|
|
|
2020-02-13 07:08:49 -05:00
|
|
|
def merge_request?
|
|
|
|
merge_request_id.present?
|
2019-02-20 01:37:49 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def detached_merge_request_pipeline?
|
2020-02-13 07:08:49 -05:00
|
|
|
merge_request? && target_sha.nil?
|
2019-02-20 01:37:49 -05:00
|
|
|
end
|
|
|
|
|
2019-03-21 09:08:32 -04:00
|
|
|
def legacy_detached_merge_request_pipeline?
|
|
|
|
detached_merge_request_pipeline? && !merge_request_ref?
|
|
|
|
end
|
|
|
|
|
2019-02-20 01:37:49 -05:00
|
|
|
def merge_request_pipeline?
|
2020-02-13 07:08:49 -05:00
|
|
|
merge_request? && target_sha.present?
|
2019-02-20 01:37:49 -05:00
|
|
|
end
|
|
|
|
|
2019-03-21 09:08:32 -04:00
|
|
|
def merge_request_ref?
|
|
|
|
MergeRequest.merge_request_ref?(ref)
|
|
|
|
end
|
|
|
|
|
2019-03-21 11:31:09 -04:00
|
|
|
def matches_sha_or_source_sha?(sha)
|
|
|
|
self.sha == sha || self.source_sha == sha
|
|
|
|
end
|
|
|
|
|
2019-04-09 10:53:44 -04:00
|
|
|
def triggered_by?(current_user)
|
|
|
|
user == current_user
|
|
|
|
end
|
|
|
|
|
2019-04-25 04:23:39 -04:00
|
|
|
def source_ref
|
2020-02-13 07:08:49 -05:00
|
|
|
if merge_request?
|
2019-04-25 04:23:39 -04:00
|
|
|
merge_request.source_branch
|
|
|
|
else
|
|
|
|
ref
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def source_ref_slug
|
|
|
|
Gitlab::Utils.slugify(source_ref.to_s)
|
|
|
|
end
|
|
|
|
|
2019-05-02 14:27:35 -04:00
|
|
|
def find_stage_by_name!(name)
|
|
|
|
stages.find_by!(name: name)
|
|
|
|
end
|
|
|
|
|
2020-06-29 08:09:20 -04:00
|
|
|
def full_error_messages
|
2019-06-26 08:29:35 -04:00
|
|
|
errors ? errors.full_messages.to_sentence : ""
|
|
|
|
end
|
|
|
|
|
2019-08-28 03:51:26 -04:00
|
|
|
def merge_request_event_type
|
2020-02-13 07:08:49 -05:00
|
|
|
return unless merge_request?
|
2019-08-28 03:51:26 -04:00
|
|
|
|
|
|
|
strong_memoize(:merge_request_event_type) do
|
2019-11-25 19:06:28 -05:00
|
|
|
if merge_request_pipeline?
|
2019-08-28 03:51:26 -04:00
|
|
|
:merged_result
|
2019-09-10 14:18:39 -04:00
|
|
|
elsif detached_merge_request_pipeline?
|
|
|
|
:detached
|
2019-08-28 03:51:26 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-27 14:06:20 -04:00
|
|
|
def persistent_ref
|
|
|
|
@persistent_ref ||= PersistentRef.new(pipeline: self)
|
|
|
|
end
|
|
|
|
|
2020-02-13 13:09:00 -05:00
|
|
|
def cacheable?
|
2020-07-31 20:09:45 -04:00
|
|
|
Ci::PipelineEnums.ci_config_sources.key?(config_source.to_sym)
|
2020-02-13 13:09:00 -05:00
|
|
|
end
|
|
|
|
|
2020-03-18 05:09:31 -04:00
|
|
|
def source_ref_path
|
|
|
|
if branch? || merge_request?
|
|
|
|
Gitlab::Git::BRANCH_REF_PREFIX + source_ref.to_s
|
|
|
|
elsif tag?
|
|
|
|
Gitlab::Git::TAG_REF_PREFIX + source_ref.to_s
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-05-15 11:08:04 -04:00
|
|
|
# Set scheduling type of processables if they were created before scheduling_type
|
|
|
|
# data was deployed (https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22246).
|
|
|
|
def ensure_scheduling_type!
|
|
|
|
processables.populate_scheduling_type!
|
|
|
|
end
|
|
|
|
|
2020-06-10 05:08:35 -04:00
|
|
|
def ensure_ci_ref!
|
|
|
|
self.ci_ref = Ci::Ref.ensure_for(self)
|
|
|
|
end
|
|
|
|
|
2016-06-02 11:17:23 -04:00
|
|
|
private
|
|
|
|
|
2020-07-13 08:09:18 -04:00
|
|
|
def add_message(severity, content)
|
|
|
|
return unless Gitlab::Ci::Features.store_pipeline_messages?(project)
|
|
|
|
|
|
|
|
messages.build(severity: severity, content: content)
|
|
|
|
end
|
|
|
|
|
2016-08-04 12:43:16 -04:00
|
|
|
def pipeline_data
|
2016-08-12 04:09:29 -04:00
|
|
|
Gitlab::DataBuilder::Pipeline.build(self)
|
2016-04-11 10:55:40 -04:00
|
|
|
end
|
2016-07-03 19:58:58 -04:00
|
|
|
|
2018-10-02 07:10:56 -04:00
|
|
|
def push_details
|
|
|
|
strong_memoize(:push_details) do
|
2018-10-22 05:37:52 -04:00
|
|
|
Gitlab::Git::Push.new(project, before_sha, sha, git_ref)
|
2018-10-02 07:10:56 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-10-22 05:37:52 -04:00
|
|
|
def git_ref
|
2019-03-27 19:15:26 -04:00
|
|
|
strong_memoize(:git_ref) do
|
2020-02-13 07:08:49 -05:00
|
|
|
if merge_request?
|
2019-03-17 02:23:11 -04:00
|
|
|
##
|
|
|
|
# In the future, we're going to change this ref to
|
|
|
|
# merge request's merged reference, such as "refs/merge-requests/:iid/merge".
|
|
|
|
# In order to do that, we have to update GitLab-Runner's source pulling
|
|
|
|
# logic.
|
|
|
|
# See https://gitlab.com/gitlab-org/gitlab-runner/merge_requests/1092
|
|
|
|
Gitlab::Git::BRANCH_REF_PREFIX + ref.to_s
|
|
|
|
else
|
|
|
|
super
|
|
|
|
end
|
2018-10-02 07:10:56 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-07-03 19:58:58 -04:00
|
|
|
def keep_around_commits
|
2016-07-15 18:33:30 -04:00
|
|
|
return unless project
|
2016-07-19 08:59:38 -04:00
|
|
|
|
2018-08-16 19:55:00 -04:00
|
|
|
project.repository.keep_around(self.sha, self.before_sha)
|
2016-07-03 19:58:58 -04:00
|
|
|
end
|
2015-08-25 21:42:46 -04:00
|
|
|
end
|
|
|
|
end
|
2019-09-13 09:26:31 -04:00
|
|
|
|
|
|
|
Ci::Pipeline.prepend_if_ee('EE::Ci::Pipeline')
|