2015-08-25 21:42:46 -04:00
|
|
|
module Ci
|
2016-06-02 10:19:18 -04:00
|
|
|
class Pipeline < ActiveRecord::Base
|
2017-09-06 07:13:52 -04:00
|
|
|
extend Gitlab::Ci::Model
|
2016-08-24 22:55:32 -04:00
|
|
|
include HasStatus
|
2016-08-30 04:09:41 -04:00
|
|
|
include Importable
|
2016-10-12 06:07:58 -04:00
|
|
|
include AfterCommitQueue
|
2017-03-28 16:27:16 -04:00
|
|
|
include Presentable
|
2017-10-03 04:12:28 -04:00
|
|
|
include Gitlab::OptimisticLocking
|
2015-09-24 11:09:33 -04:00
|
|
|
|
2017-03-17 19:06:11 -04:00
|
|
|
belongs_to :project
|
2016-07-15 09:42:29 -04:00
|
|
|
belongs_to :user
|
2017-03-18 07:46:56 -04:00
|
|
|
belongs_to :auto_canceled_by, class_name: 'Ci::Pipeline'
|
2017-05-07 18:35:56 -04:00
|
|
|
belongs_to :pipeline_schedule, class_name: 'Ci::PipelineSchedule'
|
2016-07-15 09:42:29 -04:00
|
|
|
|
2017-06-02 06:16:11 -04:00
|
|
|
has_many :stages
|
2016-06-03 07:58:35 -04:00
|
|
|
has_many :statuses, class_name: 'CommitStatus', foreign_key: :commit_id
|
2016-10-18 17:20:36 -04:00
|
|
|
has_many :builds, foreign_key: :commit_id
|
2017-06-08 11:16:27 -04:00
|
|
|
has_many :trigger_requests, dependent: :destroy, foreign_key: :commit_id # rubocop:disable Cop/ActiveRecordDependent
|
2017-07-26 05:31:09 -04:00
|
|
|
has_many :variables, class_name: 'Ci::PipelineVariable'
|
2017-03-28 16:04:14 -04:00
|
|
|
|
|
|
|
# Merge requests for which the current pipeline is running against
|
|
|
|
# the merge request's latest commit.
|
2017-03-22 11:20:44 -04:00
|
|
|
has_many :merge_requests, foreign_key: "head_pipeline_id"
|
2015-08-25 21:42:46 -04:00
|
|
|
|
2017-04-06 08:31:38 -04:00
|
|
|
has_many :pending_builds, -> { pending }, foreign_key: :commit_id, class_name: 'Ci::Build'
|
2017-07-26 05:44:49 -04:00
|
|
|
has_many :retryable_builds, -> { latest.failed_or_canceled.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build'
|
2017-04-06 08:31:38 -04:00
|
|
|
has_many :cancelable_statuses, -> { cancelable }, foreign_key: :commit_id, class_name: 'CommitStatus'
|
2017-06-02 18:57:40 -04:00
|
|
|
has_many :manual_actions, -> { latest.manual_actions.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build'
|
|
|
|
has_many :artifacts, -> { latest.with_artifacts_not_expired.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build'
|
2017-04-06 08:31:38 -04:00
|
|
|
|
2017-06-02 06:16:11 -04:00
|
|
|
has_many :auto_canceled_pipelines, class_name: 'Ci::Pipeline', foreign_key: 'auto_canceled_by_id'
|
|
|
|
has_many :auto_canceled_jobs, class_name: 'CommitStatus', foreign_key: 'auto_canceled_by_id'
|
2017-04-06 08:31:38 -04:00
|
|
|
|
2017-02-22 17:35:08 -05:00
|
|
|
delegate :id, to: :project, prefix: true
|
2017-09-18 08:29:43 -04:00
|
|
|
delegate :full_path, to: :project, prefix: true
|
2017-02-22 17:35:08 -05:00
|
|
|
|
2017-05-24 09:13:51 -04:00
|
|
|
validates :source, exclusion: { in: %w(unknown), unless: :importing? }, on: :create
|
2017-02-21 19:40:04 -05:00
|
|
|
validates :sha, presence: { unless: :importing? }
|
|
|
|
validates :ref, presence: { unless: :importing? }
|
|
|
|
validates :status, presence: { unless: :importing? }
|
2016-08-30 04:09:41 -04:00
|
|
|
validate :valid_commit_sha, unless: :importing?
|
2015-08-25 21:42:46 -04:00
|
|
|
|
2016-10-19 08:03:31 -04:00
|
|
|
after_create :keep_around_commits, unless: :importing?
|
2016-04-11 10:55:40 -04:00
|
|
|
|
2017-05-24 09:13:51 -04:00
|
|
|
enum source: {
|
|
|
|
unknown: nil,
|
|
|
|
push: 1,
|
|
|
|
web: 2,
|
|
|
|
trigger: 3,
|
|
|
|
schedule: 4,
|
|
|
|
api: 5,
|
|
|
|
external: 6
|
|
|
|
}
|
|
|
|
|
2017-08-31 07:47:29 -04:00
|
|
|
enum config_source: {
|
2017-09-04 09:44:46 -04:00
|
|
|
unknown_source: nil,
|
|
|
|
repository_source: 1,
|
|
|
|
auto_devops_source: 2
|
2017-08-31 07:47:29 -04:00
|
|
|
}
|
|
|
|
|
2017-09-29 04:18:53 -04:00
|
|
|
enum failure_reason: {
|
2017-10-02 07:40:12 -04:00
|
|
|
unknown_failure: 0,
|
|
|
|
config_error: 1
|
2017-09-29 04:18:53 -04:00
|
|
|
}
|
|
|
|
|
2016-08-11 14:54:02 -04:00
|
|
|
state_machine :status, initial: :created do
|
2016-08-12 07:57:58 -04:00
|
|
|
event :enqueue do
|
2017-11-07 10:47:50 -05:00
|
|
|
transition [:created, :skipped] => :pending
|
|
|
|
transition [:success, :failed, :canceled] => :running
|
2016-08-12 05:36:51 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
event :run do
|
2016-10-20 03:33:44 -04:00
|
|
|
transition any - [:running] => :running
|
2016-08-12 05:36:51 -04:00
|
|
|
end
|
|
|
|
|
2016-08-11 14:54:02 -04:00
|
|
|
event :skip do
|
2016-10-20 03:33:44 -04:00
|
|
|
transition any - [:skipped] => :skipped
|
2016-08-11 14:54:02 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
event :drop do
|
2016-10-20 03:33:44 -04:00
|
|
|
transition any - [:failed] => :failed
|
2016-08-11 14:54:02 -04:00
|
|
|
end
|
|
|
|
|
2016-08-12 05:36:51 -04:00
|
|
|
event :succeed do
|
2016-10-20 03:33:44 -04:00
|
|
|
transition any - [:success] => :success
|
2016-08-12 05:36:51 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
event :cancel do
|
2016-10-20 03:33:44 -04:00
|
|
|
transition any - [:canceled] => :canceled
|
2016-08-11 14:54:02 -04:00
|
|
|
end
|
|
|
|
|
2017-03-03 06:51:23 -05:00
|
|
|
event :block do
|
2017-03-03 08:35:19 -05:00
|
|
|
transition any - [:manual] => :manual
|
2017-03-03 06:51:23 -05:00
|
|
|
end
|
|
|
|
|
2016-10-14 13:08:48 -04:00
|
|
|
# IMPORTANT
|
|
|
|
# Do not add any operations to this state_machine
|
|
|
|
# Create a separate worker for each new operation
|
|
|
|
|
2016-08-12 05:53:27 -04:00
|
|
|
before_transition [:created, :pending] => :running do |pipeline|
|
2016-08-19 04:57:25 -04:00
|
|
|
pipeline.started_at = Time.now
|
2016-08-11 14:54:02 -04:00
|
|
|
end
|
|
|
|
|
2016-08-12 05:53:27 -04:00
|
|
|
before_transition any => [:success, :failed, :canceled] do |pipeline|
|
2016-08-19 04:57:25 -04:00
|
|
|
pipeline.finished_at = Time.now
|
2016-10-04 09:59:11 -04:00
|
|
|
pipeline.update_duration
|
|
|
|
end
|
|
|
|
|
2017-04-21 13:45:14 -04:00
|
|
|
before_transition any => [:manual] do |pipeline|
|
|
|
|
pipeline.update_duration
|
|
|
|
end
|
|
|
|
|
2017-04-06 11:47:37 -04:00
|
|
|
before_transition canceled: any - [:canceled] do |pipeline|
|
2017-04-06 10:04:45 -04:00
|
|
|
pipeline.auto_canceled_by = nil
|
|
|
|
end
|
|
|
|
|
2017-10-02 07:09:07 -04:00
|
|
|
before_transition any => :failed do |pipeline, transition|
|
|
|
|
transition.args.first.try do |reason|
|
|
|
|
pipeline.failure_reason = reason
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
Improve performance of the cycle analytics page.
1. These changes bring down page load time for 100 issues from more than
a minute to about 1.5 seconds.
2. This entire commit is composed of these types of performance
enhancements:
- Cache relevant data in `IssueMetrics` wherever possible.
- Cache relevant data in `MergeRequestMetrics` wherever possible.
- Preload metrics
3. Given these improvements, we now only need to make 4 SQL calls:
- Load all issues
- Load all merge requests
- Load all metrics for the issues
- Load all metrics for the merge requests
4. A list of all the data points that are now being pre-calculated:
a. The first time an issue is mentioned in a commit
- In `GitPushService`, find all issues mentioned by the given commit
using `ReferenceExtractor`. Set the `first_mentioned_in_commit_at`
flag for each of them.
- There seems to be a (pre-existing) bug here - files (and
therefore commits) created using the Web CI don't have
cross-references created, and issues are not closed even when
the commit title is "Fixes #xx".
b. The first time a merge request is deployed to production
When a `Deployment` is created, find all merge requests that
were merged in before the deployment, and set the
`first_deployed_to_production_at` flag for each of them.
c. The start / end time for a merge request pipeline
Hook into the `Pipeline` state machine. When the `status` moves to
`running`, find the merge requests whose tip commit matches the
pipeline, and record the `latest_build_started_at` time for each
of them. When the `status` moves to `success`, record the
`latest_build_finished_at` time.
d. The merge requests that close an issue
- This was a big cause of the performance problems we were having
with Cycle Analytics. We need to use `ReferenceExtractor` to make
this calculation, which is slow when we have to run it on a large
number of merge requests.
- When a merge request is created, updated, or refreshed, find the
issues it closes, and create an instance of
`MergeRequestsClosingIssues`, which acts as a join model between
merge requests and issues.
- If a `MergeRequestsClosingIssues` instance links a merge request
and an issue, that issue closes that merge request.
5. The `Queries` module was changed into a class, so we can cache the
results of `issues` and `merge_requests_closing_issues` across
various cycle analytics stages.
6. The code added in this commit is untested. Tests will be added in the
next commit.
2016-09-15 04:59:36 -04:00
|
|
|
after_transition [:created, :pending] => :running do |pipeline|
|
2017-04-20 02:44:01 -04:00
|
|
|
pipeline.run_after_commit { PipelineMetricsWorker.perform_async(pipeline.id) }
|
Improve performance of the cycle analytics page.
1. These changes bring down page load time for 100 issues from more than
a minute to about 1.5 seconds.
2. This entire commit is composed of these types of performance
enhancements:
- Cache relevant data in `IssueMetrics` wherever possible.
- Cache relevant data in `MergeRequestMetrics` wherever possible.
- Preload metrics
3. Given these improvements, we now only need to make 4 SQL calls:
- Load all issues
- Load all merge requests
- Load all metrics for the issues
- Load all metrics for the merge requests
4. A list of all the data points that are now being pre-calculated:
a. The first time an issue is mentioned in a commit
- In `GitPushService`, find all issues mentioned by the given commit
using `ReferenceExtractor`. Set the `first_mentioned_in_commit_at`
flag for each of them.
- There seems to be a (pre-existing) bug here - files (and
therefore commits) created using the Web CI don't have
cross-references created, and issues are not closed even when
the commit title is "Fixes #xx".
b. The first time a merge request is deployed to production
When a `Deployment` is created, find all merge requests that
were merged in before the deployment, and set the
`first_deployed_to_production_at` flag for each of them.
c. The start / end time for a merge request pipeline
Hook into the `Pipeline` state machine. When the `status` moves to
`running`, find the merge requests whose tip commit matches the
pipeline, and record the `latest_build_started_at` time for each
of them. When the `status` moves to `success`, record the
`latest_build_finished_at` time.
d. The merge requests that close an issue
- This was a big cause of the performance problems we were having
with Cycle Analytics. We need to use `ReferenceExtractor` to make
this calculation, which is slow when we have to run it on a large
number of merge requests.
- When a merge request is created, updated, or refreshed, find the
issues it closes, and create an instance of
`MergeRequestsClosingIssues`, which acts as a join model between
merge requests and issues.
- If a `MergeRequestsClosingIssues` instance links a merge request
and an issue, that issue closes that merge request.
5. The `Queries` module was changed into a class, so we can cache the
results of `issues` and `merge_requests_closing_issues` across
various cycle analytics stages.
6. The code added in this commit is untested. Tests will be added in the
next commit.
2016-09-15 04:59:36 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
after_transition any => [:success] do |pipeline|
|
2017-04-20 02:44:01 -04:00
|
|
|
pipeline.run_after_commit { PipelineMetricsWorker.perform_async(pipeline.id) }
|
Improve performance of the cycle analytics page.
1. These changes bring down page load time for 100 issues from more than
a minute to about 1.5 seconds.
2. This entire commit is composed of these types of performance
enhancements:
- Cache relevant data in `IssueMetrics` wherever possible.
- Cache relevant data in `MergeRequestMetrics` wherever possible.
- Preload metrics
3. Given these improvements, we now only need to make 4 SQL calls:
- Load all issues
- Load all merge requests
- Load all metrics for the issues
- Load all metrics for the merge requests
4. A list of all the data points that are now being pre-calculated:
a. The first time an issue is mentioned in a commit
- In `GitPushService`, find all issues mentioned by the given commit
using `ReferenceExtractor`. Set the `first_mentioned_in_commit_at`
flag for each of them.
- There seems to be a (pre-existing) bug here - files (and
therefore commits) created using the Web CI don't have
cross-references created, and issues are not closed even when
the commit title is "Fixes #xx".
b. The first time a merge request is deployed to production
When a `Deployment` is created, find all merge requests that
were merged in before the deployment, and set the
`first_deployed_to_production_at` flag for each of them.
c. The start / end time for a merge request pipeline
Hook into the `Pipeline` state machine. When the `status` moves to
`running`, find the merge requests whose tip commit matches the
pipeline, and record the `latest_build_started_at` time for each
of them. When the `status` moves to `success`, record the
`latest_build_finished_at` time.
d. The merge requests that close an issue
- This was a big cause of the performance problems we were having
with Cycle Analytics. We need to use `ReferenceExtractor` to make
this calculation, which is slow when we have to run it on a large
number of merge requests.
- When a merge request is created, updated, or refreshed, find the
issues it closes, and create an instance of
`MergeRequestsClosingIssues`, which acts as a join model between
merge requests and issues.
- If a `MergeRequestsClosingIssues` instance links a merge request
and an issue, that issue closes that merge request.
5. The `Queries` module was changed into a class, so we can cache the
results of `issues` and `merge_requests_closing_issues` across
various cycle analytics stages.
6. The code added in this commit is untested. Tests will be added in the
next commit.
2016-09-15 04:59:36 -04:00
|
|
|
end
|
|
|
|
|
2016-10-04 09:59:11 -04:00
|
|
|
after_transition [:created, :pending, :running] => :success do |pipeline|
|
2017-04-20 02:44:01 -04:00
|
|
|
pipeline.run_after_commit { PipelineSuccessWorker.perform_async(pipeline.id) }
|
2016-08-11 14:54:02 -04:00
|
|
|
end
|
2016-08-12 06:03:30 -04:00
|
|
|
|
|
|
|
after_transition do |pipeline, transition|
|
2016-10-12 07:44:33 -04:00
|
|
|
next if transition.loopback?
|
|
|
|
|
|
|
|
pipeline.run_after_commit do
|
2017-04-20 02:44:01 -04:00
|
|
|
PipelineHooksWorker.perform_async(pipeline.id)
|
2017-04-18 15:26:56 -04:00
|
|
|
ExpirePipelineCacheWorker.perform_async(pipeline.id)
|
2016-10-12 07:44:33 -04:00
|
|
|
end
|
2016-08-12 06:03:30 -04:00
|
|
|
end
|
2016-09-14 07:42:16 -04:00
|
|
|
|
2016-09-28 05:22:06 -04:00
|
|
|
after_transition any => [:success, :failed] do |pipeline|
|
2016-10-17 06:09:43 -04:00
|
|
|
pipeline.run_after_commit do
|
2016-10-17 06:12:07 -04:00
|
|
|
PipelineNotificationWorker.perform_async(pipeline.id)
|
2016-10-17 06:09:43 -04:00
|
|
|
end
|
2016-09-14 07:42:16 -04:00
|
|
|
end
|
2016-08-11 14:54:02 -04:00
|
|
|
end
|
|
|
|
|
2017-11-10 14:57:11 -05:00
|
|
|
scope :internal, -> { where(source: internal_sources) }
|
2016-11-21 09:19:16 -05:00
|
|
|
|
2017-11-10 14:57:11 -05:00
|
|
|
# Returns the pipelines in descending order (= newest first), optionally
|
|
|
|
# limited to a number of references.
|
|
|
|
#
|
|
|
|
# ref - The name (or names) of the branch(es)/tag(s) to limit the list of
|
|
|
|
# pipelines to.
|
|
|
|
def self.newest_first(ref = nil)
|
|
|
|
relation = order(id: :desc)
|
|
|
|
|
|
|
|
ref ? relation.where(ref: ref) : relation
|
2016-11-21 09:19:16 -05:00
|
|
|
end
|
2016-11-07 10:44:11 -05:00
|
|
|
|
2016-12-14 08:00:06 -05:00
|
|
|
def self.latest_status(ref = nil)
|
2017-11-10 14:57:11 -05:00
|
|
|
newest_first(ref).pluck(:status).first
|
2016-12-14 08:00:06 -05:00
|
|
|
end
|
|
|
|
|
2016-08-11 06:09:26 -04:00
|
|
|
def self.latest_successful_for(ref)
|
2017-11-10 14:57:11 -05:00
|
|
|
newest_first(ref).success.take
|
2016-07-19 08:59:38 -04:00
|
|
|
end
|
|
|
|
|
2017-03-13 12:00:07 -04:00
|
|
|
def self.latest_successful_for_refs(refs)
|
2017-11-10 14:57:11 -05:00
|
|
|
relation = newest_first(refs).success
|
|
|
|
|
|
|
|
relation.each_with_object({}) do |pipeline, hash|
|
2017-03-13 12:00:07 -04:00
|
|
|
hash[pipeline.ref] ||= pipeline
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-11-10 14:57:11 -05:00
|
|
|
# Returns a Hash containing the latest pipeline status for every given
|
|
|
|
# commit.
|
|
|
|
#
|
|
|
|
# The keys of this Hash are the commit SHAs, the values the statuses.
|
|
|
|
#
|
|
|
|
# commits - The list of commit SHAs to get the status for.
|
|
|
|
# ref - The ref to scope the data to (e.g. "master"). If the ref is not
|
|
|
|
# given we simply get the latest status for the commits, regardless
|
|
|
|
# of what refs their pipelines belong to.
|
|
|
|
def self.latest_status_per_commit(commits, ref = nil)
|
|
|
|
p1 = arel_table
|
|
|
|
p2 = arel_table.alias
|
|
|
|
|
|
|
|
# This LEFT JOIN will filter out all but the newest row for every
|
|
|
|
# combination of (project_id, sha) or (project_id, sha, ref) if a ref is
|
|
|
|
# given.
|
|
|
|
cond = p1[:sha].eq(p2[:sha])
|
|
|
|
.and(p1[:project_id].eq(p2[:project_id]))
|
|
|
|
.and(p1[:id].lt(p2[:id]))
|
|
|
|
|
|
|
|
cond = cond.and(p1[:ref].eq(p2[:ref])) if ref
|
|
|
|
join = p1.join(p2, Arel::Nodes::OuterJoin).on(cond)
|
|
|
|
|
|
|
|
relation = select(:sha, :status)
|
|
|
|
.where(sha: commits)
|
|
|
|
.where(p2[:id].eq(nil))
|
|
|
|
.joins(join.join_sources)
|
|
|
|
|
|
|
|
relation = relation.where(ref: ref) if ref
|
|
|
|
|
|
|
|
relation.each_with_object({}) do |row, hash|
|
|
|
|
hash[row[:sha]] = row[:status]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-08-25 21:42:46 -04:00
|
|
|
def self.truncate_sha(sha)
|
|
|
|
sha[0...8]
|
|
|
|
end
|
|
|
|
|
2016-08-15 06:54:38 -04:00
|
|
|
def self.total_duration
|
2016-08-15 06:50:02 -04:00
|
|
|
where.not(duration: nil).sum(:duration)
|
2016-08-10 10:45:30 -04:00
|
|
|
end
|
|
|
|
|
2017-06-19 13:56:27 -04:00
|
|
|
def self.internal_sources
|
|
|
|
sources.reject { |source| source == "external" }.values
|
|
|
|
end
|
|
|
|
|
2016-12-05 08:17:42 -05:00
|
|
|
def stages_count
|
|
|
|
statuses.select(:stage).distinct.count
|
2016-11-10 09:32:23 -05:00
|
|
|
end
|
|
|
|
|
2017-12-18 04:13:46 -05:00
|
|
|
def total_size
|
|
|
|
statuses.count(:id)
|
|
|
|
end
|
|
|
|
|
2017-06-01 05:55:18 -04:00
|
|
|
def stages_names
|
2017-06-21 09:48:12 -04:00
|
|
|
statuses.order(:stage_idx).distinct
|
|
|
|
.pluck(:stage, :stage_idx).map(&:first)
|
2016-12-06 08:49:37 -05:00
|
|
|
end
|
|
|
|
|
2017-06-01 05:55:18 -04:00
|
|
|
def legacy_stage(name)
|
2017-06-01 06:00:37 -04:00
|
|
|
stage = Ci::LegacyStage.new(self, name: name)
|
2017-06-01 05:55:18 -04:00
|
|
|
stage unless stage.statuses_count.zero?
|
|
|
|
end
|
|
|
|
|
|
|
|
def legacy_stages
|
2017-01-18 05:07:12 -05:00
|
|
|
# TODO, this needs refactoring, see gitlab-ce#26481.
|
|
|
|
|
2017-02-22 17:54:59 -05:00
|
|
|
stages_query = statuses
|
|
|
|
.group('stage').select(:stage).order('max(stage_idx)')
|
2017-01-18 05:07:12 -05:00
|
|
|
|
2016-11-10 09:32:23 -05:00
|
|
|
status_sql = statuses.latest.where('stage=sg.stage').status_sql
|
|
|
|
|
2017-03-08 04:58:41 -05:00
|
|
|
warnings_sql = statuses.latest.select('COUNT(*)')
|
2017-02-22 17:54:59 -05:00
|
|
|
.where('stage=sg.stage').failed_but_allowed.to_sql
|
2016-12-05 08:17:42 -05:00
|
|
|
|
2017-02-22 17:54:59 -05:00
|
|
|
stages_with_statuses = CommitStatus.from(stages_query, :sg)
|
|
|
|
.pluck('sg.stage', status_sql, "(#{warnings_sql})")
|
2016-11-10 09:32:23 -05:00
|
|
|
|
|
|
|
stages_with_statuses.map do |stage|
|
2017-06-01 06:00:37 -04:00
|
|
|
Ci::LegacyStage.new(self, Hash[%i[name status warnings].zip(stage)])
|
2016-11-10 09:32:23 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-08-25 21:42:46 -04:00
|
|
|
def valid_commit_sha
|
2015-12-11 11:57:04 -05:00
|
|
|
if self.sha == Gitlab::Git::BLANK_SHA
|
2015-08-25 21:42:46 -04:00
|
|
|
self.errors.add(:sha, " cant be 00000000 (branch removal)")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def git_author_name
|
2016-06-21 08:43:37 -04:00
|
|
|
commit.try(:author_name)
|
2015-08-25 21:42:46 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def git_author_email
|
2016-06-21 08:43:37 -04:00
|
|
|
commit.try(:author_email)
|
2015-08-25 21:42:46 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def git_commit_message
|
2016-06-21 08:43:37 -04:00
|
|
|
commit.try(:message)
|
2015-08-25 21:42:46 -04:00
|
|
|
end
|
|
|
|
|
2016-07-11 03:11:20 -04:00
|
|
|
def git_commit_title
|
|
|
|
commit.try(:title)
|
|
|
|
end
|
|
|
|
|
2015-08-25 21:42:46 -04:00
|
|
|
def short_sha
|
2016-06-02 10:19:18 -04:00
|
|
|
Ci::Pipeline.truncate_sha(sha)
|
2015-08-25 21:42:46 -04:00
|
|
|
end
|
|
|
|
|
2017-12-05 08:15:30 -05:00
|
|
|
# NOTE: This is loaded lazily and will never be nil, even if the commit
|
|
|
|
# cannot be found.
|
|
|
|
#
|
|
|
|
# Use constructs like: `pipeline.commit.present?`
|
2016-06-21 08:43:37 -04:00
|
|
|
def commit
|
2017-12-05 08:15:30 -05:00
|
|
|
@commit ||= Commit.lazy(project, sha)
|
2015-08-25 21:42:46 -04:00
|
|
|
end
|
|
|
|
|
2016-03-31 13:51:28 -04:00
|
|
|
def branch?
|
|
|
|
!tag?
|
|
|
|
end
|
|
|
|
|
2016-12-15 16:06:39 -05:00
|
|
|
def stuck?
|
2017-04-06 08:31:38 -04:00
|
|
|
pending_builds.any?(&:stuck?)
|
2016-07-16 12:39:58 -04:00
|
|
|
end
|
|
|
|
|
2016-03-31 13:51:28 -04:00
|
|
|
def retryable?
|
2017-04-06 08:31:38 -04:00
|
|
|
retryable_builds.any?
|
2016-03-31 13:51:28 -04:00
|
|
|
end
|
|
|
|
|
2016-05-31 09:26:00 -04:00
|
|
|
def cancelable?
|
2017-04-06 08:31:38 -04:00
|
|
|
cancelable_statuses.any?
|
2016-05-31 09:26:00 -04:00
|
|
|
end
|
|
|
|
|
2017-04-05 15:10:52 -04:00
|
|
|
def auto_canceled?
|
|
|
|
canceled? && auto_canceled_by_id?
|
|
|
|
end
|
|
|
|
|
2016-05-09 19:26:13 -04:00
|
|
|
def cancel_running
|
2017-10-03 04:12:28 -04:00
|
|
|
retry_optimistic_lock(cancelable_statuses) do |cancelable|
|
2017-04-07 02:47:15 -04:00
|
|
|
cancelable.find_each do |job|
|
|
|
|
yield(job) if block_given?
|
|
|
|
job.cancel
|
2016-11-23 06:43:56 -05:00
|
|
|
end
|
2017-04-07 02:47:15 -04:00
|
|
|
end
|
2016-05-09 19:26:13 -04:00
|
|
|
end
|
|
|
|
|
2017-04-06 09:32:56 -04:00
|
|
|
def auto_cancel_running(pipeline)
|
|
|
|
update(auto_canceled_by: pipeline)
|
|
|
|
|
|
|
|
cancel_running do |job|
|
|
|
|
job.auto_canceled_by = pipeline
|
2017-04-06 08:31:38 -04:00
|
|
|
end
|
2016-05-09 19:26:13 -04:00
|
|
|
end
|
|
|
|
|
2017-02-14 06:20:02 -05:00
|
|
|
def retry_failed(current_user)
|
2017-02-22 17:54:59 -05:00
|
|
|
Ci::RetryPipelineService.new(project, current_user)
|
|
|
|
.execute(self)
|
2016-05-09 19:26:13 -04:00
|
|
|
end
|
|
|
|
|
2016-08-18 17:36:54 -04:00
|
|
|
def mark_as_processable_after_stage(stage_idx)
|
2017-02-14 05:38:19 -05:00
|
|
|
builds.skipped.after_stage(stage_idx).find_each(&:process)
|
2016-08-18 17:36:54 -04:00
|
|
|
end
|
|
|
|
|
2016-04-13 07:01:08 -04:00
|
|
|
def latest?
|
2017-12-05 08:15:30 -05:00
|
|
|
return false unless ref && commit.present?
|
2017-11-14 04:02:39 -05:00
|
|
|
|
2017-12-05 08:15:30 -05:00
|
|
|
project.commit(ref) == commit
|
2016-04-13 07:01:08 -04:00
|
|
|
end
|
|
|
|
|
2015-10-06 06:01:16 -04:00
|
|
|
def retried
|
|
|
|
@retried ||= (statuses.order(id: :desc) - statuses.latest)
|
2015-08-25 21:42:46 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def coverage
|
2016-04-13 14:51:03 -04:00
|
|
|
coverage_array = statuses.latest.map(&:coverage).compact
|
2015-12-04 06:55:23 -05:00
|
|
|
if coverage_array.size >= 1
|
|
|
|
'%.2f' % (coverage_array.reduce(:+) / coverage_array.size)
|
2015-08-25 21:42:46 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-31 09:13:40 -04:00
|
|
|
def stage_seeds
|
2016-08-11 09:22:35 -04:00
|
|
|
return [] unless config_processor
|
|
|
|
|
2017-06-02 06:16:11 -04:00
|
|
|
@stage_seeds ||= config_processor.stage_seeds(self)
|
2017-05-30 07:27:46 -04:00
|
|
|
end
|
|
|
|
|
2017-09-28 06:28:26 -04:00
|
|
|
def seeds_size
|
|
|
|
@seeds_size ||= stage_seeds.sum(&:size)
|
|
|
|
end
|
|
|
|
|
2017-09-01 06:58:22 -04:00
|
|
|
def has_kubernetes_active?
|
2017-11-27 08:35:16 -05:00
|
|
|
project.deployment_platform&.active?
|
2017-08-26 06:45:36 -04:00
|
|
|
end
|
|
|
|
|
2017-06-02 06:16:11 -04:00
|
|
|
def has_stage_seeds?
|
|
|
|
stage_seeds.any?
|
2016-08-11 09:22:35 -04:00
|
|
|
end
|
|
|
|
|
2016-07-14 10:58:05 -04:00
|
|
|
def has_warnings?
|
2016-10-03 06:22:51 -04:00
|
|
|
builds.latest.failed_but_allowed.any?
|
2016-07-12 13:05:48 -04:00
|
|
|
end
|
|
|
|
|
2017-09-06 09:14:21 -04:00
|
|
|
def set_config_source
|
2017-09-06 12:57:07 -04:00
|
|
|
if ci_yaml_from_repo
|
|
|
|
self.config_source = :repository_source
|
|
|
|
elsif implied_ci_yaml_file
|
|
|
|
self.config_source = :auto_devops_source
|
|
|
|
end
|
2017-09-04 09:44:46 -04:00
|
|
|
end
|
|
|
|
|
2015-08-25 21:42:46 -04:00
|
|
|
def config_processor
|
2017-05-31 09:25:36 -04:00
|
|
|
return unless ci_yaml_file
|
2016-04-13 14:54:21 -04:00
|
|
|
return @config_processor if defined?(@config_processor)
|
|
|
|
|
|
|
|
@config_processor ||= begin
|
2017-09-18 08:35:31 -04:00
|
|
|
Gitlab::Ci::YamlProcessor.new(ci_yaml_file)
|
2017-09-06 07:23:24 -04:00
|
|
|
rescue Gitlab::Ci::YamlProcessor::ValidationError, Psych::SyntaxError => e
|
2016-06-02 07:52:19 -04:00
|
|
|
self.yaml_errors = e.message
|
2016-04-13 14:54:21 -04:00
|
|
|
nil
|
|
|
|
rescue
|
2016-06-02 06:39:15 -04:00
|
|
|
self.yaml_errors = 'Undefined error'
|
2016-04-13 14:54:21 -04:00
|
|
|
nil
|
|
|
|
end
|
2015-08-25 21:42:46 -04:00
|
|
|
end
|
|
|
|
|
2017-07-05 08:04:53 -04:00
|
|
|
def ci_yaml_file_path
|
2017-07-05 08:11:01 -04:00
|
|
|
if project.ci_config_path.blank?
|
2017-07-05 08:04:53 -04:00
|
|
|
'.gitlab-ci.yml'
|
|
|
|
else
|
2017-07-05 08:11:01 -04:00
|
|
|
project.ci_config_path
|
2017-07-05 08:04:53 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-10-02 07:46:38 -04:00
|
|
|
def ci_yaml_file
|
2016-04-13 14:54:21 -04:00
|
|
|
return @ci_yaml_file if defined?(@ci_yaml_file)
|
|
|
|
|
2017-09-04 09:44:46 -04:00
|
|
|
@ci_yaml_file =
|
2017-09-06 09:14:21 -04:00
|
|
|
if auto_devops_source?
|
2017-09-04 09:44:46 -04:00
|
|
|
implied_ci_yaml_file
|
2017-09-06 09:14:21 -04:00
|
|
|
else
|
|
|
|
ci_yaml_from_repo
|
2017-09-04 09:44:46 -04:00
|
|
|
end
|
2017-08-31 07:47:29 -04:00
|
|
|
|
|
|
|
if @ci_yaml_file
|
|
|
|
@ci_yaml_file
|
|
|
|
else
|
|
|
|
self.yaml_errors = "Failed to load CI/CD config file for #{sha}"
|
|
|
|
nil
|
2016-02-02 08:35:09 -05:00
|
|
|
end
|
2015-10-02 07:46:38 -04:00
|
|
|
end
|
|
|
|
|
2016-12-21 08:26:47 -05:00
|
|
|
def has_yaml_errors?
|
|
|
|
yaml_errors.present?
|
|
|
|
end
|
|
|
|
|
2016-04-29 09:14:38 -04:00
|
|
|
def environments
|
|
|
|
builds.where.not(environment: nil).success.pluck(:environment).uniq
|
|
|
|
end
|
|
|
|
|
2016-06-23 06:29:11 -04:00
|
|
|
# Manually set the notes for a Ci::Pipeline
|
|
|
|
# There is no ActiveRecord relation between Ci::Pipeline and notes
|
|
|
|
# as they are related to a commit sha. This method helps importing
|
|
|
|
# them using the +Gitlab::ImportExport::RelationFactory+ class.
|
|
|
|
def notes=(notes)
|
|
|
|
notes.each do |note|
|
|
|
|
note[:id] = nil
|
|
|
|
note[:commit_id] = sha
|
|
|
|
note[:noteable_id] = self['id']
|
|
|
|
note.save!
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-06-13 04:55:54 -04:00
|
|
|
def notes
|
2017-11-07 11:11:37 -05:00
|
|
|
project.notes.for_commit_id(sha)
|
2016-06-13 04:55:54 -04:00
|
|
|
end
|
|
|
|
|
2016-08-11 09:22:35 -04:00
|
|
|
def process!
|
|
|
|
Ci::ProcessPipelineService.new(project, user).execute(self)
|
|
|
|
end
|
2016-08-11 12:37:36 -04:00
|
|
|
|
2016-09-19 09:03:51 -04:00
|
|
|
def update_status
|
2017-10-03 04:12:28 -04:00
|
|
|
retry_optimistic_lock(self) do
|
2016-09-19 11:20:30 -04:00
|
|
|
case latest_builds_status
|
2016-09-19 08:13:08 -04:00
|
|
|
when 'pending' then enqueue
|
|
|
|
when 'running' then run
|
|
|
|
when 'success' then succeed
|
|
|
|
when 'failed' then drop
|
|
|
|
when 'canceled' then cancel
|
|
|
|
when 'skipped' then skip
|
2017-03-03 08:35:19 -05:00
|
|
|
when 'manual' then block
|
2016-09-19 08:13:08 -04:00
|
|
|
end
|
2016-08-12 05:36:51 -04:00
|
|
|
end
|
2016-08-11 09:22:35 -04:00
|
|
|
end
|
|
|
|
|
2016-07-20 07:17:21 -04:00
|
|
|
def predefined_variables
|
|
|
|
[
|
2017-06-30 04:14:48 -04:00
|
|
|
{ key: 'CI_PIPELINE_ID', value: id.to_s, public: true },
|
2017-08-23 04:45:46 -04:00
|
|
|
{ key: 'CI_CONFIG_PATH', value: ci_yaml_file_path, public: true },
|
|
|
|
{ key: 'CI_PIPELINE_SOURCE', value: source.to_s, public: true }
|
2016-07-20 07:17:21 -04:00
|
|
|
]
|
|
|
|
end
|
|
|
|
|
2016-09-05 06:49:27 -04:00
|
|
|
def queued_duration
|
|
|
|
return unless started_at
|
|
|
|
|
|
|
|
seconds = (started_at - created_at).to_i
|
|
|
|
seconds unless seconds.zero?
|
|
|
|
end
|
|
|
|
|
2016-08-11 14:54:02 -04:00
|
|
|
def update_duration
|
2016-08-29 11:37:15 -04:00
|
|
|
return unless started_at
|
|
|
|
|
2017-09-25 07:22:04 -04:00
|
|
|
self.duration = Gitlab::Ci::Pipeline::Duration.from_pipeline(self)
|
2016-08-02 06:06:31 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def execute_hooks
|
2016-08-16 04:00:13 -04:00
|
|
|
data = pipeline_data
|
|
|
|
project.execute_hooks(data, :pipeline_hooks)
|
|
|
|
project.execute_services(data, :pipeline_hooks)
|
2016-08-04 12:43:16 -04:00
|
|
|
end
|
|
|
|
|
2017-04-18 15:26:56 -04:00
|
|
|
# All the merge requests for which the current pipeline runs/ran against
|
|
|
|
def all_merge_requests
|
2017-04-20 02:44:01 -04:00
|
|
|
@all_merge_requests ||= project.merge_requests.where(source_branch: ref)
|
2017-04-18 15:26:56 -04:00
|
|
|
end
|
|
|
|
|
2016-12-08 11:52:24 -05:00
|
|
|
def detailed_status(current_user)
|
2017-02-22 17:54:59 -05:00
|
|
|
Gitlab::Ci::Status::Pipeline::Factory
|
|
|
|
.new(self, current_user)
|
|
|
|
.fabricate!
|
2016-12-05 07:23:58 -05:00
|
|
|
end
|
|
|
|
|
2017-09-12 12:13:07 -04:00
|
|
|
def latest_builds_with_artifacts
|
2017-11-21 11:32:12 -05:00
|
|
|
# We purposely cast the builds to an Array here. Because we always use the
|
|
|
|
# rows if there are more than 0 this prevents us from having to run two
|
|
|
|
# queries: one to get the count and one to get the rows.
|
|
|
|
@latest_builds_with_artifacts ||= builds.latest.with_artifacts.to_a
|
2017-09-12 12:13:07 -04:00
|
|
|
end
|
|
|
|
|
2016-06-02 11:17:23 -04:00
|
|
|
private
|
|
|
|
|
2017-08-24 07:01:33 -04:00
|
|
|
def ci_yaml_from_repo
|
2017-09-06 12:57:07 -04:00
|
|
|
return unless project
|
|
|
|
return unless sha
|
|
|
|
|
2017-08-24 07:01:33 -04:00
|
|
|
project.repository.gitlab_ci_yml_for(sha, ci_yaml_file_path)
|
|
|
|
rescue GRPC::NotFound, Rugged::ReferenceError, GRPC::Internal
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
|
2017-09-04 09:44:46 -04:00
|
|
|
def implied_ci_yaml_file
|
2017-09-06 12:57:07 -04:00
|
|
|
return unless project
|
|
|
|
|
2017-09-04 09:44:46 -04:00
|
|
|
if project.auto_devops_enabled?
|
|
|
|
Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps').content
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-08-04 12:43:16 -04:00
|
|
|
def pipeline_data
|
2016-08-12 04:09:29 -04:00
|
|
|
Gitlab::DataBuilder::Pipeline.build(self)
|
2016-04-11 10:55:40 -04:00
|
|
|
end
|
2016-07-03 19:58:58 -04:00
|
|
|
|
2016-08-12 05:36:51 -04:00
|
|
|
def latest_builds_status
|
2016-08-11 14:54:02 -04:00
|
|
|
return 'failed' unless yaml_errors.blank?
|
|
|
|
|
|
|
|
statuses.latest.status || 'skipped'
|
2016-04-11 10:55:40 -04:00
|
|
|
end
|
2016-07-03 19:58:58 -04:00
|
|
|
|
|
|
|
def keep_around_commits
|
2016-07-15 18:33:30 -04:00
|
|
|
return unless project
|
2016-07-19 08:59:38 -04:00
|
|
|
|
2016-07-03 19:58:58 -04:00
|
|
|
project.repository.keep_around(self.sha)
|
|
|
|
project.repository.keep_around(self.before_sha)
|
|
|
|
end
|
2015-08-25 21:42:46 -04:00
|
|
|
end
|
|
|
|
end
|