Merge branch 'feature/cycle-analytics-events' into 'master'
Cycle Analytics: Events per stage Adds list of events to each stage: - Issue: list of issues created in the last XX days, that have been labeled or added to a milestone. - Plan: list of commits that reference for the fist time an issue from the last stage. - Code: list of MR created in this stage - Test: List of unique builds triggered by the commits. - Review: List of MR merged - Staging: List of deployed builds - Production: list of issues with the time from idea to production Fixes #23449 - [x] [CHANGELOG](https://gitlab.com/gitlab-org/gitlab-ce/blob/master/CHANGELOG) entry added - [ ] [Documentation created/updated](https://gitlab.com/gitlab-org/gitlab-ce/blob/master/doc/development/doc_styleguide.md) - Tests - [x] Added for this feature/bug - [x] All builds are passing - [x] Conform by the [merge request performance guides](http://docs.gitlab.com/ce/development/merge_request_performance_guidelines.html) - [x] Conform by the [style guides](https://gitlab.com/gitlab-org/gitlab-ce/blob/master/CONTRIBUTING.md#style-guides) - [x] Branch has no merge conflicts with `master` (if it does - rebase it please) - [ ] [Squashed related commits together](https://git-scm.com/book/en/Git-Tools-Rewriting-History#Squashing-Commits) See merge request !6859
This commit is contained in:
commit
2343b83098
56 changed files with 1679 additions and 86 deletions
7
app/controllers/concerns/cycle_analytics_params.rb
Normal file
7
app/controllers/concerns/cycle_analytics_params.rb
Normal file
|
@ -0,0 +1,7 @@
|
|||
module CycleAnalyticsParams
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
def start_date(params)
|
||||
params[:start_date] == '30' ? 30.days.ago : 90.days.ago
|
||||
end
|
||||
end
|
|
@ -0,0 +1,65 @@
|
|||
module Projects
|
||||
module CycleAnalytics
|
||||
class EventsController < Projects::ApplicationController
|
||||
include CycleAnalyticsParams
|
||||
|
||||
before_action :authorize_read_cycle_analytics!
|
||||
before_action :authorize_read_build!, only: [:test, :staging]
|
||||
before_action :authorize_read_issue!, only: [:issue, :production]
|
||||
before_action :authorize_read_merge_request!, only: [:code, :review]
|
||||
|
||||
def issue
|
||||
render_events(events.issue_events)
|
||||
end
|
||||
|
||||
def plan
|
||||
render_events(events.plan_events)
|
||||
end
|
||||
|
||||
def code
|
||||
render_events(events.code_events)
|
||||
end
|
||||
|
||||
def test
|
||||
options[:branch] = events_params[:branch_name]
|
||||
|
||||
render_events(events.test_events)
|
||||
end
|
||||
|
||||
def review
|
||||
render_events(events.review_events)
|
||||
end
|
||||
|
||||
def staging
|
||||
render_events(events.staging_events)
|
||||
end
|
||||
|
||||
def production
|
||||
render_events(events.production_events)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def render_events(events_list)
|
||||
respond_to do |format|
|
||||
format.html
|
||||
format.json { render json: { events: events_list } }
|
||||
end
|
||||
end
|
||||
|
||||
def events
|
||||
@events ||= Gitlab::CycleAnalytics::Events.new(project: project, options: options)
|
||||
end
|
||||
|
||||
def options
|
||||
@options ||= { from: start_date(events_params), current_user: current_user }
|
||||
end
|
||||
|
||||
def events_params
|
||||
return {} unless params[:events].present?
|
||||
|
||||
params[:events].slice(:start_date, :branch_name)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,11 +1,12 @@
|
|||
class Projects::CycleAnalyticsController < Projects::ApplicationController
|
||||
include ActionView::Helpers::DateHelper
|
||||
include ActionView::Helpers::TextHelper
|
||||
include CycleAnalyticsParams
|
||||
|
||||
before_action :authorize_read_cycle_analytics!
|
||||
|
||||
def show
|
||||
@cycle_analytics = CycleAnalytics.new(@project, from: parse_start_date)
|
||||
@cycle_analytics = ::CycleAnalytics.new(@project, from: start_date(cycle_analytics_params))
|
||||
|
||||
respond_to do |format|
|
||||
format.html
|
||||
|
@ -15,14 +16,6 @@ class Projects::CycleAnalyticsController < Projects::ApplicationController
|
|||
|
||||
private
|
||||
|
||||
def parse_start_date
|
||||
case cycle_analytics_params[:start_date]
|
||||
when '30' then 30.days.ago
|
||||
when '90' then 90.days.ago
|
||||
else 90.days.ago
|
||||
end
|
||||
end
|
||||
|
||||
def cycle_analytics_params
|
||||
return {} unless params[:cycle_analytics].present?
|
||||
|
||||
|
|
|
@ -1,12 +1,8 @@
|
|||
class CycleAnalytics
|
||||
include Gitlab::Database::Median
|
||||
include Gitlab::Database::DateTime
|
||||
|
||||
DEPLOYMENT_METRIC_STAGES = %i[production staging]
|
||||
|
||||
def initialize(project, from:)
|
||||
@project = project
|
||||
@from = from
|
||||
@fetcher = Gitlab::CycleAnalytics::MetricsFetcher.new(project: project, from: from, branch: nil)
|
||||
end
|
||||
|
||||
def summary
|
||||
|
@ -14,90 +10,46 @@ class CycleAnalytics
|
|||
end
|
||||
|
||||
def issue
|
||||
calculate_metric(:issue,
|
||||
@fetcher.calculate_metric(:issue,
|
||||
Issue.arel_table[:created_at],
|
||||
[Issue::Metrics.arel_table[:first_associated_with_milestone_at],
|
||||
Issue::Metrics.arel_table[:first_added_to_board_at]])
|
||||
end
|
||||
|
||||
def plan
|
||||
calculate_metric(:plan,
|
||||
@fetcher.calculate_metric(:plan,
|
||||
[Issue::Metrics.arel_table[:first_associated_with_milestone_at],
|
||||
Issue::Metrics.arel_table[:first_added_to_board_at]],
|
||||
Issue::Metrics.arel_table[:first_mentioned_in_commit_at])
|
||||
end
|
||||
|
||||
def code
|
||||
calculate_metric(:code,
|
||||
@fetcher.calculate_metric(:code,
|
||||
Issue::Metrics.arel_table[:first_mentioned_in_commit_at],
|
||||
MergeRequest.arel_table[:created_at])
|
||||
end
|
||||
|
||||
def test
|
||||
calculate_metric(:test,
|
||||
@fetcher.calculate_metric(:test,
|
||||
MergeRequest::Metrics.arel_table[:latest_build_started_at],
|
||||
MergeRequest::Metrics.arel_table[:latest_build_finished_at])
|
||||
end
|
||||
|
||||
def review
|
||||
calculate_metric(:review,
|
||||
@fetcher.calculate_metric(:review,
|
||||
MergeRequest.arel_table[:created_at],
|
||||
MergeRequest::Metrics.arel_table[:merged_at])
|
||||
end
|
||||
|
||||
def staging
|
||||
calculate_metric(:staging,
|
||||
@fetcher.calculate_metric(:staging,
|
||||
MergeRequest::Metrics.arel_table[:merged_at],
|
||||
MergeRequest::Metrics.arel_table[:first_deployed_to_production_at])
|
||||
end
|
||||
|
||||
def production
|
||||
calculate_metric(:production,
|
||||
@fetcher.calculate_metric(:production,
|
||||
Issue.arel_table[:created_at],
|
||||
MergeRequest::Metrics.arel_table[:first_deployed_to_production_at])
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def calculate_metric(name, start_time_attrs, end_time_attrs)
|
||||
cte_table = Arel::Table.new("cte_table_for_#{name}")
|
||||
|
||||
# Build a `SELECT` query. We find the first of the `end_time_attrs` that isn't `NULL` (call this end_time).
|
||||
# Next, we find the first of the start_time_attrs that isn't `NULL` (call this start_time).
|
||||
# We compute the (end_time - start_time) interval, and give it an alias based on the current
|
||||
# cycle analytics stage.
|
||||
interval_query = Arel::Nodes::As.new(
|
||||
cte_table,
|
||||
subtract_datetimes(base_query_for(name), end_time_attrs, start_time_attrs, name.to_s))
|
||||
|
||||
median_datetime(cte_table, interval_query, name)
|
||||
end
|
||||
|
||||
# Join table with a row for every <issue,merge_request> pair (where the merge request
|
||||
# closes the given issue) with issue and merge request metrics included. The metrics
|
||||
# are loaded with an inner join, so issues / merge requests without metrics are
|
||||
# automatically excluded.
|
||||
def base_query_for(name)
|
||||
arel_table = MergeRequestsClosingIssues.arel_table
|
||||
|
||||
# Load issues
|
||||
query = arel_table.join(Issue.arel_table).on(Issue.arel_table[:id].eq(arel_table[:issue_id])).
|
||||
join(Issue::Metrics.arel_table).on(Issue.arel_table[:id].eq(Issue::Metrics.arel_table[:issue_id])).
|
||||
where(Issue.arel_table[:project_id].eq(@project.id)).
|
||||
where(Issue.arel_table[:deleted_at].eq(nil)).
|
||||
where(Issue.arel_table[:created_at].gteq(@from))
|
||||
|
||||
# Load merge_requests
|
||||
query = query.join(MergeRequest.arel_table, Arel::Nodes::OuterJoin).
|
||||
on(MergeRequest.arel_table[:id].eq(arel_table[:merge_request_id])).
|
||||
join(MergeRequest::Metrics.arel_table).
|
||||
on(MergeRequest.arel_table[:id].eq(MergeRequest::Metrics.arel_table[:merge_request_id]))
|
||||
|
||||
if DEPLOYMENT_METRIC_STAGES.include?(name)
|
||||
# Limit to merge requests that have been deployed to production after `@from`
|
||||
query.where(MergeRequest::Metrics.arel_table[:first_deployed_to_production_at].gteq(@from))
|
||||
end
|
||||
|
||||
query
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
class MergeRequest::Metrics < ActiveRecord::Base
|
||||
belongs_to :merge_request
|
||||
belongs_to :pipeline, class_name: 'Ci::Pipeline', foreign_key: :pipeline_id
|
||||
|
||||
def record!
|
||||
if merge_request.merged? && self.merged_at.blank?
|
||||
|
|
40
app/serializers/analytics_build_entity.rb
Normal file
40
app/serializers/analytics_build_entity.rb
Normal file
|
@ -0,0 +1,40 @@
|
|||
class AnalyticsBuildEntity < Grape::Entity
|
||||
include RequestAwareEntity
|
||||
include EntityDateHelper
|
||||
|
||||
expose :name
|
||||
expose :id
|
||||
expose :ref, as: :branch
|
||||
expose :short_sha
|
||||
expose :author, using: UserEntity
|
||||
|
||||
expose :started_at, as: :date do |build|
|
||||
interval_in_words(build[:started_at])
|
||||
end
|
||||
|
||||
expose :duration, as: :total_time do |build|
|
||||
distance_of_time_as_hash(build[:duration].to_f)
|
||||
end
|
||||
|
||||
expose :branch do
|
||||
expose :ref, as: :name
|
||||
|
||||
expose :url do |build|
|
||||
url_to(:namespace_project_tree, build, build.ref)
|
||||
end
|
||||
end
|
||||
|
||||
expose :url do |build|
|
||||
url_to(:namespace_project_build, build)
|
||||
end
|
||||
|
||||
expose :commit_url do |build|
|
||||
url_to(:namespace_project_commit, build, build.sha)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def url_to(route, build, id = nil)
|
||||
public_send("#{route}_url", build.project.namespace, build.project, id || build)
|
||||
end
|
||||
end
|
3
app/serializers/analytics_build_serializer.rb
Normal file
3
app/serializers/analytics_build_serializer.rb
Normal file
|
@ -0,0 +1,3 @@
|
|||
class AnalyticsBuildSerializer < BaseSerializer
|
||||
entity AnalyticsBuildEntity
|
||||
end
|
13
app/serializers/analytics_commit_entity.rb
Normal file
13
app/serializers/analytics_commit_entity.rb
Normal file
|
@ -0,0 +1,13 @@
|
|||
class AnalyticsCommitEntity < CommitEntity
|
||||
include EntityDateHelper
|
||||
|
||||
expose :short_id, as: :short_sha
|
||||
|
||||
expose :total_time do |commit|
|
||||
distance_of_time_as_hash(request.total_time.to_f)
|
||||
end
|
||||
|
||||
unexpose :author_name
|
||||
unexpose :author_email
|
||||
unexpose :message
|
||||
end
|
3
app/serializers/analytics_commit_serializer.rb
Normal file
3
app/serializers/analytics_commit_serializer.rb
Normal file
|
@ -0,0 +1,3 @@
|
|||
class AnalyticsCommitSerializer < BaseSerializer
|
||||
entity AnalyticsCommitEntity
|
||||
end
|
7
app/serializers/analytics_generic_serializer.rb
Normal file
7
app/serializers/analytics_generic_serializer.rb
Normal file
|
@ -0,0 +1,7 @@
|
|||
class AnalyticsGenericSerializer < BaseSerializer
|
||||
def represent(resource, opts = {})
|
||||
resource.symbolize_keys!
|
||||
|
||||
super(resource, opts)
|
||||
end
|
||||
end
|
29
app/serializers/analytics_issue_entity.rb
Normal file
29
app/serializers/analytics_issue_entity.rb
Normal file
|
@ -0,0 +1,29 @@
|
|||
class AnalyticsIssueEntity < Grape::Entity
|
||||
include RequestAwareEntity
|
||||
include EntityDateHelper
|
||||
|
||||
expose :title
|
||||
expose :author, using: UserEntity
|
||||
|
||||
expose :iid do |object|
|
||||
object[:iid].to_s
|
||||
end
|
||||
|
||||
expose :total_time do |object|
|
||||
distance_of_time_as_hash(object[:total_time].to_f)
|
||||
end
|
||||
|
||||
expose(:created_at) do |object|
|
||||
interval_in_words(object[:created_at])
|
||||
end
|
||||
|
||||
expose :url do |object|
|
||||
url_to(:namespace_project_issue, id: object[:iid].to_s)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def url_to(route, id)
|
||||
public_send("#{route}_url", request.project.namespace, request.project, id)
|
||||
end
|
||||
end
|
3
app/serializers/analytics_issue_serializer.rb
Normal file
3
app/serializers/analytics_issue_serializer.rb
Normal file
|
@ -0,0 +1,3 @@
|
|||
class AnalyticsIssueSerializer < AnalyticsGenericSerializer
|
||||
entity AnalyticsIssueEntity
|
||||
end
|
7
app/serializers/analytics_merge_request_entity.rb
Normal file
7
app/serializers/analytics_merge_request_entity.rb
Normal file
|
@ -0,0 +1,7 @@
|
|||
class AnalyticsMergeRequestEntity < AnalyticsIssueEntity
|
||||
expose :state
|
||||
|
||||
expose :url do |object|
|
||||
url_to(:namespace_project_merge_request, id: object[:iid].to_s)
|
||||
end
|
||||
end
|
3
app/serializers/analytics_merge_request_serializer.rb
Normal file
3
app/serializers/analytics_merge_request_serializer.rb
Normal file
|
@ -0,0 +1,3 @@
|
|||
class AnalyticsMergeRequestSerializer < AnalyticsGenericSerializer
|
||||
entity AnalyticsMergeRequestEntity
|
||||
end
|
35
app/serializers/entity_date_helper.rb
Normal file
35
app/serializers/entity_date_helper.rb
Normal file
|
@ -0,0 +1,35 @@
|
|||
module EntityDateHelper
|
||||
include ActionView::Helpers::DateHelper
|
||||
|
||||
def interval_in_words(diff)
|
||||
"#{distance_of_time_in_words(diff.to_f)} ago"
|
||||
end
|
||||
|
||||
# Converts seconds into a hash such as:
|
||||
# { days: 1, hours: 3, mins: 42, seconds: 40 }
|
||||
#
|
||||
# It returns 0 seconds for zero or negative numbers
|
||||
# It rounds to nearest time unit and does not return zero
|
||||
# i.e { min: 1 } instead of { mins: 1, seconds: 0 }
|
||||
def distance_of_time_as_hash(diff)
|
||||
diff = diff.abs.floor
|
||||
|
||||
return { seconds: 0 } if diff == 0
|
||||
|
||||
mins = (diff / 60).floor
|
||||
seconds = diff % 60
|
||||
hours = (mins / 60).floor
|
||||
mins = mins % 60
|
||||
days = (hours / 24).floor
|
||||
hours = hours % 24
|
||||
|
||||
duration_hash = {}
|
||||
|
||||
duration_hash[:days] = days if days > 0
|
||||
duration_hash[:hours] = hours if hours > 0
|
||||
duration_hash[:mins] = mins if mins > 0
|
||||
duration_hash[:seconds] = seconds if seconds > 0
|
||||
|
||||
duration_hash
|
||||
end
|
||||
end
|
|
@ -12,11 +12,11 @@ class PipelineMetricsWorker
|
|||
private
|
||||
|
||||
def update_metrics_for_active_pipeline(pipeline)
|
||||
metrics(pipeline).update_all(latest_build_started_at: pipeline.started_at, latest_build_finished_at: nil)
|
||||
metrics(pipeline).update_all(latest_build_started_at: pipeline.started_at, latest_build_finished_at: nil, pipeline_id: pipeline.id)
|
||||
end
|
||||
|
||||
def update_metrics_for_succeeded_pipeline(pipeline)
|
||||
metrics(pipeline).update_all(latest_build_started_at: pipeline.started_at, latest_build_finished_at: pipeline.finished_at)
|
||||
metrics(pipeline).update_all(latest_build_started_at: pipeline.started_at, latest_build_finished_at: pipeline.finished_at, pipeline_id: pipeline.id)
|
||||
end
|
||||
|
||||
def metrics(pipeline)
|
||||
|
|
4
changelogs/unreleased/feature-cycle-analytics-events.yml
Normal file
4
changelogs/unreleased/feature-cycle-analytics-events.yml
Normal file
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
title: Add events per stage to cycle analytics
|
||||
merge_request:
|
||||
author:
|
|
@ -153,6 +153,18 @@ resources :namespaces, path: '/', constraints: { id: /[a-zA-Z.0-9_\-]+/ }, only:
|
|||
|
||||
resource :cycle_analytics, only: [:show]
|
||||
|
||||
namespace :cycle_analytics do
|
||||
scope :events, controller: 'events' do
|
||||
get :issue
|
||||
get :plan
|
||||
get :code
|
||||
get :test
|
||||
get :review
|
||||
get :staging
|
||||
get :production
|
||||
end
|
||||
end
|
||||
|
||||
resources :builds, only: [:index, :show], constraints: { id: /\d+/ } do
|
||||
collection do
|
||||
post :cancel_all
|
||||
|
|
|
@ -203,6 +203,8 @@ class Gitlab::Seeder::CycleAnalytics
|
|||
pipeline.run!
|
||||
Timecop.travel rand(1..6).hours.from_now
|
||||
pipeline.succeed!
|
||||
|
||||
PipelineMetricsWorker.new.perform(pipeline.id)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
|
||||
# for more information on how to write migrations for GitLab.
|
||||
|
||||
class AddPipelineIdToMergeRequestMetrics < ActiveRecord::Migration
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
# Set this constant to true if this migration requires downtime.
|
||||
DOWNTIME = true
|
||||
|
||||
# When a migration requires downtime you **must** uncomment the following
|
||||
# constant and define a short and easy to understand explanation as to why the
|
||||
# migration requires downtime.
|
||||
DOWNTIME_REASON = 'Adding a foreign key'
|
||||
|
||||
# When using the methods "add_concurrent_index" or "add_column_with_default"
|
||||
# you must disable the use of transactions as these methods can not run in an
|
||||
# existing transaction. When using "add_concurrent_index" make sure that this
|
||||
# method is the _only_ method called in the migration, any other changes
|
||||
# should go in a separate migration. This ensures that upon failure _only_ the
|
||||
# index creation fails and can be retried or reverted easily.
|
||||
#
|
||||
# To disable transactions uncomment the following line and remove these
|
||||
# comments:
|
||||
# disable_ddl_transaction!
|
||||
|
||||
def change
|
||||
add_column :merge_request_metrics, :pipeline_id, :integer
|
||||
add_concurrent_index :merge_request_metrics, :pipeline_id
|
||||
add_foreign_key :merge_request_metrics, :ci_commits, column: :pipeline_id, on_delete: :cascade
|
||||
end
|
||||
end
|
|
@ -649,10 +649,12 @@ ActiveRecord::Schema.define(version: 20161117114805) do
|
|||
t.datetime "merged_at"
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
t.integer "pipeline_id"
|
||||
end
|
||||
|
||||
add_index "merge_request_metrics", ["first_deployed_to_production_at"], name: "index_merge_request_metrics_on_first_deployed_to_production_at", using: :btree
|
||||
add_index "merge_request_metrics", ["merge_request_id"], name: "index_merge_request_metrics", using: :btree
|
||||
add_index "merge_request_metrics", ["pipeline_id"], name: "index_merge_request_metrics_on_pipeline_id", using: :btree
|
||||
|
||||
create_table "merge_requests", force: :cascade do |t|
|
||||
t.string "target_branch", null: false
|
||||
|
@ -1260,6 +1262,7 @@ ActiveRecord::Schema.define(version: 20161117114805) do
|
|||
add_foreign_key "labels", "namespaces", column: "group_id", on_delete: :cascade
|
||||
add_foreign_key "lists", "boards"
|
||||
add_foreign_key "lists", "labels"
|
||||
add_foreign_key "merge_request_metrics", "ci_commits", column: "pipeline_id", on_delete: :cascade
|
||||
add_foreign_key "merge_request_metrics", "merge_requests", on_delete: :cascade
|
||||
add_foreign_key "merge_requests_closing_issues", "issues", on_delete: :cascade
|
||||
add_foreign_key "merge_requests_closing_issues", "merge_requests", on_delete: :cascade
|
||||
|
|
57
lib/gitlab/cycle_analytics/base_event.rb
Normal file
57
lib/gitlab/cycle_analytics/base_event.rb
Normal file
|
@ -0,0 +1,57 @@
|
|||
module Gitlab
|
||||
module CycleAnalytics
|
||||
class BaseEvent
|
||||
include MetricsTables
|
||||
|
||||
attr_reader :stage, :start_time_attrs, :end_time_attrs, :projections, :query
|
||||
|
||||
def initialize(project:, options:)
|
||||
@query = EventsQuery.new(project: project, options: options)
|
||||
@project = project
|
||||
@options = options
|
||||
end
|
||||
|
||||
def fetch
|
||||
update_author!
|
||||
|
||||
event_result.map do |event|
|
||||
serialize(event) if has_permission?(event['id'])
|
||||
end
|
||||
end
|
||||
|
||||
def custom_query(_base_query); end
|
||||
|
||||
def order
|
||||
@order || @start_time_attrs
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def update_author!
|
||||
return unless event_result.any? && event_result.first['author_id']
|
||||
|
||||
Updater.update!(event_result, from: 'author_id', to: 'author', klass: User)
|
||||
end
|
||||
|
||||
def event_result
|
||||
@event_result ||= @query.execute(self).to_a
|
||||
end
|
||||
|
||||
def serialize(_event)
|
||||
raise NotImplementedError.new("Expected #{self.name} to implement serialize(event)")
|
||||
end
|
||||
|
||||
def has_permission?(id)
|
||||
allowed_ids.nil? || allowed_ids.include?(id.to_i)
|
||||
end
|
||||
|
||||
def allowed_ids
|
||||
nil
|
||||
end
|
||||
|
||||
def event_result_ids
|
||||
event_result.map { |event| event['id'] }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
28
lib/gitlab/cycle_analytics/code_event.rb
Normal file
28
lib/gitlab/cycle_analytics/code_event.rb
Normal file
|
@ -0,0 +1,28 @@
|
|||
module Gitlab
|
||||
module CycleAnalytics
|
||||
class CodeEvent < BaseEvent
|
||||
include MergeRequestAllowed
|
||||
|
||||
def initialize(*args)
|
||||
@stage = :code
|
||||
@start_time_attrs = issue_metrics_table[:first_mentioned_in_commit_at]
|
||||
@end_time_attrs = mr_table[:created_at]
|
||||
@projections = [mr_table[:title],
|
||||
mr_table[:iid],
|
||||
mr_table[:id],
|
||||
mr_table[:created_at],
|
||||
mr_table[:state],
|
||||
mr_table[:author_id]]
|
||||
@order = mr_table[:created_at]
|
||||
|
||||
super(*args)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def serialize(event)
|
||||
AnalyticsMergeRequestSerializer.new(project: @project).represent(event).as_json
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
38
lib/gitlab/cycle_analytics/events.rb
Normal file
38
lib/gitlab/cycle_analytics/events.rb
Normal file
|
@ -0,0 +1,38 @@
|
|||
module Gitlab
|
||||
module CycleAnalytics
|
||||
class Events
|
||||
def initialize(project:, options:)
|
||||
@project = project
|
||||
@options = options
|
||||
end
|
||||
|
||||
def issue_events
|
||||
IssueEvent.new(project: @project, options: @options).fetch
|
||||
end
|
||||
|
||||
def plan_events
|
||||
PlanEvent.new(project: @project, options: @options).fetch
|
||||
end
|
||||
|
||||
def code_events
|
||||
CodeEvent.new(project: @project, options: @options).fetch
|
||||
end
|
||||
|
||||
def test_events
|
||||
TestEvent.new(project: @project, options: @options).fetch
|
||||
end
|
||||
|
||||
def review_events
|
||||
ReviewEvent.new(project: @project, options: @options).fetch
|
||||
end
|
||||
|
||||
def staging_events
|
||||
StagingEvent.new(project: @project, options: @options).fetch
|
||||
end
|
||||
|
||||
def production_events
|
||||
ProductionEvent.new(project: @project, options: @options).fetch
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
37
lib/gitlab/cycle_analytics/events_query.rb
Normal file
37
lib/gitlab/cycle_analytics/events_query.rb
Normal file
|
@ -0,0 +1,37 @@
|
|||
module Gitlab
|
||||
module CycleAnalytics
|
||||
class EventsQuery
|
||||
attr_reader :project
|
||||
|
||||
def initialize(project:, options: {})
|
||||
@project = project
|
||||
@from = options[:from]
|
||||
@branch = options[:branch]
|
||||
@fetcher = Gitlab::CycleAnalytics::MetricsFetcher.new(project: project, from: @from, branch: @branch)
|
||||
end
|
||||
|
||||
def execute(stage_class)
|
||||
@stage_class = stage_class
|
||||
|
||||
ActiveRecord::Base.connection.exec_query(query.to_sql)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def query
|
||||
base_query = @fetcher.base_query_for(@stage_class.stage)
|
||||
diff_fn = @fetcher.subtract_datetimes_diff(base_query, @stage_class.start_time_attrs, @stage_class.end_time_attrs)
|
||||
|
||||
@stage_class.custom_query(base_query)
|
||||
|
||||
base_query.project(extract_epoch(diff_fn).as('total_time'), *@stage_class.projections).order(@stage_class.order.desc)
|
||||
end
|
||||
|
||||
def extract_epoch(arel_attribute)
|
||||
return arel_attribute unless Gitlab::Database.postgresql?
|
||||
|
||||
Arel.sql(%Q{EXTRACT(EPOCH FROM (#{arel_attribute.to_sql}))})
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
9
lib/gitlab/cycle_analytics/issue_allowed.rb
Normal file
9
lib/gitlab/cycle_analytics/issue_allowed.rb
Normal file
|
@ -0,0 +1,9 @@
|
|||
module Gitlab
|
||||
module CycleAnalytics
|
||||
module IssueAllowed
|
||||
def allowed_ids
|
||||
@allowed_ids ||= IssuesFinder.new(@options[:current_user], project_id: @project.id).execute.where(id: event_result_ids).pluck(:id)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
27
lib/gitlab/cycle_analytics/issue_event.rb
Normal file
27
lib/gitlab/cycle_analytics/issue_event.rb
Normal file
|
@ -0,0 +1,27 @@
|
|||
module Gitlab
|
||||
module CycleAnalytics
|
||||
class IssueEvent < BaseEvent
|
||||
include IssueAllowed
|
||||
|
||||
def initialize(*args)
|
||||
@stage = :issue
|
||||
@start_time_attrs = issue_table[:created_at]
|
||||
@end_time_attrs = [issue_metrics_table[:first_associated_with_milestone_at],
|
||||
issue_metrics_table[:first_added_to_board_at]]
|
||||
@projections = [issue_table[:title],
|
||||
issue_table[:iid],
|
||||
issue_table[:id],
|
||||
issue_table[:created_at],
|
||||
issue_table[:author_id]]
|
||||
|
||||
super(*args)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def serialize(event)
|
||||
AnalyticsIssueSerializer.new(project: @project).represent(event).as_json
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
9
lib/gitlab/cycle_analytics/merge_request_allowed.rb
Normal file
9
lib/gitlab/cycle_analytics/merge_request_allowed.rb
Normal file
|
@ -0,0 +1,9 @@
|
|||
module Gitlab
|
||||
module CycleAnalytics
|
||||
module MergeRequestAllowed
|
||||
def allowed_ids
|
||||
@allowed_ids ||= MergeRequestsFinder.new(@options[:current_user], project_id: @project.id).execute.where(id: event_result_ids).pluck(:id)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
60
lib/gitlab/cycle_analytics/metrics_fetcher.rb
Normal file
60
lib/gitlab/cycle_analytics/metrics_fetcher.rb
Normal file
|
@ -0,0 +1,60 @@
|
|||
module Gitlab
|
||||
module CycleAnalytics
|
||||
class MetricsFetcher
|
||||
include Gitlab::Database::Median
|
||||
include Gitlab::Database::DateTime
|
||||
include MetricsTables
|
||||
|
||||
DEPLOYMENT_METRIC_STAGES = %i[production staging]
|
||||
|
||||
def initialize(project:, from:, branch:)
|
||||
@project = project
|
||||
@project = project
|
||||
@from = from
|
||||
@branch = branch
|
||||
end
|
||||
|
||||
def calculate_metric(name, start_time_attrs, end_time_attrs)
|
||||
cte_table = Arel::Table.new("cte_table_for_#{name}")
|
||||
|
||||
# Build a `SELECT` query. We find the first of the `end_time_attrs` that isn't `NULL` (call this end_time).
|
||||
# Next, we find the first of the start_time_attrs that isn't `NULL` (call this start_time).
|
||||
# We compute the (end_time - start_time) interval, and give it an alias based on the current
|
||||
# cycle analytics stage.
|
||||
interval_query = Arel::Nodes::As.new(
|
||||
cte_table,
|
||||
subtract_datetimes(base_query_for(name), start_time_attrs, end_time_attrs, name.to_s))
|
||||
|
||||
median_datetime(cte_table, interval_query, name)
|
||||
end
|
||||
|
||||
# Join table with a row for every <issue,merge_request> pair (where the merge request
|
||||
# closes the given issue) with issue and merge request metrics included. The metrics
|
||||
# are loaded with an inner join, so issues / merge requests without metrics are
|
||||
# automatically excluded.
|
||||
def base_query_for(name)
|
||||
# Load issues
|
||||
query = mr_closing_issues_table.join(issue_table).on(issue_table[:id].eq(mr_closing_issues_table[:issue_id])).
|
||||
join(issue_metrics_table).on(issue_table[:id].eq(issue_metrics_table[:issue_id])).
|
||||
where(issue_table[:project_id].eq(@project.id)).
|
||||
where(issue_table[:deleted_at].eq(nil)).
|
||||
where(issue_table[:created_at].gteq(@from))
|
||||
|
||||
query = query.where(build_table[:ref].eq(@branch)) if name == :test && @branch
|
||||
|
||||
# Load merge_requests
|
||||
query = query.join(mr_table, Arel::Nodes::OuterJoin).
|
||||
on(mr_table[:id].eq(mr_closing_issues_table[:merge_request_id])).
|
||||
join(mr_metrics_table).
|
||||
on(mr_table[:id].eq(mr_metrics_table[:merge_request_id]))
|
||||
|
||||
if DEPLOYMENT_METRIC_STAGES.include?(name)
|
||||
# Limit to merge requests that have been deployed to production after `@from`
|
||||
query.where(mr_metrics_table[:first_deployed_to_production_at].gteq(@from))
|
||||
end
|
||||
|
||||
query
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
37
lib/gitlab/cycle_analytics/metrics_tables.rb
Normal file
37
lib/gitlab/cycle_analytics/metrics_tables.rb
Normal file
|
@ -0,0 +1,37 @@
|
|||
module Gitlab
|
||||
module CycleAnalytics
|
||||
module MetricsTables
|
||||
def mr_metrics_table
|
||||
MergeRequest::Metrics.arel_table
|
||||
end
|
||||
|
||||
def mr_table
|
||||
MergeRequest.arel_table
|
||||
end
|
||||
|
||||
def mr_diff_table
|
||||
MergeRequestDiff.arel_table
|
||||
end
|
||||
|
||||
def mr_closing_issues_table
|
||||
MergeRequestsClosingIssues.arel_table
|
||||
end
|
||||
|
||||
def issue_table
|
||||
Issue.arel_table
|
||||
end
|
||||
|
||||
def issue_metrics_table
|
||||
Issue::Metrics.arel_table
|
||||
end
|
||||
|
||||
def user_table
|
||||
User.arel_table
|
||||
end
|
||||
|
||||
def build_table
|
||||
::CommitStatus.arel_table
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
44
lib/gitlab/cycle_analytics/plan_event.rb
Normal file
44
lib/gitlab/cycle_analytics/plan_event.rb
Normal file
|
@ -0,0 +1,44 @@
|
|||
module Gitlab
|
||||
module CycleAnalytics
|
||||
class PlanEvent < BaseEvent
|
||||
def initialize(*args)
|
||||
@stage = :plan
|
||||
@start_time_attrs = issue_metrics_table[:first_associated_with_milestone_at]
|
||||
@end_time_attrs = [issue_metrics_table[:first_added_to_board_at],
|
||||
issue_metrics_table[:first_mentioned_in_commit_at]]
|
||||
@projections = [mr_diff_table[:st_commits].as('commits'),
|
||||
issue_metrics_table[:first_mentioned_in_commit_at]]
|
||||
|
||||
super(*args)
|
||||
end
|
||||
|
||||
def custom_query(base_query)
|
||||
base_query.join(mr_diff_table).on(mr_diff_table[:merge_request_id].eq(mr_table[:id]))
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def serialize(event)
|
||||
st_commit = first_time_reference_commit(event.delete('commits'), event)
|
||||
|
||||
return unless st_commit
|
||||
|
||||
serialize_commit(event, st_commit, query)
|
||||
end
|
||||
|
||||
def first_time_reference_commit(commits, event)
|
||||
YAML.load(commits).find do |commit|
|
||||
next unless commit[:committed_date] && event['first_mentioned_in_commit_at']
|
||||
|
||||
commit[:committed_date].to_i == DateTime.parse(event['first_mentioned_in_commit_at'].to_s).to_i
|
||||
end
|
||||
end
|
||||
|
||||
def serialize_commit(event, st_commit, query)
|
||||
commit = Commit.new(Gitlab::Git::Commit.new(st_commit), @project)
|
||||
|
||||
AnalyticsCommitSerializer.new(project: @project, total_time: event['total_time']).represent(commit).as_json
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
26
lib/gitlab/cycle_analytics/production_event.rb
Normal file
26
lib/gitlab/cycle_analytics/production_event.rb
Normal file
|
@ -0,0 +1,26 @@
|
|||
module Gitlab
|
||||
module CycleAnalytics
|
||||
class ProductionEvent < BaseEvent
|
||||
include IssueAllowed
|
||||
|
||||
def initialize(*args)
|
||||
@stage = :production
|
||||
@start_time_attrs = issue_table[:created_at]
|
||||
@end_time_attrs = mr_metrics_table[:first_deployed_to_production_at]
|
||||
@projections = [issue_table[:title],
|
||||
issue_table[:iid],
|
||||
issue_table[:id],
|
||||
issue_table[:created_at],
|
||||
issue_table[:author_id]]
|
||||
|
||||
super(*args)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def serialize(event)
|
||||
AnalyticsIssueSerializer.new(project: @project).represent(event).as_json
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
25
lib/gitlab/cycle_analytics/review_event.rb
Normal file
25
lib/gitlab/cycle_analytics/review_event.rb
Normal file
|
@ -0,0 +1,25 @@
|
|||
module Gitlab
|
||||
module CycleAnalytics
|
||||
class ReviewEvent < BaseEvent
|
||||
include MergeRequestAllowed
|
||||
|
||||
def initialize(*args)
|
||||
@stage = :review
|
||||
@start_time_attrs = mr_table[:created_at]
|
||||
@end_time_attrs = mr_metrics_table[:merged_at]
|
||||
@projections = [mr_table[:title],
|
||||
mr_table[:iid],
|
||||
mr_table[:id],
|
||||
mr_table[:created_at],
|
||||
mr_table[:state],
|
||||
mr_table[:author_id]]
|
||||
|
||||
super(*args)
|
||||
end
|
||||
|
||||
def serialize(event)
|
||||
AnalyticsMergeRequestSerializer.new(project: @project).represent(event).as_json
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
31
lib/gitlab/cycle_analytics/staging_event.rb
Normal file
31
lib/gitlab/cycle_analytics/staging_event.rb
Normal file
|
@ -0,0 +1,31 @@
|
|||
module Gitlab
|
||||
module CycleAnalytics
|
||||
class StagingEvent < BaseEvent
|
||||
def initialize(*args)
|
||||
@stage = :staging
|
||||
@start_time_attrs = mr_metrics_table[:merged_at]
|
||||
@end_time_attrs = mr_metrics_table[:first_deployed_to_production_at]
|
||||
@projections = [build_table[:id]]
|
||||
@order = build_table[:created_at]
|
||||
|
||||
super(*args)
|
||||
end
|
||||
|
||||
def fetch
|
||||
Updater.update!(event_result, from: 'id', to: 'build', klass: ::Ci::Build)
|
||||
|
||||
super
|
||||
end
|
||||
|
||||
def custom_query(base_query)
|
||||
base_query.join(build_table).on(mr_metrics_table[:pipeline_id].eq(build_table[:commit_id]))
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def serialize(event)
|
||||
AnalyticsBuildSerializer.new.represent(event['build']).as_json
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
13
lib/gitlab/cycle_analytics/test_event.rb
Normal file
13
lib/gitlab/cycle_analytics/test_event.rb
Normal file
|
@ -0,0 +1,13 @@
|
|||
module Gitlab
|
||||
module CycleAnalytics
|
||||
class TestEvent < StagingEvent
|
||||
def initialize(*args)
|
||||
super(*args)
|
||||
|
||||
@stage = :test
|
||||
@start_time_attrs = mr_metrics_table[:latest_build_started_at]
|
||||
@end_time_attrs = mr_metrics_table[:latest_build_finished_at]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
30
lib/gitlab/cycle_analytics/updater.rb
Normal file
30
lib/gitlab/cycle_analytics/updater.rb
Normal file
|
@ -0,0 +1,30 @@
|
|||
module Gitlab
|
||||
module CycleAnalytics
|
||||
class Updater
|
||||
def self.update!(*args)
|
||||
new(*args).update!
|
||||
end
|
||||
|
||||
def initialize(event_result, from:, to:, klass:)
|
||||
@event_result = event_result
|
||||
@klass = klass
|
||||
@from = from
|
||||
@to = to
|
||||
end
|
||||
|
||||
def update!
|
||||
@event_result.each do |event|
|
||||
event[@to] = items[event.delete(@from).to_i].first
|
||||
end
|
||||
end
|
||||
|
||||
def result_ids
|
||||
@event_result.map { |event| event[@from] }
|
||||
end
|
||||
|
||||
def items
|
||||
@items ||= @klass.find(result_ids).group_by { |item| item['id'] }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -7,21 +7,25 @@ module Gitlab
|
|||
#
|
||||
# Note: For MySQL, the interval is returned in seconds.
|
||||
# For PostgreSQL, the interval is returned as an INTERVAL type.
|
||||
def subtract_datetimes(query_so_far, end_time_attrs, start_time_attrs, as)
|
||||
diff_fn = if Gitlab::Database.postgresql?
|
||||
Arel::Nodes::Subtraction.new(
|
||||
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(end_time_attrs)),
|
||||
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(start_time_attrs)))
|
||||
elsif Gitlab::Database.mysql?
|
||||
Arel::Nodes::NamedFunction.new(
|
||||
"TIMESTAMPDIFF",
|
||||
[Arel.sql('second'),
|
||||
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(start_time_attrs)),
|
||||
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(end_time_attrs))])
|
||||
end
|
||||
def subtract_datetimes(query_so_far, start_time_attrs, end_time_attrs, as)
|
||||
diff_fn = subtract_datetimes_diff(query_so_far, start_time_attrs, end_time_attrs)
|
||||
|
||||
query_so_far.project(diff_fn.as(as))
|
||||
end
|
||||
|
||||
def subtract_datetimes_diff(query_so_far, start_time_attrs, end_time_attrs)
|
||||
if Gitlab::Database.postgresql?
|
||||
Arel::Nodes::Subtraction.new(
|
||||
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(end_time_attrs)),
|
||||
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(start_time_attrs)))
|
||||
elsif Gitlab::Database.mysql?
|
||||
Arel::Nodes::NamedFunction.new(
|
||||
"TIMESTAMPDIFF",
|
||||
[Arel.sql('second'),
|
||||
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(start_time_attrs)),
|
||||
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(end_time_attrs))])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
10
spec/lib/gitlab/cycle_analytics/code_event_spec.rb
Normal file
10
spec/lib/gitlab/cycle_analytics/code_event_spec.rb
Normal file
|
@ -0,0 +1,10 @@
|
|||
require 'spec_helper'
|
||||
require 'lib/gitlab/cycle_analytics/shared_event_spec'
|
||||
|
||||
describe Gitlab::CycleAnalytics::CodeEvent do
|
||||
it_behaves_like 'default query config' do
|
||||
it 'does not have the default order' do
|
||||
expect(event.order).not_to eq(event.start_time_attrs)
|
||||
end
|
||||
end
|
||||
end
|
326
spec/lib/gitlab/cycle_analytics/events_spec.rb
Normal file
326
spec/lib/gitlab/cycle_analytics/events_spec.rb
Normal file
|
@ -0,0 +1,326 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::CycleAnalytics::Events do
|
||||
let(:project) { create(:project) }
|
||||
let(:from_date) { 10.days.ago }
|
||||
let(:user) { create(:user, :admin) }
|
||||
let!(:context) { create(:issue, project: project, created_at: 2.days.ago) }
|
||||
|
||||
subject { described_class.new(project: project, options: { from: from_date, current_user: user }) }
|
||||
|
||||
before do
|
||||
allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([context])
|
||||
|
||||
setup(context)
|
||||
end
|
||||
|
||||
describe '#issue_events' do
|
||||
it 'has the total time' do
|
||||
expect(subject.issue_events.first[:total_time]).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has a title' do
|
||||
expect(subject.issue_events.first[:title]).to eq(context.title)
|
||||
end
|
||||
|
||||
it 'has the URL' do
|
||||
expect(subject.issue_events.first[:url]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has an iid' do
|
||||
expect(subject.issue_events.first[:iid]).to eq(context.iid.to_s)
|
||||
end
|
||||
|
||||
it 'has a created_at timestamp' do
|
||||
expect(subject.issue_events.first[:created_at]).to end_with('ago')
|
||||
end
|
||||
|
||||
it "has the author's URL" do
|
||||
expect(subject.issue_events.first[:author][:web_url]).not_to be_nil
|
||||
end
|
||||
|
||||
it "has the author's avatar URL" do
|
||||
expect(subject.issue_events.first[:author][:avatar_url]).not_to be_nil
|
||||
end
|
||||
|
||||
it "has the author's name" do
|
||||
expect(subject.issue_events.first[:author][:name]).to eq(context.author.name)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#plan_events' do
|
||||
it 'has a title' do
|
||||
expect(subject.plan_events.first[:title]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has a sha short ID' do
|
||||
expect(subject.plan_events.first[:short_sha]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the URL' do
|
||||
expect(subject.plan_events.first[:commit_url]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the total time' do
|
||||
expect(subject.plan_events.first[:total_time]).not_to be_empty
|
||||
end
|
||||
|
||||
it "has the author's URL" do
|
||||
expect(subject.plan_events.first[:author][:web_url]).not_to be_nil
|
||||
end
|
||||
|
||||
it "has the author's avatar URL" do
|
||||
expect(subject.plan_events.first[:author][:avatar_url]).not_to be_nil
|
||||
end
|
||||
|
||||
it "has the author's name" do
|
||||
expect(subject.plan_events.first[:author][:name]).not_to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
describe '#code_events' do
|
||||
before do
|
||||
create_commit_referencing_issue(context)
|
||||
end
|
||||
|
||||
it 'has the total time' do
|
||||
expect(subject.code_events.first[:total_time]).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has a title' do
|
||||
expect(subject.code_events.first[:title]).to eq('Awesome merge_request')
|
||||
end
|
||||
|
||||
it 'has an iid' do
|
||||
expect(subject.code_events.first[:iid]).to eq(context.iid.to_s)
|
||||
end
|
||||
|
||||
it 'has a created_at timestamp' do
|
||||
expect(subject.code_events.first[:created_at]).to end_with('ago')
|
||||
end
|
||||
|
||||
it "has the author's URL" do
|
||||
expect(subject.code_events.first[:author][:web_url]).not_to be_nil
|
||||
end
|
||||
|
||||
it "has the author's avatar URL" do
|
||||
expect(subject.code_events.first[:author][:avatar_url]).not_to be_nil
|
||||
end
|
||||
|
||||
it "has the author's name" do
|
||||
expect(subject.code_events.first[:author][:name]).to eq(MergeRequest.first.author.name)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#test_events' do
|
||||
let(:merge_request) { MergeRequest.first }
|
||||
let!(:pipeline) do
|
||||
create(:ci_pipeline,
|
||||
ref: merge_request.source_branch,
|
||||
sha: merge_request.diff_head_sha,
|
||||
project: context.project)
|
||||
end
|
||||
|
||||
before do
|
||||
create(:ci_build, pipeline: pipeline, status: :success, author: user)
|
||||
create(:ci_build, pipeline: pipeline, status: :success, author: user)
|
||||
|
||||
pipeline.run!
|
||||
pipeline.succeed!
|
||||
end
|
||||
|
||||
it 'has the name' do
|
||||
expect(subject.test_events.first[:name]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the ID' do
|
||||
expect(subject.test_events.first[:id]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the URL' do
|
||||
expect(subject.test_events.first[:url]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the branch name' do
|
||||
expect(subject.test_events.first[:branch]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the branch URL' do
|
||||
expect(subject.test_events.first[:branch][:url]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the short SHA' do
|
||||
expect(subject.test_events.first[:short_sha]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the commit URL' do
|
||||
expect(subject.test_events.first[:commit_url]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the date' do
|
||||
expect(subject.test_events.first[:date]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the total time' do
|
||||
expect(subject.test_events.first[:total_time]).not_to be_empty
|
||||
end
|
||||
end
|
||||
|
||||
describe '#review_events' do
|
||||
let!(:context) { create(:issue, project: project, created_at: 2.days.ago) }
|
||||
|
||||
it 'has the total time' do
|
||||
expect(subject.review_events.first[:total_time]).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has a title' do
|
||||
expect(subject.review_events.first[:title]).to eq('Awesome merge_request')
|
||||
end
|
||||
|
||||
it 'has an iid' do
|
||||
expect(subject.review_events.first[:iid]).to eq(context.iid.to_s)
|
||||
end
|
||||
|
||||
it 'has the URL' do
|
||||
expect(subject.review_events.first[:url]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has a state' do
|
||||
expect(subject.review_events.first[:state]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has a created_at timestamp' do
|
||||
expect(subject.review_events.first[:created_at]).not_to be_nil
|
||||
end
|
||||
|
||||
it "has the author's URL" do
|
||||
expect(subject.review_events.first[:author][:web_url]).not_to be_nil
|
||||
end
|
||||
|
||||
it "has the author's avatar URL" do
|
||||
expect(subject.review_events.first[:author][:avatar_url]).not_to be_nil
|
||||
end
|
||||
|
||||
it "has the author's name" do
|
||||
expect(subject.review_events.first[:author][:name]).to eq(MergeRequest.first.author.name)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#staging_events' do
|
||||
let(:merge_request) { MergeRequest.first }
|
||||
let!(:pipeline) do
|
||||
create(:ci_pipeline,
|
||||
ref: merge_request.source_branch,
|
||||
sha: merge_request.diff_head_sha,
|
||||
project: context.project)
|
||||
end
|
||||
|
||||
before do
|
||||
create(:ci_build, pipeline: pipeline, status: :success, author: user)
|
||||
create(:ci_build, pipeline: pipeline, status: :success, author: user)
|
||||
|
||||
pipeline.run!
|
||||
pipeline.succeed!
|
||||
|
||||
merge_merge_requests_closing_issue(context)
|
||||
deploy_master
|
||||
end
|
||||
|
||||
it 'has the name' do
|
||||
expect(subject.staging_events.first[:name]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the ID' do
|
||||
expect(subject.staging_events.first[:id]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the URL' do
|
||||
expect(subject.staging_events.first[:url]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the branch name' do
|
||||
expect(subject.staging_events.first[:branch]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the branch URL' do
|
||||
expect(subject.staging_events.first[:branch][:url]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the short SHA' do
|
||||
expect(subject.staging_events.first[:short_sha]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the commit URL' do
|
||||
expect(subject.staging_events.first[:commit_url]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the date' do
|
||||
expect(subject.staging_events.first[:date]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the total time' do
|
||||
expect(subject.staging_events.first[:total_time]).not_to be_empty
|
||||
end
|
||||
|
||||
it "has the author's URL" do
|
||||
expect(subject.staging_events.first[:author][:web_url]).not_to be_nil
|
||||
end
|
||||
|
||||
it "has the author's avatar URL" do
|
||||
expect(subject.staging_events.first[:author][:avatar_url]).not_to be_nil
|
||||
end
|
||||
|
||||
it "has the author's name" do
|
||||
expect(subject.staging_events.first[:author][:name]).to eq(MergeRequest.first.author.name)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#production_events' do
|
||||
let!(:context) { create(:issue, project: project, created_at: 2.days.ago) }
|
||||
|
||||
before do
|
||||
merge_merge_requests_closing_issue(context)
|
||||
deploy_master
|
||||
end
|
||||
|
||||
it 'has the total time' do
|
||||
expect(subject.production_events.first[:total_time]).not_to be_empty
|
||||
end
|
||||
|
||||
it 'has a title' do
|
||||
expect(subject.production_events.first[:title]).to eq(context.title)
|
||||
end
|
||||
|
||||
it 'has the URL' do
|
||||
expect(subject.production_events.first[:url]).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has an iid' do
|
||||
expect(subject.production_events.first[:iid]).to eq(context.iid.to_s)
|
||||
end
|
||||
|
||||
it 'has a created_at timestamp' do
|
||||
expect(subject.production_events.first[:created_at]).to end_with('ago')
|
||||
end
|
||||
|
||||
it "has the author's URL" do
|
||||
expect(subject.production_events.first[:author][:web_url]).not_to be_nil
|
||||
end
|
||||
|
||||
it "has the author's avatar URL" do
|
||||
expect(subject.production_events.first[:author][:avatar_url]).not_to be_nil
|
||||
end
|
||||
|
||||
it "has the author's name" do
|
||||
expect(subject.production_events.first[:author][:name]).to eq(context.author.name)
|
||||
end
|
||||
end
|
||||
|
||||
def setup(context)
|
||||
milestone = create(:milestone, project: project)
|
||||
context.update(milestone: milestone)
|
||||
mr = create_merge_request_closing_issue(context)
|
||||
|
||||
ProcessCommitWorker.new.perform(project.id, user.id, mr.commits.last.sha)
|
||||
end
|
||||
end
|
10
spec/lib/gitlab/cycle_analytics/issue_event_spec.rb
Normal file
10
spec/lib/gitlab/cycle_analytics/issue_event_spec.rb
Normal file
|
@ -0,0 +1,10 @@
|
|||
require 'spec_helper'
|
||||
require 'lib/gitlab/cycle_analytics/shared_event_spec'
|
||||
|
||||
describe Gitlab::CycleAnalytics::IssueEvent do
|
||||
it_behaves_like 'default query config' do
|
||||
it 'has the default order' do
|
||||
expect(event.order).to eq(event.start_time_attrs)
|
||||
end
|
||||
end
|
||||
end
|
10
spec/lib/gitlab/cycle_analytics/plan_event_spec.rb
Normal file
10
spec/lib/gitlab/cycle_analytics/plan_event_spec.rb
Normal file
|
@ -0,0 +1,10 @@
|
|||
require 'spec_helper'
|
||||
require 'lib/gitlab/cycle_analytics/shared_event_spec'
|
||||
|
||||
describe Gitlab::CycleAnalytics::PlanEvent do
|
||||
it_behaves_like 'default query config' do
|
||||
it 'has the default order' do
|
||||
expect(event.order).to eq(event.start_time_attrs)
|
||||
end
|
||||
end
|
||||
end
|
10
spec/lib/gitlab/cycle_analytics/production_event_spec.rb
Normal file
10
spec/lib/gitlab/cycle_analytics/production_event_spec.rb
Normal file
|
@ -0,0 +1,10 @@
|
|||
require 'spec_helper'
|
||||
require 'lib/gitlab/cycle_analytics/shared_event_spec'
|
||||
|
||||
describe Gitlab::CycleAnalytics::ProductionEvent do
|
||||
it_behaves_like 'default query config' do
|
||||
it 'has the default order' do
|
||||
expect(event.order).to eq(event.start_time_attrs)
|
||||
end
|
||||
end
|
||||
end
|
10
spec/lib/gitlab/cycle_analytics/review_event_spec.rb
Normal file
10
spec/lib/gitlab/cycle_analytics/review_event_spec.rb
Normal file
|
@ -0,0 +1,10 @@
|
|||
require 'spec_helper'
|
||||
require 'lib/gitlab/cycle_analytics/shared_event_spec'
|
||||
|
||||
describe Gitlab::CycleAnalytics::ReviewEvent do
|
||||
it_behaves_like 'default query config' do
|
||||
it 'has the default order' do
|
||||
expect(event.order).to eq(event.start_time_attrs)
|
||||
end
|
||||
end
|
||||
end
|
21
spec/lib/gitlab/cycle_analytics/shared_event_spec.rb
Normal file
21
spec/lib/gitlab/cycle_analytics/shared_event_spec.rb
Normal file
|
@ -0,0 +1,21 @@
|
|||
require 'spec_helper'
|
||||
|
||||
shared_examples 'default query config' do
|
||||
let(:event) { described_class.new(project: double, options: {}) }
|
||||
|
||||
it 'has the start attributes' do
|
||||
expect(event.start_time_attrs).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the stage attribute' do
|
||||
expect(event.stage).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the end attributes' do
|
||||
expect(event.end_time_attrs).not_to be_nil
|
||||
end
|
||||
|
||||
it 'has the projection attributes' do
|
||||
expect(event.projections).not_to be_nil
|
||||
end
|
||||
end
|
10
spec/lib/gitlab/cycle_analytics/staging_event_spec.rb
Normal file
10
spec/lib/gitlab/cycle_analytics/staging_event_spec.rb
Normal file
|
@ -0,0 +1,10 @@
|
|||
require 'spec_helper'
|
||||
require 'lib/gitlab/cycle_analytics/shared_event_spec'
|
||||
|
||||
describe Gitlab::CycleAnalytics::StagingEvent do
|
||||
it_behaves_like 'default query config' do
|
||||
it 'does not have the default order' do
|
||||
expect(event.order).not_to eq(event.start_time_attrs)
|
||||
end
|
||||
end
|
||||
end
|
10
spec/lib/gitlab/cycle_analytics/test_event_spec.rb
Normal file
10
spec/lib/gitlab/cycle_analytics/test_event_spec.rb
Normal file
|
@ -0,0 +1,10 @@
|
|||
require 'spec_helper'
|
||||
require 'lib/gitlab/cycle_analytics/shared_event_spec'
|
||||
|
||||
describe Gitlab::CycleAnalytics::TestEvent do
|
||||
it_behaves_like 'default query config' do
|
||||
it 'does not have the default order' do
|
||||
expect(event.order).not_to eq(event.start_time_attrs)
|
||||
end
|
||||
end
|
||||
end
|
25
spec/lib/gitlab/cycle_analytics/updater_spec.rb
Normal file
25
spec/lib/gitlab/cycle_analytics/updater_spec.rb
Normal file
|
@ -0,0 +1,25 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::CycleAnalytics::Updater do
|
||||
describe 'updates authors' do
|
||||
let(:user) { create(:user) }
|
||||
let(:events) { [{ 'author_id' => user.id }] }
|
||||
|
||||
it 'maps the correct user' do
|
||||
described_class.update!(events, from: 'author_id', to: 'author', klass: User)
|
||||
|
||||
expect(events.first['author']).to eq(user)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'updates builds' do
|
||||
let(:build) { create(:ci_build) }
|
||||
let(:events) { [{ 'id' => build.id }] }
|
||||
|
||||
it 'maps the correct build' do
|
||||
described_class.update!(events, from: 'id', to: 'build', klass: ::Ci::Build)
|
||||
|
||||
expect(events.first['build']).to eq(build)
|
||||
end
|
||||
end
|
||||
end
|
119
spec/lib/light_url_builder_spec.rb
Normal file
119
spec/lib/light_url_builder_spec.rb
Normal file
|
@ -0,0 +1,119 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Gitlab::UrlBuilder, lib: true do
|
||||
describe '.build' do
|
||||
context 'when passing a Commit' do
|
||||
it 'returns a proper URL' do
|
||||
commit = build_stubbed(:commit)
|
||||
|
||||
url = described_class.build(commit)
|
||||
|
||||
expect(url).to eq "#{Settings.gitlab['url']}/#{commit.project.path_with_namespace}/commit/#{commit.id}"
|
||||
end
|
||||
end
|
||||
|
||||
context 'when passing an Issue' do
|
||||
it 'returns a proper URL' do
|
||||
issue = build_stubbed(:issue, iid: 42)
|
||||
|
||||
url = described_class.build(issue)
|
||||
|
||||
expect(url).to eq "#{Settings.gitlab['url']}/#{issue.project.path_with_namespace}/issues/#{issue.iid}"
|
||||
end
|
||||
end
|
||||
|
||||
context 'when passing a MergeRequest' do
|
||||
it 'returns a proper URL' do
|
||||
merge_request = build_stubbed(:merge_request, iid: 42)
|
||||
|
||||
url = described_class.build(merge_request)
|
||||
|
||||
expect(url).to eq "#{Settings.gitlab['url']}/#{merge_request.project.path_with_namespace}/merge_requests/#{merge_request.iid}"
|
||||
end
|
||||
end
|
||||
|
||||
context 'when passing a Note' do
|
||||
context 'on a Commit' do
|
||||
it 'returns a proper URL' do
|
||||
note = build_stubbed(:note_on_commit)
|
||||
|
||||
url = described_class.build(note)
|
||||
|
||||
expect(url).to eq "#{Settings.gitlab['url']}/#{note.project.path_with_namespace}/commit/#{note.commit_id}#note_#{note.id}"
|
||||
end
|
||||
end
|
||||
|
||||
context 'on a Commit Diff' do
|
||||
it 'returns a proper URL' do
|
||||
note = build_stubbed(:diff_note_on_commit)
|
||||
|
||||
url = described_class.build(note)
|
||||
|
||||
expect(url).to eq "#{Settings.gitlab['url']}/#{note.project.path_with_namespace}/commit/#{note.commit_id}#note_#{note.id}"
|
||||
end
|
||||
end
|
||||
|
||||
context 'on an Issue' do
|
||||
it 'returns a proper URL' do
|
||||
issue = create(:issue, iid: 42)
|
||||
note = build_stubbed(:note_on_issue, noteable: issue)
|
||||
|
||||
url = described_class.build(note)
|
||||
|
||||
expect(url).to eq "#{Settings.gitlab['url']}/#{issue.project.path_with_namespace}/issues/#{issue.iid}#note_#{note.id}"
|
||||
end
|
||||
end
|
||||
|
||||
context 'on a MergeRequest' do
|
||||
it 'returns a proper URL' do
|
||||
merge_request = create(:merge_request, iid: 42)
|
||||
note = build_stubbed(:note_on_merge_request, noteable: merge_request)
|
||||
|
||||
url = described_class.build(note)
|
||||
|
||||
expect(url).to eq "#{Settings.gitlab['url']}/#{merge_request.project.path_with_namespace}/merge_requests/#{merge_request.iid}#note_#{note.id}"
|
||||
end
|
||||
end
|
||||
|
||||
context 'on a MergeRequest Diff' do
|
||||
it 'returns a proper URL' do
|
||||
merge_request = create(:merge_request, iid: 42)
|
||||
note = build_stubbed(:diff_note_on_merge_request, noteable: merge_request)
|
||||
|
||||
url = described_class.build(note)
|
||||
|
||||
expect(url).to eq "#{Settings.gitlab['url']}/#{merge_request.project.path_with_namespace}/merge_requests/#{merge_request.iid}#note_#{note.id}"
|
||||
end
|
||||
end
|
||||
|
||||
context 'on a ProjectSnippet' do
|
||||
it 'returns a proper URL' do
|
||||
project_snippet = create(:project_snippet)
|
||||
note = build_stubbed(:note_on_project_snippet, noteable: project_snippet)
|
||||
|
||||
url = described_class.build(note)
|
||||
|
||||
expect(url).to eq "#{Settings.gitlab['url']}/#{project_snippet.project.path_with_namespace}/snippets/#{note.noteable_id}#note_#{note.id}"
|
||||
end
|
||||
end
|
||||
|
||||
context 'on another object' do
|
||||
it 'returns a proper URL' do
|
||||
project = build_stubbed(:project)
|
||||
|
||||
expect { described_class.build(project) }.
|
||||
to raise_error(NotImplementedError, 'No URL builder defined for Project')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when passing a WikiPage' do
|
||||
it 'returns a proper URL' do
|
||||
wiki_page = build(:wiki_page)
|
||||
url = described_class.build(wiki_page)
|
||||
|
||||
expect(url).to eq "#{Gitlab.config.gitlab.url}#{wiki_page.wiki.wiki_base_path}/#{wiki_page.slug}"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
140
spec/requests/projects/cycle_analytics_events_spec.rb
Normal file
140
spec/requests/projects/cycle_analytics_events_spec.rb
Normal file
|
@ -0,0 +1,140 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe 'cycle analytics events' do
|
||||
let(:user) { create(:user) }
|
||||
let(:project) { create(:project) }
|
||||
let(:issue) { create(:issue, project: project, created_at: 2.days.ago) }
|
||||
|
||||
describe 'GET /:namespace/:project/cycle_analytics/events/issues' do
|
||||
before do
|
||||
project.team << [user, :developer]
|
||||
|
||||
allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([issue])
|
||||
|
||||
3.times { create_cycle }
|
||||
deploy_master
|
||||
|
||||
login_as(user)
|
||||
end
|
||||
|
||||
it 'lists the issue events' do
|
||||
get namespace_project_cycle_analytics_issue_path(project.namespace, project, format: :json)
|
||||
|
||||
expect(json_response['events']).not_to be_empty
|
||||
|
||||
first_issue_iid = Issue.order(created_at: :desc).pluck(:iid).first.to_s
|
||||
|
||||
expect(json_response['events'].first['iid']).to eq(first_issue_iid)
|
||||
end
|
||||
|
||||
it 'lists the plan events' do
|
||||
get namespace_project_cycle_analytics_plan_path(project.namespace, project, format: :json)
|
||||
|
||||
expect(json_response['events']).not_to be_empty
|
||||
|
||||
expect(json_response['events'].first['short_sha']).to eq(MergeRequest.last.commits.first.short_id)
|
||||
end
|
||||
|
||||
it 'lists the code events' do
|
||||
get namespace_project_cycle_analytics_code_path(project.namespace, project, format: :json)
|
||||
|
||||
expect(json_response['events']).not_to be_empty
|
||||
|
||||
first_mr_iid = MergeRequest.order(created_at: :desc).pluck(:iid).first.to_s
|
||||
|
||||
expect(json_response['events'].first['iid']).to eq(first_mr_iid)
|
||||
end
|
||||
|
||||
it 'lists the test events' do
|
||||
get namespace_project_cycle_analytics_test_path(project.namespace, project, format: :json)
|
||||
|
||||
expect(json_response['events']).not_to be_empty
|
||||
|
||||
expect(json_response['events'].first['date']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'lists the review events' do
|
||||
get namespace_project_cycle_analytics_review_path(project.namespace, project, format: :json)
|
||||
|
||||
expect(json_response['events']).not_to be_empty
|
||||
|
||||
first_mr_iid = MergeRequest.order(created_at: :desc).pluck(:iid).first.to_s
|
||||
|
||||
expect(json_response['events'].first['iid']).to eq(first_mr_iid)
|
||||
end
|
||||
|
||||
it 'lists the staging events' do
|
||||
get namespace_project_cycle_analytics_staging_path(project.namespace, project, format: :json)
|
||||
|
||||
expect(json_response['events']).not_to be_empty
|
||||
|
||||
expect(json_response['events'].first['date']).not_to be_empty
|
||||
end
|
||||
|
||||
it 'lists the production events' do
|
||||
get namespace_project_cycle_analytics_production_path(project.namespace, project, format: :json)
|
||||
|
||||
expect(json_response['events']).not_to be_empty
|
||||
|
||||
first_issue_iid = Issue.order(created_at: :desc).pluck(:iid).first.to_s
|
||||
|
||||
expect(json_response['events'].first['iid']).to eq(first_issue_iid)
|
||||
end
|
||||
|
||||
context 'specific branch' do
|
||||
it 'lists the test events' do
|
||||
branch = MergeRequest.first.source_branch
|
||||
|
||||
get namespace_project_cycle_analytics_test_path(project.namespace, project, format: :json, branch: branch)
|
||||
|
||||
expect(json_response['events']).not_to be_empty
|
||||
|
||||
expect(json_response['events'].first['date']).not_to be_empty
|
||||
end
|
||||
end
|
||||
|
||||
context 'with private project and builds' do
|
||||
before do
|
||||
ProjectMember.first.update(access_level: Gitlab::Access::GUEST)
|
||||
end
|
||||
|
||||
it 'does not list the test events' do
|
||||
get namespace_project_cycle_analytics_test_path(project.namespace, project, format: :json)
|
||||
|
||||
expect(response).to have_http_status(:not_found)
|
||||
end
|
||||
|
||||
it 'does not list the staging events' do
|
||||
get namespace_project_cycle_analytics_staging_path(project.namespace, project, format: :json)
|
||||
|
||||
expect(response).to have_http_status(:not_found)
|
||||
end
|
||||
|
||||
it 'lists the issue events' do
|
||||
get namespace_project_cycle_analytics_issue_path(project.namespace, project, format: :json)
|
||||
|
||||
expect(response).to have_http_status(:ok)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def json_response
|
||||
JSON.parse(response.body)
|
||||
end
|
||||
|
||||
def create_cycle
|
||||
milestone = create(:milestone, project: project)
|
||||
issue.update(milestone: milestone)
|
||||
mr = create_merge_request_closing_issue(issue)
|
||||
|
||||
pipeline = create(:ci_empty_pipeline, status: 'created', project: project, ref: mr.source_branch, sha: mr.source_branch_sha)
|
||||
pipeline.run
|
||||
|
||||
create(:ci_build, pipeline: pipeline, status: :success, author: user)
|
||||
create(:ci_build, pipeline: pipeline, status: :success, author: user)
|
||||
|
||||
merge_merge_requests_closing_issue(issue)
|
||||
|
||||
ProcessCommitWorker.new.perform(project.id, user.id, mr.commits.last.sha)
|
||||
end
|
||||
end
|
27
spec/serializers/analytics_build_entity_spec.rb
Normal file
27
spec/serializers/analytics_build_entity_spec.rb
Normal file
|
@ -0,0 +1,27 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe AnalyticsBuildEntity do
|
||||
let(:entity) do
|
||||
described_class.new(build, request: double)
|
||||
end
|
||||
|
||||
context 'build with an author' do
|
||||
let(:user) { create(:user) }
|
||||
let(:build) { create(:ci_build, author: user) }
|
||||
|
||||
subject { entity.as_json }
|
||||
|
||||
it 'contains the URL' do
|
||||
expect(subject).to include(:url)
|
||||
end
|
||||
|
||||
it 'contains the author' do
|
||||
expect(subject).to include(:author)
|
||||
end
|
||||
|
||||
it 'does not contain sensitive information' do
|
||||
expect(subject).not_to include(/token/)
|
||||
expect(subject).not_to include(/variables/)
|
||||
end
|
||||
end
|
||||
end
|
22
spec/serializers/analytics_build_serializer_spec.rb
Normal file
22
spec/serializers/analytics_build_serializer_spec.rb
Normal file
|
@ -0,0 +1,22 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe AnalyticsBuildSerializer do
|
||||
let(:serializer) do
|
||||
described_class
|
||||
.new.represent(resource)
|
||||
end
|
||||
|
||||
let(:json) { serializer.as_json }
|
||||
let(:resource) { create(:ci_build) }
|
||||
|
||||
context 'when there is a single object provided' do
|
||||
it 'it generates payload for single object' do
|
||||
expect(json).to be_an_instance_of Hash
|
||||
end
|
||||
|
||||
it 'contains important elements of analyticsBuild' do
|
||||
expect(json)
|
||||
.to include(:name, :branch, :short_sha, :date, :total_time, :url, :author)
|
||||
end
|
||||
end
|
||||
end
|
39
spec/serializers/analytics_generic_entity_spec.rb
Normal file
39
spec/serializers/analytics_generic_entity_spec.rb
Normal file
|
@ -0,0 +1,39 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe AnalyticsIssueEntity do
|
||||
let(:user) { create(:user) }
|
||||
let(:entity_hash) do
|
||||
{
|
||||
total_time: "172802.724419",
|
||||
title: "Eos voluptatem inventore in sed.",
|
||||
iid: "1",
|
||||
id: "1",
|
||||
created_at: "2016-11-12 15:04:02.948604",
|
||||
author: user,
|
||||
}
|
||||
end
|
||||
|
||||
let(:project) { create(:empty_project) }
|
||||
let(:request) { EntityRequest.new(project: project, entity: :merge_request) }
|
||||
|
||||
let(:entity) do
|
||||
described_class.new(entity_hash, request: request, project: project)
|
||||
end
|
||||
|
||||
context 'generic entity' do
|
||||
subject { entity.as_json }
|
||||
|
||||
it 'contains the entity URL' do
|
||||
expect(subject).to include(:url)
|
||||
end
|
||||
|
||||
it 'contains the author' do
|
||||
expect(subject).to include(:author)
|
||||
end
|
||||
|
||||
it 'does not contain sensitive information' do
|
||||
expect(subject).not_to include(/token/)
|
||||
expect(subject).not_to include(/variables/)
|
||||
end
|
||||
end
|
||||
end
|
33
spec/serializers/analytics_issue_serializer_spec.rb
Normal file
33
spec/serializers/analytics_issue_serializer_spec.rb
Normal file
|
@ -0,0 +1,33 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe AnalyticsIssueSerializer do
|
||||
let(:serializer) do
|
||||
described_class
|
||||
.new(project: project, entity: :merge_request)
|
||||
.represent(resource)
|
||||
end
|
||||
|
||||
let(:user) { create(:user) }
|
||||
let(:json) { serializer.as_json }
|
||||
let(:project) { create(:project) }
|
||||
let(:resource) do
|
||||
{
|
||||
total_time: "172802.724419",
|
||||
title: "Eos voluptatem inventore in sed.",
|
||||
iid: "1",
|
||||
id: "1",
|
||||
created_at: "2016-11-12 15:04:02.948604",
|
||||
author: user,
|
||||
}
|
||||
end
|
||||
|
||||
context 'when there is a single object provided' do
|
||||
it 'it generates payload for single object' do
|
||||
expect(json).to be_an_instance_of Hash
|
||||
end
|
||||
|
||||
it 'contains important elements of the issue' do
|
||||
expect(json).to include(:title, :iid, :created_at, :total_time, :url, :author)
|
||||
end
|
||||
end
|
||||
end
|
34
spec/serializers/analytics_merge_request_serializer_spec.rb
Normal file
34
spec/serializers/analytics_merge_request_serializer_spec.rb
Normal file
|
@ -0,0 +1,34 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe AnalyticsMergeRequestSerializer do
|
||||
let(:serializer) do
|
||||
described_class
|
||||
.new(project: project, entity: :merge_request)
|
||||
.represent(resource)
|
||||
end
|
||||
|
||||
let(:user) { create(:user) }
|
||||
let(:json) { serializer.as_json }
|
||||
let(:project) { create(:project) }
|
||||
let(:resource) do
|
||||
{
|
||||
total_time: "172802.724419",
|
||||
title: "Eos voluptatem inventore in sed.",
|
||||
iid: "1",
|
||||
id: "1",
|
||||
state: 'open',
|
||||
created_at: "2016-11-12 15:04:02.948604",
|
||||
author: user
|
||||
}
|
||||
end
|
||||
|
||||
context 'when there is a single object provided' do
|
||||
it 'it generates payload for single object' do
|
||||
expect(json).to be_an_instance_of Hash
|
||||
end
|
||||
|
||||
it 'contains important elements of the merge request' do
|
||||
expect(json).to include(:title, :iid, :created_at, :total_time, :url, :author, :state)
|
||||
end
|
||||
end
|
||||
end
|
45
spec/serializers/entity_date_helper_spec.rb
Normal file
45
spec/serializers/entity_date_helper_spec.rb
Normal file
|
@ -0,0 +1,45 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe EntityDateHelper do
|
||||
let(:date_helper_class) { Class.new { include EntityDateHelper }.new }
|
||||
|
||||
it 'converts 0 seconds' do
|
||||
expect(date_helper_class.distance_of_time_as_hash(0)).to eq(seconds: 0)
|
||||
end
|
||||
|
||||
it 'converts 40 seconds' do
|
||||
expect(date_helper_class.distance_of_time_as_hash(40)).to eq(seconds: 40)
|
||||
end
|
||||
|
||||
it 'converts 60 seconds' do
|
||||
expect(date_helper_class.distance_of_time_as_hash(60)).to eq(mins: 1)
|
||||
end
|
||||
|
||||
it 'converts 70 seconds' do
|
||||
expect(date_helper_class.distance_of_time_as_hash(70)).to eq(mins: 1, seconds: 10)
|
||||
end
|
||||
|
||||
it 'converts 3600 seconds' do
|
||||
expect(date_helper_class.distance_of_time_as_hash(3600)).to eq(hours: 1)
|
||||
end
|
||||
|
||||
it 'converts 3750 seconds' do
|
||||
expect(date_helper_class.distance_of_time_as_hash(3750)).to eq(hours: 1, mins: 2, seconds: 30)
|
||||
end
|
||||
|
||||
it 'converts 86400 seconds' do
|
||||
expect(date_helper_class.distance_of_time_as_hash(86400)).to eq(days: 1)
|
||||
end
|
||||
|
||||
it 'converts 86560 seconds' do
|
||||
expect(date_helper_class.distance_of_time_as_hash(86560)).to eq(days: 1, mins: 2, seconds: 40)
|
||||
end
|
||||
|
||||
it 'converts 86760 seconds' do
|
||||
expect(date_helper_class.distance_of_time_as_hash(99760)).to eq(days: 1, hours: 3, mins: 42, seconds: 40)
|
||||
end
|
||||
|
||||
it 'converts 986760 seconds' do
|
||||
expect(date_helper_class.distance_of_time_as_hash(986760)).to eq(days: 11, hours: 10, mins: 6)
|
||||
end
|
||||
end
|
|
@ -15,32 +15,36 @@ describe PipelineMetricsWorker do
|
|||
end
|
||||
|
||||
describe '#perform' do
|
||||
subject { described_class.new.perform(pipeline.id) }
|
||||
before do
|
||||
described_class.new.perform(pipeline.id)
|
||||
end
|
||||
|
||||
context 'when pipeline is running' do
|
||||
let(:status) { 'running' }
|
||||
|
||||
it 'records the build start time' do
|
||||
subject
|
||||
|
||||
expect(merge_request.reload.metrics.latest_build_started_at).to be_like_time(pipeline.started_at)
|
||||
end
|
||||
|
||||
it 'clears the build end time' do
|
||||
subject
|
||||
|
||||
expect(merge_request.reload.metrics.latest_build_finished_at).to be_nil
|
||||
end
|
||||
|
||||
it 'records the pipeline' do
|
||||
expect(merge_request.reload.metrics.pipeline).to eq(pipeline)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when pipeline succeeded' do
|
||||
let(:status) { 'success' }
|
||||
|
||||
it 'records the build end time' do
|
||||
subject
|
||||
|
||||
expect(merge_request.reload.metrics.latest_build_finished_at).to be_like_time(pipeline.finished_at)
|
||||
end
|
||||
|
||||
it 'records the pipeline' do
|
||||
expect(merge_request.reload.metrics.pipeline).to eq(pipeline)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
Loading…
Reference in a new issue