Implement (some) comments from @DouweM's review.
- Move things common to `Issue` and `MergeRequest` into `Issuable` - Move more database-specific functions into `Gitlab::Database` - Indentation changes and other minor refactorings.
This commit is contained in:
parent
6f194e28e4
commit
71d4bf721b
10 changed files with 126 additions and 122 deletions
|
@ -33,6 +33,8 @@ module Issuable
|
|||
has_many :labels, through: :label_links
|
||||
has_many :todos, as: :target, dependent: :destroy
|
||||
|
||||
has_one :metrics
|
||||
|
||||
validates :author, presence: true
|
||||
validates :title, presence: true, length: { within: 0..255 }
|
||||
|
||||
|
@ -82,6 +84,7 @@ module Issuable
|
|||
acts_as_paranoid
|
||||
|
||||
after_save :update_assignee_cache_counts, if: :assignee_id_changed?
|
||||
after_save :record_metrics
|
||||
|
||||
def update_assignee_cache_counts
|
||||
# make sure we flush the cache for both the old *and* new assignee
|
||||
|
@ -287,4 +290,9 @@ module Issuable
|
|||
def can_move?(*)
|
||||
false
|
||||
end
|
||||
|
||||
def record_metrics
|
||||
metrics = self.metrics || create_metrics
|
||||
metrics.record!
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
class CycleAnalytics
|
||||
include Gitlab::Database::Median
|
||||
include Gitlab::Database::DateTime
|
||||
|
||||
def initialize(project, from:)
|
||||
@project = project
|
||||
|
@ -12,58 +13,64 @@ class CycleAnalytics
|
|||
end
|
||||
|
||||
def issue
|
||||
calculate_metric!(:issue,
|
||||
TableReferences.issues[:created_at],
|
||||
[TableReferences.issue_metrics[:first_associated_with_milestone_at],
|
||||
TableReferences.issue_metrics[:first_added_to_board_at]])
|
||||
calculate_metric(:issue,
|
||||
TableReferences.issues[:created_at],
|
||||
[TableReferences.issue_metrics[:first_associated_with_milestone_at],
|
||||
TableReferences.issue_metrics[:first_added_to_board_at]])
|
||||
end
|
||||
|
||||
def plan
|
||||
calculate_metric!(:plan,
|
||||
[TableReferences.issue_metrics[:first_associated_with_milestone_at],
|
||||
TableReferences.issue_metrics[:first_added_to_board_at]],
|
||||
TableReferences.issue_metrics[:first_mentioned_in_commit_at])
|
||||
calculate_metric(:plan,
|
||||
[TableReferences.issue_metrics[:first_associated_with_milestone_at],
|
||||
TableReferences.issue_metrics[:first_added_to_board_at]],
|
||||
TableReferences.issue_metrics[:first_mentioned_in_commit_at])
|
||||
end
|
||||
|
||||
def code
|
||||
calculate_metric!(:code,
|
||||
TableReferences.issue_metrics[:first_mentioned_in_commit_at],
|
||||
TableReferences.merge_requests[:created_at])
|
||||
calculate_metric(:code,
|
||||
TableReferences.issue_metrics[:first_mentioned_in_commit_at],
|
||||
TableReferences.merge_requests[:created_at])
|
||||
end
|
||||
|
||||
def test
|
||||
calculate_metric!(:test,
|
||||
TableReferences.merge_request_metrics[:latest_build_started_at],
|
||||
TableReferences.merge_request_metrics[:latest_build_finished_at])
|
||||
calculate_metric(:test,
|
||||
TableReferences.merge_request_metrics[:latest_build_started_at],
|
||||
TableReferences.merge_request_metrics[:latest_build_finished_at])
|
||||
end
|
||||
|
||||
def review
|
||||
calculate_metric!(:review,
|
||||
TableReferences.merge_requests[:created_at],
|
||||
TableReferences.merge_request_metrics[:merged_at])
|
||||
calculate_metric(:review,
|
||||
TableReferences.merge_requests[:created_at],
|
||||
TableReferences.merge_request_metrics[:merged_at])
|
||||
end
|
||||
|
||||
def staging
|
||||
calculate_metric!(:staging,
|
||||
TableReferences.merge_request_metrics[:merged_at],
|
||||
TableReferences.merge_request_metrics[:first_deployed_to_production_at])
|
||||
calculate_metric(:staging,
|
||||
TableReferences.merge_request_metrics[:merged_at],
|
||||
TableReferences.merge_request_metrics[:first_deployed_to_production_at])
|
||||
end
|
||||
|
||||
def production
|
||||
calculate_metric!(:production,
|
||||
TableReferences.issues[:created_at],
|
||||
TableReferences.merge_request_metrics[:first_deployed_to_production_at])
|
||||
calculate_metric(:production,
|
||||
TableReferences.issues[:created_at],
|
||||
TableReferences.merge_request_metrics[:first_deployed_to_production_at])
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def calculate_metric!(name, start_time_attrs, end_time_attrs)
|
||||
def calculate_metric(name, start_time_attrs, end_time_attrs)
|
||||
cte_table = Arel::Table.new("cte_table_for_#{name}")
|
||||
|
||||
# Add a `SELECT` for (end_time - start-time), and add an alias for it.
|
||||
query = Arel::Nodes::As.new(cte_table, subtract_datetimes(base_query, end_time_attrs, start_time_attrs, name.to_s))
|
||||
queries = Array.wrap(median_datetime(cte_table, query, name))
|
||||
results = queries.map { |query| run_query(query) }
|
||||
# Build a `SELECT` query. We find the first of the `end_time_attrs` that isn't `NULL` (call this end_time).
|
||||
# Next, we find the first of the start_time_attrs that isn't `NULL` (call this start_time).
|
||||
# We compute the (end_time - start_time) interval, and give it an alias based on the current
|
||||
# cycle analytics stage.
|
||||
interval_query = Arel::Nodes::As.new(
|
||||
cte_table,
|
||||
subtract_datetimes(base_query, end_time_attrs, start_time_attrs, name.to_s))
|
||||
|
||||
median_queries = Array.wrap(median_datetime(cte_table, interval_query, name))
|
||||
results = median_queries.map { |query| run_query(query) }
|
||||
extract_median(results).presence
|
||||
end
|
||||
|
||||
|
@ -82,51 +89,17 @@ class CycleAnalytics
|
|||
where(TableReferences.issues[:created_at].gteq(@from))
|
||||
|
||||
# Load merge_requests
|
||||
query = query.join(TableReferences.merge_requests, Arel::Nodes::OuterJoin).on(TableReferences.merge_requests[:id].eq(arel_table[:merge_request_id])).
|
||||
join(TableReferences.merge_request_metrics).on(TableReferences.merge_requests[:id].eq(TableReferences.merge_request_metrics[:merge_request_id]))
|
||||
query = query.join(TableReferences.merge_requests, Arel::Nodes::OuterJoin).
|
||||
on(TableReferences.merge_requests[:id].eq(arel_table[:merge_request_id])).
|
||||
join(TableReferences.merge_request_metrics).
|
||||
on(TableReferences.merge_requests[:id].eq(TableReferences.merge_request_metrics[:merge_request_id]))
|
||||
|
||||
# Limit to merge requests that have been deployed to production after `@from`
|
||||
query.where(TableReferences.merge_request_metrics[:first_deployed_to_production_at].gteq(@from))
|
||||
end
|
||||
|
||||
# Note: We use COALESCE to pick up the first non-null column for end_time / start_time.
|
||||
def subtract_datetimes(query_so_far, end_time_attrs, start_time_attrs, as)
|
||||
diff_fn = case ActiveRecord::Base.connection.adapter_name
|
||||
when 'PostgreSQL'
|
||||
Arel::Nodes::Subtraction.new(
|
||||
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(end_time_attrs)),
|
||||
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(start_time_attrs)))
|
||||
when 'Mysql2'
|
||||
Arel::Nodes::NamedFunction.new(
|
||||
"TIMESTAMPDIFF",
|
||||
[Arel.sql('second'),
|
||||
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(start_time_attrs)),
|
||||
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(end_time_attrs))])
|
||||
else
|
||||
raise NotImplementedError, "Cycle analytics doesn't support your database type."
|
||||
end
|
||||
|
||||
query_so_far.project(diff_fn.as(as))
|
||||
end
|
||||
|
||||
def run_query(query)
|
||||
if query.is_a? String
|
||||
ActiveRecord::Base.connection.execute query
|
||||
else
|
||||
ActiveRecord::Base.connection.execute query.to_sql
|
||||
end
|
||||
end
|
||||
|
||||
def extract_median(results)
|
||||
result = results.compact.first
|
||||
|
||||
case ActiveRecord::Base.connection.adapter_name
|
||||
when 'PostgreSQL'
|
||||
result = result.first.presence
|
||||
median = result['median'] if result
|
||||
median.to_f if median
|
||||
when 'Mysql2'
|
||||
result.to_a.flatten.first
|
||||
end
|
||||
query = query.to_sql unless query.is_a?(String)
|
||||
ActiveRecord::Base.connection.execute(query)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -46,22 +46,19 @@ class Deployment < ActiveRecord::Base
|
|||
|
||||
def update_merge_request_metrics
|
||||
if environment.update_merge_request_metrics?
|
||||
query = project.merge_requests.
|
||||
joins(:metrics).
|
||||
where(target_branch: self.ref, merge_request_metrics: { first_deployed_to_production_at: nil })
|
||||
merge_requests = project.merge_requests.
|
||||
joins(:metrics).
|
||||
where(target_branch: self.ref, merge_request_metrics: { first_deployed_to_production_at: nil }).
|
||||
where("merge_request_metrics.merged_at <= ?", self.created_at)
|
||||
|
||||
merge_requests =
|
||||
if previous_deployment
|
||||
query.where("merge_request_metrics.merged_at <= ? AND merge_request_metrics.merged_at >= ?",
|
||||
self.created_at,
|
||||
previous_deployment.created_at)
|
||||
else
|
||||
query.where("merge_request_metrics.merged_at <= ?", self.created_at)
|
||||
end
|
||||
if previous_deployment
|
||||
merge_requests = merge_requests.where("merge_request_metrics.merged_at >= ?", previous_deployment.created_at)
|
||||
end
|
||||
|
||||
# Need to use `map` instead of `select` because MySQL doesn't allow `SELECT`ing from the same table
|
||||
# that we're updating.
|
||||
MergeRequest::Metrics.where(merge_request_id: merge_requests.map(&:id), first_deployed_to_production_at: nil).
|
||||
MergeRequest::Metrics.
|
||||
where(merge_request_id: merge_requests.map(&:id), first_deployed_to_production_at: nil).
|
||||
update_all(first_deployed_to_production_at: self.created_at)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -23,8 +23,6 @@ class Issue < ActiveRecord::Base
|
|||
|
||||
has_many :events, as: :target, dependent: :destroy
|
||||
|
||||
has_one :metrics
|
||||
|
||||
has_many :merge_requests_closing_issues, class_name: 'MergeRequestsClosingIssues'
|
||||
has_many :closed_by_merge_requests, through: :merge_requests_closing_issues, source: :merge_request
|
||||
|
||||
|
@ -41,8 +39,6 @@ class Issue < ActiveRecord::Base
|
|||
scope :order_due_date_asc, -> { reorder('issues.due_date IS NULL, issues.due_date ASC') }
|
||||
scope :order_due_date_desc, -> { reorder('issues.due_date IS NULL, issues.due_date DESC') }
|
||||
|
||||
after_save :record_metrics
|
||||
|
||||
attr_spammable :title, spam_title: true
|
||||
attr_spammable :description, spam_description: true
|
||||
|
||||
|
@ -277,9 +273,4 @@ class Issue < ActiveRecord::Base
|
|||
def check_for_spam?
|
||||
project.public?
|
||||
end
|
||||
|
||||
def record_metrics
|
||||
metrics = Metrics.find_or_create_by(issue_id: self.id)
|
||||
metrics.record!
|
||||
end
|
||||
end
|
||||
|
|
|
@ -10,7 +10,7 @@ class Issue::Metrics < ActiveRecord::Base
|
|||
self.first_added_to_board_at = Time.now
|
||||
end
|
||||
|
||||
self.save if self.changed?
|
||||
self.save
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
@ -13,7 +13,6 @@ class MergeRequest < ActiveRecord::Base
|
|||
has_many :merge_request_diffs, dependent: :destroy
|
||||
has_one :merge_request_diff,
|
||||
-> { order('merge_request_diffs.id DESC') }
|
||||
has_one :metrics
|
||||
|
||||
has_many :events, as: :target, dependent: :destroy
|
||||
|
||||
|
@ -35,8 +34,6 @@ class MergeRequest < ActiveRecord::Base
|
|||
# when creating new merge request
|
||||
attr_accessor :can_be_created, :compare_commits, :compare
|
||||
|
||||
after_save :record_metrics
|
||||
|
||||
state_machine :state, initial: :opened do
|
||||
event :close do
|
||||
transition [:reopened, :opened] => :closed
|
||||
|
@ -515,7 +512,7 @@ class MergeRequest < ActiveRecord::Base
|
|||
transaction do
|
||||
self.merge_requests_closing_issues.delete_all
|
||||
closes_issues(current_user).each do |issue|
|
||||
MergeRequestsClosingIssues.create!(merge_request: self, issue: issue)
|
||||
self.merge_requests_closing_issues.create!(issue: issue)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -845,9 +842,4 @@ class MergeRequest < ActiveRecord::Base
|
|||
@conflicts_can_be_resolved_in_ui = false
|
||||
end
|
||||
end
|
||||
|
||||
def record_metrics
|
||||
metrics = Metrics.find_or_create_by(merge_request_id: self.id)
|
||||
metrics.record!
|
||||
end
|
||||
end
|
||||
|
|
|
@ -6,6 +6,6 @@ class MergeRequest::Metrics < ActiveRecord::Base
|
|||
self.merged_at = Time.now
|
||||
end
|
||||
|
||||
self.save if self.changed?
|
||||
self.save
|
||||
end
|
||||
end
|
||||
|
|
|
@ -780,7 +780,7 @@ Rails.application.routes.draw do
|
|||
|
||||
resources :environments
|
||||
|
||||
resource :cycle_analytics
|
||||
resource :cycle_analytics, only: [:show]
|
||||
|
||||
resources :builds, only: [:index, :show], constraints: { id: /\d+/ } do
|
||||
collection do
|
||||
|
|
27
lib/gitlab/database/date_time.rb
Normal file
27
lib/gitlab/database/date_time.rb
Normal file
|
@ -0,0 +1,27 @@
|
|||
module Gitlab
|
||||
module Database
|
||||
module DateTime
|
||||
# Find the first of the `end_time_attrs` that isn't `NULL`. Subtract from it
|
||||
# the first of the `start_time_attrs` that isn't NULL. `SELECT` the resulting interval
|
||||
# along with an alias specified by the `as` parameter.
|
||||
#
|
||||
# Note: For MySQL, the interval is returned in seconds.
|
||||
# For PostgreSQL, the interval is returned as an INTERVAL type.
|
||||
def subtract_datetimes(query_so_far, end_time_attrs, start_time_attrs, as)
|
||||
diff_fn = if Gitlab::Database.postgresql?
|
||||
Arel::Nodes::Subtraction.new(
|
||||
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(end_time_attrs)),
|
||||
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(start_time_attrs)))
|
||||
elsif Gitlab::Database.mysql?
|
||||
Arel::Nodes::NamedFunction.new(
|
||||
"TIMESTAMPDIFF",
|
||||
[Arel.sql('second'),
|
||||
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(start_time_attrs)),
|
||||
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(end_time_attrs))])
|
||||
end
|
||||
|
||||
query_so_far.project(diff_fn.as(as))
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -3,13 +3,22 @@ module Gitlab
|
|||
module Database
|
||||
module Median
|
||||
def median_datetime(arel_table, query_so_far, column_sym)
|
||||
case ActiveRecord::Base.connection.adapter_name
|
||||
when 'PostgreSQL'
|
||||
if Gitlab::Database.postgresql?
|
||||
pg_median_datetime(arel_table, query_so_far, column_sym)
|
||||
when 'Mysql2'
|
||||
elsif Gitlab::Database.mysql?
|
||||
mysql_median_datetime(arel_table, query_so_far, column_sym)
|
||||
else
|
||||
raise NotImplementedError, "We haven't implemented a database median strategy for your database type."
|
||||
end
|
||||
end
|
||||
|
||||
def extract_median(results)
|
||||
result = results.compact.first
|
||||
|
||||
if Gitlab::Database.postgresql?
|
||||
result = result.first.presence
|
||||
median = result['median'] if result
|
||||
median.to_f if median
|
||||
elsif Gitlab::Database.mysql?
|
||||
result.to_a.flatten.first
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -17,17 +26,23 @@ module Gitlab
|
|||
query = arel_table.
|
||||
from(arel_table.project(Arel.sql('*')).order(arel_table[column_sym]).as(arel_table.table_name)).
|
||||
project(average([arel_table[column_sym]], 'median')).
|
||||
where(Arel::Nodes::Between.new(Arel.sql("(select @row_id := @row_id + 1)"),
|
||||
Arel::Nodes::And.new([Arel.sql('@ct/2.0'),
|
||||
Arel.sql('@ct/2.0 + 1')]))).
|
||||
where(Arel::Nodes::Between.new(
|
||||
Arel.sql("(select @row_id := @row_id + 1)"),
|
||||
Arel::Nodes::And.new(
|
||||
[Arel.sql('@ct/2.0'),
|
||||
Arel.sql('@ct/2.0 + 1')]
|
||||
)
|
||||
)).
|
||||
# Disallow negative values
|
||||
where(arel_table[column_sym].gteq(0))
|
||||
|
||||
[Arel.sql("CREATE TEMPORARY TABLE IF NOT EXISTS #{query_so_far.to_sql}"),
|
||||
Arel.sql("set @ct := (select count(1) from #{arel_table.table_name});"),
|
||||
Arel.sql("set @row_id := 0;"),
|
||||
query,
|
||||
Arel.sql("DROP TEMPORARY TABLE IF EXISTS #{arel_table.table_name};")]
|
||||
[
|
||||
Arel.sql("CREATE TEMPORARY TABLE IF NOT EXISTS #{query_so_far.to_sql}"),
|
||||
Arel.sql("set @ct := (select count(1) from #{arel_table.table_name});"),
|
||||
Arel.sql("set @row_id := 0;"),
|
||||
query,
|
||||
Arel.sql("DROP TEMPORARY TABLE IF EXISTS #{arel_table.table_name};")
|
||||
]
|
||||
end
|
||||
|
||||
def pg_median_datetime(arel_table, query_so_far, column_sym)
|
||||
|
@ -42,14 +57,15 @@ module Gitlab
|
|||
# 9 | 2 | 3
|
||||
# 15 | 3 | 3
|
||||
cte_table = Arel::Table.new("ordered_records")
|
||||
cte = Arel::Nodes::As.new(cte_table,
|
||||
arel_table.
|
||||
project(arel_table[column_sym].as(column_sym.to_s),
|
||||
Arel::Nodes::Over.new(Arel::Nodes::NamedFunction.new("row_number", []),
|
||||
Arel::Nodes::Window.new.order(arel_table[column_sym])).as('row_id'),
|
||||
arel_table.project("COUNT(1)").as('ct')).
|
||||
# Disallow negative values
|
||||
where(arel_table[column_sym].gteq(zero_interval)))
|
||||
cte = Arel::Nodes::As.new(
|
||||
cte_table,
|
||||
arel_table.
|
||||
project(arel_table[column_sym].as(column_sym.to_s),
|
||||
Arel::Nodes::Over.new(Arel::Nodes::NamedFunction.new("row_number", []),
|
||||
Arel::Nodes::Window.new.order(arel_table[column_sym])).as('row_id'),
|
||||
arel_table.project("COUNT(1)").as('ct')).
|
||||
# Disallow negative values
|
||||
where(arel_table[column_sym].gteq(zero_interval)))
|
||||
|
||||
# From the CTE, select either the middle row or the middle two rows (this is accomplished
|
||||
# by 'where cte.row_id between cte.ct / 2.0 AND cte.ct / 2.0 + 1'). Find the average of the
|
||||
|
|
Loading…
Reference in a new issue