diff --git a/app/models/concerns/issuable.rb b/app/models/concerns/issuable.rb index 6ed9216d666..1650ac9fcbe 100644 --- a/app/models/concerns/issuable.rb +++ b/app/models/concerns/issuable.rb @@ -33,6 +33,8 @@ module Issuable has_many :labels, through: :label_links has_many :todos, as: :target, dependent: :destroy + has_one :metrics + validates :author, presence: true validates :title, presence: true, length: { within: 0..255 } @@ -82,6 +84,7 @@ module Issuable acts_as_paranoid after_save :update_assignee_cache_counts, if: :assignee_id_changed? + after_save :record_metrics def update_assignee_cache_counts # make sure we flush the cache for both the old *and* new assignee @@ -287,4 +290,9 @@ module Issuable def can_move?(*) false end + + def record_metrics + metrics = self.metrics || create_metrics + metrics.record! + end end diff --git a/app/models/cycle_analytics.rb b/app/models/cycle_analytics.rb index d5f1c754dad..3ca160252ff 100644 --- a/app/models/cycle_analytics.rb +++ b/app/models/cycle_analytics.rb @@ -1,5 +1,6 @@ class CycleAnalytics include Gitlab::Database::Median + include Gitlab::Database::DateTime def initialize(project, from:) @project = project @@ -12,58 +13,64 @@ class CycleAnalytics end def issue - calculate_metric!(:issue, - TableReferences.issues[:created_at], - [TableReferences.issue_metrics[:first_associated_with_milestone_at], - TableReferences.issue_metrics[:first_added_to_board_at]]) + calculate_metric(:issue, + TableReferences.issues[:created_at], + [TableReferences.issue_metrics[:first_associated_with_milestone_at], + TableReferences.issue_metrics[:first_added_to_board_at]]) end def plan - calculate_metric!(:plan, - [TableReferences.issue_metrics[:first_associated_with_milestone_at], - TableReferences.issue_metrics[:first_added_to_board_at]], - TableReferences.issue_metrics[:first_mentioned_in_commit_at]) + calculate_metric(:plan, + [TableReferences.issue_metrics[:first_associated_with_milestone_at], + TableReferences.issue_metrics[:first_added_to_board_at]], + TableReferences.issue_metrics[:first_mentioned_in_commit_at]) end def code - calculate_metric!(:code, - TableReferences.issue_metrics[:first_mentioned_in_commit_at], - TableReferences.merge_requests[:created_at]) + calculate_metric(:code, + TableReferences.issue_metrics[:first_mentioned_in_commit_at], + TableReferences.merge_requests[:created_at]) end def test - calculate_metric!(:test, - TableReferences.merge_request_metrics[:latest_build_started_at], - TableReferences.merge_request_metrics[:latest_build_finished_at]) + calculate_metric(:test, + TableReferences.merge_request_metrics[:latest_build_started_at], + TableReferences.merge_request_metrics[:latest_build_finished_at]) end def review - calculate_metric!(:review, - TableReferences.merge_requests[:created_at], - TableReferences.merge_request_metrics[:merged_at]) + calculate_metric(:review, + TableReferences.merge_requests[:created_at], + TableReferences.merge_request_metrics[:merged_at]) end def staging - calculate_metric!(:staging, - TableReferences.merge_request_metrics[:merged_at], - TableReferences.merge_request_metrics[:first_deployed_to_production_at]) + calculate_metric(:staging, + TableReferences.merge_request_metrics[:merged_at], + TableReferences.merge_request_metrics[:first_deployed_to_production_at]) end def production - calculate_metric!(:production, - TableReferences.issues[:created_at], - TableReferences.merge_request_metrics[:first_deployed_to_production_at]) + calculate_metric(:production, + TableReferences.issues[:created_at], + TableReferences.merge_request_metrics[:first_deployed_to_production_at]) end private - def calculate_metric!(name, start_time_attrs, end_time_attrs) + def calculate_metric(name, start_time_attrs, end_time_attrs) cte_table = Arel::Table.new("cte_table_for_#{name}") - # Add a `SELECT` for (end_time - start-time), and add an alias for it. - query = Arel::Nodes::As.new(cte_table, subtract_datetimes(base_query, end_time_attrs, start_time_attrs, name.to_s)) - queries = Array.wrap(median_datetime(cte_table, query, name)) - results = queries.map { |query| run_query(query) } + # Build a `SELECT` query. We find the first of the `end_time_attrs` that isn't `NULL` (call this end_time). + # Next, we find the first of the start_time_attrs that isn't `NULL` (call this start_time). + # We compute the (end_time - start_time) interval, and give it an alias based on the current + # cycle analytics stage. + interval_query = Arel::Nodes::As.new( + cte_table, + subtract_datetimes(base_query, end_time_attrs, start_time_attrs, name.to_s)) + + median_queries = Array.wrap(median_datetime(cte_table, interval_query, name)) + results = median_queries.map { |query| run_query(query) } extract_median(results).presence end @@ -82,51 +89,17 @@ class CycleAnalytics where(TableReferences.issues[:created_at].gteq(@from)) # Load merge_requests - query = query.join(TableReferences.merge_requests, Arel::Nodes::OuterJoin).on(TableReferences.merge_requests[:id].eq(arel_table[:merge_request_id])). - join(TableReferences.merge_request_metrics).on(TableReferences.merge_requests[:id].eq(TableReferences.merge_request_metrics[:merge_request_id])) + query = query.join(TableReferences.merge_requests, Arel::Nodes::OuterJoin). + on(TableReferences.merge_requests[:id].eq(arel_table[:merge_request_id])). + join(TableReferences.merge_request_metrics). + on(TableReferences.merge_requests[:id].eq(TableReferences.merge_request_metrics[:merge_request_id])) # Limit to merge requests that have been deployed to production after `@from` query.where(TableReferences.merge_request_metrics[:first_deployed_to_production_at].gteq(@from)) end - # Note: We use COALESCE to pick up the first non-null column for end_time / start_time. - def subtract_datetimes(query_so_far, end_time_attrs, start_time_attrs, as) - diff_fn = case ActiveRecord::Base.connection.adapter_name - when 'PostgreSQL' - Arel::Nodes::Subtraction.new( - Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(end_time_attrs)), - Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(start_time_attrs))) - when 'Mysql2' - Arel::Nodes::NamedFunction.new( - "TIMESTAMPDIFF", - [Arel.sql('second'), - Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(start_time_attrs)), - Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(end_time_attrs))]) - else - raise NotImplementedError, "Cycle analytics doesn't support your database type." - end - - query_so_far.project(diff_fn.as(as)) - end - def run_query(query) - if query.is_a? String - ActiveRecord::Base.connection.execute query - else - ActiveRecord::Base.connection.execute query.to_sql - end - end - - def extract_median(results) - result = results.compact.first - - case ActiveRecord::Base.connection.adapter_name - when 'PostgreSQL' - result = result.first.presence - median = result['median'] if result - median.to_f if median - when 'Mysql2' - result.to_a.flatten.first - end + query = query.to_sql unless query.is_a?(String) + ActiveRecord::Base.connection.execute(query) end end diff --git a/app/models/deployment.rb b/app/models/deployment.rb index 183d538a867..31a5bb1f962 100644 --- a/app/models/deployment.rb +++ b/app/models/deployment.rb @@ -46,22 +46,19 @@ class Deployment < ActiveRecord::Base def update_merge_request_metrics if environment.update_merge_request_metrics? - query = project.merge_requests. - joins(:metrics). - where(target_branch: self.ref, merge_request_metrics: { first_deployed_to_production_at: nil }) + merge_requests = project.merge_requests. + joins(:metrics). + where(target_branch: self.ref, merge_request_metrics: { first_deployed_to_production_at: nil }). + where("merge_request_metrics.merged_at <= ?", self.created_at) - merge_requests = - if previous_deployment - query.where("merge_request_metrics.merged_at <= ? AND merge_request_metrics.merged_at >= ?", - self.created_at, - previous_deployment.created_at) - else - query.where("merge_request_metrics.merged_at <= ?", self.created_at) - end + if previous_deployment + merge_requests = merge_requests.where("merge_request_metrics.merged_at >= ?", previous_deployment.created_at) + end # Need to use `map` instead of `select` because MySQL doesn't allow `SELECT`ing from the same table # that we're updating. - MergeRequest::Metrics.where(merge_request_id: merge_requests.map(&:id), first_deployed_to_production_at: nil). + MergeRequest::Metrics. + where(merge_request_id: merge_requests.map(&:id), first_deployed_to_production_at: nil). update_all(first_deployed_to_production_at: self.created_at) end end diff --git a/app/models/issue.rb b/app/models/issue.rb index 06af94adf5e..6a264ace165 100644 --- a/app/models/issue.rb +++ b/app/models/issue.rb @@ -23,8 +23,6 @@ class Issue < ActiveRecord::Base has_many :events, as: :target, dependent: :destroy - has_one :metrics - has_many :merge_requests_closing_issues, class_name: 'MergeRequestsClosingIssues' has_many :closed_by_merge_requests, through: :merge_requests_closing_issues, source: :merge_request @@ -41,8 +39,6 @@ class Issue < ActiveRecord::Base scope :order_due_date_asc, -> { reorder('issues.due_date IS NULL, issues.due_date ASC') } scope :order_due_date_desc, -> { reorder('issues.due_date IS NULL, issues.due_date DESC') } - after_save :record_metrics - attr_spammable :title, spam_title: true attr_spammable :description, spam_description: true @@ -277,9 +273,4 @@ class Issue < ActiveRecord::Base def check_for_spam? project.public? end - - def record_metrics - metrics = Metrics.find_or_create_by(issue_id: self.id) - metrics.record! - end end diff --git a/app/models/issue/metrics.rb b/app/models/issue/metrics.rb index 4436696cc1a..012d545c440 100644 --- a/app/models/issue/metrics.rb +++ b/app/models/issue/metrics.rb @@ -10,7 +10,7 @@ class Issue::Metrics < ActiveRecord::Base self.first_added_to_board_at = Time.now end - self.save if self.changed? + self.save end private diff --git a/app/models/merge_request.rb b/app/models/merge_request.rb index b3af3ff7c17..0ac291ce1a0 100644 --- a/app/models/merge_request.rb +++ b/app/models/merge_request.rb @@ -13,7 +13,6 @@ class MergeRequest < ActiveRecord::Base has_many :merge_request_diffs, dependent: :destroy has_one :merge_request_diff, -> { order('merge_request_diffs.id DESC') } - has_one :metrics has_many :events, as: :target, dependent: :destroy @@ -35,8 +34,6 @@ class MergeRequest < ActiveRecord::Base # when creating new merge request attr_accessor :can_be_created, :compare_commits, :compare - after_save :record_metrics - state_machine :state, initial: :opened do event :close do transition [:reopened, :opened] => :closed @@ -515,7 +512,7 @@ class MergeRequest < ActiveRecord::Base transaction do self.merge_requests_closing_issues.delete_all closes_issues(current_user).each do |issue| - MergeRequestsClosingIssues.create!(merge_request: self, issue: issue) + self.merge_requests_closing_issues.create!(issue: issue) end end end @@ -845,9 +842,4 @@ class MergeRequest < ActiveRecord::Base @conflicts_can_be_resolved_in_ui = false end end - - def record_metrics - metrics = Metrics.find_or_create_by(merge_request_id: self.id) - metrics.record! - end end diff --git a/app/models/merge_request/metrics.rb b/app/models/merge_request/metrics.rb index 697a2e303fb..99c49a020c9 100644 --- a/app/models/merge_request/metrics.rb +++ b/app/models/merge_request/metrics.rb @@ -6,6 +6,6 @@ class MergeRequest::Metrics < ActiveRecord::Base self.merged_at = Time.now end - self.save if self.changed? + self.save end end diff --git a/config/routes.rb b/config/routes.rb index 33b070188dc..c4eee59e7aa 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -780,7 +780,7 @@ Rails.application.routes.draw do resources :environments - resource :cycle_analytics + resource :cycle_analytics, only: [:show] resources :builds, only: [:index, :show], constraints: { id: /\d+/ } do collection do diff --git a/lib/gitlab/database/date_time.rb b/lib/gitlab/database/date_time.rb new file mode 100644 index 00000000000..b6a89f715fd --- /dev/null +++ b/lib/gitlab/database/date_time.rb @@ -0,0 +1,27 @@ +module Gitlab + module Database + module DateTime + # Find the first of the `end_time_attrs` that isn't `NULL`. Subtract from it + # the first of the `start_time_attrs` that isn't NULL. `SELECT` the resulting interval + # along with an alias specified by the `as` parameter. + # + # Note: For MySQL, the interval is returned in seconds. + # For PostgreSQL, the interval is returned as an INTERVAL type. + def subtract_datetimes(query_so_far, end_time_attrs, start_time_attrs, as) + diff_fn = if Gitlab::Database.postgresql? + Arel::Nodes::Subtraction.new( + Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(end_time_attrs)), + Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(start_time_attrs))) + elsif Gitlab::Database.mysql? + Arel::Nodes::NamedFunction.new( + "TIMESTAMPDIFF", + [Arel.sql('second'), + Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(start_time_attrs)), + Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(end_time_attrs))]) + end + + query_so_far.project(diff_fn.as(as)) + end + end + end +end diff --git a/lib/gitlab/database/median.rb b/lib/gitlab/database/median.rb index 3ede0c6acd9..21b4c043655 100644 --- a/lib/gitlab/database/median.rb +++ b/lib/gitlab/database/median.rb @@ -3,13 +3,22 @@ module Gitlab module Database module Median def median_datetime(arel_table, query_so_far, column_sym) - case ActiveRecord::Base.connection.adapter_name - when 'PostgreSQL' + if Gitlab::Database.postgresql? pg_median_datetime(arel_table, query_so_far, column_sym) - when 'Mysql2' + elsif Gitlab::Database.mysql? mysql_median_datetime(arel_table, query_so_far, column_sym) - else - raise NotImplementedError, "We haven't implemented a database median strategy for your database type." + end + end + + def extract_median(results) + result = results.compact.first + + if Gitlab::Database.postgresql? + result = result.first.presence + median = result['median'] if result + median.to_f if median + elsif Gitlab::Database.mysql? + result.to_a.flatten.first end end @@ -17,17 +26,23 @@ module Gitlab query = arel_table. from(arel_table.project(Arel.sql('*')).order(arel_table[column_sym]).as(arel_table.table_name)). project(average([arel_table[column_sym]], 'median')). - where(Arel::Nodes::Between.new(Arel.sql("(select @row_id := @row_id + 1)"), - Arel::Nodes::And.new([Arel.sql('@ct/2.0'), - Arel.sql('@ct/2.0 + 1')]))). + where(Arel::Nodes::Between.new( + Arel.sql("(select @row_id := @row_id + 1)"), + Arel::Nodes::And.new( + [Arel.sql('@ct/2.0'), + Arel.sql('@ct/2.0 + 1')] + ) + )). # Disallow negative values where(arel_table[column_sym].gteq(0)) - [Arel.sql("CREATE TEMPORARY TABLE IF NOT EXISTS #{query_so_far.to_sql}"), - Arel.sql("set @ct := (select count(1) from #{arel_table.table_name});"), - Arel.sql("set @row_id := 0;"), - query, - Arel.sql("DROP TEMPORARY TABLE IF EXISTS #{arel_table.table_name};")] + [ + Arel.sql("CREATE TEMPORARY TABLE IF NOT EXISTS #{query_so_far.to_sql}"), + Arel.sql("set @ct := (select count(1) from #{arel_table.table_name});"), + Arel.sql("set @row_id := 0;"), + query, + Arel.sql("DROP TEMPORARY TABLE IF EXISTS #{arel_table.table_name};") + ] end def pg_median_datetime(arel_table, query_so_far, column_sym) @@ -42,14 +57,15 @@ module Gitlab # 9 | 2 | 3 # 15 | 3 | 3 cte_table = Arel::Table.new("ordered_records") - cte = Arel::Nodes::As.new(cte_table, - arel_table. - project(arel_table[column_sym].as(column_sym.to_s), - Arel::Nodes::Over.new(Arel::Nodes::NamedFunction.new("row_number", []), - Arel::Nodes::Window.new.order(arel_table[column_sym])).as('row_id'), - arel_table.project("COUNT(1)").as('ct')). - # Disallow negative values - where(arel_table[column_sym].gteq(zero_interval))) + cte = Arel::Nodes::As.new( + cte_table, + arel_table. + project(arel_table[column_sym].as(column_sym.to_s), + Arel::Nodes::Over.new(Arel::Nodes::NamedFunction.new("row_number", []), + Arel::Nodes::Window.new.order(arel_table[column_sym])).as('row_id'), + arel_table.project("COUNT(1)").as('ct')). + # Disallow negative values + where(arel_table[column_sym].gteq(zero_interval))) # From the CTE, select either the middle row or the middle two rows (this is accomplished # by 'where cte.row_id between cte.ct / 2.0 AND cte.ct / 2.0 + 1'). Find the average of the