2017-04-28 11:31:18 -04:00
|
|
|
#!/usr/bin/env ruby
|
2021-02-12 07:09:02 -05:00
|
|
|
# frozen_string_literal: true
|
2017-04-28 11:31:18 -04:00
|
|
|
|
2018-01-23 07:12:51 -05:00
|
|
|
# We don't have auto-loading here
|
2020-03-18 20:09:27 -04:00
|
|
|
require_relative '../lib/gitlab'
|
2018-01-23 07:12:51 -05:00
|
|
|
require_relative '../lib/gitlab/popen'
|
|
|
|
require_relative '../lib/gitlab/popen/runner'
|
|
|
|
|
2020-05-28 14:08:37 -04:00
|
|
|
class StaticAnalysis
|
|
|
|
ALLOWED_WARNINGS = [
|
|
|
|
# https://github.com/browserslist/browserslist/blob/d0ec62eb48c41c218478cd3ac28684df051cc865/node.js#L329
|
|
|
|
# warns if caniuse-lite package is older than 6 months. Ignore this
|
|
|
|
# warning message so that GitLab backports don't fail.
|
|
|
|
"Browserslist: caniuse-lite is outdated. Please run next command `yarn upgrade`"
|
|
|
|
].freeze
|
|
|
|
|
2021-08-30 08:09:48 -04:00
|
|
|
Task = Struct.new(:command, :duration) do
|
|
|
|
def cmd
|
|
|
|
command.join(' ')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
NodeAssignment = Struct.new(:index, :tasks) do
|
|
|
|
def total_duration
|
|
|
|
return 0 if tasks.empty?
|
|
|
|
|
|
|
|
tasks.sum(&:duration)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-09-14 05:11:32 -04:00
|
|
|
def self.project_path
|
|
|
|
project_root = File.expand_path('..', __dir__)
|
|
|
|
|
|
|
|
if Gitlab.jh?
|
|
|
|
"#{project_root}/jh"
|
|
|
|
else
|
|
|
|
project_root
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-05-28 14:08:37 -04:00
|
|
|
# `gettext:updated_check` and `gitlab:sidekiq:sidekiq_queues_yml:check` will fail on FOSS installations
|
|
|
|
# (e.g. gitlab-org/gitlab-foss) since they test against a single
|
|
|
|
# file that is generated by an EE installation, which can
|
|
|
|
# contain values that a FOSS installation won't find. To work
|
|
|
|
# around this we will only enable this task on EE installations.
|
2021-08-30 08:09:48 -04:00
|
|
|
TASKS_WITH_DURATIONS_SECONDS = [
|
|
|
|
Task.new(%w[bin/rake lint:haml], 562),
|
2021-06-17 05:09:53 -04:00
|
|
|
# We need to disable the cache for this cop since it creates files under tmp/feature_flags/*.used,
|
|
|
|
# the cache would prevent these files from being created.
|
2021-09-03 08:09:03 -04:00
|
|
|
Task.new(%w[bundle exec rubocop --only Gitlab/MarkUsedFeatureFlags --cache false], 400),
|
2021-08-30 08:09:48 -04:00
|
|
|
(Gitlab.ee? ? Task.new(%w[bin/rake gettext:updated_check], 360) : nil),
|
|
|
|
Task.new(%w[yarn run lint:eslint:all], 312),
|
|
|
|
Task.new(%w[bundle exec rubocop --parallel], 60),
|
|
|
|
Task.new(%w[yarn run lint:prettier], 160),
|
|
|
|
Task.new(%w[bin/rake gettext:lint], 85),
|
2021-09-14 05:11:32 -04:00
|
|
|
Task.new(%W[bundle exec license_finder --decisions-file config/dependency_decisions.yml --project-path #{project_path}], 20),
|
2021-08-30 08:09:48 -04:00
|
|
|
Task.new(%w[bin/rake lint:static_verification], 35),
|
|
|
|
Task.new(%w[bin/rake config_lint], 10),
|
|
|
|
Task.new(%w[bin/rake gitlab:sidekiq:all_queues_yml:check], 15),
|
|
|
|
(Gitlab.ee? ? Task.new(%w[bin/rake gitlab:sidekiq:sidekiq_queues_yml:check], 11) : nil),
|
|
|
|
Task.new(%w[yarn run internal:stylelint], 8),
|
|
|
|
Task.new(%w[scripts/lint-conflicts.sh], 1),
|
|
|
|
Task.new(%w[yarn run block-dependencies], 1),
|
|
|
|
Task.new(%w[scripts/lint-rugged], 1),
|
2021-09-03 11:10:48 -04:00
|
|
|
Task.new(%w[scripts/gemfile_lock_changed.sh], 1)
|
2021-08-30 08:09:48 -04:00
|
|
|
].compact.freeze
|
|
|
|
|
|
|
|
def run_tasks!(options = {})
|
|
|
|
node_assignment = tasks_to_run((ENV['CI_NODE_TOTAL'] || 1).to_i)[(ENV['CI_NODE_INDEX'] || 1).to_i - 1]
|
|
|
|
|
|
|
|
if options[:dry_run]
|
|
|
|
puts "Dry-run mode!"
|
|
|
|
return
|
|
|
|
end
|
2020-05-28 14:08:37 -04:00
|
|
|
|
|
|
|
static_analysis = Gitlab::Popen::Runner.new
|
2021-08-30 08:09:48 -04:00
|
|
|
start_time = Time.now
|
|
|
|
static_analysis.run(node_assignment.tasks.map(&:command)) do |command, &run|
|
|
|
|
task = node_assignment.tasks.find { |task| task.command == command }
|
2020-05-28 14:08:37 -04:00
|
|
|
puts
|
2021-08-30 08:09:48 -04:00
|
|
|
puts "$ #{task.cmd}"
|
2020-05-28 14:08:37 -04:00
|
|
|
|
|
|
|
result = run.call
|
|
|
|
|
2021-08-30 08:09:48 -04:00
|
|
|
puts "==> Finished in #{result.duration} seconds (expected #{task.duration} seconds)"
|
2020-05-28 14:08:37 -04:00
|
|
|
puts
|
|
|
|
end
|
2018-01-23 07:12:51 -05:00
|
|
|
|
|
|
|
puts
|
2020-05-28 14:08:37 -04:00
|
|
|
puts '==================================================='
|
2021-08-30 08:09:48 -04:00
|
|
|
puts "Node finished running all tasks in #{Time.now - start_time} seconds (expected #{node_assignment.total_duration})"
|
2018-01-23 07:12:51 -05:00
|
|
|
puts
|
|
|
|
puts
|
2017-04-28 11:31:18 -04:00
|
|
|
|
2020-05-28 14:08:37 -04:00
|
|
|
if static_analysis.all_success_and_clean?
|
|
|
|
puts 'All static analyses passed successfully.'
|
|
|
|
elsif static_analysis.all_success?
|
|
|
|
puts 'All static analyses passed successfully, but we have warnings:'
|
|
|
|
puts
|
2020-01-05 04:08:10 -05:00
|
|
|
|
2020-05-28 14:08:37 -04:00
|
|
|
emit_warnings(static_analysis)
|
2020-01-05 04:08:10 -05:00
|
|
|
|
2020-05-28 14:08:37 -04:00
|
|
|
exit 2 if warning_count(static_analysis).nonzero?
|
2019-10-31 17:06:28 -04:00
|
|
|
else
|
2020-05-28 14:08:37 -04:00
|
|
|
puts 'Some static analyses failed:'
|
2017-04-28 11:31:18 -04:00
|
|
|
|
2020-05-28 14:08:37 -04:00
|
|
|
emit_warnings(static_analysis)
|
|
|
|
emit_errors(static_analysis)
|
2017-04-28 11:31:18 -04:00
|
|
|
|
2020-05-28 14:08:37 -04:00
|
|
|
exit 1
|
|
|
|
end
|
|
|
|
end
|
2017-04-28 11:31:18 -04:00
|
|
|
|
2020-05-28 14:08:37 -04:00
|
|
|
def emit_warnings(static_analysis)
|
|
|
|
static_analysis.warned_results.each do |result|
|
|
|
|
puts
|
|
|
|
puts "**** #{result.cmd.join(' ')} had the following warning(s):"
|
|
|
|
puts
|
|
|
|
puts result.stderr
|
|
|
|
puts
|
|
|
|
end
|
|
|
|
end
|
2017-11-03 12:00:49 -04:00
|
|
|
|
2020-05-28 14:08:37 -04:00
|
|
|
def emit_errors(static_analysis)
|
|
|
|
static_analysis.failed_results.each do |result|
|
|
|
|
puts
|
|
|
|
puts "**** #{result.cmd.join(' ')} failed with the following error(s):"
|
|
|
|
puts
|
|
|
|
puts result.stdout
|
|
|
|
puts result.stderr
|
|
|
|
puts
|
|
|
|
end
|
|
|
|
end
|
2018-01-23 07:12:51 -05:00
|
|
|
|
2020-05-28 14:08:37 -04:00
|
|
|
def warning_count(static_analysis)
|
|
|
|
static_analysis.warned_results
|
|
|
|
.count { |result| !ALLOWED_WARNINGS.include?(result.stderr.strip) }
|
|
|
|
end
|
2018-01-23 07:12:51 -05:00
|
|
|
|
2021-08-30 08:09:48 -04:00
|
|
|
def tasks_to_run(node_total)
|
|
|
|
total_time = TASKS_WITH_DURATIONS_SECONDS.sum(&:duration).to_f
|
|
|
|
ideal_time_per_node = total_time / node_total
|
|
|
|
tasks_by_duration_desc = TASKS_WITH_DURATIONS_SECONDS.sort_by { |a| -a.duration }
|
|
|
|
nodes = Array.new(node_total) { |i| NodeAssignment.new(i + 1, []) }
|
|
|
|
|
|
|
|
puts "Total expected time: #{total_time}; ideal time per job: #{ideal_time_per_node}.\n\n"
|
|
|
|
puts "Tasks to distribute:"
|
|
|
|
tasks_by_duration_desc.each { |task| puts "* #{task.cmd} (#{task.duration}s)" }
|
|
|
|
|
|
|
|
# Distribute tasks optimally first
|
|
|
|
puts "\nAssigning tasks optimally."
|
|
|
|
distribute_tasks(tasks_by_duration_desc, nodes, ideal_time_per_node: ideal_time_per_node)
|
|
|
|
|
|
|
|
# Distribute remaining tasks, ordered by ascending duration
|
|
|
|
leftover_tasks = tasks_by_duration_desc - nodes.flat_map(&:tasks)
|
|
|
|
|
|
|
|
if leftover_tasks.any?
|
|
|
|
puts "\n\nAssigning remaining tasks: #{leftover_tasks.flat_map(&:cmd)}"
|
|
|
|
distribute_tasks(leftover_tasks, nodes.sort_by { |node| node.total_duration })
|
|
|
|
end
|
|
|
|
|
|
|
|
nodes.each do |node|
|
|
|
|
puts "\nExpected duration for node #{node.index}: #{node.total_duration} seconds"
|
|
|
|
node.tasks.each { |task| puts "* #{task.cmd} (#{task.duration}s)" }
|
|
|
|
end
|
|
|
|
|
|
|
|
nodes
|
|
|
|
end
|
|
|
|
|
|
|
|
def distribute_tasks(tasks, nodes, ideal_time_per_node: nil)
|
|
|
|
condition =
|
|
|
|
if ideal_time_per_node
|
|
|
|
->(task, node, ideal_time_per_node) { (task.duration + node.total_duration) <= ideal_time_per_node }
|
|
|
|
else
|
|
|
|
->(*) { true }
|
|
|
|
end
|
|
|
|
|
|
|
|
tasks.each do |task|
|
|
|
|
nodes.each do |node|
|
|
|
|
if condition.call(task, node, ideal_time_per_node)
|
|
|
|
assign_task_to_node(tasks, node, task)
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
2020-05-28 14:08:37 -04:00
|
|
|
end
|
2021-08-30 08:09:48 -04:00
|
|
|
end
|
2017-04-28 11:31:18 -04:00
|
|
|
|
2021-08-30 08:09:48 -04:00
|
|
|
def assign_task_to_node(remaining_tasks, node, task)
|
|
|
|
node.tasks << task
|
|
|
|
puts "Assigning #{task.command} (#{task.duration}s) to node ##{node.index}. Node total duration: #{node.total_duration}s."
|
2020-05-28 14:08:37 -04:00
|
|
|
end
|
|
|
|
end
|
2017-04-28 11:31:18 -04:00
|
|
|
|
2020-05-28 14:08:37 -04:00
|
|
|
if $0 == __FILE__
|
2021-08-30 08:09:48 -04:00
|
|
|
options = {}
|
|
|
|
|
|
|
|
if ARGV.include?('--dry-run')
|
|
|
|
options[:dry_run] = true
|
|
|
|
end
|
|
|
|
|
|
|
|
StaticAnalysis.new.run_tasks!(options)
|
2017-04-28 11:31:18 -04:00
|
|
|
end
|