2018-08-03 03:15:25 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2018-03-26 07:45:18 -04:00
|
|
|
module Ci
|
2019-03-28 09:17:42 -04:00
|
|
|
class BuildTraceChunk < ApplicationRecord
|
2018-05-01 04:06:44 -04:00
|
|
|
include FastDestroyAll
|
2018-07-03 03:20:27 -04:00
|
|
|
include ::Gitlab::ExclusiveLeaseHelpers
|
2018-03-26 07:45:18 -04:00
|
|
|
extend Gitlab::Ci::Model
|
|
|
|
|
2018-04-26 02:06:04 -04:00
|
|
|
belongs_to :build, class_name: "Ci::Build", foreign_key: :build_id
|
2018-04-04 06:19:17 -04:00
|
|
|
|
|
|
|
default_value_for :data_store, :redis
|
|
|
|
|
2018-04-05 07:39:35 -04:00
|
|
|
CHUNK_SIZE = 128.kilobytes
|
2018-05-07 04:34:47 -04:00
|
|
|
WRITE_LOCK_RETRY = 10
|
2018-05-07 05:45:38 -04:00
|
|
|
WRITE_LOCK_SLEEP = 0.01.seconds
|
2018-05-07 04:34:47 -04:00
|
|
|
WRITE_LOCK_TTL = 1.minute
|
2018-04-04 06:19:17 -04:00
|
|
|
|
2018-11-28 07:27:25 -05:00
|
|
|
FailedToPersistDataError = Class.new(StandardError)
|
|
|
|
|
2018-06-15 02:48:03 -04:00
|
|
|
# Note: The ordering of this enum is related to the precedence of persist store.
|
2019-02-25 05:42:31 -05:00
|
|
|
# The bottom item takes the highest precedence, and the top item takes the lowest precedence.
|
2018-04-04 06:19:17 -04:00
|
|
|
enum data_store: {
|
|
|
|
redis: 1,
|
2018-06-07 04:04:55 -04:00
|
|
|
database: 2,
|
|
|
|
fog: 3
|
2018-04-04 06:19:17 -04:00
|
|
|
}
|
|
|
|
|
2018-05-01 04:06:44 -04:00
|
|
|
class << self
|
2018-06-07 04:04:55 -04:00
|
|
|
def all_stores
|
|
|
|
@all_stores ||= self.data_stores.keys
|
2018-04-24 09:13:46 -04:00
|
|
|
end
|
|
|
|
|
2018-07-03 01:33:11 -04:00
|
|
|
def persistable_store
|
2018-06-07 04:04:55 -04:00
|
|
|
# get first available store from the back of the list
|
|
|
|
all_stores.reverse.find { |store| get_store_class(store).available? }
|
2018-04-24 09:13:46 -04:00
|
|
|
end
|
|
|
|
|
2018-06-07 04:04:55 -04:00
|
|
|
def get_store_class(store)
|
|
|
|
@stores ||= {}
|
|
|
|
@stores[store] ||= "Ci::BuildTraceChunks::#{store.capitalize}".constantize.new
|
2018-04-24 09:13:46 -04:00
|
|
|
end
|
2018-05-03 04:08:05 -04:00
|
|
|
|
|
|
|
##
|
|
|
|
# FastDestroyAll concerns
|
|
|
|
def begin_fast_destroy
|
2018-06-15 02:48:03 -04:00
|
|
|
all_stores.each_with_object({}) do |store, result|
|
|
|
|
relation = public_send(store) # rubocop:disable GitlabSecurity/PublicSend
|
2018-06-07 04:04:55 -04:00
|
|
|
keys = get_store_class(store).keys(relation)
|
|
|
|
|
|
|
|
result[store] = keys if keys.present?
|
|
|
|
end
|
2018-05-03 04:08:05 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
##
|
|
|
|
# FastDestroyAll concerns
|
|
|
|
def finalize_fast_destroy(keys)
|
2018-06-07 04:04:55 -04:00
|
|
|
keys.each do |store, value|
|
|
|
|
get_store_class(store).delete_keys(value)
|
|
|
|
end
|
2018-05-03 04:08:05 -04:00
|
|
|
end
|
2018-04-24 09:13:46 -04:00
|
|
|
end
|
|
|
|
|
2018-04-26 03:30:27 -04:00
|
|
|
##
|
|
|
|
# Data is memoized for optimizing #size and #end_offset
|
2018-04-04 06:19:17 -04:00
|
|
|
def data
|
2018-04-30 01:52:29 -04:00
|
|
|
@data ||= get_data.to_s
|
2018-04-04 06:19:17 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def truncate(offset = 0)
|
2018-05-04 04:42:37 -04:00
|
|
|
raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0
|
|
|
|
return if offset == size # Skip the following process as it doesn't affect anything
|
|
|
|
|
|
|
|
self.append("", offset)
|
2018-04-04 06:19:17 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def append(new_data, offset)
|
2018-07-03 01:48:00 -04:00
|
|
|
raise ArgumentError, 'New data is missing' unless new_data
|
2018-05-04 04:42:37 -04:00
|
|
|
raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0
|
2018-04-30 01:52:29 -04:00
|
|
|
raise ArgumentError, 'Chunk size overflow' if CHUNK_SIZE < (offset + new_data.bytesize)
|
2018-04-04 06:19:17 -04:00
|
|
|
|
2018-11-23 11:25:11 -05:00
|
|
|
in_lock(*lock_params) do # Write operation is atomic
|
2018-06-15 02:48:03 -04:00
|
|
|
unsafe_set_data!(data.byteslice(0, offset) + new_data)
|
|
|
|
end
|
|
|
|
|
|
|
|
schedule_to_persist if full?
|
2018-04-04 06:19:17 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def size
|
|
|
|
data&.bytesize.to_i
|
|
|
|
end
|
|
|
|
|
|
|
|
def start_offset
|
|
|
|
chunk_index * CHUNK_SIZE
|
|
|
|
end
|
|
|
|
|
|
|
|
def end_offset
|
|
|
|
start_offset + size
|
|
|
|
end
|
|
|
|
|
|
|
|
def range
|
|
|
|
(start_offset...end_offset)
|
|
|
|
end
|
2018-06-15 02:48:03 -04:00
|
|
|
|
2018-06-18 04:56:16 -04:00
|
|
|
def persist_data!
|
2018-11-23 11:25:11 -05:00
|
|
|
in_lock(*lock_params) do # Write operation is atomic
|
2018-07-03 01:33:11 -04:00
|
|
|
unsafe_persist_to!(self.class.persistable_store)
|
2018-04-06 11:08:35 -04:00
|
|
|
end
|
2018-04-04 06:19:17 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
2018-06-25 03:19:40 -04:00
|
|
|
def unsafe_persist_to!(new_store)
|
2018-06-07 04:04:55 -04:00
|
|
|
return if data_store == new_store.to_s
|
|
|
|
|
2018-11-28 07:27:25 -05:00
|
|
|
current_data = get_data
|
2018-06-07 04:04:55 -04:00
|
|
|
|
2018-11-28 07:27:25 -05:00
|
|
|
unless current_data&.bytesize.to_i == CHUNK_SIZE
|
2019-02-25 03:19:36 -05:00
|
|
|
raise FailedToPersistDataError, 'Data is not fulfilled in a bucket'
|
2018-06-07 04:04:55 -04:00
|
|
|
end
|
|
|
|
|
2018-11-28 07:27:25 -05:00
|
|
|
old_store_class = self.class.get_store_class(data_store)
|
|
|
|
|
|
|
|
self.raw_data = nil
|
|
|
|
self.data_store = new_store
|
|
|
|
unsafe_set_data!(current_data)
|
|
|
|
|
2018-06-07 04:04:55 -04:00
|
|
|
old_store_class.delete_data(self)
|
|
|
|
end
|
|
|
|
|
2018-04-26 03:30:27 -04:00
|
|
|
def get_data
|
2018-06-07 04:04:55 -04:00
|
|
|
self.class.get_store_class(data_store).data(self)&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default
|
2018-06-25 06:59:28 -04:00
|
|
|
rescue Excon::Error::NotFound
|
|
|
|
# If the data store is :fog and the file does not exist in the object storage, this method returns nil.
|
2018-04-26 03:30:27 -04:00
|
|
|
end
|
|
|
|
|
2018-06-15 02:48:03 -04:00
|
|
|
def unsafe_set_data!(value)
|
2018-06-25 03:19:40 -04:00
|
|
|
raise ArgumentError, 'New data size exceeds chunk size' if value.bytesize > CHUNK_SIZE
|
2018-04-26 03:30:27 -04:00
|
|
|
|
2018-06-15 02:48:03 -04:00
|
|
|
self.class.get_store_class(data_store).set_data(self, value)
|
|
|
|
@data = value
|
2018-04-26 03:30:27 -04:00
|
|
|
|
2018-06-15 02:48:03 -04:00
|
|
|
save! if changed?
|
2018-04-26 03:30:27 -04:00
|
|
|
end
|
|
|
|
|
2018-06-07 04:04:55 -04:00
|
|
|
def schedule_to_persist
|
2018-06-18 04:56:16 -04:00
|
|
|
return if data_persisted?
|
2018-04-04 06:19:17 -04:00
|
|
|
|
2018-05-04 04:02:08 -04:00
|
|
|
Ci::BuildTraceChunkFlushWorker.perform_async(id)
|
2018-04-04 06:19:17 -04:00
|
|
|
end
|
|
|
|
|
2018-06-25 03:19:40 -04:00
|
|
|
def data_persisted?
|
|
|
|
!redis?
|
|
|
|
end
|
|
|
|
|
2018-05-07 04:34:47 -04:00
|
|
|
def full?
|
2018-04-04 06:19:17 -04:00
|
|
|
size == CHUNK_SIZE
|
|
|
|
end
|
|
|
|
|
2018-06-15 02:48:03 -04:00
|
|
|
def lock_params
|
|
|
|
["trace_write:#{build_id}:chunks:#{chunk_index}",
|
|
|
|
{ ttl: WRITE_LOCK_TTL,
|
2018-07-04 00:29:47 -04:00
|
|
|
retries: WRITE_LOCK_RETRY,
|
2018-06-15 02:48:03 -04:00
|
|
|
sleep_sec: WRITE_LOCK_SLEEP }]
|
2018-04-04 06:19:17 -04:00
|
|
|
end
|
2018-03-26 07:45:18 -04:00
|
|
|
end
|
|
|
|
end
|