Removed unused code

This commit is contained in:
Kamil Trzciński 2018-04-04 12:48:30 +02:00 committed by Shinya Maeda
parent cb3a0e33a0
commit de5194cdb8
13 changed files with 0 additions and 1411 deletions

View File

@ -1,63 +0,0 @@
module Gitlab
module Ci
class Trace
module ChunkedFile
module ChunkStore
class Base
attr_reader :params
def initialize(*identifiers, **params)
@params = params
end
def close
raise NotImplementedError
end
def get
raise NotImplementedError
end
def size
raise NotImplementedError
end
# Write data to chunk store. Always overwrite.
#
# @param [String] data
# @return [Fixnum] length of the data after writing
def write!(data)
raise NotImplementedError
end
# Append data to chunk store
#
# @param [String] data
# @return [Fixnum] length of the appended
def append!(data)
raise NotImplementedError
end
# Truncate data to chunk store
#
# @param [String] offset
def truncate!(offset)
raise NotImplementedError
end
# Delete data from chunk store
#
# @param [String] offset
def delete!
raise NotImplementedError
end
def filled?
size == params[:buffer_size]
end
end
end
end
end
end
end

View File

@ -1,92 +0,0 @@
module Gitlab
module Ci
class Trace
module ChunkedFile
module ChunkStore
class Database < Base
class << self
def open(job_id, chunk_index, **params)
raise ArgumentError unless job_id && chunk_index
job_trace_chunk = ::Ci::JobTraceChunk
.find_or_initialize_by(job_id: job_id, chunk_index: chunk_index)
store = self.new(job_trace_chunk, params)
yield store
ensure
store&.close
end
def exist?(job_id, chunk_index)
::Ci::JobTraceChunk.exists?(job_id: job_id, chunk_index: chunk_index)
end
def chunks_count(job_id)
::Ci::JobTraceChunk.where(job_id: job_id).count
end
def chunks_size(job_id)
::Ci::JobTraceChunk.where(job_id: job_id).pluck('data')
.inject(0) { |sum, data| sum + data.bytesize }
end
def delete_all(job_id)
::Ci::JobTraceChunk.destroy_all(job_id: job_id)
end
end
attr_reader :job_trace_chunk
def initialize(job_trace_chunk, **params)
super
@job_trace_chunk = job_trace_chunk
end
def close
@job_trace_chunk = nil
end
def get
puts "#{self.class.name} - #{__callee__}: params[:chunk_index]: #{params[:chunk_index]}"
job_trace_chunk.data
end
def size
job_trace_chunk.data&.bytesize || 0
end
def write!(data)
raise NotImplementedError, 'Partial writing is not supported' unless params[:buffer_size] == data&.bytesize
raise NotImplementedError, 'UPDATE (Overwriting data) is not supported' if job_trace_chunk.data
puts "#{self.class.name} - #{__callee__}: data.bytesize: #{data.bytesize.inspect} params[:chunk_index]: #{params[:chunk_index]}"
job_trace_chunk.data = data
job_trace_chunk.save!
data.bytesize
end
def append!(data)
raise NotImplementedError
end
def truncate!(offset)
raise NotImplementedError
end
def delete!
raise ActiveRecord::RecordNotFound, 'Could not find deletable record' unless job_trace_chunk.persisted?
puts "#{self.class.name} - #{__callee__}: params[:chunk_index]: #{params[:chunk_index]}"
job_trace_chunk.destroy!
end
end
end
end
end
end
end

View File

@ -1,132 +0,0 @@
module Gitlab
module Ci
class Trace
module ChunkedFile
module ChunkStore
class Redis < Base
class << self
def open(job_id, chunk_index, **params)
raise ArgumentError unless job_id && chunk_index
buffer_key = self.buffer_key(job_id, chunk_index)
store = self.new(buffer_key, params)
yield store
ensure
store&.close
end
def exist?(job_id, chunk_index)
Gitlab::Redis::Cache.with do |redis|
redis.exists(self.buffer_key(job_id, chunk_index))
end
end
def chunks_count(job_id)
Gitlab::Redis::Cache.with do |redis|
redis.scan_each(match: buffer_key(job_id, '?')).inject(0) do |sum, key|
sum + 1
end
end
end
def chunks_size(job_id)
Gitlab::Redis::Cache.with do |redis|
redis.scan_each(match: buffer_key(job_id, '?')).inject(0) do |sum, key|
sum + redis.strlen(key)
end
end
end
def delete_all(job_id)
Gitlab::Redis::Cache.with do |redis|
redis.scan_each(match: buffer_key(job_id, '?')) do |key|
redis.del(key)
end
end
end
def buffer_key(job_id, chunk_index)
"live_trace_buffer:#{job_id}:#{chunk_index}"
end
end
BufferKeyNotFoundError = Class.new(StandardError)
WriteError = Class.new(StandardError)
attr_reader :buffer_key
def initialize(buffer_key, **params)
super
@buffer_key = buffer_key
end
def close
@buffer_key = nil
end
def get
puts "#{self.class.name} - #{__callee__}: params[:chunk_index]: #{params[:chunk_index]}"
Gitlab::Redis::Cache.with do |redis|
redis.get(buffer_key)
end
end
def size
Gitlab::Redis::Cache.with do |redis|
redis.strlen(buffer_key)
end
end
def write!(data)
raise ArgumentError, 'Could not write empty data' unless data.present?
puts "#{self.class.name} - #{__callee__}: data.bytesize: #{data.bytesize.inspect} params[:chunk_index]: #{params[:chunk_index]}"
Gitlab::Redis::Cache.with do |redis|
unless redis.set(buffer_key, data) == 'OK'
raise WriteError, 'Failed to write'
end
redis.strlen(buffer_key)
end
end
def append!(data)
raise ArgumentError, 'Could not write empty data' unless data.present?
puts "#{self.class.name} - #{__callee__}: data.bytesize: #{data.bytesize.inspect} params[:chunk_index]: #{params[:chunk_index]}"
Gitlab::Redis::Cache.with do |redis|
raise BufferKeyNotFoundError, 'Buffer key is not found' unless redis.exists(buffer_key)
original_size = size
new_size = redis.append(buffer_key, data)
appended_size = new_size - original_size
raise WriteError, 'Failed to append' unless appended_size == data.bytesize
appended_size
end
end
def truncate!(offset)
raise NotImplementedError
end
def delete!
puts "#{self.class.name} - #{__callee__}: params[:chunk_index]: #{params[:chunk_index]}"
Gitlab::Redis::Cache.with do |redis|
raise BufferKeyNotFoundError, 'Buffer key is not found' unless redis.exists(buffer_key)
unless redis.del(buffer_key) == 1
raise WriteError, 'Failed to delete'
end
end
end
end
end
end
end
end
end

View File

@ -1,274 +0,0 @@
##
# ChunkedIO Engine
#
# Choose a chunk_store with your purpose
# This class is designed that it's compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html)
module Gitlab
module Ci
class Trace
module ChunkedFile
class ChunkedIO
# extend ChunkedFile::Concerns::Opener
include ChunkedFile::Concerns::Errors
include ChunkedFile::Concerns::Hooks
include ChunkedFile::Concerns::Callbacks
prepend ChunkedFile::Concerns::Permissions
attr_reader :size
attr_reader :tell
attr_reader :chunk, :chunk_range
attr_reader :job_id
attr_reader :mode
alias_method :pos, :tell
def initialize(job_id, size = nil, mode = 'rb', &block)
raise NotImplementedError, "Mode 'w' is not supported" if mode.include?('w')
@size = size || calculate_size(job_id)
@tell = 0
@job_id = job_id
@mode = mode
if block_given?
begin
yield self
ensure
self.close
end
end
end
def close
end
def binmode
# no-op
end
def binmode?
true
end
def seek(amount, where = IO::SEEK_SET)
new_pos =
case where
when IO::SEEK_END
size + amount
when IO::SEEK_SET
amount
when IO::SEEK_CUR
tell + amount
else
-1
end
raise ArgumentError, 'new position is outside of file' if new_pos < 0 || new_pos > size
@tell = new_pos
end
def eof?
tell == size
end
def each_line
until eof?
line = readline
break if line.nil?
yield(line)
end
end
def read(length = nil, outbuf = nil)
out = ""
until eof? || (length && out.bytesize >= length)
data = get_chunk
break if data.empty?
out << data
@tell += data.bytesize
end
out = out.byteslice(0, length) if length && out.bytesize > length
out
end
def readline
out = ""
until eof?
data = get_chunk
break if data.empty?
new_line_pos = byte_position(data, "\n")
if new_line_pos.nil?
out << data
@tell += data.bytesize
else
out << data.byteslice(0..new_line_pos)
@tell += new_line_pos + 1
break
end
end
out
end
def write(data)
raise ArgumentError, 'Could not write empty data' unless data.present?
if mode.include?('w')
raise NotImplementedError, "Overwrite is not supported"
elsif mode.include?('a')
write_as_append(data)
end
end
def truncate(offset)
raise NotImplementedError
end
def flush
# no-op
end
def present?
chunks_count > 0
end
def delete
chunk_store.delete_all
end
private
def in_range?
@chunk_range&.include?(tell)
end
def get_chunk
return '' if size <= 0 || eof?
unless in_range?
chunk_store.open(job_id, chunk_index, params_for_store) do |store|
@chunk = store.get
raise ReadError, 'Could not get a chunk' unless chunk && chunk.present?
@chunk_range = (chunk_start...(chunk_start + chunk.bytesize))
end
end
@chunk.byteslice(chunk_offset, buffer_size)
end
def write_as_append(data)
@tell = size
data_size = data.bytesize
new_tell = tell + data_size
data_offset = 0
until tell == new_tell
writable_size = buffer_size - chunk_offset
writable_data = data.byteslice(data_offset, writable_size)
written_size = write_chunk(writable_data)
data_offset += written_size
@tell += written_size
@size = [tell, size].max
end
data_size
end
def write_chunk(data)
written_size = 0
chunk_store.open(job_id, chunk_index, params_for_store) do |store|
with_callbacks(:write_chunk, store) do
written_size = if store.size > 0 # # rubocop:disable ZeroLengthPredicate
store.append!(data)
else
store.write!(data)
end
raise WriteError, 'Written size mismatch' unless data.bytesize == written_size
end
end
written_size
end
def params_for_store(c_index = chunk_index)
{
buffer_size: buffer_size,
chunk_start: c_index * buffer_size,
chunk_index: c_index
}
end
def chunk_offset
tell % buffer_size
end
def chunk_start
chunk_index * buffer_size
end
def chunk_end
[chunk_start + buffer_size, size].min
end
def chunk_index
(tell / buffer_size)
end
def chunks_count
(size / buffer_size.to_f).ceil
end
def last_chunk?
((size / buffer_size) * buffer_size..size).include?(tell)
end
def chunk_store
raise NotImplementedError
end
def buffer_size
raise NotImplementedError
end
def calculate_size(job_id)
chunk_store.chunks_size(job_id)
end
def byte_position(data, pattern_byte)
index_as_string = data.index(pattern_byte)
return nil unless index_as_string
if data.getbyte(index_as_string) == pattern_byte.getbyte(0)
index_as_string
else
data2 = data.byteslice(index_as_string, 100)
additional_pos = 0
data2.each_byte do |b|
break if b == pattern_byte.getbyte(0)
additional_pos += 1
end
index_as_string + additional_pos
end
end
end
end
end
end
end

View File

@ -1,37 +0,0 @@
module Gitlab
module Ci
class Trace
module ChunkedFile
module Concerns
module Callbacks
extend ActiveSupport::Concern
included do
class_attribute :_before_callbacks, :_after_callbacks, instance_writer: false
self._before_callbacks = Hash.new []
self._after_callbacks = Hash.new []
end
def with_callbacks(kind, *args)
self.class._before_callbacks[kind].each { |c| send c, *args } # rubocop:disable GitlabSecurity/PublicSend
yield
self.class._after_callbacks[kind].each { |c| send c, *args } # rubocop:disable GitlabSecurity/PublicSend
end
module ClassMethods
def before_callback(kind, callback)
self._before_callbacks = self._before_callbacks
.merge kind => _before_callbacks[kind] + [callback]
end
def after_callback(kind, callback)
self._after_callbacks = self._after_callbacks
.merge kind => _after_callbacks[kind] + [callback]
end
end
end
end
end
end
end
end

View File

@ -1,18 +0,0 @@
module Gitlab
module Ci
class Trace
module ChunkedFile
module Concerns
module Errors
extend ActiveSupport::Concern
included do
WriteError = Class.new(StandardError)
ReadError = Class.new(StandardError)
end
end
end
end
end
end
end

View File

@ -1,23 +0,0 @@
module Gitlab
module Ci
class Trace
module ChunkedFile
module Concerns
module Opener
extend ActiveSupport::Concern
class_methods do
def open(*args)
stream = self.new(*args)
yield stream
ensure
stream&.close
end
end
end
end
end
end
end
end

View File

@ -1,93 +0,0 @@
module Gitlab
module Ci
class Trace
module ChunkedFile
module Concerns
module Permissions
extend ActiveSupport::Concern
WRITABLE_MODE = %w[a].freeze
READABLE_MODE = %w[r +].freeze
included do
attr_reader :write_lock_uuid
end
def initialize(job_id, size, mode = 'rb')
if WRITABLE_MODE.any? { |m| mode.include?(m) }
@write_lock_uuid = Gitlab::ExclusiveLease
.new(write_lock_key(job_id), timeout: 1.hour.to_i).try_obtain
raise IOError, 'Already opened by another process' unless write_lock_uuid
end
super
end
def close
if write_lock_uuid
Gitlab::ExclusiveLease.cancel(write_lock_key(job_id), write_lock_uuid)
end
super
end
def read(*args)
can_read!
super
end
def readline(*args)
can_read!
super
end
def each_line(*args)
can_read!
super
end
def write(*args)
can_write!
super
end
def truncate(*args)
can_write!
super
end
def delete(*args)
can_write!
super
end
private
def can_read!
unless READABLE_MODE.any? { |m| mode.include?(m) }
raise IOError, 'not opened for reading'
end
end
def can_write!
unless WRITABLE_MODE.any? { |m| mode.include?(m) }
raise IOError, 'not opened for writing'
end
end
def write_lock_key(job_id)
"live_trace:operation:write:#{job_id}"
end
end
end
end
end
end
end

View File

@ -1,63 +0,0 @@
module Gitlab
module Ci
class Trace
module ChunkedFile
class LiveTrace < ChunkedIO
class << self
def exist?(job_id)
ChunkedFile::ChunkStore::Redis.chunks_count(job_id) > 0 || ChunkedFile::ChunkStore::Database.chunks_count(job_id) > 0
end
end
after_callback :write_chunk, :stash_to_database
def stash_to_database(store)
# Once data is filled into redis, move the data to database
if store.filled?
ChunkedFile::ChunkStore::Database.open(job_id, chunk_index, params_for_store) do |to_store|
to_store.write!(store.get)
store.delete!
end
end
end
# This is more efficient than iterating each chunk store and deleting
def truncate(offset)
if offset == 0
delete
@size = @tell = 0
elsif offset == size
# no-op
else
raise NotImplementedError, 'Unexpected operation'
end
end
def delete
ChunkedFile::ChunkStore::Redis.delete_all(job_id)
ChunkedFile::ChunkStore::Database.delete_all(job_id)
end
private
def calculate_size(job_id)
ChunkedFile::ChunkStore::Redis.chunks_size(job_id) +
ChunkedFile::ChunkStore::Database.chunks_size(job_id)
end
def chunk_store
if last_chunk?
ChunkedFile::ChunkStore::Redis
else
ChunkedFile::ChunkStore::Database
end
end
def buffer_size
128.kilobytes
end
end
end
end
end
end

View File

@ -1,222 +0,0 @@
require 'spec_helper'
describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database do
let(:job) { create(:ci_build) }
let(:job_id) { job.id }
let(:chunk_index) { 0 }
let(:buffer_size) { 256 }
let(:job_trace_chunk) { ::Ci::JobTraceChunk.new(job_id: job_id, chunk_index: chunk_index) }
let(:params) { { buffer_size: buffer_size } }
let(:data) { 'A' * buffer_size }
describe '.open' do
subject { described_class.open(job_id, chunk_index, params) }
it 'opens' do
expect { |b| described_class.open(job_id, chunk_index, params, &b) }
.to yield_successive_args(described_class)
end
context 'when job_id is nil' do
let(:job_id) { nil }
it { expect { subject }.to raise_error(ArgumentError) }
end
context 'when chunk_index is nil' do
let(:chunk_index) { nil }
it { expect { subject }.to raise_error(ArgumentError) }
end
end
describe '.exist?' do
subject { described_class.exist?(job_id, chunk_index) }
context 'when job_trace_chunk exists' do
before do
described_class.new(job_trace_chunk, params).write!(data)
end
it { is_expected.to be_truthy }
end
context 'when job_trace_chunk does not exist' do
it { is_expected.to be_falsy }
end
end
describe '.chunks_count' do
subject { described_class.chunks_count(job_id) }
context 'when job_trace_chunk exists' do
before do
described_class.new(job_trace_chunk, params).write!(data)
end
it { is_expected.to eq(1) }
context 'when two chunks exists' do
let(:job_trace_chunk_2) { ::Ci::JobTraceChunk.new(job_id: job_id, chunk_index: chunk_index + 1) }
let(:data_2) { 'B' * buffer_size }
before do
described_class.new(job_trace_chunk_2, params).write!(data_2)
end
it { is_expected.to eq(2) }
end
end
context 'when job_trace_chunk does not exist' do
it { is_expected.to eq(0) }
end
end
describe '.chunks_size' do
subject { described_class.chunks_size(job_id) }
context 'when job_trace_chunk exists' do
before do
described_class.new(job_trace_chunk, params).write!(data)
end
it { is_expected.to eq(data.length) }
context 'when two chunks exists' do
let(:job_trace_chunk_2) { ::Ci::JobTraceChunk.new(job_id: job_id, chunk_index: chunk_index + 1) }
let(:data_2) { 'B' * buffer_size }
let(:chunks_size) { data.length + data_2.length }
before do
described_class.new(job_trace_chunk_2, params).write!(data_2)
end
it { is_expected.to eq(chunks_size) }
end
end
context 'when job_trace_chunk does not exist' do
it { is_expected.to eq(0) }
end
end
describe '.delete_all' do
subject { described_class.delete_all(job_id) }
context 'when job_trace_chunk exists' do
before do
described_class.new(job_trace_chunk, params).write!(data)
end
it 'deletes all' do
expect { subject }.to change { described_class.chunks_count(job_id) }.by(-1)
end
context 'when two chunks exists' do
let(:job_trace_chunk_2) { ::Ci::JobTraceChunk.new(job_id: job_id, chunk_index: chunk_index + 1) }
let(:data_2) { 'B' * buffer_size }
before do
described_class.new(job_trace_chunk_2, params).write!(data_2)
end
it 'deletes all' do
expect { subject }.to change { described_class.chunks_count(job_id) }.by(-2)
end
end
end
context 'when buffer_key does not exist' do
it 'deletes all' do
expect { subject }.not_to change { described_class.chunks_count(job_id) }
end
end
end
describe '#get' do
subject { described_class.new(job_trace_chunk, params).get }
context 'when job_trace_chunk exists' do
before do
described_class.new(job_trace_chunk, params).write!(data)
end
it { is_expected.to eq(data) }
end
context 'when job_trace_chunk does not exist' do
it { is_expected.to be_nil }
end
end
describe '#size' do
subject { described_class.new(job_trace_chunk, params).size }
context 'when job_trace_chunk exists' do
before do
described_class.new(job_trace_chunk, params).write!(data)
end
it { is_expected.to eq(data.length) }
end
context 'when job_trace_chunk does not exist' do
it { is_expected.to eq(0) }
end
end
describe '#write!' do
subject { described_class.new(job_trace_chunk, params).write!(data) }
context 'when job_trace_chunk exists' do
before do
described_class.new(job_trace_chunk, params).write!(data)
end
it { expect { subject }.to raise_error('UPDATE (Overwriting data) is not supported') }
end
context 'when job_trace_chunk does not exist' do
let(:expected_data) { ::Ci::JobTraceChunk.find_by(job_id: job_id, chunk_index: chunk_index).data }
it 'writes' do
is_expected.to eq(data.length)
expect(expected_data).to eq(data)
end
end
context 'when data is nil' do
let(:data) { nil }
it { expect { subject }.to raise_error('Partial writing is not supported') }
end
end
describe '#delete!' do
subject { described_class.new(job_trace_chunk, params).delete! }
context 'when job_trace_chunk exists' do
before do
described_class.new(job_trace_chunk, params).write!(data)
end
it 'deletes' do
expect(::Ci::JobTraceChunk.exists?(job_id: job_id, chunk_index: chunk_index))
.to be_truthy
subject
expect(::Ci::JobTraceChunk.exists?(job_id: job_id, chunk_index: chunk_index))
.to be_falsy
end
end
context 'when job_trace_chunk does not exist' do
it 'raises an error' do
expect { subject }.to raise_error('Could not find deletable record')
end
end
end
end

View File

@ -1,273 +0,0 @@
require 'spec_helper'
describe Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis, :clean_gitlab_redis_cache do
let(:job) { create(:ci_build) }
let(:job_id) { job.id }
let(:chunk_index) { 0 }
let(:buffer_size) { 128.kilobytes }
let(:buffer_key) { described_class.buffer_key(job_id, chunk_index) }
let(:params) { { buffer_size: buffer_size } }
let(:data) { 'Here is the trace' }
describe '.open' do
subject { described_class.open(job_id, chunk_index, params) }
it 'opens' do
expect { |b| described_class.open(job_id, chunk_index, params, &b) }
.to yield_successive_args(described_class)
end
context 'when job_id is nil' do
let(:job_id) { nil }
it { expect { subject }.to raise_error(ArgumentError) }
end
context 'when chunk_index is nil' do
let(:chunk_index) { nil }
it { expect { subject }.to raise_error(ArgumentError) }
end
end
describe '.exist?' do
subject { described_class.exist?(job_id, chunk_index) }
context 'when buffer_key exists' do
before do
described_class.new(buffer_key, params).write!(data)
end
it { is_expected.to be_truthy }
end
context 'when buffer_key does not exist' do
it { is_expected.to be_falsy }
end
end
describe '.chunks_count' do
subject { described_class.chunks_count(job_id) }
context 'when buffer_key exists' do
before do
described_class.new(buffer_key, params).write!(data)
end
it { is_expected.to eq(1) }
context 'when two chunks exists' do
let(:buffer_key_2) { described_class.buffer_key(job_id, chunk_index + 1) }
let(:data_2) { 'Another data' }
before do
described_class.new(buffer_key_2, params).write!(data_2)
end
it { is_expected.to eq(2) }
end
end
context 'when buffer_key does not exist' do
it { is_expected.to eq(0) }
end
end
describe '.chunks_size' do
subject { described_class.chunks_size(job_id) }
context 'when buffer_key exists' do
before do
described_class.new(buffer_key, params).write!(data)
end
it { is_expected.to eq(data.length) }
context 'when two chunks exists' do
let(:buffer_key_2) { described_class.buffer_key(job_id, chunk_index + 1) }
let(:data_2) { 'Another data' }
let(:chunks_size) { data.length + data_2.length }
before do
described_class.new(buffer_key_2, params).write!(data_2)
end
it { is_expected.to eq(chunks_size) }
end
end
context 'when buffer_key does not exist' do
it { is_expected.to eq(0) }
end
end
describe '.delete_all' do
subject { described_class.delete_all(job_id) }
context 'when buffer_key exists' do
before do
described_class.new(buffer_key, params).write!(data)
end
it 'deletes all' do
expect { subject }.to change { described_class.chunks_count(job_id) }.by(-1)
end
context 'when two chunks exists' do
let(:buffer_key_2) { described_class.buffer_key(job_id, chunk_index + 1) }
let(:data_2) { 'Another data' }
before do
described_class.new(buffer_key_2, params).write!(data_2)
end
it 'deletes all' do
expect { subject }.to change { described_class.chunks_count(job_id) }.by(-2)
end
end
end
context 'when buffer_key does not exist' do
it 'deletes all' do
expect { subject }.not_to change { described_class.chunks_count(job_id) }
end
end
end
describe '.buffer_key' do
subject { described_class.buffer_key(job_id, chunk_index) }
it { is_expected.to eq("live_trace_buffer:#{job_id}:#{chunk_index}") }
end
describe '#get' do
subject { described_class.new(buffer_key, params).get }
context 'when buffer_key exists' do
before do
described_class.new(buffer_key, params).write!(data)
end
it { is_expected.to eq(data) }
end
context 'when buffer_key does not exist' do
it { is_expected.to be_nil }
end
end
describe '#size' do
subject { described_class.new(buffer_key, params).size }
context 'when buffer_key exists' do
before do
described_class.new(buffer_key, params).write!(data)
end
it { is_expected.to eq(data.length) }
end
context 'when buffer_key does not exist' do
it { is_expected.to eq(0) }
end
end
describe '#write!' do
subject { described_class.new(buffer_key, params).write!(data) }
context 'when buffer_key exists' do
before do
described_class.new(buffer_key, params).write!('Already data in the data')
end
it 'overwrites' do
is_expected.to eq(data.length)
Gitlab::Redis::Cache.with do |redis|
expect(redis.get(buffer_key)).to eq(data)
end
end
end
context 'when buffer_key does not exist' do
it 'writes' do
is_expected.to eq(data.length)
Gitlab::Redis::Cache.with do |redis|
expect(redis.get(buffer_key)).to eq(data)
end
end
end
context 'when data is nil' do
let(:data) { nil }
it 'clears value' do
expect { described_class.new(buffer_key, params).write!(data) }
.to raise_error('Could not write empty data')
end
end
end
describe '#append!' do
subject { described_class.new(buffer_key, params).append!(data) }
context 'when buffer_key exists' do
let(:written_chunk) { 'Already data in the data' }
before do
described_class.new(buffer_key, params).write!(written_chunk)
end
it 'appends' do
is_expected.to eq(data.length)
Gitlab::Redis::Cache.with do |redis|
expect(redis.get(buffer_key)).to eq(written_chunk + data)
end
end
end
context 'when buffer_key does not exist' do
it 'raises an error' do
expect { subject }.to raise_error(described_class::BufferKeyNotFoundError)
end
end
context 'when data is nil' do
let(:data) { nil }
it 'raises an error' do
expect { subject }.to raise_error('Could not write empty data')
end
end
end
describe '#delete!' do
subject { described_class.new(buffer_key, params).delete! }
context 'when buffer_key exists' do
before do
described_class.new(buffer_key, params).write!(data)
end
it 'deletes' do
Gitlab::Redis::Cache.with do |redis|
expect(redis.exists(buffer_key)).to be_truthy
end
subject
Gitlab::Redis::Cache.with do |redis|
expect(redis.exists(buffer_key)).to be_falsy
end
end
end
context 'when buffer_key does not exist' do
it 'raises an error' do
expect { subject }.to raise_error(described_class::BufferKeyNotFoundError)
end
end
end
end

View File

@ -1,32 +0,0 @@
require 'spec_helper'
describe Gitlab::Ci::Trace::ChunkedFile::ChunkedIO, :clean_gitlab_redis_cache do
include ChunkedIOHelpers
let(:chunked_io) { described_class.new(job_id, nil, mode) }
let(:job) { create(:ci_build) }
let(:job_id) { job.id }
let(:mode) { 'rb' }
describe 'ChunkStore is Redis', :partial_support do
let(:chunk_stores) { [Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis] }
before do
allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_stores.first)
allow_any_instance_of(described_class).to receive(:buffer_size).and_return(128.kilobytes)
end
it_behaves_like 'ChunkedIO shared tests'
end
describe 'ChunkStore is Database' do
let(:chunk_stores) { [Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database] }
before do
allow_any_instance_of(described_class).to receive(:chunk_store).and_return(chunk_stores.first)
allow_any_instance_of(described_class).to receive(:buffer_size).and_return(128.kilobytes)
end
it_behaves_like 'ChunkedIO shared tests'
end
end

View File

@ -1,89 +0,0 @@
require 'spec_helper'
describe Gitlab::Ci::Trace::ChunkedFile::LiveTrace, :clean_gitlab_redis_cache do
include ChunkedIOHelpers
let(:chunked_io) { described_class.new(job_id, nil, mode) }
let(:job) { create(:ci_build) }
let(:job_id) { job.id }
let(:mode) { 'rb' }
let(:chunk_stores) do
[Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Redis,
Gitlab::Ci::Trace::ChunkedFile::ChunkStore::Database]
end
describe 'ChunkStores are Redis and Database', :partial_support do
it_behaves_like 'ChunkedIO shared tests'
end
describe '.exist?' do
subject { described_class.exist?(job_id) }
context 'when a chunk exists in a store' do
before do
fill_trace_to_chunks(sample_trace_raw)
end
it { is_expected.to be_truthy }
end
context 'when chunks do not exists in any store' do
it { is_expected.to be_falsey }
end
end
describe '#truncate' do
subject { chunked_io.truncate(offset) }
let(:mode) { 'a+b' }
before do
fill_trace_to_chunks(sample_trace_raw)
end
context 'when offset is 0' do
let(:offset) { 0 }
it 'deletes all chunks' do
expect { subject }.to change { described_class.exist?(job_id) }.from(true).to(false)
end
end
context 'when offset is size' do
let(:offset) { sample_trace_raw.bytesize }
it 'does nothing' do
expect { subject }.not_to change { described_class.exist?(job_id) }
end
end
context 'when offset is else' do
let(:offset) { 10 }
it 'raises an error' do
expect { subject }.to raise_error('Unexpected operation')
end
end
end
describe '#delete' do
subject { chunked_io.delete }
context 'when a chunk exists in a store' do
before do
fill_trace_to_chunks(sample_trace_raw)
end
it 'deletes' do
expect { subject }.to change { described_class.exist?(job_id) }.from(true).to(false)
end
end
context 'when chunks do not exists in any store' do
it 'deletes' do
expect { subject }.not_to change { described_class.exist?(job_id) }
end
end
end
end