Use bytesize everywhere instead of length

This commit is contained in:
Shinya Maeda 2018-04-03 18:42:40 +09:00
parent 098fbac199
commit 91fe68a6af
8 changed files with 137 additions and 1240 deletions

View File

@ -77,7 +77,7 @@ module Gitlab
def write
stream = Gitlab::Ci::Trace::Stream.new do
if Feature.enabled?('ci_enable_live_trace')
if Feature.enabled?('ci_enable_live_trace') || true
if current_path
current_path
else

View File

@ -27,7 +27,7 @@ module Gitlab
def chunks_size(job_id)
::Ci::JobTraceChunk.where(job_id: job_id).pluck('data')
.inject(0) { |sum, data| sum + data.length }
.inject(0) { |sum, data| sum + data.bytesize }
end
def delete_all(job_id)
@ -54,19 +54,19 @@ module Gitlab
end
def size
job_trace_chunk.data&.length || 0
job_trace_chunk.data&.bytesize || 0
end
def write!(data)
raise NotImplementedError, 'Partial writing is not supported' unless params[:buffer_size] == data&.length
raise NotImplementedError, 'Partial writing is not supported' unless params[:buffer_size] == data&.bytesize
raise NotImplementedError, 'UPDATE (Overwriting data) is not supported' if job_trace_chunk.data
puts "#{self.class.name} - #{__callee__}: data.length: #{data.length.inspect} params[:chunk_index]: #{params[:chunk_index]}"
puts "#{self.class.name} - #{__callee__}: data.bytesize: #{data.bytesize.inspect} params[:chunk_index]: #{params[:chunk_index]}"
job_trace_chunk.data = data
job_trace_chunk.save!
data.length
data.bytesize
end
def append!(data)

View File

@ -83,7 +83,7 @@ module Gitlab
def write!(data)
raise ArgumentError, 'Could not write empty data' unless data.present?
puts "#{self.class.name} - #{__callee__}: data.length: #{data.length.inspect} params[:chunk_index]: #{params[:chunk_index]}"
puts "#{self.class.name} - #{__callee__}: data.bytesize: #{data.bytesize.inspect} params[:chunk_index]: #{params[:chunk_index]}"
Gitlab::Redis::Cache.with do |redis|
unless redis.set(buffer_key, data) == 'OK'
raise WriteError, 'Failed to write'
@ -96,7 +96,7 @@ module Gitlab
def append!(data)
raise ArgumentError, 'Could not write empty data' unless data.present?
puts "#{self.class.name} - #{__callee__}: data.length: #{data.length.inspect} params[:chunk_index]: #{params[:chunk_index]}"
puts "#{self.class.name} - #{__callee__}: data.bytesize: #{data.bytesize.inspect} params[:chunk_index]: #{params[:chunk_index]}"
Gitlab::Redis::Cache.with do |redis|
raise BufferKeyNotFoundError, 'Buffer key is not found' unless redis.exists(buffer_key)
@ -104,7 +104,7 @@ module Gitlab
new_size = redis.append(buffer_key, data)
appended_size = new_size - original_size
raise WriteError, 'Failed to append' unless appended_size == data.length
raise WriteError, 'Failed to append' unless appended_size == data.bytesize
appended_size
end

View File

@ -84,7 +84,7 @@ module Gitlab
def read(length = nil, outbuf = nil)
out = ""
until eof? || (length && out.length >= length)
until eof? || (length && out.bytesize >= length)
data = get_chunk
break if data.empty?
@ -92,7 +92,7 @@ module Gitlab
@tell += data.bytesize
end
out = out[0, length] if length && out.length > length
out = out.byteslice(0, length) if length && out.bytesize > length
out
end
@ -104,15 +104,15 @@ module Gitlab
data = get_chunk
break if data.empty?
new_line = data.index("\n")
new_line_pos = byte_position(data, "\n")
if !new_line.nil?
out << data[0..new_line]
@tell += new_line + 1
break
else
if new_line_pos.nil?
out << data
@tell += data.bytesize
else
out << data.byteslice(0..new_line_pos)
@tell += new_line_pos + 1
break
end
end
@ -123,7 +123,7 @@ module Gitlab
raise ArgumentError, 'Could not write empty data' unless data.present?
if mode.include?('w')
write_as_overwrite(data)
raise NotImplementedError, "Overwrite is not supported"
elsif mode.include?('a')
write_as_append(data)
end
@ -157,27 +157,26 @@ module Gitlab
unless in_range?
chunk_store.open(job_id, chunk_index, params_for_store) do |store|
@chunk = store.get
@chunk_range = (chunk_start...(chunk_start + chunk.length))
raise ReadError, 'Could not get a chunk' unless chunk && chunk.present?
@chunk_range = (chunk_start...(chunk_start + chunk.bytesize))
end
end
@chunk[chunk_offset..buffer_size]
end
def write_as_overwrite(data)
raise NotImplementedError, "Overwrite is not supported"
@chunk.byteslice(chunk_offset, buffer_size)
end
def write_as_append(data)
@tell = size
data_size = data.size
data_size = data.bytesize
new_tell = tell + data_size
data_offset = 0
until tell == new_tell
writable_size = buffer_size - chunk_offset
writable_data = data[data_offset...(data_offset + writable_size)]
writable_data = data.byteslice(data_offset, writable_size)
written_size = write_chunk(writable_data)
data_offset += written_size
@ -199,7 +198,7 @@ module Gitlab
store.write!(data)
end
raise WriteError, 'Written size mismatch' unless data.length == written_size
raise WriteError, 'Written size mismatch' unless data.bytesize == written_size
end
end
@ -249,6 +248,25 @@ module Gitlab
def calculate_size(job_id)
chunk_store.chunks_size(job_id)
end
def byte_position(data, pattern_byte)
index_as_string = data.index(pattern_byte)
return nil unless index_as_string
if data.getbyte(index_as_string) == pattern_byte.getbyte(0)
index_as_string
else
data2 = data.byteslice(index_as_string, 100)
additional_pos = 0
data2.each_byte do |b|
break if b == pattern_byte.getbyte(0)
additional_pos += 1
end
index_as_string + additional_pos
end
end
end
end
end

View File

@ -8,6 +8,7 @@ module Gitlab
included do
WriteError = Class.new(StandardError)
ReadError = Class.new(StandardError)
end
end
end

File diff suppressed because one or more lines are too long

View File

@ -51,7 +51,7 @@ describe Gitlab::Ci::Trace::ChunkedFile::LiveTrace, :clean_gitlab_redis_cache do
end
context 'when offset is size' do
let(:offset) { sample_trace_raw.length }
let(:offset) { sample_trace_raw.bytesize }
it 'does nothing' do
expect { subject }.not_to change { described_class.exist?(job_id) }

View File

@ -90,7 +90,7 @@ shared_examples "ChunkedIO shared tests" do
subject { chunked_io.seek(pos, where) }
before do
set_smaller_buffer_size_than(sample_trace_raw.length)
set_smaller_buffer_size_than(sample_trace_raw.bytesize)
fill_trace_to_chunks(sample_trace_raw)
end
@ -98,11 +98,11 @@ shared_examples "ChunkedIO shared tests" do
let(:pos) { 0 }
let(:where) { IO::SEEK_END }
it { is_expected.to eq(sample_trace_raw.length) }
it { is_expected.to eq(sample_trace_raw.bytesize) }
end
context 'when moves pos to middle of the file' do
let(:pos) { sample_trace_raw.length / 2 }
let(:pos) { sample_trace_raw.bytesize / 2 }
let(:where) { IO::SEEK_SET }
it { is_expected.to eq(pos) }
@ -112,7 +112,7 @@ shared_examples "ChunkedIO shared tests" do
it 'matches the result' do
expect(chunked_io.seek(0)).to eq(0)
expect(chunked_io.seek(100, IO::SEEK_CUR)).to eq(100)
expect { chunked_io.seek(sample_trace_raw.length + 1, IO::SEEK_CUR) }
expect { chunked_io.seek(sample_trace_raw.bytesize + 1, IO::SEEK_CUR) }
.to raise_error('new position is outside of file')
end
end
@ -122,13 +122,13 @@ shared_examples "ChunkedIO shared tests" do
subject { chunked_io.eof? }
before do
set_smaller_buffer_size_than(sample_trace_raw.length)
set_smaller_buffer_size_than(sample_trace_raw.bytesize)
fill_trace_to_chunks(sample_trace_raw)
end
context 'when current pos is at end of the file' do
before do
chunked_io.seek(sample_trace_raw.length, IO::SEEK_SET)
chunked_io.seek(sample_trace_raw.bytesize, IO::SEEK_SET)
end
it { is_expected.to be_truthy }
@ -148,7 +148,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(sample_trace_raw.length)
set_smaller_buffer_size_than(sample_trace_raw.bytesize)
fill_trace_to_chunks(sample_trace_raw)
end
@ -160,7 +160,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when buffer size is larger than file size', :partial_support do
before do
set_larger_buffer_size_than(sample_trace_raw.length)
set_larger_buffer_size_than(sample_trace_raw.bytesize)
fill_trace_to_chunks(sample_trace_raw)
end
@ -186,7 +186,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(sample_trace_raw.length)
set_smaller_buffer_size_than(sample_trace_raw.bytesize)
fill_trace_to_chunks(sample_trace_raw)
end
@ -195,7 +195,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when buffer size is larger than file size', :partial_support do
before do
set_larger_buffer_size_than(sample_trace_raw.length)
set_larger_buffer_size_than(sample_trace_raw.bytesize)
fill_trace_to_chunks(sample_trace_raw)
end
@ -204,7 +204,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when buffer size is half of file size' do
before do
set_half_buffer_size_of(sample_trace_raw.length)
set_half_buffer_size_of(sample_trace_raw.bytesize)
fill_trace_to_chunks(sample_trace_raw)
end
@ -217,7 +217,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(sample_trace_raw.length)
set_smaller_buffer_size_than(sample_trace_raw.bytesize)
fill_trace_to_chunks(sample_trace_raw)
end
@ -228,7 +228,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when buffer size is larger than file size', :partial_support do
before do
set_larger_buffer_size_than(sample_trace_raw.length)
set_larger_buffer_size_than(sample_trace_raw.bytesize)
fill_trace_to_chunks(sample_trace_raw)
end
@ -239,11 +239,11 @@ shared_examples "ChunkedIO shared tests" do
end
context 'when tries to read oversize' do
let(:length) { sample_trace_raw.length + 1000 }
let(:length) { sample_trace_raw.bytesize + 1000 }
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(sample_trace_raw.length)
set_smaller_buffer_size_than(sample_trace_raw.bytesize)
fill_trace_to_chunks(sample_trace_raw)
end
@ -254,7 +254,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when buffer size is larger than file size', :partial_support do
before do
set_larger_buffer_size_than(sample_trace_raw.length)
set_larger_buffer_size_than(sample_trace_raw.bytesize)
fill_trace_to_chunks(sample_trace_raw)
end
@ -269,7 +269,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(sample_trace_raw.length)
set_smaller_buffer_size_than(sample_trace_raw.bytesize)
fill_trace_to_chunks(sample_trace_raw)
end
@ -280,7 +280,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when buffer size is larger than file size', :partial_support do
before do
set_larger_buffer_size_than(sample_trace_raw.length)
set_larger_buffer_size_than(sample_trace_raw.bytesize)
fill_trace_to_chunks(sample_trace_raw)
end
@ -306,7 +306,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(sample_trace_raw.length)
set_smaller_buffer_size_than(sample_trace_raw.bytesize)
fill_trace_to_chunks(sample_trace_raw)
end
@ -315,7 +315,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when buffer size is larger than file size', :partial_support do
before do
set_larger_buffer_size_than(sample_trace_raw.length)
set_larger_buffer_size_than(sample_trace_raw.bytesize)
fill_trace_to_chunks(sample_trace_raw)
end
@ -324,7 +324,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when buffer size is half of file size' do
before do
set_half_buffer_size_of(sample_trace_raw.length)
set_half_buffer_size_of(sample_trace_raw.bytesize)
fill_trace_to_chunks(sample_trace_raw)
end
@ -333,7 +333,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when pos is at middle of the file' do
before do
set_smaller_buffer_size_than(sample_trace_raw.length)
set_smaller_buffer_size_than(sample_trace_raw.bytesize)
fill_trace_to_chunks(sample_trace_raw)
chunked_io.seek(chunked_io.size / 2)
@ -357,21 +357,21 @@ shared_examples "ChunkedIO shared tests" do
context 'when data does not exist' do
shared_examples 'writes a trace' do
it do
is_expected.to eq(data.length)
is_expected.to eq(data.bytesize)
described_class.new(job_id, nil, 'rb') do |stream|
expect(stream.read).to eq(data)
expect(chunk_stores.inject(0) { |sum, store| sum + store.chunks_count(job_id) })
.to eq(stream.send(:chunks_count))
expect(chunk_stores.inject(0) { |sum, store| sum + store.chunks_size(job_id) })
.to eq(data.length)
.to eq(data.bytesize)
end
end
end
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(data.length)
set_smaller_buffer_size_than(data.bytesize)
end
it_behaves_like 'writes a trace'
@ -379,7 +379,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when buffer size is larger than file size', :partial_support do
before do
set_larger_buffer_size_than(data.length)
set_larger_buffer_size_than(data.bytesize)
end
it_behaves_like 'writes a trace'
@ -387,7 +387,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when buffer size is half of file size' do
before do
set_half_buffer_size_of(data.length)
set_half_buffer_size_of(data.bytesize)
end
it_behaves_like 'writes a trace'
@ -404,12 +404,12 @@ shared_examples "ChunkedIO shared tests" do
context 'when data already exists', :partial_support do
let(:exist_data) { 'exist data' }
let(:total_size) { exist_data.length + data.length }
let(:total_size) { exist_data.bytesize + data.bytesize }
shared_examples 'appends a trace' do
it do
described_class.new(job_id, nil, 'a+b') do |stream|
expect(stream.write(data)).to eq(data.length)
expect(stream.write(data)).to eq(data.bytesize)
end
described_class.new(job_id, nil, 'rb') do |stream|
@ -424,7 +424,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when buffer size is smaller than file size' do
before do
set_smaller_buffer_size_than(data.length)
set_smaller_buffer_size_than(data.bytesize)
fill_trace_to_chunks(exist_data)
end
@ -433,7 +433,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when buffer size is larger than file size', :partial_support do
before do
set_larger_buffer_size_than(data.length)
set_larger_buffer_size_than(data.bytesize)
fill_trace_to_chunks(exist_data)
end
@ -442,7 +442,7 @@ shared_examples "ChunkedIO shared tests" do
context 'when buffer size is half of file size' do
before do
set_half_buffer_size_of(data.length)
set_half_buffer_size_of(data.bytesize)
fill_trace_to_chunks(exist_data)
end