Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2020-02-02 06:08:56 +00:00
parent 78214bf742
commit 208e2e308c
6 changed files with 38 additions and 21 deletions

View file

@ -83,9 +83,9 @@ class Blob < SimpleDelegator
new(blob, project)
end
def self.lazy(project, commit_id, path)
def self.lazy(project, commit_id, path, blob_size_limit: Gitlab::Git::Blob::MAX_DATA_DISPLAY_SIZE)
BatchLoader.for([commit_id, path]).batch(key: project.repository) do |items, loader, args|
args[:key].blobs_at(items).each do |blob|
args[:key].blobs_at(items, blob_size_limit: blob_size_limit).each do |blob|
loader.call([blob.commit_id, blob.path], blob) if blob
end
end

View file

@ -516,10 +516,12 @@ class Repository
end
# items is an Array like: [[oid, path], [oid1, path1]]
def blobs_at(items)
def blobs_at(items, blob_size_limit: Gitlab::Git::Blob::MAX_DATA_DISPLAY_SIZE)
return [] unless exists?
raw_repository.batch_blobs(items).map { |blob| Blob.decorate(blob, project) }
raw_repository.batch_blobs(items, blob_size_limit: blob_size_limit).map do |blob|
Blob.decorate(blob, project)
end
end
def root_ref

View file

@ -0,0 +1,5 @@
---
title: Load maximum 1mb blob data for a diff file
merge_request: 24160
author:
type: performance

View file

@ -350,6 +350,16 @@ module Gitlab
private
def fetch_blob(sha, path)
return unless sha
# Load only patch_hard_limit_bytes number of bytes for the blob
# Because otherwise, it is too large to be displayed
Blob.lazy(
repository.project, sha, path,
blob_size_limit: Gitlab::Git::Diff.patch_hard_limit_bytes)
end
def total_blob_lines(blob)
@total_lines ||= begin
line_count = blob.lines.size
@ -385,15 +395,11 @@ module Gitlab
end
def new_blob_lazy
return unless new_content_sha
Blob.lazy(repository.project, new_content_sha, file_path)
fetch_blob(new_content_sha, file_path)
end
def old_blob_lazy
return unless old_content_sha
Blob.lazy(repository.project, old_content_sha, old_path)
fetch_blob(old_content_sha, old_path)
end
def simple_viewer_class

View file

@ -169,18 +169,18 @@ describe Gitlab::Diff::File do
end
end
describe '#old_blob' do
it 'returns blob of commit of base commit' do
describe '#old_blob and #new_blob' do
it 'returns blob of base commit and the new commit' do
items = [
[diff_file.new_content_sha, diff_file.new_path], [diff_file.old_content_sha, diff_file.old_path]
]
expect(project.repository).to receive(:blobs_at).with(items, blob_size_limit: 100 * 1024).and_call_original
old_data = diff_file.old_blob.data
expect(old_data).to include('raise "System commands must be given as an array of strings"')
end
end
describe '#new_blob' do
it 'returns blob of new commit' do
data = diff_file.new_blob.data
expect(old_data).to include('raise "System commands must be given as an array of strings"')
expect(data).to include('raise RuntimeError, "System commands must be given as an array of strings"')
end
end

View file

@ -22,6 +22,7 @@ describe Blob do
let(:same_project) { Project.find(project.id) }
let(:other_project) { create(:project, :repository) }
let(:commit_id) { 'e63f41fe459e62e1228fcef60d7189127aeba95a' }
let(:blob_size_limit) { 10 * 1024 * 1024 }
it 'does not fetch blobs when none are accessed' do
expect(project.repository).not_to receive(:blobs_at)
@ -30,7 +31,9 @@ describe Blob do
end
it 'fetches all blobs for the same repository when one is accessed' do
expect(project.repository).to receive(:blobs_at).with([[commit_id, 'CHANGELOG'], [commit_id, 'CONTRIBUTING.md']]).once.and_call_original
expect(project.repository).to receive(:blobs_at)
.with([[commit_id, 'CHANGELOG'], [commit_id, 'CONTRIBUTING.md']], blob_size_limit: blob_size_limit)
.once.and_call_original
expect(other_project.repository).not_to receive(:blobs_at)
changelog = described_class.lazy(project, commit_id, 'CHANGELOG')
@ -53,7 +56,8 @@ describe Blob do
readme = described_class.lazy(project, commit_id, 'README.md')
expect(project.repository).to receive(:blobs_at).with([[commit_id, 'README.md']]).once.and_call_original
expect(project.repository).to receive(:blobs_at)
.with([[commit_id, 'README.md']], blob_size_limit: blob_size_limit).once.and_call_original
readme.id
end