ace833b31d
Example: for issues that are closed, the links will now show '[closed]' following the issue number. This is done as post-process after the markdown has been loaded from the cache as the status of the issue may change between the cache being populated and the content being displayed. In order to avoid N+1 queries problem when rendering notes ObjectRenderer populates the cache of referenced issuables for all notes at once, before the post processing phase. As a part of this change, the Banzai BaseParser#grouped_objects_for_nodes method has been refactored to return a Hash utilising the node itself as the key, since this was a common pattern of usage for this method.
52 lines
1.4 KiB
Ruby
52 lines
1.4 KiB
Ruby
require 'spec_helper'
|
|
|
|
describe Banzai::IssuableExtractor, lib: true do
|
|
let(:project) { create(:empty_project) }
|
|
let(:user) { create(:user) }
|
|
let(:extractor) { described_class.new(project, user) }
|
|
let(:issue) { create(:issue, project: project) }
|
|
let(:merge_request) { create(:merge_request, source_project: project) }
|
|
let(:issue_link) do
|
|
html_to_node(
|
|
"<a href='' data-issue='#{issue.id}' data-reference-type='issue' class='gfm'>text</a>"
|
|
)
|
|
end
|
|
let(:merge_request_link) do
|
|
html_to_node(
|
|
"<a href='' data-merge-request='#{merge_request.id}' data-reference-type='merge_request' class='gfm'>text</a>"
|
|
)
|
|
end
|
|
|
|
def html_to_node(html)
|
|
Nokogiri::HTML.fragment(
|
|
html
|
|
).children[0]
|
|
end
|
|
|
|
it 'returns instances of issuables for nodes with references' do
|
|
result = extractor.extract([issue_link, merge_request_link])
|
|
|
|
expect(result).to eq(issue_link => issue, merge_request_link => merge_request)
|
|
end
|
|
|
|
describe 'caching' do
|
|
before do
|
|
RequestStore.begin!
|
|
end
|
|
|
|
after do
|
|
RequestStore.end!
|
|
RequestStore.clear!
|
|
end
|
|
|
|
it 'saves records to cache' do
|
|
extractor.extract([issue_link, merge_request_link])
|
|
|
|
second_call_queries = ActiveRecord::QueryRecorder.new do
|
|
extractor.extract([issue_link, merge_request_link])
|
|
end.count
|
|
|
|
expect(second_call_queries).to eq 0
|
|
end
|
|
end
|
|
end
|