gitlab-org--gitlab-foss/spec/models/event_spec.rb

319 lines
10 KiB
Ruby
Raw Normal View History

2012-02-28 08:09:23 -05:00
require 'spec_helper'
describe Event do
describe "Associations" do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:target) }
end
2012-03-28 15:53:45 -04:00
describe "Respond to" do
it { is_expected.to respond_to(:author_name) }
it { is_expected.to respond_to(:author_email) }
it { is_expected.to respond_to(:issue_title) }
it { is_expected.to respond_to(:merge_request_title) }
it { is_expected.to respond_to(:commits) }
2012-03-28 15:53:45 -04:00
end
describe 'Callbacks' do
let(:project) { create(:project) }
describe 'after_create :reset_project_activity' do
it 'calls the reset_project_activity method' do
expect_any_instance_of(described_class).to receive(:reset_project_activity)
create_push_event(project, project.owner)
end
end
describe 'after_create :set_last_repository_updated_at' do
context 'with a push event' do
it 'updates the project last_repository_updated_at' do
project.update(last_repository_updated_at: 1.year.ago)
create_push_event(project, project.owner)
project.reload
expect(project.last_repository_updated_at).to be_within(1.minute).of(Time.now)
end
end
context 'without a push event' do
it 'does not update the project last_repository_updated_at' do
project.update(last_repository_updated_at: 1.year.ago)
create(:closed_issue_event, project: project, author: project.owner)
project.reload
expect(project.last_repository_updated_at).to be_within(1.minute).of(1.year.ago)
end
end
end
end
2012-09-14 18:00:59 -04:00
describe "Push event" do
let(:project) { create(:project, :private) }
let(:user) { project.owner }
let(:event) { create_push_event(project, user) }
it do
expect(event.push?).to be_truthy
expect(event.visible_to_user?(user)).to be_truthy
expect(event.visible_to_user?(nil)).to be_falsey
expect(event.tag?).to be_falsey
expect(event.branch_name).to eq("master")
expect(event.author).to eq(user)
2012-03-28 15:53:45 -04:00
end
end
Faster way of obtaining latest event update time Instead of using MAX(events.updated_at) we can simply sort the events in descending order by the "id" column and grab the first row. In other words, instead of this: SELECT max(events.updated_at) AS max_id FROM events LEFT OUTER JOIN projects ON projects.id = events.project_id LEFT OUTER JOIN namespaces ON namespaces.id = projects.namespace_id WHERE events.author_id IS NOT NULL AND events.project_id IN (13083); we can use this: SELECT events.updated_at AS max_id FROM events LEFT OUTER JOIN projects ON projects.id = events.project_id LEFT OUTER JOIN namespaces ON namespaces.id = projects.namespace_id WHERE events.author_id IS NOT NULL AND events.project_id IN (13083) ORDER BY events.id DESC LIMIT 1; This has the benefit that on PostgreSQL a backwards index scan can be used, which due to the "LIMIT 1" will at most process only a single row. This in turn greatly speeds up the process of grabbing the latest update time. This can be confirmed by looking at the query plans. The first query produces the following plan: Aggregate (cost=43779.84..43779.85 rows=1 width=12) (actual time=2142.462..2142.462 rows=1 loops=1) -> Index Scan using index_events_on_project_id on events (cost=0.43..43704.69 rows=30060 width=12) (actual time=0.033..2138.086 rows=32769 loops=1) Index Cond: (project_id = 13083) Filter: (author_id IS NOT NULL) Planning time: 1.248 ms Execution time: 2142.548 ms The second query in turn produces the following plan: Limit (cost=0.43..41.65 rows=1 width=16) (actual time=1.394..1.394 rows=1 loops=1) -> Index Scan Backward using events_pkey on events (cost=0.43..1238907.96 rows=30060 width=16) (actual time=1.394..1.394 rows=1 loops=1) Filter: ((author_id IS NOT NULL) AND (project_id = 13083)) Rows Removed by Filter: 2104 Planning time: 0.166 ms Execution time: 1.408 ms According to the above plans the 2nd query is around 1500 times faster. However, re-running the first query produces timings of around 80 ms, making the 2nd query "only" around 55 times faster.
2015-11-11 11:14:47 -05:00
describe '#membership_changed?' do
context "created" do
subject { build(:event, :created).membership_changed? }
it { is_expected.to be_falsey }
end
context "updated" do
subject { build(:event, :updated).membership_changed? }
it { is_expected.to be_falsey }
end
context "expired" do
subject { build(:event, :expired).membership_changed? }
it { is_expected.to be_truthy }
end
context "left" do
subject { build(:event, :left).membership_changed? }
it { is_expected.to be_truthy }
end
context "joined" do
subject { build(:event, :joined).membership_changed? }
it { is_expected.to be_truthy }
end
end
describe '#note?' do
subject { described_class.new(project: target.project, target: target) }
context 'issue note event' do
let(:target) { create(:note_on_issue) }
it { is_expected.to be_note }
end
context 'merge request diff note event' do
2016-07-07 15:57:38 -04:00
let(:target) { create(:legacy_diff_note_on_merge_request) }
it { is_expected.to be_note }
end
end
describe '#visible_to_user?' do
let(:project) { create(:project, :public) }
let(:non_member) { create(:user) }
let(:member) { create(:user) }
let(:guest) { create(:user) }
let(:author) { create(:author) }
let(:assignee) { create(:user) }
let(:admin) { create(:admin) }
let(:issue) { create(:issue, project: project, author: author, assignees: [assignee]) }
let(:confidential_issue) { create(:issue, :confidential, project: project, author: author, assignees: [assignee]) }
let(:note_on_commit) { create(:note_on_commit, project: project) }
let(:note_on_issue) { create(:note_on_issue, noteable: issue, project: project) }
let(:note_on_confidential_issue) { create(:note_on_issue, noteable: confidential_issue, project: project) }
let(:event) { described_class.new(project: project, target: target, author_id: author.id) }
before do
project.team << [member, :developer]
project.team << [guest, :guest]
end
context 'commit note event' do
let(:target) { note_on_commit }
it do
aggregate_failures do
expect(event.visible_to_user?(non_member)).to eq true
expect(event.visible_to_user?(member)).to eq true
expect(event.visible_to_user?(guest)).to eq true
expect(event.visible_to_user?(admin)).to eq true
end
end
context 'private project' do
let(:project) { create(:project, :private) }
it do
aggregate_failures do
expect(event.visible_to_user?(non_member)).to eq false
expect(event.visible_to_user?(member)).to eq true
expect(event.visible_to_user?(guest)).to eq false
expect(event.visible_to_user?(admin)).to eq true
end
end
end
end
context 'issue event' do
context 'for non confidential issues' do
let(:target) { issue }
it do
expect(event.visible_to_user?(non_member)).to eq true
expect(event.visible_to_user?(author)).to eq true
expect(event.visible_to_user?(assignee)).to eq true
expect(event.visible_to_user?(member)).to eq true
expect(event.visible_to_user?(guest)).to eq true
expect(event.visible_to_user?(admin)).to eq true
end
end
context 'for confidential issues' do
let(:target) { confidential_issue }
it do
expect(event.visible_to_user?(non_member)).to eq false
expect(event.visible_to_user?(author)).to eq true
expect(event.visible_to_user?(assignee)).to eq true
expect(event.visible_to_user?(member)).to eq true
expect(event.visible_to_user?(guest)).to eq false
expect(event.visible_to_user?(admin)).to eq true
end
end
end
context 'issue note event' do
context 'on non confidential issues' do
let(:target) { note_on_issue }
it do
expect(event.visible_to_user?(non_member)).to eq true
expect(event.visible_to_user?(author)).to eq true
expect(event.visible_to_user?(assignee)).to eq true
expect(event.visible_to_user?(member)).to eq true
expect(event.visible_to_user?(guest)).to eq true
expect(event.visible_to_user?(admin)).to eq true
end
end
context 'on confidential issues' do
let(:target) { note_on_confidential_issue }
it do
expect(event.visible_to_user?(non_member)).to eq false
expect(event.visible_to_user?(author)).to eq true
expect(event.visible_to_user?(assignee)).to eq true
expect(event.visible_to_user?(member)).to eq true
expect(event.visible_to_user?(guest)).to eq false
expect(event.visible_to_user?(admin)).to eq true
end
end
end
context 'merge request diff note event' do
let(:project) { create(:project, :public) }
let(:merge_request) { create(:merge_request, source_project: project, author: author, assignee: assignee) }
2016-07-07 15:57:38 -04:00
let(:note_on_merge_request) { create(:legacy_diff_note_on_merge_request, noteable: merge_request, project: project) }
let(:target) { note_on_merge_request }
it do
expect(event.visible_to_user?(non_member)).to eq true
expect(event.visible_to_user?(author)).to eq true
expect(event.visible_to_user?(assignee)).to eq true
expect(event.visible_to_user?(member)).to eq true
expect(event.visible_to_user?(guest)).to eq true
expect(event.visible_to_user?(admin)).to eq true
end
2016-10-04 08:52:08 -04:00
context 'private project' do
let(:project) { create(:project, :private) }
2016-10-04 08:52:08 -04:00
it do
expect(event.visible_to_user?(non_member)).to eq false
expect(event.visible_to_user?(author)).to eq true
expect(event.visible_to_user?(assignee)).to eq true
expect(event.visible_to_user?(member)).to eq true
expect(event.visible_to_user?(guest)).to eq false
expect(event.visible_to_user?(admin)).to eq true
end
2016-10-04 08:52:08 -04:00
end
end
end
describe '.limit_recent' do
let!(:event1) { create(:closed_issue_event) }
let!(:event2) { create(:closed_issue_event) }
describe 'without an explicit limit' do
subject { described_class.limit_recent }
it { is_expected.to eq([event2, event1]) }
end
describe 'with an explicit limit' do
subject { described_class.limit_recent(1) }
it { is_expected.to eq([event2]) }
end
end
describe '#reset_project_activity' do
let(:project) { create(:project) }
context 'when a project was updated less than 1 hour ago' do
it 'does not update the project' do
project.update(last_activity_at: Time.now)
2017-06-21 09:48:12 -04:00
expect(project).not_to receive(:update_column)
.with(:last_activity_at, a_kind_of(Time))
create_push_event(project, project.owner)
end
end
context 'when a project was updated more than 1 hour ago' do
it 'updates the project' do
project.update(last_activity_at: 1.year.ago)
create_push_event(project, project.owner)
project.reload
expect(project.last_activity_at).to be_within(1.minute).of(Time.now)
end
end
end
describe '#authored_by?' do
let(:event) { build(:event) }
it 'returns true when the event author and user are the same' do
expect(event.authored_by?(event.author)).to eq(true)
end
it 'returns false when passing nil as an argument' do
expect(event.authored_by?(nil)).to eq(false)
end
it 'returns false when the given user is not the author of the event' do
user = double(:user, id: -1)
expect(event.authored_by?(user)).to eq(false)
end
end
Migrate events into a new format This commit migrates events data in such a way that push events are stored much more efficiently. This is done by creating a shadow table called "events_for_migration", and a table called "push_event_payloads" which is used for storing push data of push events. The background migration in this commit will copy events from the "events" table into the "events_for_migration" table, push events in will also have a row created in "push_event_payloads". This approach allows us to reclaim space in the next release by simply swapping the "events" and "events_for_migration" tables, then dropping the old events (now "events_for_migration") table. The new table structure is also optimised for storage space, and does not include the unused "title" column nor the "data" column (since this data is moved to "push_event_payloads"). == Newly Created Events Newly created events are inserted into both "events" and "events_for_migration", both using the exact same primary key value. The table "push_event_payloads" in turn has a foreign key to the _shadow_ table. This removes the need for recreating and validating the foreign key after swapping the tables. Since the shadow table also has a foreign key to "projects.id" we also don't have to worry about orphaned rows. This approach however does require some additional storage as we're duplicating a portion of the events data for at least 1 release. The exact amount is hard to estimate, but for GitLab.com this is expected to be between 10 and 20 GB at most. The background migration in this commit deliberately does _not_ update the "events" table as doing so would put a lot of pressure on PostgreSQL's auto vacuuming system. == Supporting Both Old And New Events Application code has also been adjusted to support push events using both the old and new data formats. This is done by creating a PushEvent class which extends the regular Event class. Using Rails' Single Table Inheritance system we can ensure the right class is used for the right data, which in this case is based on the value of `events.action`. To support displaying old and new data at the same time the PushEvent class re-defines a few methods of the Event class, falling back to their original implementations for push events in the old format. Once all existing events have been migrated the various push event related methods can be removed from the Event model, and the calls to `super` can be removed from the methods in the PushEvent model. The UI and event atom feed have also been slightly changed to better handle this new setup, fortunately only a few changes were necessary to make this work. == API Changes The API only displays push data of events in the new format. Supporting both formats in the API is a bit more difficult compared to the UI. Since the old push data was not really well documented (apart from one example that used an incorrect "action" nmae) I decided that supporting both was not worth the effort, especially since events will be migrated in a few days _and_ new events are created in the correct format.
2017-07-10 11:43:57 -04:00
def create_push_event(project, user)
event = create(:push_event, project: project, author: user)
create(:push_event_payload,
event: event,
commit_to: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
commit_count: 0,
ref: 'master')
event
end
2012-02-28 08:09:23 -05:00
end