gitlab-org--gitlab-foss/spec/services/event_create_service_spec.rb

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

542 lines
17 KiB
Ruby
Raw Normal View History

# frozen_string_literal: true
require 'spec_helper'
RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redis_shared_state do
include SnowplowHelpers
let(:service) { described_class.new }
let_it_be(:user, reload: true) { create :user }
let_it_be(:project) { create(:project) }
shared_examples 'it records the event in the event counter' do
specify do
tracking_params = { event_action: event_action, date_from: Date.yesterday, date_to: Date.today }
expect { subject }
.to change { Gitlab::UsageDataCounters::TrackUniqueEvents.count_unique_events(**tracking_params) }
.by(1)
end
end
describe 'Issues' do
2016-07-11 22:12:31 +00:00
describe '#open_issue' do
let(:issue) { create(:issue) }
it { expect(service.open_issue(issue, issue.author)).to be_truthy }
it "creates new event" do
expect { service.open_issue(issue, issue.author) }.to change { Event.count }
end
end
2016-07-11 22:12:31 +00:00
describe '#close_issue' do
let(:issue) { create(:issue) }
it { expect(service.close_issue(issue, issue.author)).to be_truthy }
it "creates new event" do
expect { service.close_issue(issue, issue.author) }.to change { Event.count }
end
end
2016-07-11 22:12:31 +00:00
describe '#reopen_issue' do
let(:issue) { create(:issue) }
it { expect(service.reopen_issue(issue, issue.author)).to be_truthy }
it "creates new event" do
expect { service.reopen_issue(issue, issue.author) }.to change { Event.count }
end
end
end
describe 'Merge Requests', :snowplow do
2016-07-11 22:12:31 +00:00
describe '#open_mr' do
subject(:open_mr) { service.open_mr(merge_request, merge_request.author) }
let(:merge_request) { create(:merge_request) }
it { expect(open_mr).to be_truthy }
it "creates new event" do
expect { open_mr }.to change { Event.count }
end
it_behaves_like "it records the event in the event counter" do
let(:event_action) { Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION }
end
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
let(:category) { described_class.name }
let(:action) { 'created' }
let(:label) { 'usage_activity_by_stage_monthly.create.merge_requests_users' }
let(:namespace) { project.namespace }
let(:project) { merge_request.project }
let(:user) { merge_request.author }
let(:context) do
[Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: 'merge_requests_users').to_context]
end
end
end
2016-07-11 22:12:31 +00:00
describe '#close_mr' do
subject(:close_mr) { service.close_mr(merge_request, merge_request.author) }
let(:merge_request) { create(:merge_request) }
it { expect(close_mr).to be_truthy }
it "creates new event" do
expect { close_mr }.to change { Event.count }
end
it_behaves_like "it records the event in the event counter" do
let(:event_action) { Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION }
end
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
let(:category) { described_class.name }
let(:action) { 'closed' }
let(:label) { 'usage_activity_by_stage_monthly.create.merge_requests_users' }
let(:namespace) { project.namespace }
let(:project) { merge_request.project }
let(:user) { merge_request.author }
let(:context) do
[Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: 'merge_requests_users').to_context]
end
end
end
2016-07-11 22:12:31 +00:00
describe '#merge_mr' do
subject(:merge_mr) { service.merge_mr(merge_request, merge_request.author) }
let(:merge_request) { create(:merge_request) }
it { expect(merge_mr).to be_truthy }
it "creates new event" do
expect { merge_mr }.to change { Event.count }
end
it_behaves_like "it records the event in the event counter" do
let(:event_action) { Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION }
end
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
let(:category) { described_class.name }
let(:action) { 'merged' }
let(:label) { 'usage_activity_by_stage_monthly.create.merge_requests_users' }
let(:namespace) { project.namespace }
let(:project) { merge_request.project }
let(:user) { merge_request.author }
let(:context) do
[Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: 'merge_requests_users').to_context]
end
end
end
2016-07-11 22:12:31 +00:00
describe '#reopen_mr' do
let(:merge_request) { create(:merge_request) }
it { expect(service.reopen_mr(merge_request, merge_request.author)).to be_truthy }
it "creates new event" do
expect { service.reopen_mr(merge_request, merge_request.author) }.to change { Event.count }
end
end
describe '#approve_mr' do
let(:merge_request) { create(:merge_request) }
it { expect(service.approve_mr(merge_request, user)).to be_truthy }
it 'creates new event' do
service.approve_mr(merge_request, user)
change { Event.approved_action.where(target: merge_request).count }.by(1)
end
end
end
describe 'Milestone' do
2016-07-11 22:12:31 +00:00
describe '#open_milestone' do
let(:milestone) { create(:milestone) }
it { expect(service.open_milestone(milestone, user)).to be_truthy }
it "creates new event" do
expect { service.open_milestone(milestone, user) }.to change { Event.count }
end
end
2016-07-11 22:12:31 +00:00
describe '#close_mr' do
let(:milestone) { create(:milestone) }
it { expect(service.close_milestone(milestone, user)).to be_truthy }
it "creates new event" do
expect { service.close_milestone(milestone, user) }.to change { Event.count }
end
end
2016-07-11 22:12:31 +00:00
describe '#destroy_mr' do
let(:milestone) { create(:milestone) }
it { expect(service.destroy_milestone(milestone, user)).to be_truthy }
it "creates new event" do
expect { service.destroy_milestone(milestone, user) }.to change { Event.count }
end
end
end
shared_examples_for 'service for creating a push event' do |service_class|
it 'creates a new event' do
expect { subject }.to change { Event.count }
Migrate events into a new format This commit migrates events data in such a way that push events are stored much more efficiently. This is done by creating a shadow table called "events_for_migration", and a table called "push_event_payloads" which is used for storing push data of push events. The background migration in this commit will copy events from the "events" table into the "events_for_migration" table, push events in will also have a row created in "push_event_payloads". This approach allows us to reclaim space in the next release by simply swapping the "events" and "events_for_migration" tables, then dropping the old events (now "events_for_migration") table. The new table structure is also optimised for storage space, and does not include the unused "title" column nor the "data" column (since this data is moved to "push_event_payloads"). == Newly Created Events Newly created events are inserted into both "events" and "events_for_migration", both using the exact same primary key value. The table "push_event_payloads" in turn has a foreign key to the _shadow_ table. This removes the need for recreating and validating the foreign key after swapping the tables. Since the shadow table also has a foreign key to "projects.id" we also don't have to worry about orphaned rows. This approach however does require some additional storage as we're duplicating a portion of the events data for at least 1 release. The exact amount is hard to estimate, but for GitLab.com this is expected to be between 10 and 20 GB at most. The background migration in this commit deliberately does _not_ update the "events" table as doing so would put a lot of pressure on PostgreSQL's auto vacuuming system. == Supporting Both Old And New Events Application code has also been adjusted to support push events using both the old and new data formats. This is done by creating a PushEvent class which extends the regular Event class. Using Rails' Single Table Inheritance system we can ensure the right class is used for the right data, which in this case is based on the value of `events.action`. To support displaying old and new data at the same time the PushEvent class re-defines a few methods of the Event class, falling back to their original implementations for push events in the old format. Once all existing events have been migrated the various push event related methods can be removed from the Event model, and the calls to `super` can be removed from the methods in the PushEvent model. The UI and event atom feed have also been slightly changed to better handle this new setup, fortunately only a few changes were necessary to make this work. == API Changes The API only displays push data of events in the new format. Supporting both formats in the API is a bit more difficult compared to the UI. Since the old push data was not really well documented (apart from one example that used an incorrect "action" nmae) I decided that supporting both was not worth the effort, especially since events will be migrated in a few days _and_ new events are created in the correct format.
2017-07-10 15:43:57 +00:00
end
it 'creates the push event payload' do
expect(service_class).to receive(:new)
Migrate events into a new format This commit migrates events data in such a way that push events are stored much more efficiently. This is done by creating a shadow table called "events_for_migration", and a table called "push_event_payloads" which is used for storing push data of push events. The background migration in this commit will copy events from the "events" table into the "events_for_migration" table, push events in will also have a row created in "push_event_payloads". This approach allows us to reclaim space in the next release by simply swapping the "events" and "events_for_migration" tables, then dropping the old events (now "events_for_migration") table. The new table structure is also optimised for storage space, and does not include the unused "title" column nor the "data" column (since this data is moved to "push_event_payloads"). == Newly Created Events Newly created events are inserted into both "events" and "events_for_migration", both using the exact same primary key value. The table "push_event_payloads" in turn has a foreign key to the _shadow_ table. This removes the need for recreating and validating the foreign key after swapping the tables. Since the shadow table also has a foreign key to "projects.id" we also don't have to worry about orphaned rows. This approach however does require some additional storage as we're duplicating a portion of the events data for at least 1 release. The exact amount is hard to estimate, but for GitLab.com this is expected to be between 10 and 20 GB at most. The background migration in this commit deliberately does _not_ update the "events" table as doing so would put a lot of pressure on PostgreSQL's auto vacuuming system. == Supporting Both Old And New Events Application code has also been adjusted to support push events using both the old and new data formats. This is done by creating a PushEvent class which extends the regular Event class. Using Rails' Single Table Inheritance system we can ensure the right class is used for the right data, which in this case is based on the value of `events.action`. To support displaying old and new data at the same time the PushEvent class re-defines a few methods of the Event class, falling back to their original implementations for push events in the old format. Once all existing events have been migrated the various push event related methods can be removed from the Event model, and the calls to `super` can be removed from the methods in the PushEvent model. The UI and event atom feed have also been slightly changed to better handle this new setup, fortunately only a few changes were necessary to make this work. == API Changes The API only displays push data of events in the new format. Supporting both formats in the API is a bit more difficult compared to the UI. Since the old push data was not really well documented (apart from one example that used an incorrect "action" nmae) I decided that supporting both was not worth the effort, especially since events will be migrated in a few days _and_ new events are created in the correct format.
2017-07-10 15:43:57 +00:00
.with(an_instance_of(PushEvent), push_data)
.and_call_original
subject
end
it 'updates user last activity' do
expect { subject }.to change { user.last_activity_on }.to(Date.today)
Migrate events into a new format This commit migrates events data in such a way that push events are stored much more efficiently. This is done by creating a shadow table called "events_for_migration", and a table called "push_event_payloads" which is used for storing push data of push events. The background migration in this commit will copy events from the "events" table into the "events_for_migration" table, push events in will also have a row created in "push_event_payloads". This approach allows us to reclaim space in the next release by simply swapping the "events" and "events_for_migration" tables, then dropping the old events (now "events_for_migration") table. The new table structure is also optimised for storage space, and does not include the unused "title" column nor the "data" column (since this data is moved to "push_event_payloads"). == Newly Created Events Newly created events are inserted into both "events" and "events_for_migration", both using the exact same primary key value. The table "push_event_payloads" in turn has a foreign key to the _shadow_ table. This removes the need for recreating and validating the foreign key after swapping the tables. Since the shadow table also has a foreign key to "projects.id" we also don't have to worry about orphaned rows. This approach however does require some additional storage as we're duplicating a portion of the events data for at least 1 release. The exact amount is hard to estimate, but for GitLab.com this is expected to be between 10 and 20 GB at most. The background migration in this commit deliberately does _not_ update the "events" table as doing so would put a lot of pressure on PostgreSQL's auto vacuuming system. == Supporting Both Old And New Events Application code has also been adjusted to support push events using both the old and new data formats. This is done by creating a PushEvent class which extends the regular Event class. Using Rails' Single Table Inheritance system we can ensure the right class is used for the right data, which in this case is based on the value of `events.action`. To support displaying old and new data at the same time the PushEvent class re-defines a few methods of the Event class, falling back to their original implementations for push events in the old format. Once all existing events have been migrated the various push event related methods can be removed from the Event model, and the calls to `super` can be removed from the methods in the PushEvent model. The UI and event atom feed have also been slightly changed to better handle this new setup, fortunately only a few changes were necessary to make this work. == API Changes The API only displays push data of events in the new format. Supporting both formats in the API is a bit more difficult compared to the UI. Since the old push data was not really well documented (apart from one example that used an incorrect "action" nmae) I decided that supporting both was not worth the effort, especially since events will be migrated in a few days _and_ new events are created in the correct format.
2017-07-10 15:43:57 +00:00
end
Rework how recent push events are retrieved Whenever you push to a branch GitLab will show a button to create a merge request (should one not exist already). The underlying code to display this data was quite inefficient. For example, it involved multiple slow queries just to figure out what the most recent push event was. This commit changes the way this data is retrieved so it's much faster. This is achieved by caching the ID of the last push event on every push, which is then retrieved when loading certain pages. Database queries are only executed if necessary and the cached data is removed automatically once a merge request has been created, or 2 hours after being stored. A trade-off of this approach is that we _only_ track the last event. Previously if you were to push to branch A and B then create a merge request for branch B we'd still show the widget for branch A. As of this commit this is no longer the case, instead we will only show the widget for the branch you pushed to most recently. Once a merge request exists the widget is no longer displayed. Alternative solutions are either too complex and/or too slow, hence the decision was made to settle for this trade-off. Performance Impact ------------------ In the best case scenario (= a user didn't push anything for more than 2 hours) we perform a single Redis GET per page. Should there be cached data we will run a single (and lightweight) SQL query to get the event data from the database. If a merge request already exists we will run an additional DEL to remove the cache key. The difference in response timings can vary a bit per project. On GitLab.com the 99th percentile of time spent in User#recent_push hovers between 100 milliseconds and 1 second, while the mean hovers around 50 milliseconds. With the changes in this MR the expected time spent in User#recent_push is expected to be reduced down to just a few milliseconds. Fixes https://gitlab.com/gitlab-org/gitlab-ce/issues/35990
2017-09-01 10:50:14 +00:00
it 'caches the last push event for the user' do
expect_next_instance_of(Users::LastPushEventService) do |instance|
expect(instance).to receive(:cache_last_push_event).with(an_instance_of(PushEvent))
end
Rework how recent push events are retrieved Whenever you push to a branch GitLab will show a button to create a merge request (should one not exist already). The underlying code to display this data was quite inefficient. For example, it involved multiple slow queries just to figure out what the most recent push event was. This commit changes the way this data is retrieved so it's much faster. This is achieved by caching the ID of the last push event on every push, which is then retrieved when loading certain pages. Database queries are only executed if necessary and the cached data is removed automatically once a merge request has been created, or 2 hours after being stored. A trade-off of this approach is that we _only_ track the last event. Previously if you were to push to branch A and B then create a merge request for branch B we'd still show the widget for branch A. As of this commit this is no longer the case, instead we will only show the widget for the branch you pushed to most recently. Once a merge request exists the widget is no longer displayed. Alternative solutions are either too complex and/or too slow, hence the decision was made to settle for this trade-off. Performance Impact ------------------ In the best case scenario (= a user didn't push anything for more than 2 hours) we perform a single Redis GET per page. Should there be cached data we will run a single (and lightweight) SQL query to get the event data from the database. If a merge request already exists we will run an additional DEL to remove the cache key. The difference in response timings can vary a bit per project. On GitLab.com the 99th percentile of time spent in User#recent_push hovers between 100 milliseconds and 1 second, while the mean hovers around 50 milliseconds. With the changes in this MR the expected time spent in User#recent_push is expected to be reduced down to just a few milliseconds. Fixes https://gitlab.com/gitlab-org/gitlab-ce/issues/35990
2017-09-01 10:50:14 +00:00
subject
Rework how recent push events are retrieved Whenever you push to a branch GitLab will show a button to create a merge request (should one not exist already). The underlying code to display this data was quite inefficient. For example, it involved multiple slow queries just to figure out what the most recent push event was. This commit changes the way this data is retrieved so it's much faster. This is achieved by caching the ID of the last push event on every push, which is then retrieved when loading certain pages. Database queries are only executed if necessary and the cached data is removed automatically once a merge request has been created, or 2 hours after being stored. A trade-off of this approach is that we _only_ track the last event. Previously if you were to push to branch A and B then create a merge request for branch B we'd still show the widget for branch A. As of this commit this is no longer the case, instead we will only show the widget for the branch you pushed to most recently. Once a merge request exists the widget is no longer displayed. Alternative solutions are either too complex and/or too slow, hence the decision was made to settle for this trade-off. Performance Impact ------------------ In the best case scenario (= a user didn't push anything for more than 2 hours) we perform a single Redis GET per page. Should there be cached data we will run a single (and lightweight) SQL query to get the event data from the database. If a merge request already exists we will run an additional DEL to remove the cache key. The difference in response timings can vary a bit per project. On GitLab.com the 99th percentile of time spent in User#recent_push hovers between 100 milliseconds and 1 second, while the mean hovers around 50 milliseconds. With the changes in this MR the expected time spent in User#recent_push is expected to be reduced down to just a few milliseconds. Fixes https://gitlab.com/gitlab-org/gitlab-ce/issues/35990
2017-09-01 10:50:14 +00:00
end
Migrate events into a new format This commit migrates events data in such a way that push events are stored much more efficiently. This is done by creating a shadow table called "events_for_migration", and a table called "push_event_payloads" which is used for storing push data of push events. The background migration in this commit will copy events from the "events" table into the "events_for_migration" table, push events in will also have a row created in "push_event_payloads". This approach allows us to reclaim space in the next release by simply swapping the "events" and "events_for_migration" tables, then dropping the old events (now "events_for_migration") table. The new table structure is also optimised for storage space, and does not include the unused "title" column nor the "data" column (since this data is moved to "push_event_payloads"). == Newly Created Events Newly created events are inserted into both "events" and "events_for_migration", both using the exact same primary key value. The table "push_event_payloads" in turn has a foreign key to the _shadow_ table. This removes the need for recreating and validating the foreign key after swapping the tables. Since the shadow table also has a foreign key to "projects.id" we also don't have to worry about orphaned rows. This approach however does require some additional storage as we're duplicating a portion of the events data for at least 1 release. The exact amount is hard to estimate, but for GitLab.com this is expected to be between 10 and 20 GB at most. The background migration in this commit deliberately does _not_ update the "events" table as doing so would put a lot of pressure on PostgreSQL's auto vacuuming system. == Supporting Both Old And New Events Application code has also been adjusted to support push events using both the old and new data formats. This is done by creating a PushEvent class which extends the regular Event class. Using Rails' Single Table Inheritance system we can ensure the right class is used for the right data, which in this case is based on the value of `events.action`. To support displaying old and new data at the same time the PushEvent class re-defines a few methods of the Event class, falling back to their original implementations for push events in the old format. Once all existing events have been migrated the various push event related methods can be removed from the Event model, and the calls to `super` can be removed from the methods in the PushEvent model. The UI and event atom feed have also been slightly changed to better handle this new setup, fortunately only a few changes were necessary to make this work. == API Changes The API only displays push data of events in the new format. Supporting both formats in the API is a bit more difficult compared to the UI. Since the old push data was not really well documented (apart from one example that used an incorrect "action" nmae) I decided that supporting both was not worth the effort, especially since events will be migrated in a few days _and_ new events are created in the correct format.
2017-07-10 15:43:57 +00:00
it 'does not create any event data when an error is raised' do
payload_service = double(:service)
allow(payload_service).to receive(:execute)
.and_raise(RuntimeError)
allow(service_class).to receive(:new)
Migrate events into a new format This commit migrates events data in such a way that push events are stored much more efficiently. This is done by creating a shadow table called "events_for_migration", and a table called "push_event_payloads" which is used for storing push data of push events. The background migration in this commit will copy events from the "events" table into the "events_for_migration" table, push events in will also have a row created in "push_event_payloads". This approach allows us to reclaim space in the next release by simply swapping the "events" and "events_for_migration" tables, then dropping the old events (now "events_for_migration") table. The new table structure is also optimised for storage space, and does not include the unused "title" column nor the "data" column (since this data is moved to "push_event_payloads"). == Newly Created Events Newly created events are inserted into both "events" and "events_for_migration", both using the exact same primary key value. The table "push_event_payloads" in turn has a foreign key to the _shadow_ table. This removes the need for recreating and validating the foreign key after swapping the tables. Since the shadow table also has a foreign key to "projects.id" we also don't have to worry about orphaned rows. This approach however does require some additional storage as we're duplicating a portion of the events data for at least 1 release. The exact amount is hard to estimate, but for GitLab.com this is expected to be between 10 and 20 GB at most. The background migration in this commit deliberately does _not_ update the "events" table as doing so would put a lot of pressure on PostgreSQL's auto vacuuming system. == Supporting Both Old And New Events Application code has also been adjusted to support push events using both the old and new data formats. This is done by creating a PushEvent class which extends the regular Event class. Using Rails' Single Table Inheritance system we can ensure the right class is used for the right data, which in this case is based on the value of `events.action`. To support displaying old and new data at the same time the PushEvent class re-defines a few methods of the Event class, falling back to their original implementations for push events in the old format. Once all existing events have been migrated the various push event related methods can be removed from the Event model, and the calls to `super` can be removed from the methods in the PushEvent model. The UI and event atom feed have also been slightly changed to better handle this new setup, fortunately only a few changes were necessary to make this work. == API Changes The API only displays push data of events in the new format. Supporting both formats in the API is a bit more difficult compared to the UI. Since the old push data was not really well documented (apart from one example that used an incorrect "action" nmae) I decided that supporting both was not worth the effort, especially since events will be migrated in a few days _and_ new events are created in the correct format.
2017-07-10 15:43:57 +00:00
.and_return(payload_service)
expect { subject }.to raise_error(RuntimeError)
Migrate events into a new format This commit migrates events data in such a way that push events are stored much more efficiently. This is done by creating a shadow table called "events_for_migration", and a table called "push_event_payloads" which is used for storing push data of push events. The background migration in this commit will copy events from the "events" table into the "events_for_migration" table, push events in will also have a row created in "push_event_payloads". This approach allows us to reclaim space in the next release by simply swapping the "events" and "events_for_migration" tables, then dropping the old events (now "events_for_migration") table. The new table structure is also optimised for storage space, and does not include the unused "title" column nor the "data" column (since this data is moved to "push_event_payloads"). == Newly Created Events Newly created events are inserted into both "events" and "events_for_migration", both using the exact same primary key value. The table "push_event_payloads" in turn has a foreign key to the _shadow_ table. This removes the need for recreating and validating the foreign key after swapping the tables. Since the shadow table also has a foreign key to "projects.id" we also don't have to worry about orphaned rows. This approach however does require some additional storage as we're duplicating a portion of the events data for at least 1 release. The exact amount is hard to estimate, but for GitLab.com this is expected to be between 10 and 20 GB at most. The background migration in this commit deliberately does _not_ update the "events" table as doing so would put a lot of pressure on PostgreSQL's auto vacuuming system. == Supporting Both Old And New Events Application code has also been adjusted to support push events using both the old and new data formats. This is done by creating a PushEvent class which extends the regular Event class. Using Rails' Single Table Inheritance system we can ensure the right class is used for the right data, which in this case is based on the value of `events.action`. To support displaying old and new data at the same time the PushEvent class re-defines a few methods of the Event class, falling back to their original implementations for push events in the old format. Once all existing events have been migrated the various push event related methods can be removed from the Event model, and the calls to `super` can be removed from the methods in the PushEvent model. The UI and event atom feed have also been slightly changed to better handle this new setup, fortunately only a few changes were necessary to make this work. == API Changes The API only displays push data of events in the new format. Supporting both formats in the API is a bit more difficult compared to the UI. Since the old push data was not really well documented (apart from one example that used an incorrect "action" nmae) I decided that supporting both was not worth the effort, especially since events will be migrated in a few days _and_ new events are created in the correct format.
2017-07-10 15:43:57 +00:00
expect(Event.count).to eq(0)
expect(PushEventPayload.count).to eq(0)
end
end
describe '#wiki_event' do
let_it_be(:user) { create(:user) }
let_it_be(:wiki_page) { create(:wiki_page) }
let_it_be(:meta) { create(:wiki_page_meta, :for_wiki_page, wiki_page: wiki_page) }
let(:fingerprint) { generate(:sha) }
def create_event
service.wiki_event(meta, user, action, fingerprint)
end
where(:action) { Event::WIKI_ACTIONS.map { |action| [action] } }
with_them do
subject { create_event }
it 'creates the event' do
expect(create_event).to have_attributes(
wiki_page?: true,
valid?: true,
persisted?: true,
action: action.to_s,
wiki_page: wiki_page,
author: user,
fingerprint: fingerprint
)
end
it 'is idempotent', :aggregate_failures do
event = nil
expect { event = create_event }.to change(Event, :count).by(1)
duplicate = nil
expect { duplicate = create_event }.not_to change(Event, :count)
expect(duplicate).to eq(event)
end
it_behaves_like "it records the event in the event counter" do
let(:event_action) { Gitlab::UsageDataCounters::TrackUniqueEvents::WIKI_ACTION }
end
end
(Event.actions.keys - Event::WIKI_ACTIONS).each do |bad_action|
context "The action is #{bad_action}" do
let(:action) { bad_action }
it 'raises an error' do
expect { create_event }.to raise_error(described_class::IllegalActionError)
end
end
end
end
describe '#push', :snowplow do
let(:push_data) do
{
commits: [
{
id: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
message: 'This is a commit'
}
],
before: '0000000000000000000000000000000000000000',
after: '1cf19a015df3523caf0a1f9d40c98a267d6a2fc2',
total_commits_count: 1,
ref: 'refs/heads/my-branch'
}
end
subject { service.push(project, user, push_data) }
it_behaves_like 'service for creating a push event', PushEventPayloadService
it_behaves_like "it records the event in the event counter" do
let(:event_action) { Gitlab::UsageDataCounters::TrackUniqueEvents::PUSH_ACTION }
end
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
let(:category) { described_class.to_s }
let(:action) { :push }
let(:namespace) { project.namespace }
let(:feature_flag_name) { :route_hll_to_snowplow }
let(:label) { 'usage_activity_by_stage_monthly.create.action_monthly_active_users_project_repo' }
let(:context) do
[Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll,
event: 'action_active_users_project_repo').to_context]
end
end
end
describe '#bulk_push', :snowplow do
let(:push_data) do
{
action: :created,
ref_count: 4,
ref_type: :branch
}
end
subject { service.bulk_push(project, user, push_data) }
it_behaves_like 'service for creating a push event', BulkPushEventPayloadService
it_behaves_like "it records the event in the event counter" do
let(:event_action) { Gitlab::UsageDataCounters::TrackUniqueEvents::PUSH_ACTION }
end
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
let(:category) { described_class.to_s }
let(:action) { :push }
let(:namespace) { project.namespace }
let(:feature_flag_name) { :route_hll_to_snowplow }
let(:label) { 'usage_activity_by_stage_monthly.create.action_monthly_active_users_project_repo' }
let(:context) do
[Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll,
event: 'action_active_users_project_repo').to_context]
end
end
end
describe 'Project' do
describe '#join_project' do
subject { service.join_project(project, user) }
it { is_expected.to be_truthy }
it { expect { subject }.to change { Event.count }.from(0).to(1) }
end
describe '#expired_leave_project' do
subject { service.expired_leave_project(project, user) }
it { is_expected.to be_truthy }
it { expect { subject }.to change { Event.count }.from(0).to(1) }
end
end
describe 'design events', :snowplow do
let_it_be(:design) { create(:design, project: project) }
let_it_be(:author) { user }
before do
allow(Gitlab::Tracking).to receive(:event) # rubocop:disable RSpec/ExpectGitlabTracking
end
describe '#save_designs' do
let_it_be(:updated) { create_list(:design, 5) }
let_it_be(:created) { create_list(:design, 3) }
subject(:result) { service.save_designs(author, create: created, update: updated) }
specify { expect { result }.to change { Event.count }.by(8) }
# An addditional query due to event tracking
specify { expect { result }.not_to exceed_query_limit(2) }
it 'creates 3 created design events' do
ids = result.pluck('id')
events = Event.created_action.where(id: ids)
expect(events.map(&:design)).to match_array(created)
end
it 'creates 5 created design events' do
ids = result.pluck('id')
events = Event.updated_action.where(id: ids)
expect(events.map(&:design)).to match_array(updated)
end
it_behaves_like "it records the event in the event counter" do
let(:event_action) { Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION }
end
it 'records correct create payload with Snowplow event' do
service.save_designs(author, create: [design])
expect_snowplow_event(
category: Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION.to_s,
action: 'create',
namespace: design.project.namespace,
user: author,
project: design.project,
label: 'design_users'
)
end
it 'records correct update payload with Snowplow event' do
service.save_designs(author, update: [design])
expect_snowplow_event(
category: Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION.to_s,
action: 'update',
namespace: design.project.namespace,
user: author,
project: design.project,
label: 'design_users'
)
end
context 'when FF is disabled' do
before do
stub_feature_flags(route_hll_to_snowplow_phase2: false)
end
it 'doesnt emit snowwplow events', :snowplow do
subject
expect_no_snowplow_event
end
end
end
describe '#destroy_designs' do
let_it_be(:designs) { create_list(:design, 5) }
let_it_be(:author) { create(:user) }
subject(:result) { service.destroy_designs(designs, author) }
specify { expect { result }.to change { Event.count }.by(5) }
# An addditional query due to event tracking
specify { expect { result }.not_to exceed_query_limit(2) }
it 'creates 5 destroyed design events' do
ids = result.pluck('id')
events = Event.destroyed_action.where(id: ids)
expect(events.map(&:design)).to match_array(designs)
end
it_behaves_like "it records the event in the event counter" do
let(:event_action) { Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION }
end
it 'records correct payload with Snowplow event' do
service.destroy_designs([design], author)
expect_snowplow_event(
category: Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION.to_s,
action: 'destroy',
namespace: design.project.namespace,
user: author,
project: design.project,
label: 'design_users'
)
end
context 'when FF is disabled' do
before do
stub_feature_flags(route_hll_to_snowplow_phase2: false)
end
it 'doesnt emit snowplow events', :snowplow do
subject
expect_no_snowplow_event
end
end
end
end
describe '#leave_note', :snowplow do
subject(:leave_note) { service.leave_note(note, author) }
let(:note) { create(:note) }
let(:author) { create(:user) }
let(:event_action) { Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION }
it { expect(leave_note).to be_truthy }
it "creates new event" do
expect { leave_note }.to change { Event.count }.by(1)
end
context 'when it is a diff note' do
let(:note) { create(:diff_note_on_merge_request) }
it_behaves_like "it records the event in the event counter"
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
let(:note) { create(:diff_note_on_merge_request) }
let(:category) { described_class.name }
let(:action) { 'commented' }
let(:label) { 'usage_activity_by_stage_monthly.create.merge_requests_users' }
let(:namespace) { project.namespace }
let(:project) { note.project }
let(:user) { author }
let(:context) do
[Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: 'merge_requests_users').to_context]
end
end
end
context 'when it is not a diff note' do
it 'does not change the unique action counter' do
counter_class = Gitlab::UsageDataCounters::TrackUniqueEvents
tracking_params = { event_action: event_action, date_from: Date.yesterday, date_to: Date.today }
expect { subject }.not_to change { counter_class.count_unique_events(**tracking_params) }
end
end
end
end