2017-02-06 13:38:17 -05:00
|
|
|
require 'spec_helper'
|
|
|
|
|
2017-04-21 16:32:02 -04:00
|
|
|
describe API::V3::Users do
|
2017-02-06 13:38:17 -05:00
|
|
|
let(:user) { create(:user) }
|
|
|
|
let(:admin) { create(:admin) }
|
|
|
|
let(:key) { create(:key, user: user) }
|
|
|
|
let(:email) { create(:email, user: user) }
|
2017-02-20 07:31:11 -05:00
|
|
|
let(:ldap_blocked_user) { create(:omniauth_user, provider: 'ldapmain', state: 'ldap_blocked') }
|
2017-02-06 13:38:17 -05:00
|
|
|
|
2017-06-20 10:54:29 -04:00
|
|
|
describe 'GET /users' do
|
|
|
|
context 'when authenticated' do
|
|
|
|
it 'returns an array of users' do
|
|
|
|
get v3_api('/users', user)
|
|
|
|
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(200)
|
2017-06-20 10:54:29 -04:00
|
|
|
expect(response).to include_pagination_headers
|
|
|
|
expect(json_response).to be_an Array
|
|
|
|
username = user.username
|
|
|
|
expect(json_response.detect do |user|
|
|
|
|
user['username'] == username
|
|
|
|
end['username']).to eq(username)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when authenticated as user' do
|
|
|
|
it 'does not reveal the `is_admin` flag of the user' do
|
|
|
|
get v3_api('/users', user)
|
|
|
|
|
|
|
|
expect(json_response.first.keys).not_to include 'is_admin'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when authenticated as admin' do
|
|
|
|
it 'reveals the `is_admin` flag of the user' do
|
|
|
|
get v3_api('/users', admin)
|
|
|
|
|
|
|
|
expect(json_response.first.keys).to include 'is_admin'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-02-06 13:38:17 -05:00
|
|
|
describe 'GET /user/:id/keys' do
|
|
|
|
before { admin }
|
|
|
|
|
|
|
|
context 'when unauthenticated' do
|
|
|
|
it 'returns authentication error' do
|
|
|
|
get v3_api("/users/#{user.id}/keys")
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(401)
|
2017-02-06 13:38:17 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when authenticated' do
|
|
|
|
it 'returns 404 for non-existing user' do
|
|
|
|
get v3_api('/users/999999/keys', admin)
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(404)
|
2017-02-06 13:38:17 -05:00
|
|
|
expect(json_response['message']).to eq('404 User Not Found')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns array of ssh keys' do
|
|
|
|
user.keys << key
|
|
|
|
user.save
|
|
|
|
|
|
|
|
get v3_api("/users/#{user.id}/keys", admin)
|
|
|
|
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(200)
|
2017-02-06 13:38:17 -05:00
|
|
|
expect(json_response).to be_an Array
|
|
|
|
expect(json_response.first['title']).to eq(key.title)
|
|
|
|
end
|
|
|
|
end
|
2017-06-20 05:35:59 -04:00
|
|
|
|
|
|
|
context "scopes" do
|
|
|
|
let(:user) { admin }
|
|
|
|
let(:path) { "/users/#{user.id}/keys" }
|
|
|
|
let(:api_call) { method(:v3_api) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
user.keys << key
|
|
|
|
user.save
|
|
|
|
end
|
|
|
|
|
|
|
|
include_examples 'allows the "read_user" scope'
|
|
|
|
end
|
2017-02-06 13:38:17 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
describe 'GET /user/:id/emails' do
|
|
|
|
before { admin }
|
|
|
|
|
|
|
|
context 'when unauthenticated' do
|
|
|
|
it 'returns authentication error' do
|
|
|
|
get v3_api("/users/#{user.id}/emails")
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(401)
|
2017-02-06 13:38:17 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when authenticated' do
|
|
|
|
it 'returns 404 for non-existing user' do
|
|
|
|
get v3_api('/users/999999/emails', admin)
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(404)
|
2017-02-06 13:38:17 -05:00
|
|
|
expect(json_response['message']).to eq('404 User Not Found')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns array of emails' do
|
|
|
|
user.emails << email
|
|
|
|
user.save
|
|
|
|
|
|
|
|
get v3_api("/users/#{user.id}/emails", admin)
|
|
|
|
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(200)
|
2017-02-06 13:38:17 -05:00
|
|
|
expect(json_response).to be_an Array
|
|
|
|
expect(json_response.first['email']).to eq(email.email)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns a 404 for invalid ID" do
|
|
|
|
put v3_api("/users/ASDF/emails", admin)
|
|
|
|
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(404)
|
2017-02-06 13:38:17 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe "GET /user/keys" do
|
|
|
|
context "when unauthenticated" do
|
|
|
|
it "returns authentication error" do
|
|
|
|
get v3_api("/user/keys")
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(401)
|
2017-02-06 13:38:17 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when authenticated" do
|
|
|
|
it "returns array of ssh keys" do
|
|
|
|
user.keys << key
|
|
|
|
user.save
|
|
|
|
|
|
|
|
get v3_api("/user/keys", user)
|
|
|
|
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(200)
|
2017-02-06 13:38:17 -05:00
|
|
|
expect(json_response).to be_an Array
|
|
|
|
expect(json_response.first["title"]).to eq(key.title)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe "GET /user/emails" do
|
|
|
|
context "when unauthenticated" do
|
|
|
|
it "returns authentication error" do
|
|
|
|
get v3_api("/user/emails")
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(401)
|
2017-02-06 13:38:17 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when authenticated" do
|
|
|
|
it "returns array of emails" do
|
|
|
|
user.emails << email
|
|
|
|
user.save
|
|
|
|
|
|
|
|
get v3_api("/user/emails", user)
|
|
|
|
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(200)
|
2017-02-06 13:38:17 -05:00
|
|
|
expect(json_response).to be_an Array
|
|
|
|
expect(json_response.first["email"]).to eq(email.email)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2017-02-20 07:31:11 -05:00
|
|
|
|
|
|
|
describe 'PUT /users/:id/block' do
|
|
|
|
before { admin }
|
|
|
|
it 'blocks existing user' do
|
|
|
|
put v3_api("/users/#{user.id}/block", admin)
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(200)
|
2017-02-20 07:31:11 -05:00
|
|
|
expect(user.reload.state).to eq('blocked')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not re-block ldap blocked users' do
|
|
|
|
put v3_api("/users/#{ldap_blocked_user.id}/block", admin)
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(403)
|
2017-02-20 07:31:11 -05:00
|
|
|
expect(ldap_blocked_user.reload.state).to eq('ldap_blocked')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not be available for non admin users' do
|
|
|
|
put v3_api("/users/#{user.id}/block", user)
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(403)
|
2017-02-20 07:31:11 -05:00
|
|
|
expect(user.reload.state).to eq('active')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns a 404 error if user id not found' do
|
|
|
|
put v3_api('/users/9999/block', admin)
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(404)
|
2017-02-20 07:31:11 -05:00
|
|
|
expect(json_response['message']).to eq('404 User Not Found')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'PUT /users/:id/unblock' do
|
|
|
|
let(:blocked_user) { create(:user, state: 'blocked') }
|
|
|
|
before { admin }
|
|
|
|
|
|
|
|
it 'unblocks existing user' do
|
|
|
|
put v3_api("/users/#{user.id}/unblock", admin)
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(200)
|
2017-02-20 07:31:11 -05:00
|
|
|
expect(user.reload.state).to eq('active')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'unblocks a blocked user' do
|
|
|
|
put v3_api("/users/#{blocked_user.id}/unblock", admin)
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(200)
|
2017-02-20 07:31:11 -05:00
|
|
|
expect(blocked_user.reload.state).to eq('active')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not unblock ldap blocked users' do
|
|
|
|
put v3_api("/users/#{ldap_blocked_user.id}/unblock", admin)
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(403)
|
2017-02-20 07:31:11 -05:00
|
|
|
expect(ldap_blocked_user.reload.state).to eq('ldap_blocked')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not be available for non admin users' do
|
|
|
|
put v3_api("/users/#{user.id}/unblock", user)
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(403)
|
2017-02-20 07:31:11 -05:00
|
|
|
expect(user.reload.state).to eq('active')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns a 404 error if user id not found' do
|
|
|
|
put v3_api('/users/9999/block', admin)
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(404)
|
2017-02-20 07:31:11 -05:00
|
|
|
expect(json_response['message']).to eq('404 User Not Found')
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns a 404 for invalid ID" do
|
|
|
|
put v3_api("/users/ASDF/block", admin)
|
|
|
|
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(404)
|
2017-02-20 07:31:11 -05:00
|
|
|
end
|
|
|
|
end
|
2017-02-20 09:35:05 -05:00
|
|
|
|
|
|
|
describe 'GET /users/:id/events' do
|
|
|
|
let(:user) { create(:user) }
|
2017-08-02 15:55:11 -04:00
|
|
|
let(:project) { create(:project) }
|
2017-02-20 09:35:05 -05:00
|
|
|
let(:note) { create(:note_on_issue, note: 'What an awesome day!', project: project) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
project.add_user(user, :developer)
|
|
|
|
EventCreateService.new.leave_note(note, user)
|
|
|
|
end
|
|
|
|
|
|
|
|
context "as a user than cannot see the event's project" do
|
|
|
|
it 'returns no events' do
|
|
|
|
other_user = create(:user)
|
|
|
|
|
|
|
|
get api("/users/#{user.id}/events", other_user)
|
|
|
|
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(200)
|
2017-02-20 09:35:05 -05:00
|
|
|
expect(json_response).to be_empty
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "as a user than can see the event's project" do
|
Migrate events into a new format
This commit migrates events data in such a way that push events are
stored much more efficiently. This is done by creating a shadow table
called "events_for_migration", and a table called "push_event_payloads"
which is used for storing push data of push events. The background
migration in this commit will copy events from the "events" table into
the "events_for_migration" table, push events in will also have a row
created in "push_event_payloads".
This approach allows us to reclaim space in the next release by simply
swapping the "events" and "events_for_migration" tables, then dropping
the old events (now "events_for_migration") table.
The new table structure is also optimised for storage space, and does
not include the unused "title" column nor the "data" column (since this
data is moved to "push_event_payloads").
== Newly Created Events
Newly created events are inserted into both "events" and
"events_for_migration", both using the exact same primary key value. The
table "push_event_payloads" in turn has a foreign key to the _shadow_
table. This removes the need for recreating and validating the foreign
key after swapping the tables. Since the shadow table also has a foreign
key to "projects.id" we also don't have to worry about orphaned rows.
This approach however does require some additional storage as we're
duplicating a portion of the events data for at least 1 release. The
exact amount is hard to estimate, but for GitLab.com this is expected to
be between 10 and 20 GB at most. The background migration in this commit
deliberately does _not_ update the "events" table as doing so would put
a lot of pressure on PostgreSQL's auto vacuuming system.
== Supporting Both Old And New Events
Application code has also been adjusted to support push events using
both the old and new data formats. This is done by creating a PushEvent
class which extends the regular Event class. Using Rails' Single Table
Inheritance system we can ensure the right class is used for the right
data, which in this case is based on the value of `events.action`. To
support displaying old and new data at the same time the PushEvent class
re-defines a few methods of the Event class, falling back to their
original implementations for push events in the old format.
Once all existing events have been migrated the various push event
related methods can be removed from the Event model, and the calls to
`super` can be removed from the methods in the PushEvent model.
The UI and event atom feed have also been slightly changed to better
handle this new setup, fortunately only a few changes were necessary to
make this work.
== API Changes
The API only displays push data of events in the new format. Supporting
both formats in the API is a bit more difficult compared to the UI.
Since the old push data was not really well documented (apart from one
example that used an incorrect "action" nmae) I decided that supporting
both was not worth the effort, especially since events will be migrated
in a few days _and_ new events are created in the correct format.
2017-07-10 11:43:57 -04:00
|
|
|
context 'when the list of events includes push events' do
|
|
|
|
let(:event) { create(:push_event, author: user, project: project) }
|
|
|
|
let!(:payload) { create(:push_event_payload, event: event) }
|
|
|
|
let(:payload_hash) { json_response[0]['push_data'] }
|
|
|
|
|
|
|
|
before do
|
|
|
|
get api("/users/#{user.id}/events?action=pushed", user)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'responds with HTTP 200 OK' do
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(200)
|
Migrate events into a new format
This commit migrates events data in such a way that push events are
stored much more efficiently. This is done by creating a shadow table
called "events_for_migration", and a table called "push_event_payloads"
which is used for storing push data of push events. The background
migration in this commit will copy events from the "events" table into
the "events_for_migration" table, push events in will also have a row
created in "push_event_payloads".
This approach allows us to reclaim space in the next release by simply
swapping the "events" and "events_for_migration" tables, then dropping
the old events (now "events_for_migration") table.
The new table structure is also optimised for storage space, and does
not include the unused "title" column nor the "data" column (since this
data is moved to "push_event_payloads").
== Newly Created Events
Newly created events are inserted into both "events" and
"events_for_migration", both using the exact same primary key value. The
table "push_event_payloads" in turn has a foreign key to the _shadow_
table. This removes the need for recreating and validating the foreign
key after swapping the tables. Since the shadow table also has a foreign
key to "projects.id" we also don't have to worry about orphaned rows.
This approach however does require some additional storage as we're
duplicating a portion of the events data for at least 1 release. The
exact amount is hard to estimate, but for GitLab.com this is expected to
be between 10 and 20 GB at most. The background migration in this commit
deliberately does _not_ update the "events" table as doing so would put
a lot of pressure on PostgreSQL's auto vacuuming system.
== Supporting Both Old And New Events
Application code has also been adjusted to support push events using
both the old and new data formats. This is done by creating a PushEvent
class which extends the regular Event class. Using Rails' Single Table
Inheritance system we can ensure the right class is used for the right
data, which in this case is based on the value of `events.action`. To
support displaying old and new data at the same time the PushEvent class
re-defines a few methods of the Event class, falling back to their
original implementations for push events in the old format.
Once all existing events have been migrated the various push event
related methods can be removed from the Event model, and the calls to
`super` can be removed from the methods in the PushEvent model.
The UI and event atom feed have also been slightly changed to better
handle this new setup, fortunately only a few changes were necessary to
make this work.
== API Changes
The API only displays push data of events in the new format. Supporting
both formats in the API is a bit more difficult compared to the UI.
Since the old push data was not really well documented (apart from one
example that used an incorrect "action" nmae) I decided that supporting
both was not worth the effort, especially since events will be migrated
in a few days _and_ new events are created in the correct format.
2017-07-10 11:43:57 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
it 'includes the push payload as a Hash' do
|
|
|
|
expect(payload_hash).to be_an_instance_of(Hash)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'includes the push payload details' do
|
|
|
|
expect(payload_hash['commit_count']).to eq(payload.commit_count)
|
|
|
|
expect(payload_hash['action']).to eq(payload.action)
|
|
|
|
expect(payload_hash['ref_type']).to eq(payload.ref_type)
|
|
|
|
expect(payload_hash['commit_to']).to eq(payload.commit_to)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-02-20 09:35:05 -05:00
|
|
|
context 'joined event' do
|
|
|
|
it 'returns the "joined" event' do
|
|
|
|
get v3_api("/users/#{user.id}/events", user)
|
|
|
|
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(200)
|
2017-02-20 09:35:05 -05:00
|
|
|
expect(response).to include_pagination_headers
|
|
|
|
expect(json_response).to be_an Array
|
|
|
|
|
|
|
|
comment_event = json_response.find { |e| e['action_name'] == 'commented on' }
|
|
|
|
|
|
|
|
expect(comment_event['project_id'].to_i).to eq(project.id)
|
|
|
|
expect(comment_event['author_username']).to eq(user.username)
|
|
|
|
expect(comment_event['note']['id']).to eq(note.id)
|
|
|
|
expect(comment_event['note']['body']).to eq('What an awesome day!')
|
|
|
|
|
|
|
|
joined_event = json_response.find { |e| e['action_name'] == 'joined' }
|
|
|
|
|
|
|
|
expect(joined_event['project_id'].to_i).to eq(project.id)
|
|
|
|
expect(joined_event['author_username']).to eq(user.username)
|
|
|
|
expect(joined_event['author']['name']).to eq(user.name)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there are multiple events from different projects' do
|
2017-08-02 15:55:11 -04:00
|
|
|
let(:second_note) { create(:note_on_issue, project: create(:project)) }
|
2017-02-20 09:35:05 -05:00
|
|
|
let(:third_note) { create(:note_on_issue, project: project) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
second_note.project.add_user(user, :developer)
|
|
|
|
|
|
|
|
[second_note, third_note].each do |note|
|
|
|
|
EventCreateService.new.leave_note(note, user)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns events in the correct order (from newest to oldest)' do
|
|
|
|
get v3_api("/users/#{user.id}/events", user)
|
|
|
|
|
|
|
|
comment_events = json_response.select { |e| e['action_name'] == 'commented on' }
|
|
|
|
|
|
|
|
expect(comment_events[0]['target_id']).to eq(third_note.id)
|
|
|
|
expect(comment_events[1]['target_id']).to eq(second_note.id)
|
|
|
|
expect(comment_events[2]['target_id']).to eq(note.id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns a 404 error if not found' do
|
2017-06-21 05:22:39 -04:00
|
|
|
get v3_api('/users/420/events', user)
|
2017-02-20 09:35:05 -05:00
|
|
|
|
2017-10-19 14:28:19 -04:00
|
|
|
expect(response).to have_gitlab_http_status(404)
|
2017-02-20 09:35:05 -05:00
|
|
|
expect(json_response['message']).to eq('404 User Not Found')
|
|
|
|
end
|
|
|
|
end
|
2017-03-27 05:37:24 -04:00
|
|
|
|
|
|
|
describe 'POST /users' do
|
|
|
|
it 'creates confirmed user when confirm parameter is false' do
|
|
|
|
optional_attributes = { confirm: false }
|
|
|
|
attributes = attributes_for(:user).merge(optional_attributes)
|
|
|
|
|
|
|
|
post v3_api('/users', admin), attributes
|
|
|
|
|
|
|
|
user_id = json_response['id']
|
|
|
|
new_user = User.find(user_id)
|
|
|
|
|
|
|
|
expect(new_user).to be_confirmed
|
|
|
|
end
|
2017-04-21 05:47:58 -04:00
|
|
|
|
|
|
|
it 'does not reveal the `is_admin` flag of the user' do
|
|
|
|
post v3_api('/users', admin), attributes_for(:user)
|
|
|
|
|
|
|
|
expect(json_response['is_admin']).to be_nil
|
|
|
|
end
|
2017-06-20 05:35:59 -04:00
|
|
|
|
|
|
|
context "scopes" do
|
|
|
|
let(:user) { admin }
|
|
|
|
let(:path) { '/users' }
|
|
|
|
let(:api_call) { method(:v3_api) }
|
|
|
|
|
|
|
|
include_examples 'does not allow the "read_user" scope'
|
|
|
|
end
|
2017-03-27 05:37:24 -04:00
|
|
|
end
|
2017-02-06 13:38:17 -05:00
|
|
|
end
|