Merge branch 'master' into 22643-manual-job-page

* master: (50 commits)
  Prevent some specs from mangling the gitlab-shell checkout
  Line up search dropdown with other nav dropdowns
  Fix onion-skin re-entering state
  Remove related links in MR widget when empty state
  Show inline edit button for issues
  Fix tags in the Activity tab not being clickable
  Fix shortcut links on help page
  Don't link LFS-objects multiple times.
  [CE->EE] Fix spec/lib/gitlab/git/gitlab_projects_spec.rb
  Tidy up the documentation of Gitlab HA/Gitlab Application
  Make sure two except won't overwrite each other
  Update axios.md
  Remove transitionend event from GL dropdown
  Preserve gem path so that we use the same gems
  Load commit in batches for pipelines#index
  BlobViewer::PackageJson - if private link to homepage
  Do not generate links for private NPM modules in blob view
  Fix missing WHERE clause in 20171106135924_issues_milestone_id_foreign_key migration
  Inverse the has_multiple_clusters? helper usage
  Remove block styling from search dropdown
  ...
This commit is contained in:
Filipa Lacerda 2017-12-20 09:47:43 +00:00
commit 800ab47a53
109 changed files with 1791 additions and 915 deletions

View File

@ -1,5 +1,10 @@
image: "dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.5-golang-1.8-git-2.14-chrome-63.0-node-8.x-yarn-1.2-postgresql-9.6"
.dedicated-runner: &dedicated-runner
retry: 1
tags:
- gitlab-org
.default-cache: &default-cache
key: "ruby-235-with-yarn"
paths:
@ -42,11 +47,6 @@ stages:
- post-cleanup
# Predefined scopes
.dedicated-runner: &dedicated-runner
retry: 1
tags:
- gitlab-org
.tests-metadata-state: &tests-metadata-state
<<: *dedicated-runner
variables:
@ -80,11 +80,15 @@ stages:
except:
- /(^qa[\/-].*|.*-qa$)/
.except-docs-and-qa: &except-docs-and-qa
except:
- /(^docs[\/-].*|.*-docs$)/
- /(^qa[\/-].*|.*-qa$)/
.rspec-metadata: &rspec-metadata
<<: *dedicated-runner
<<: *except-docs-and-qa
<<: *pull-cache
<<: *except-docs
<<: *except-qa
stage: test
script:
- JOB_NAME=( $CI_JOB_NAME )
@ -121,9 +125,8 @@ stages:
.spinach-metadata: &spinach-metadata
<<: *dedicated-runner
<<: *except-docs-and-qa
<<: *pull-cache
<<: *except-docs
<<: *except-qa
stage: test
script:
- JOB_NAME=( $CI_JOB_NAME )
@ -162,6 +165,7 @@ stages:
# Trigger a package build in omnibus-gitlab repository
#
package-qa:
<<: *dedicated-runner
image: ruby:2.4-alpine
before_script: []
stage: build
@ -175,6 +179,7 @@ package-qa:
# Review docs base
.review-docs: &review-docs
<<: *dedicated-runner
<<: *except-qa
image: ruby:2.4-alpine
before_script:
@ -220,8 +225,7 @@ review-docs-cleanup:
# Retrieve knapsack and rspec_flaky reports
retrieve-tests-metadata:
<<: *tests-metadata-state
<<: *except-docs
<<: *except-qa
<<: *except-docs-and-qa
stage: prepare
cache:
key: tests_metadata
@ -284,9 +288,9 @@ flaky-examples-check:
- scripts/detect-new-flaky-examples $NEW_FLAKY_SPECS_REPORT
setup-test-env:
<<: *use-pg
<<: *dedicated-runner
<<: *except-docs
<<: *use-pg
stage: prepare
cache:
<<: *default-cache
@ -375,19 +379,18 @@ spinach-mysql 3 4: *spinach-metadata-mysql
SETUP_DB: "false"
.rake-exec: &rake-exec
<<: *ruby-static-analysis
<<: *dedicated-runner
<<: *except-docs
<<: *except-qa
<<: *except-docs-and-qa
<<: *pull-cache
<<: *ruby-static-analysis
stage: test
script:
- bundle exec rake $CI_JOB_NAME
static-analysis:
<<: *ruby-static-analysis
<<: *dedicated-runner
<<: *except-docs
<<: *ruby-static-analysis
stage: test
script:
- scripts/static-analysis
@ -441,8 +444,7 @@ ee_compat_check:
# DB migration, rollback, and seed jobs
.db-migrate-reset: &db-migrate-reset
<<: *dedicated-runner
<<: *except-docs
<<: *except-qa
<<: *except-docs-and-qa
<<: *pull-cache
stage: test
script:
@ -456,11 +458,16 @@ db:migrate:reset-mysql:
<<: *db-migrate-reset
<<: *use-mysql
db:check-schema-pg:
<<: *db-migrate-reset
<<: *use-pg
script:
- source scripts/schema_changed.sh
.migration-paths: &migration-paths
<<: *dedicated-runner
<<: *except-docs-and-qa
<<: *pull-cache
<<: *except-docs
<<: *except-qa
stage: test
variables:
SETUP_DB: "false"
@ -486,8 +493,7 @@ migration:path-mysql:
.db-rollback: &db-rollback
<<: *dedicated-runner
<<: *except-docs
<<: *except-qa
<<: *except-docs-and-qa
<<: *pull-cache
stage: test
script:
@ -504,8 +510,7 @@ db:rollback-mysql:
.db-seed_fu: &db-seed_fu
<<: *dedicated-runner
<<: *except-docs
<<: *except-qa
<<: *except-docs-and-qa
<<: *pull-cache
stage: test
variables:
@ -530,17 +535,10 @@ db:seed_fu-mysql:
<<: *db-seed_fu
<<: *use-mysql
db:check-schema-pg:
<<: *db-migrate-reset
<<: *use-pg
script:
- source scripts/schema_changed.sh
# Frontend-related jobs
gitlab:assets:compile:
<<: *dedicated-runner
<<: *except-docs
<<: *except-qa
<<: *except-docs-and-qa
<<: *pull-cache
stage: test
dependencies: []
@ -561,11 +559,10 @@ gitlab:assets:compile:
- webpack-report/
karma:
<<: *use-pg
<<: *dedicated-runner
<<: *except-docs
<<: *except-qa
<<: *except-docs-and-qa
<<: *pull-cache
<<: *use-pg
stage: test
variables:
BABEL_ENV: "coverage"
@ -604,6 +601,7 @@ codequality:
paths: [codeclimate.json]
qa:internal:
<<: *dedicated-runner
<<: *except-docs
stage: test
variables:
@ -616,8 +614,7 @@ qa:internal:
coverage:
<<: *dedicated-runner
<<: *except-docs
<<: *except-qa
<<: *except-docs-and-qa
<<: *pull-cache
stage: post-test
services: []
@ -636,8 +633,7 @@ coverage:
lint:javascript:report:
<<: *dedicated-runner
<<: *except-docs
<<: *except-qa
<<: *except-docs-and-qa
<<: *pull-cache
stage: post-test
dependencies:
@ -695,9 +691,9 @@ cache gems:
- master@gitlab-org/gitlab-ee
gitlab_git_test:
<<: *dedicated-runner
<<: *except-docs-and-qa
<<: *pull-cache
<<: *except-docs
<<: *except-qa
variables:
SETUP_DB: "false"
script:

View File

@ -263,7 +263,7 @@ gem 'gettext_i18n_rails', '~> 1.8.0'
gem 'gettext_i18n_rails_js', '~> 1.2.0'
gem 'gettext', '~> 3.2.2', require: false, group: :development
gem 'batch-loader'
gem 'batch-loader', '~> 1.2.1'
# Perf bar
gem 'peek', '~> 1.0.1'

View File

@ -78,7 +78,7 @@ GEM
thread_safe (~> 0.3, >= 0.3.1)
babosa (1.0.2)
base32 (0.3.2)
batch-loader (1.1.1)
batch-loader (1.2.1)
bcrypt (3.1.11)
bcrypt_pbkdf (1.0.0)
benchmark-ips (2.3.0)
@ -988,7 +988,7 @@ DEPENDENCIES
awesome_print (~> 1.2.0)
babosa (~> 1.0.2)
base32 (~> 0.3.0)
batch-loader
batch-loader (~> 1.2.1)
bcrypt_pbkdf (~> 1.0)
benchmark-ips (~> 2.3.0)
better_errors (~> 2.1.0)

View File

@ -176,6 +176,7 @@ export default class ImageFile {
left: dragTrackWidth
});
$frameAdded.css('opacity', 1);
framePadding = parseInt($frameAdded.css('right').replace('px', ''), 10);
_this.initDraggable($dragger, framePadding, function(e, left) {

View File

@ -1,8 +1,8 @@
/* global CommentsStore */
/* global notes */
import Vue from 'vue';
import collapseIcon from '../icons/collapse_icon.svg';
import Notes from '../../notes';
import userAvatarImage from '../../vue_shared/components/user_avatar/user_avatar_image.vue';
const DiffNoteAvatars = Vue.extend({
@ -129,7 +129,7 @@ const DiffNoteAvatars = Vue.extend({
},
methods: {
clickedAvatar(e) {
notes.onAddDiffNote(e);
Notes.instance.onAddDiffNote(e);
// Toggle the active state of the toggle all button
this.toggleDiscussionsToggleState();

View File

@ -0,0 +1,13 @@
import Mousetrap from 'mousetrap';
function addMousetrapClick(el, key) {
el.addEventListener('click', () => Mousetrap.trigger(key));
}
function domContentLoaded() {
addMousetrapClick(document.querySelector('.js-trigger-shortcut'), '?');
addMousetrapClick(document.querySelector('.js-trigger-search-bar'), 's');
}
document.addEventListener('DOMContentLoaded', domContentLoaded);

View File

@ -300,7 +300,7 @@ GitLabDropdown = (function() {
return function(data) {
_this.fullData = data;
_this.parseData(_this.fullData);
_this.focusTextInput(true);
_this.focusTextInput();
if (_this.options.filterable && _this.filter && _this.filter.input && _this.filter.input.val() && _this.filter.input.val().trim() !== '') {
return _this.filter.input.trigger('input');
}
@ -790,24 +790,16 @@ GitLabDropdown = (function() {
return [selectedObject, isMarking];
};
GitLabDropdown.prototype.focusTextInput = function(triggerFocus = false) {
GitLabDropdown.prototype.focusTextInput = function() {
if (this.options.filterable) {
this.dropdown.one('transitionend', () => {
const initialScrollTop = $(window).scrollTop();
const initialScrollTop = $(window).scrollTop();
if (this.dropdown.is('.open')) {
this.filterInput.focus();
}
if (this.dropdown.is('.open')) {
this.filterInput.focus();
}
if ($(window).scrollTop() < initialScrollTop) {
$(window).scrollTop(initialScrollTop);
}
});
if (triggerFocus) {
// This triggers after a ajax request
// in case of slow requests, the dropdown transition could already be finished
this.dropdown.trigger('transitionend');
if ($(window).scrollTop() < initialScrollTop) {
$(window).scrollTop(initialScrollTop);
}
}
};

View File

@ -1,4 +1,4 @@
/* global Notes */
import Notes from './notes';
export default () => {
const dataEl = document.querySelector('.js-notes-data');
@ -10,5 +10,7 @@ export default () => {
autocomplete,
} = JSON.parse(dataEl.innerHTML);
window.notes = new Notes(notesUrl, notesIds, now, diffView, autocomplete);
// Create a singleton so that we don't need to assign
// into the window object, we can just access the current isntance with Notes.instance
Notes.initialize(notesUrl, notesIds, now, diffView, autocomplete);
};

View File

@ -32,7 +32,7 @@ export default {
showInlineEditButton: {
type: Boolean,
required: false,
default: false,
default: true,
},
showDeleteButton: {
type: Boolean,

View File

@ -79,7 +79,7 @@
v-tooltip
v-if="showInlineEditButton && canUpdate"
type="button"
class="btn btn-default btn-edit btn-svg"
class="btn btn-default btn-edit btn-svg js-issuable-edit"
v-html="pencilIcon"
title="Edit title and description"
data-placement="bottom"

View File

@ -1,5 +1,4 @@
import Vue from 'vue';
import eventHub from './event_hub';
import issuableApp from './components/app.vue';
import '../vue_shared/vue_resource_interceptor';
@ -7,12 +6,6 @@ document.addEventListener('DOMContentLoaded', () => {
const initialDataEl = document.getElementById('js-issuable-app-initial-data');
const props = JSON.parse(initialDataEl.innerHTML.replace(/&quot;/g, '"'));
$('.js-issuable-edit').on('click', (e) => {
e.preventDefault();
eventHub.$emit('open.form');
});
return new Vue({
el: document.getElementById('js-issuable-app'),
components: {

View File

@ -45,9 +45,7 @@ import './layout_nav';
import LazyLoader from './lazy_loader';
import './line_highlighter';
import initLogoAnimation from './logo';
import './merge_request_tabs';
import './milestone_select';
import './notes';
import './preview_markdown';
import './projects_dropdown';
import './render_gfm';

View File

@ -1,5 +1,4 @@
/* eslint-disable no-new, class-methods-use-this */
/* global notes */
import Cookies from 'js-cookie';
import Flash from './flash';
@ -16,6 +15,7 @@ import initDiscussionTab from './image_diff/init_discussion_tab';
import Diff from './diff';
import { localTimeAgo } from './lib/utils/datetime_utility';
import syntaxHighlight from './syntax_highlight';
import Notes from './notes';
/* eslint-disable max-len */
// MergeRequestTabs
@ -324,7 +324,7 @@ export default class MergeRequestTabs {
if (anchor && anchor.length > 0) {
const notesContent = anchor.closest('.notes_content');
const lineType = notesContent.hasClass('new') ? 'new' : 'old';
notes.toggleDiffNote({
Notes.instance.toggleDiffNote({
target: anchor,
lineType,
forceShow: true,

View File

@ -37,6 +37,12 @@ const MAX_VISIBLE_COMMIT_LIST_COUNT = 3;
const REGEX_QUICK_ACTIONS = /^\/\w+.*$/gm;
export default class Notes {
static initialize(notes_url, note_ids, last_fetched_at, view, enableGFM = true) {
if (!this.instance) {
this.instance = new Notes(notes_url, note_ids, last_fetched_at, view, enableGFM);
}
}
constructor(notes_url, note_ids, last_fetched_at, view, enableGFM = true) {
this.updateTargetButtons = this.updateTargetButtons.bind(this);
this.updateComment = this.updateComment.bind(this);

View File

@ -51,7 +51,10 @@ export default class Shortcuts {
}
onToggleHelp(e) {
e.preventDefault();
if (e.preventDefault) {
e.preventDefault();
}
Shortcuts.toggleHelp(this.enabledHelp);
}
@ -112,6 +115,9 @@ export default class Shortcuts {
static focusSearch(e) {
$('#search').focus();
e.preventDefault();
if (e.preventDefault) {
e.preventDefault();
}
}
}

View File

@ -62,7 +62,7 @@ export default {
return this.mr.hasCI;
},
shouldRenderRelatedLinks() {
return !!this.mr.relatedLinks;
return !!this.mr.relatedLinks && !this.mr.isNothingToMergeState;
},
shouldRenderDeployments() {
return this.mr.deployments.length;

View File

@ -1,30 +1,32 @@
import { stateKey } from './state_maps';
export default function deviseState(data) {
if (data.project_archived) {
return 'archived';
return stateKey.archived;
} else if (data.branch_missing) {
return 'missingBranch';
return stateKey.missingBranch;
} else if (!data.commits_count) {
return 'nothingToMerge';
return stateKey.nothingToMerge;
} else if (this.mergeStatus === 'unchecked') {
return 'checking';
return stateKey.checking;
} else if (data.has_conflicts) {
return 'conflicts';
return stateKey.conflicts;
} else if (data.work_in_progress) {
return 'workInProgress';
return stateKey.workInProgress;
} else if (this.onlyAllowMergeIfPipelineSucceeds && this.isPipelineFailed) {
return 'pipelineFailed';
return stateKey.pipelineFailed;
} else if (this.hasMergeableDiscussionsState) {
return 'unresolvedDiscussions';
return stateKey.unresolvedDiscussions;
} else if (this.isPipelineBlocked) {
return 'pipelineBlocked';
return stateKey.pipelineBlocked;
} else if (this.hasSHAChanged) {
return 'shaMismatch';
return stateKey.shaMismatch;
} else if (this.mergeWhenPipelineSucceeds) {
return this.mergeError ? 'autoMergeFailed' : 'mergeWhenPipelineSucceeds';
return this.mergeError ? stateKey.autoMergeFailed : stateKey.mergeWhenPipelineSucceeds;
} else if (!this.canMerge) {
return 'notAllowedToMerge';
return stateKey.notAllowedToMerge;
} else if (this.canBeMerged) {
return 'readyToMerge';
return stateKey.readyToMerge;
}
return null;
}

View File

@ -1,5 +1,6 @@
import Timeago from 'timeago.js';
import { getStateKey } from '../dependencies';
import { stateKey } from './state_maps';
import { formatDate } from '../../lib/utils/datetime_utility';
export default class MergeRequestStore {
@ -120,6 +121,10 @@ export default class MergeRequestStore {
}
}
get isNothingToMergeState() {
return this.state === stateKey.nothingToMerge;
}
static getEventObject(event) {
return {
author: MergeRequestStore.getAuthorObject(event),

View File

@ -31,6 +31,23 @@ const statesToShowHelpWidget = [
'autoMergeFailed',
];
export const stateKey = {
archived: 'archived',
missingBranch: 'missingBranch',
nothingToMerge: 'nothingToMerge',
checking: 'checking',
conflicts: 'conflicts',
workInProgress: 'workInProgress',
pipelineFailed: 'pipelineFailed',
unresolvedDiscussions: 'unresolvedDiscussions',
pipelineBlocked: 'pipelineBlocked',
shaMismatch: 'shaMismatch',
autoMergeFailed: 'autoMergeFailed',
mergeWhenPipelineSucceeds: 'mergeWhenPipelineSucceeds',
notAllowedToMerge: 'notAllowedToMerge',
readyToMerge: 'readyToMerge',
};
export default {
stateToComponentMap,
statesToShowHelpWidget,

View File

@ -9,12 +9,6 @@
padding-left: $contextual-sidebar-width;
}
// Override position: absolute
.right-sidebar {
position: fixed;
height: calc(100% - #{$header-height});
}
.issues-bulk-update.right-sidebar.right-sidebar-expanded .issuable-sidebar-header {
padding: 10px 0 15px;
}

View File

@ -16,27 +16,18 @@
@mixin set-visible {
transform: translateY(0);
visibility: visible;
opacity: 1;
transition-duration: 100ms, 150ms, 25ms;
transition-delay: 35ms, 50ms, 25ms;
display: block;
}
@mixin set-invisible {
transform: translateY(-10px);
visibility: hidden;
opacity: 0;
transition-property: opacity, transform, visibility;
transition-duration: 70ms, 250ms, 250ms;
transition-timing-function: linear, $dropdown-animation-timing;
transition-delay: 25ms, 50ms, 0ms;
display: none;
}
.open {
.dropdown-menu,
.dropdown-menu-nav {
@include set-visible;
display: block;
min-height: 40px;
@media (max-width: $screen-xs-max) {
@ -55,6 +46,11 @@
}
}
// Get search dropdown to line up with other nav dropdowns
.search-input-container .dropdown-menu {
margin-top: 11px;
}
.dropdown-toggle {
padding: 6px 8px 6px 10px;
background-color: $white-light;
@ -214,7 +210,6 @@
.dropdown-menu,
.dropdown-menu-nav {
@include set-invisible;
display: block;
position: absolute;
width: auto;
top: 100%;

View File

@ -90,11 +90,6 @@
.right-sidebar {
border-left: 1px solid $border-color;
height: calc(100% - #{$header-height});
&.affix {
position: fixed;
top: $header-height;
}
}
.with-performance-bar .right-sidebar.affix {

View File

@ -122,7 +122,7 @@
}
.right-sidebar {
position: absolute;
position: fixed;
top: $header-height;
bottom: 0;
right: 0;
@ -502,7 +502,7 @@
top: $header-height + $performance-bar-height;
.issuable-sidebar {
height: calc(100% - #{$header-height} - #{$performance-bar-height});
height: calc(100% - #{$performance-bar-height});
}
}

View File

@ -108,13 +108,6 @@ input[type="checkbox"]:hover {
// Custom dropdown positioning
.dropdown-menu {
transition-property: opacity, transform;
transition-duration: 250ms, 250ms;
transition-delay: 0ms, 25ms;
transition-timing-function: $dropdown-animation-timing;
transform: translateY(0);
opacity: 0;
display: block;
left: -5px;
}
@ -152,13 +145,6 @@ input[type="checkbox"]:hover {
background-color: $nav-badge-bg;
border-color: $border-color;
}
.dropdown-menu {
transition-duration: 100ms, 75ms;
transition-delay: 75ms, 100ms;
transform: translateY(7px);
opacity: 1;
}
}
&.has-value {

View File

@ -55,7 +55,6 @@ module IssuableActions
def destroy
Issuable::DestroyService.new(issuable.project, current_user).execute(issuable)
TodoService.new.destroy_issuable(issuable, current_user)
name = issuable.human_class_name
flash[:notice] = "The #{name} was successfully deleted."

View File

@ -39,6 +39,7 @@ class Projects::Clusters::GcpController < Projects::ApplicationController
params.require(:cluster).permit(
:enabled,
:name,
:environment_scope,
provider_gcp_attributes: [
:gcp_project_id,
:zone,

View File

@ -26,6 +26,7 @@ class Projects::Clusters::UserController < Projects::ApplicationController
params.require(:cluster).permit(
:enabled,
:name,
:environment_scope,
platform_kubernetes_attributes: [
:namespace,
:api_url,

View File

@ -87,6 +87,7 @@ class Projects::ClustersController < Projects::ApplicationController
if cluster.managed?
params.require(:cluster).permit(
:enabled,
:environment_scope,
platform_kubernetes_attributes: [
:namespace
]
@ -95,6 +96,7 @@ class Projects::ClustersController < Projects::ApplicationController
params.require(:cluster).permit(
:enabled,
:name,
:environment_scope,
platform_kubernetes_attributes: [
:api_url,
:token,

View File

@ -1,9 +1,11 @@
class Projects::PipelineSchedulesController < Projects::ApplicationController
before_action :schedule, except: [:index, :new, :create]
before_action :play_rate_limit, only: [:play]
before_action :authorize_play_pipeline_schedule!, only: [:play]
before_action :authorize_read_pipeline_schedule!
before_action :authorize_create_pipeline_schedule!, only: [:new, :create]
before_action :authorize_update_pipeline_schedule!, except: [:index, :new, :create]
before_action :authorize_update_pipeline_schedule!, except: [:index, :new, :create, :play]
before_action :authorize_admin_pipeline_schedule!, only: [:destroy]
def index
@ -40,6 +42,18 @@ class Projects::PipelineSchedulesController < Projects::ApplicationController
end
end
def play
job_id = RunPipelineScheduleWorker.perform_async(schedule.id, current_user.id)
if job_id
flash[:notice] = "Successfully scheduled a pipeline to run. Go to the <a href=\"#{project_pipelines_path(@project)}\">Pipelines page</a> for details.".html_safe
else
flash[:alert] = 'Unable to schedule a pipeline to run immediately'
end
redirect_to pipeline_schedules_path(@project)
end
def take_ownership
if schedule.update(owner: current_user)
redirect_to pipeline_schedules_path(@project)
@ -60,6 +74,17 @@ class Projects::PipelineSchedulesController < Projects::ApplicationController
private
def play_rate_limit
return unless current_user
limiter = ::Gitlab::ActionRateLimiter.new(action: :play_pipeline_schedule)
return unless limiter.throttled?([current_user, schedule], 1)
flash[:alert] = 'You cannot play this scheduled pipeline at the moment. Please wait a minute.'
redirect_to pipeline_schedules_path(@project)
end
def schedule
@schedule ||= project.pipeline_schedules.find(params[:id])
end
@ -70,6 +95,10 @@ class Projects::PipelineSchedulesController < Projects::ApplicationController
variables_attributes: [:id, :key, :value, :_destroy] )
end
def authorize_play_pipeline_schedule!
return access_denied! unless can?(current_user, :play_pipeline_schedule, schedule)
end
def authorize_update_pipeline_schedule!
return access_denied! unless can?(current_user, :update_pipeline_schedule, schedule)
end

View File

@ -29,6 +29,8 @@ class Projects::PipelinesController < Projects::ApplicationController
@pipelines_count = PipelinesFinder
.new(project).execute.count
@pipelines.map(&:commit) # List commits for batch loading
respond_to do |format|
format.html
format.json do

View File

@ -0,0 +1,5 @@
module ClustersHelper
def has_multiple_clusters?(project)
false
end
end

View File

@ -182,6 +182,11 @@ module GitlabRoutingHelper
edit_project_pipeline_schedule_path(project, schedule)
end
def play_pipeline_schedule_path(schedule, *args)
project = schedule.project
play_project_pipeline_schedule_path(project, schedule, *args)
end
def take_ownership_pipeline_schedule_path(schedule, *args)
project = schedule.project
take_ownership_project_pipeline_schedule_path(project, schedule, *args)

View File

@ -27,10 +27,17 @@ module BlobViewer
private
def package_name_from_json(key)
prepare!
def json_data
@json_data ||= begin
prepare!
JSON.parse(blob.data)
rescue
{}
end
end
JSON.parse(blob.data)[key] rescue nil
def package_name_from_json(key)
json_data[key]
end
def package_name_from_method_call(name)

View File

@ -16,7 +16,25 @@ module BlobViewer
@package_name ||= package_name_from_json('name')
end
def package_type
private? ? 'private package' : super
end
def package_url
private? ? homepage : npm_url
end
private
def private?
!!json_data['private']
end
def homepage
json_data['homepage']
end
def npm_url
"https://www.npmjs.com/package/#{package_name}"
end
end

View File

@ -287,8 +287,12 @@ module Ci
Ci::Pipeline.truncate_sha(sha)
end
# NOTE: This is loaded lazily and will never be nil, even if the commit
# cannot be found.
#
# Use constructs like: `pipeline.commit.present?`
def commit
@commit ||= project.commit_by(oid: sha)
@commit ||= Commit.lazy(project, sha)
end
def branch?
@ -338,12 +342,9 @@ module Ci
end
def latest?
return false unless ref
return false unless ref && commit.present?
commit = project.commit(ref)
return false unless commit
commit.sha == sha
project.commit(ref) == commit
end
def retried

View File

@ -86,6 +86,20 @@ class Commit
def valid_hash?(key)
!!(/\A#{COMMIT_SHA_PATTERN}\z/ =~ key)
end
def lazy(project, oid)
BatchLoader.for({ project: project, oid: oid }).batch do |items, loader|
items_by_project = items.group_by { |i| i[:project] }
items_by_project.each do |project, commit_ids|
oids = commit_ids.map { |i| i[:oid] }
project.repository.commits_by(oids: oids).each do |commit|
loader.call({ project: commit.project, oid: commit.id }, commit) if commit
end
end
end
end
end
attr_accessor :raw
@ -103,7 +117,7 @@ class Commit
end
def ==(other)
(self.class === other) && (raw == other.raw)
other.is_a?(self.class) && raw == other.raw
end
def self.reference_prefix
@ -224,8 +238,8 @@ class Commit
notes.includes(:author)
end
def method_missing(m, *args, &block)
@raw.__send__(m, *args, &block) # rubocop:disable GitlabSecurity/PublicSend
def method_missing(method, *args, &block)
@raw.__send__(method, *args, &block) # rubocop:disable GitlabSecurity/PublicSend
end
def respond_to_missing?(method, include_private = false)

View File

@ -22,12 +22,9 @@ class DiffDiscussion < Discussion
def merge_request_version_params
return unless for_merge_request?
return {} if active?
if on_merge_request_commit?
{ commit_id: commit_id }
else
noteable.version_params_for(position.diff_refs)
version_params.tap do |params|
params[:commit_id] = commit_id if on_merge_request_commit?
end
end
@ -37,4 +34,12 @@ class DiffDiscussion < Discussion
position: position.to_json
)
end
private
def version_params
return {} if active?
noteable.version_params_for(position.diff_refs)
end
end

View File

@ -118,6 +118,18 @@ class Repository
@commit_cache[oid] = find_commit(oid)
end
def commits_by(oids:)
return [] unless oids.present?
commits = Gitlab::Git::Commit.batch_by_oid(raw_repository, oids)
if commits.present?
Commit.decorate(commits, @project)
else
[]
end
end
def commits(ref, path: nil, limit: nil, offset: nil, skip_merges: false, after: nil, before: nil)
options = {
repo: raw_repository,
@ -221,6 +233,12 @@ class Repository
branch_names.include?(branch_name)
end
def tag_exists?(tag_name)
return false unless raw_repository
tag_names.include?(tag_name)
end
def ref_exists?(ref)
!!raw_repository&.ref_exists?(ref)
rescue ArgumentError

View File

@ -2,16 +2,18 @@ module Ci
class PipelinePolicy < BasePolicy
delegate { @subject.project }
condition(:protected_ref) do
access = ::Gitlab::UserAccess.new(@user, project: @subject.project)
if @subject.tag?
!access.can_create_tag?(@subject.ref)
else
!access.can_update_branch?(@subject.ref)
end
end
condition(:protected_ref) { ref_protected?(@user, @subject.project, @subject.tag?, @subject.ref) }
rule { protected_ref }.prevent :update_pipeline
def ref_protected?(user, project, tag, ref)
access = ::Gitlab::UserAccess.new(user, project: project)
if tag
!access.can_create_tag?(ref)
else
!access.can_update_branch?(ref)
end
end
end
end

View File

@ -2,13 +2,23 @@ module Ci
class PipelineSchedulePolicy < PipelinePolicy
alias_method :pipeline_schedule, :subject
condition(:protected_ref) do
ref_protected?(@user, @subject.project, @subject.project.repository.tag_exists?(@subject.ref), @subject.ref)
end
condition(:owner_of_schedule) do
can?(:developer_access) && pipeline_schedule.owned_by?(@user)
end
rule { can?(:developer_access) }.policy do
enable :play_pipeline_schedule
end
rule { can?(:master_access) | owner_of_schedule }.policy do
enable :update_pipeline_schedule
enable :admin_pipeline_schedule
end
rule { protected_ref }.prevent :play_pipeline_schedule
end
end

View File

@ -1,8 +1,10 @@
module Issuable
class DestroyService < IssuableBaseService
def execute(issuable)
if issuable.destroy
issuable.update_project_counter_caches
TodoService.new.destroy_target(issuable) do |issuable|
if issuable.destroy
issuable.update_project_counter_caches
end
end
end
end

View File

@ -1,7 +1,9 @@
module Notes
class DestroyService < BaseService
def execute(note)
note.destroy
TodoService.new.destroy_target(note) do |note|
note.destroy
end
end
end
end

View File

@ -5,7 +5,7 @@ module Projects
if fork_source = @project.fork_source
fork_source.lfs_objects.find_each do |lfs_object|
lfs_object.projects << @project
lfs_object.projects << @project unless lfs_object.projects.include?(@project)
end
refresh_forks_count(fork_source)

View File

@ -31,12 +31,20 @@ class TodoService
mark_pending_todos_as_done(issue, current_user)
end
# When we destroy an issuable we should:
# When we destroy a todo target we should:
#
# * refresh the todos count cache for the current user
# * refresh the todos count cache for all users with todos on the target
#
def destroy_issuable(issuable, user)
user.update_todos_count_cache
# This needs to yield back to the caller to destroy the target, because it
# collects the todo users before the todos themselves are deleted, then
# updates the todo counts for those users.
#
def destroy_target(target)
todo_users = User.where(id: target.todos.pending.select(:user_id)).to_a
yield target
todo_users.each(&:update_todos_count_cache)
end
# When we reassign an issue we should:

View File

@ -7,7 +7,8 @@
%span.pushed #{event.action_name} #{event.ref_type}
%strong
- commits_link = project_commits_path(project, event.ref_name)
= link_to_if project.repository.branch_exists?(event.ref_name), event.ref_name, commits_link, class: 'ref-name'
- should_link = event.tag? ? project.repository.tag_exists?(event.ref_name) : project.repository.branch_exists?(event.ref_name)
= link_to_if should_link, event.ref_name, commits_link, class: 'ref-name'
= render "events/event_scope", event: event

View File

@ -1,3 +1,5 @@
= webpack_bundle_tag 'docs'
%div
- if current_application_settings.help_page_text.present?
= markdown_field(current_application_settings, :help_page_text)
@ -37,8 +39,12 @@
Quick help
%ul.well-list
%li= link_to 'See our website for getting help', support_url
%li= link_to 'Use the search bar on the top of this page', '#', onclick: 'Shortcuts.focusSearch(event)'
%li= link_to 'Use shortcuts', '#', onclick: 'Shortcuts.toggleHelp()'
%li
%button.btn-blank.btn-link.js-trigger-search-bar{ type: 'button' }
Use the search bar on the top of this page
%li
%button.btn-blank.btn-link.js-trigger-shortcut{ type: 'button' }
Use shortcuts
- unless current_application_settings.help_page_hide_commercial_content?
%li= link_to 'Get a support subscription', 'https://about.gitlab.com/pricing/'
%li= link_to 'Compare GitLab editions', 'https://about.gitlab.com/features/#compare'

View File

@ -6,6 +6,6 @@
- if viewer.package_name
and defines a #{viewer.package_type} named
%strong<
= link_to viewer.package_name, viewer.package_url, target: '_blank', rel: 'noopener noreferrer'
= link_to_if viewer.package_url.present?, viewer.package_name, viewer.package_url, target: '_blank', rel: 'noopener noreferrer'
= link_to 'Learn more', viewer.manager_url, target: '_blank', rel: 'noopener noreferrer'

View File

@ -7,6 +7,9 @@
.form-group
= field.label :name, s_('ClusterIntegration|Cluster name')
= field.text_field :name, class: 'form-control', placeholder: s_('ClusterIntegration|Cluster name')
.form-group
= field.label :environment_scope, s_('ClusterIntegration|Environment scope')
= field.text_field :environment_scope, class: 'form-control', readonly: !has_multiple_clusters?(@project), placeholder: s_('ClusterIntegration|Environment scope')
= field.fields_for :provider_gcp, @cluster.provider_gcp do |provider_gcp_field|
.form-group

View File

@ -8,6 +8,11 @@
= form_for @cluster, url: namespace_project_cluster_path(@project.namespace, @project, @cluster), as: :cluster do |field|
= form_errors(@cluster)
.form-group
= field.label :environment_scope, s_('ClusterIntegration|Environment scope')
= field.text_field :environment_scope, class: 'form-control js-select-on-focus', readonly: !has_multiple_clusters?(@project), placeholder: s_('ClusterIntegration|Environment scope')
= field.fields_for :platform_kubernetes, @cluster.platform_kubernetes do |platform_kubernetes_field|
.form-group
= platform_kubernetes_field.label :api_url, s_('ClusterIntegration|API URL')

View File

@ -3,6 +3,9 @@
.form-group
= field.label :name, s_('ClusterIntegration|Cluster name')
= field.text_field :name, class: 'form-control', placeholder: s_('ClusterIntegration|Cluster name')
.form-group
= field.label :environment_scope, s_('ClusterIntegration|Environment scope')
= field.text_field :environment_scope, class: 'form-control', readonly: !has_multiple_clusters?(@project), placeholder: s_('ClusterIntegration|Environment scope')
= field.fields_for :platform_kubernetes, @cluster.platform_kubernetes do |platform_kubernetes_field|
.form-group

View File

@ -4,6 +4,10 @@
= field.label :name, s_('ClusterIntegration|Cluster name')
= field.text_field :name, class: 'form-control', placeholder: s_('ClusterIntegration|Cluster name')
.form-group
= field.label :environment_scope, s_('ClusterIntegration|Environment scope')
= field.text_field :environment_scope, class: 'form-control js-select-on-focus', readonly: !has_multiple_clusters?(@project), placeholder: s_('ClusterIntegration|Environment scope')
= field.fields_for :platform_kubernetes, @cluster.platform_kubernetes do |platform_kubernetes_field|
.form-group
= platform_kubernetes_field.label :api_url, s_('ClusterIntegration|API URL')

View File

@ -39,8 +39,6 @@
= icon('caret-down')
.dropdown-menu.dropdown-menu-align-right.hidden-lg
%ul
- if can_update_issue
%li= link_to 'Edit', edit_project_issue_path(@project, @issue), class: 'js-issuable-edit'
- unless current_user == @issue.author
%li= link_to 'Report abuse', new_abuse_report_path(user_id: @issue.author.id, ref_url: issue_url(@issue))
- if can_update_issue
@ -52,9 +50,6 @@
%li.divider
%li= link_to 'New issue', new_project_issue_path(@project), title: 'New issue', id: 'new_issue_link'
- if can_update_issue
= link_to 'Edit', edit_project_issue_path(@project, @issue), class: 'hidden-xs hidden-sm btn btn-grouped js-issuable-edit'
= render 'shared/issuable/close_reopen_button', issuable: @issue, can_update: can_update_issue
- if can_report_spam

View File

@ -26,10 +26,12 @@
= pipeline_schedule.owner&.name
%td
.pull-right.btn-group
- if can?(current_user, :play_pipeline_schedule, pipeline_schedule)
= link_to play_pipeline_schedule_path(pipeline_schedule), method: :post, title: s_('Play'), class: 'btn' do
= icon('play')
- if can?(current_user, :update_pipeline_schedule, pipeline_schedule)
= link_to take_ownership_pipeline_schedule_path(pipeline_schedule), method: :post, title: s_('PipelineSchedules|Take ownership'), class: 'btn' do
= s_('PipelineSchedules|Take ownership')
- if can?(current_user, :update_pipeline_schedule, pipeline_schedule)
= link_to edit_pipeline_schedule_path(pipeline_schedule), title: _('Edit'), class: 'btn' do
= icon('pencil')
- if can?(current_user, :admin_pipeline_schedule, pipeline_schedule)

View File

@ -1,6 +1,6 @@
#js-pipeline-header-vue.pipeline-header-container
- if @commit
- if @commit.present?
.commit-box
%h3.commit-title
= markdown(@commit.title, pipeline: :single_line)
@ -8,28 +8,28 @@
%pre.commit-description
= preserve(markdown(@commit.description, pipeline: :single_line))
.info-well
- if @commit.status
.well-segment.pipeline-info
.icon-container
= icon('clock-o')
= pluralize @pipeline.total_size, "job"
- if @pipeline.ref
from
= link_to @pipeline.ref, project_ref_path(@project, @pipeline.ref), class: "ref-name"
- if @pipeline.duration
in
= time_interval_in_words(@pipeline.duration)
- if @pipeline.queued_duration
= "(queued for #{time_interval_in_words(@pipeline.queued_duration)})"
.info-well
- if @commit.status
.well-segment.pipeline-info
.icon-container
= icon('clock-o')
= pluralize @pipeline.total_size, "job"
- if @pipeline.ref
from
= link_to @pipeline.ref, project_ref_path(@project, @pipeline.ref), class: "ref-name"
- if @pipeline.duration
in
= time_interval_in_words(@pipeline.duration)
- if @pipeline.queued_duration
= "(queued for #{time_interval_in_words(@pipeline.queued_duration)})"
.well-segment.branch-info
.icon-container.commit-icon
= custom_icon("icon_commit")
= link_to @commit.short_id, project_commit_path(@project, @pipeline.sha), class: "commit-sha js-details-short"
= link_to("#", class: "js-details-expand hidden-xs hidden-sm") do
%span.text-expander
\...
%span.js-details-content.hide
= link_to @pipeline.sha, project_commit_path(@project, @pipeline.sha), class: "commit-sha commit-hash-full"
= clipboard_button(text: @pipeline.sha, title: "Copy commit SHA to clipboard")
.well-segment.branch-info
.icon-container.commit-icon
= custom_icon("icon_commit")
= link_to @commit.short_id, project_commit_path(@project, @pipeline.sha), class: "commit-sha js-details-short"
= link_to("#", class: "js-details-expand hidden-xs hidden-sm") do
%span.text-expander
\...
%span.js-details-content.hide
= link_to @pipeline.sha, project_commit_path(@project, @pipeline.sha), class: "commit-sha commit-hash-full"
= clipboard_button(text: @pipeline.sha, title: "Copy commit SHA to clipboard")

View File

@ -8,16 +8,17 @@
= image_tag 'illustrations/issues.svg'
.col-xs-12
.text-content
- if has_button && current_user
- if current_user
%h4
= _("The Issue Tracker is the place to add things that need to be improved or solved in a project")
%p
= _("Issues can be bugs, tasks or ideas to be discussed. Also, issues are searchable and filterable.")
.text-center
- if project_select_button
= render 'shared/new_project_item_select', path: 'issues/new', label: 'New issue', type: :issues
- else
= link_to 'New issue', button_path, class: 'btn btn-success', title: 'New issue', id: 'new_issue_link'
- if has_button
.text-center
- if project_select_button
= render 'shared/new_project_item_select', path: 'issues/new', label: 'New issue', type: :issues
- else
= link_to 'New issue', button_path, class: 'btn btn-success', title: 'New issue', id: 'new_issue_link'
- else
%h4.text-center= _("There are no issues to show")
%p

View File

@ -1,14 +1,10 @@
- max_render = 3
- max = [max_render, issue.assignees.length].min
- max_render = 4
- assignees_rendering_overflow = issue.assignees.size > max_render
- render_count = assignees_rendering_overflow ? max_render - 1 : max_render
- more_assignees_count = issue.assignees.size - render_count
- issue.assignees.take(max).each do |assignee|
- issue.assignees.take(render_count).each do |assignee|
= link_to_member(@project, assignee, name: false, title: "Assigned to :name")
- if issue.assignees.length > max_render
- counter = issue.assignees.length - max_render
%span{ class: 'avatar-counter has-tooltip', data: { container: 'body', placement: 'bottom', 'line-type' => 'old', 'original-title' => "+#{counter} more assignees" } }
- if counter < 99
= "+#{counter}"
- else
99+
- if more_assignees_count.positive?
%span{ class: 'avatar-counter has-tooltip', data: { container: 'body', placement: 'bottom', 'line-type' => 'old', 'original-title' => "+#{more_assignees_count} more assignees" } } +#{more_assignees_count}

View File

@ -39,6 +39,7 @@
- pipeline_cache:expire_job_cache
- pipeline_cache:expire_pipeline_cache
- pipeline_creation:create_pipeline
- pipeline_creation:run_pipeline_schedule
- pipeline_default:build_coverage
- pipeline_default:build_trace_sections
- pipeline_default:pipeline_metrics

View File

@ -13,7 +13,7 @@ class ExpirePipelineCacheWorker
store.touch(project_pipelines_path(project))
store.touch(project_pipeline_path(project, pipeline))
store.touch(commit_pipelines_path(project, pipeline.commit)) if pipeline.commit
store.touch(commit_pipelines_path(project, pipeline.commit)) unless pipeline.commit.nil?
store.touch(new_merge_request_pipelines_path(project))
each_pipelines_merge_request_path(project, pipeline) do |path|
store.touch(path)

View File

@ -0,0 +1,22 @@
class RunPipelineScheduleWorker
include ApplicationWorker
include PipelineQueue
queue_namespace :pipeline_creation
def perform(schedule_id, user_id)
schedule = Ci::PipelineSchedule.find_by(id: schedule_id)
user = User.find_by(id: user_id)
return unless schedule && user
run_pipeline_schedule(schedule, user)
end
def run_pipeline_schedule(schedule, user)
Ci::CreatePipelineService.new(schedule.project,
user,
ref: schedule.ref)
.execute(:schedule, ignore_skip_ci: true, save_on_errors: false, schedule: schedule)
end
end

View File

@ -0,0 +1,5 @@
---
title: Fix tags in the Activity tab not being clickable
merge_request: 15996
author: Mario de la Ossa
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Do not generate NPM links for private NPM modules in blob view
merge_request: 16002
author: Mario de la Ossa
type: added

View File

@ -0,0 +1,5 @@
---
title: List of avatars should never show +1
merge_request: 15972
author: Jacopo Beschi @jacopo-beschi
type: added

View File

@ -0,0 +1,5 @@
---
title: Reset todo counters when the target is deleted
merge_request: 15807
author:
type: fixed

View File

@ -0,0 +1,6 @@
---
title: Don't link LFS objects to a project when unlinking forks when they were already
linked
merge_request: 16006
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Fix shortcut links on help page
merge_request:
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Fix onion-skin re-entering state
merge_request:
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Remove related links in MR widget when empty state
merge_request:
author:
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Add button to run scheduled pipeline immediately
merge_request:
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Move edit button to second row on issue page (and change it to a pencil icon)
merge_request:
author:
type: changed

View File

@ -179,6 +179,7 @@ constraints(ProjectUrlConstrainer.new) do
resources :pipeline_schedules, except: [:show] do
member do
post :play
post :take_ownership
end
end

View File

@ -36,6 +36,7 @@ var config = {
cycle_analytics: './cycle_analytics/cycle_analytics_bundle.js',
commit_pipelines: './commit/pipelines/pipelines_bundle.js',
deploy_keys: './deploy_keys/index.js',
docs: './docs/docs_bundle.js',
diff_notes: './diff_notes/diff_notes_bundle.js',
environments: './environments/environments_bundle.js',
environments_folder: './environments/folder/environments_folder_bundle.js',

View File

@ -16,6 +16,7 @@ class IssuesMilestoneIdForeignKey < ActiveRecord::Migration
def self.with_orphaned_milestones
where('NOT EXISTS (SELECT true FROM milestones WHERE milestones.id = issues.milestone_id)')
.where('milestone_id IS NOT NULL')
end
end

View File

@ -1,6 +1,6 @@
# Configuring GitLab for HA
Assuming you have already configured a database, Redis, and NFS, you can
Assuming you have already configured a [database](database.md), [Redis](redis.md), and [NFS](nfs.md), you can
configure the GitLab application server(s) now. Complete the steps below
for each GitLab application server in your environment.
@ -48,34 +48,33 @@ for each GitLab application server in your environment.
data locations. See [NFS documentation](nfs.md) for `/etc/gitlab/gitlab.rb`
configuration values for various scenarios. The example below assumes you've
added NFS mounts in the default data locations.
```ruby
external_url 'https://gitlab.example.com'
# Prevent GitLab from starting if NFS data mounts are not available
high_availability['mountpoint'] = '/var/opt/gitlab/git-data'
# Disable components that will not be on the GitLab application server
postgresql['enable'] = false
redis['enable'] = false
roles ['application_role']
# PostgreSQL connection details
gitlab_rails['db_adapter'] = 'postgresql'
gitlab_rails['db_encoding'] = 'unicode'
gitlab_rails['db_host'] = '10.1.0.5' # IP/hostname of database server
gitlab_rails['db_password'] = 'DB password'
# Redis connection details
gitlab_rails['redis_port'] = '6379'
gitlab_rails['redis_host'] = '10.1.0.6' # IP/hostname of Redis server
gitlab_rails['redis_password'] = 'Redis Password'
```
> **Note:** To maintain uniformity of links across HA clusters, the `external_url`
on the first application server as well as the additional application
servers should point to the external url that users will use to access GitLab.
> **Note:** To maintain uniformity of links across HA clusters, the `external_url`
on the first application server as well as the additional application
servers should point to the external url that users will use to access GitLab.
In a typical HA setup, this will be the url of the load balancer which will
route traffic to all GitLab application servers in the HA cluster.
route traffic to all GitLab application servers in the HA cluster.
1. Run `sudo gitlab-ctl reconfigure` to compile the configuration.

View File

@ -11,7 +11,7 @@ This exported module should be used instead of directly using `axios` to ensure
## Usage
```javascript
import axios from '~/lib/utils/axios_utils';
import axios from './lib/utils/axios_utils';
axios.get(url)
.then((response) => {

View File

@ -163,3 +163,11 @@ For Windows, you can use `wincred` or Microsoft's [Git Credential Manager for Wi
More details about various methods of storing the user credentials can be found
on [Git Credential Storage documentation](https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage).
### LFS objects are missing on push
GitLab checks files to detect LFS pointers on push. If LFS pointers are detected, GitLab tries to verify that those files already exist in LFS on GitLab.
Verify that LFS in installed locally and consider a manual push with `git lfs push --all`.
If you are storing LFS files outside of GitLab you can disable LFS on the project by settting `lfs_enabled: false` with the [projets api](../../api/projects.md#edit-project).

View File

@ -0,0 +1,47 @@
module Gitlab
# This class implements a simple rate limiter that can be used to throttle
# certain actions. Unlike Rack Attack and Rack::Throttle, which operate at
# the middleware level, this can be used at the controller level.
class ActionRateLimiter
TIME_TO_EXPIRE = 60 # 1 min
attr_accessor :action, :expiry_time
def initialize(action:, expiry_time: TIME_TO_EXPIRE)
@action = action
@expiry_time = expiry_time
end
# Increments the given cache key and increments the value by 1 with the
# given expiration time. Returns the incremented value.
#
# key - An array of ActiveRecord instances
def increment(key)
value = 0
Gitlab::Redis::Cache.with do |redis|
cache_key = action_key(key)
value = redis.incr(cache_key)
redis.expire(cache_key, expiry_time) if value == 1
end
value
end
# Increments the given key and returns true if the action should
# be throttled.
#
# key - An array of ActiveRecord instances
# threshold_value - The maximum number of times this action should occur in the given time interval
def throttled?(key, threshold_value)
self.increment(key) > threshold_value
end
private
def action_key(key)
serialized = key.map { |obj| "#{obj.class.model_name.to_s.underscore}:#{obj.id}" }.join(":")
"action_rate_limiter:#{action}:#{serialized}"
end
end
end

View File

@ -228,6 +228,19 @@ module Gitlab
end
end
end
# Only to be used when the object ids will not necessarily have a
# relation to each other. The last 10 commits for a branch for example,
# should go through .where
def batch_by_oid(repo, oids)
repo.gitaly_migrate(:list_commits_by_oid) do |is_enabled|
if is_enabled
repo.gitaly_commit_client.list_commits_by_oid(oids)
else
oids.map { |oid| find(repo, oid) }.compact
end
end
end
end
def initialize(repository, raw_commit, head = nil)

View File

@ -169,6 +169,15 @@ module Gitlab
consume_commits_response(response)
end
def list_commits_by_oid(oids)
request = Gitaly::ListCommitsByOidRequest.new(repository: @gitaly_repo, oid: oids)
response = GitalyClient.call(@repository.storage, :commit_service, :list_commits_by_oid, request, timeout: GitalyClient.medium_timeout)
consume_commits_response(response)
rescue GRPC::Unknown # If no repository is found, happens mainly during testing
[]
end
def commits_by_message(query, revision: '', path: '', limit: 1000, offset: 0)
request = Gitaly::CommitsByMessageRequest.new(
repository: @gitaly_repo,

View File

@ -1,7 +1,8 @@
#!/usr/bin/env ruby
gitaly_dir = 'tmp/tests/gitaly'
env = { 'HOME' => File.expand_path('tmp/tests') }
env = { 'HOME' => File.expand_path('tmp/tests'),
'GEM_PATH' => Gem.path.join(':') }
args = %W[#{gitaly_dir}/gitaly #{gitaly_dir}/config.toml]
# Print the PID of the spawned process

View File

@ -874,7 +874,7 @@ describe Projects::IssuesController do
end
it 'delegates the update of the todos count cache to TodoService' do
expect_any_instance_of(TodoService).to receive(:destroy_issuable).with(issue, owner).once
expect_any_instance_of(TodoService).to receive(:destroy_target).with(issue).once
delete :destroy, namespace_id: project.namespace, project_id: project, id: issue.iid
end

View File

@ -468,7 +468,7 @@ describe Projects::MergeRequestsController do
end
it 'delegates the update of the todos count cache to TodoService' do
expect_any_instance_of(TodoService).to receive(:destroy_issuable).with(merge_request, owner).once
expect_any_instance_of(TodoService).to receive(:destroy_target).with(merge_request).once
delete :destroy, namespace_id: project.namespace, project_id: project, id: merge_request.iid
end

View File

@ -3,10 +3,12 @@ require 'spec_helper'
describe Projects::PipelineSchedulesController do
include AccessMatchersForController
set(:project) { create(:project, :public) }
let!(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project) }
set(:project) { create(:project, :public, :repository) }
set(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project) }
describe 'GET #index' do
render_views
let(:scope) { nil }
let!(:inactive_pipeline_schedule) do
create(:ci_pipeline_schedule, :inactive, project: project)
@ -96,7 +98,7 @@ describe Projects::PipelineSchedulesController do
end
end
context 'when variables_attributes has two variables and duplicted' do
context 'when variables_attributes has two variables and duplicated' do
let(:schedule) do
basic_param.merge({
variables_attributes: [{ key: 'AAA', value: 'AAA123' }, { key: 'AAA', value: 'BBB123' }]
@ -364,6 +366,65 @@ describe Projects::PipelineSchedulesController do
end
end
describe 'POST #play', :clean_gitlab_redis_cache do
set(:user) { create(:user) }
let(:ref) { 'master' }
before do
project.add_developer(user)
sign_in(user)
end
context 'when an anonymous user makes the request' do
before do
sign_out(user)
end
it 'does not allow pipeline to be executed' do
expect(RunPipelineScheduleWorker).not_to receive(:perform_async)
post :play, namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id
expect(response).to have_gitlab_http_status(404)
end
end
context 'when a developer makes the request' do
it 'executes a new pipeline' do
expect(RunPipelineScheduleWorker).to receive(:perform_async).with(pipeline_schedule.id, user.id).and_return('job-123')
post :play, namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id
expect(flash[:notice]).to start_with 'Successfully scheduled a pipeline to run'
expect(response).to have_gitlab_http_status(302)
end
it 'prevents users from scheduling the same pipeline repeatedly' do
2.times do
post :play, namespace_id: project.namespace.to_param, project_id: project, id: pipeline_schedule.id
end
expect(flash.to_a.size).to eq(2)
expect(flash[:alert]).to eq 'You cannot play this scheduled pipeline at the moment. Please wait a minute.'
expect(response).to have_gitlab_http_status(302)
end
end
context 'when a developer attempts to schedule a protected ref' do
it 'does not allow pipeline to be executed' do
create(:protected_branch, project: project, name: ref)
protected_schedule = create(:ci_pipeline_schedule, project: project, ref: ref)
expect(RunPipelineScheduleWorker).not_to receive(:perform_async)
post :play, namespace_id: project.namespace.to_param, project_id: project, id: protected_schedule.id
expect(response).to have_gitlab_http_status(404)
end
end
end
describe 'DELETE #destroy' do
set(:user) { create(:user) }

View File

@ -17,13 +17,10 @@ describe Projects::PipelinesController do
describe 'GET index.json' do
before do
branch_head = project.commit
parent = branch_head.parent
create(:ci_empty_pipeline, status: 'pending', project: project, sha: branch_head.id)
create(:ci_empty_pipeline, status: 'running', project: project, sha: branch_head.id)
create(:ci_empty_pipeline, status: 'created', project: project, sha: parent.id)
create(:ci_empty_pipeline, status: 'success', project: project, sha: parent.id)
%w(pending running created success).each_with_index do |status, index|
sha = project.commit("HEAD~#{index}")
create(:ci_empty_pipeline, status: status, project: project, sha: sha)
end
end
subject do
@ -46,7 +43,7 @@ describe Projects::PipelinesController do
context 'when performing gitaly calls', :request_store do
it 'limits the Gitaly requests' do
expect { subject }.to change { Gitlab::GitalyClient.get_request_count }.by(8)
expect { subject }.to change { Gitlab::GitalyClient.get_request_count }.by(3)
end
end
end

View File

@ -32,6 +32,24 @@ describe 'Help Pages' do
it_behaves_like 'help page', prefix: '/gitlab'
end
context 'quick link shortcuts', :js do
before do
visit help_path
end
it 'focuses search bar' do
find('.js-trigger-search-bar').click
expect(page).to have_selector('#search:focus')
end
it 'opens shortcuts help dialog' do
find('.js-trigger-shortcut').click
expect(page).to have_selector('#modal-shortcuts')
end
end
end
context 'in a production environment with version check enabled', :js do

View File

@ -24,7 +24,7 @@ feature 'Issue Detail', :js do
visit project_issue_path(project, issue)
wait_for_requests
click_link 'Edit'
page.find('.js-issuable-edit').click
fill_in 'issuable-title', with: 'issue title'
click_button 'Save'
wait_for_requests

File diff suppressed because it is too large Load Diff

View File

@ -10,8 +10,6 @@ feature 'image diff notes', :js do
project.team << [user, :master]
sign_in user
page.driver.set_cookie('sidebar_collapsed', 'true')
# Stub helper to return any blob file as image from public app folder.
# This is necessary to run this specs since we don't display repo images in capybara.
allow_any_instance_of(DiffHelper).to receive(:diff_file_blob_raw_path).and_return('/apple-touch-icon.png')
@ -141,13 +139,13 @@ feature 'image diff notes', :js do
end
it 'allows expanding/collapsing the discussion notes' do
page.all('.js-diff-notes-toggle')[0].trigger('click')
page.all('.js-diff-notes-toggle')[1].trigger('click')
page.all('.js-diff-notes-toggle')[0].click
page.all('.js-diff-notes-toggle')[1].click
expect(page).not_to have_content('image diff test comment')
page.all('.js-diff-notes-toggle')[0].trigger('click')
page.all('.js-diff-notes-toggle')[1].trigger('click')
page.all('.js-diff-notes-toggle')[0].click
page.all('.js-diff-notes-toggle')[1].click
expect(page).to have_content('image diff test comment')
end
@ -196,13 +194,31 @@ feature 'image diff notes', :js do
expect(find('.onion-skin-frame')['style']).to match('width: 228px; height: 240px;')
end
it 'resets onion skin view mode opacity when toggling between view modes' do
find('.view-modes-menu .onion-skin').click
# Simulate dragging onion-skin slider
drag_and_drop_by(find('.dragger'), -30, 0)
expect(find('.onion-skin-frame .frame.added', visible: false)['style']).not_to match('opacity: 1;')
find('.view-modes-menu .swipe').click
find('.view-modes-menu .onion-skin').click
expect(find('.onion-skin-frame .frame.added', visible: false)['style']).to match('opacity: 1;')
end
end
def drag_and_drop_by(element, right_by, down_by)
page.driver.browser.action.drag_and_drop_by(element.native, right_by, down_by).perform
end
def create_image_diff_note
find('.js-add-image-diff-note-button', match: :first).click
page.all('.js-add-image-diff-note-button')[0].click
find('.diff-content .note-textarea').native.send_keys('image diff test comment')
click_button 'Comment'
wait_for_requests
end
end
def create_image_diff_note
find('.js-add-image-diff-note-button', match: :first).click
page.all('.js-add-image-diff-note-button')[0].trigger('click')
find('.diff-content .note-textarea').native.send_keys('image diff test comment')
click_button 'Comment'
wait_for_requests
end

View File

@ -2,15 +2,15 @@ require 'spec_helper'
feature 'project owner sees a link to create a license file in empty project', :js do
let(:project_master) { create(:user) }
let(:project) { create(:project) }
let(:project) { create(:project_empty_repo) }
background do
project.team << [project_master, :master]
project.add_master(project_master)
sign_in(project_master)
end
scenario 'project master creates a license file from a template' do
visit project_path(project)
click_link 'Create empty bare repository'
click_on 'LICENSE'
expect(page).to have_content('New file')
@ -26,8 +26,6 @@ feature 'project owner sees a link to create a license file in empty project', :
expect(file_content).to have_content("Copyright (c) #{Time.now.year} #{project.namespace.human_name}")
fill_in :commit_message, with: 'Add a LICENSE file', visible: true
# Remove pre-receive hook so we can push without auth
FileUtils.rm_f(File.join(project.repository.path, 'hooks', 'pre-receive'))
click_button 'Commit changes'
expect(current_path).to eq(

View File

@ -32,9 +32,7 @@ feature 'issuable templates', :js do
message: 'added issue template',
branch_name: 'master')
visit project_issue_path project, issue
page.within('.js-issuable-actions') do
click_on 'Edit'
end
page.find('.js-issuable-edit').click
fill_in :'issuable-title', with: 'test issue title'
end
@ -77,9 +75,7 @@ feature 'issuable templates', :js do
message: 'added issue template',
branch_name: 'master')
visit project_issue_path project, issue
page.within('.js-issuable-actions') do
click_on 'Edit'
end
page.find('.js-issuable-edit').click
fill_in :'issuable-title', with: 'test issue title'
fill_in :'issue-description', with: prior_description
end

View File

@ -4,18 +4,17 @@ feature 'Master views tags' do
let(:user) { create(:user) }
before do
project.team << [user, :master]
project.add_master(user)
sign_in(user)
end
context 'when project has no tags' do
let(:project) { create(:project_empty_repo) }
before do
visit project_path(project)
click_on 'README'
fill_in :commit_message, with: 'Add a README file', visible: true
# Remove pre-receive hook so we can push without auth
FileUtils.rm_f(File.join(project.repository.path, 'hooks', 'pre-receive'))
click_button 'Commit changes'
visit project_tags_path(project)
end

View File

@ -41,6 +41,7 @@ describe NotesHelper do
describe '#discussion_path' do
let(:project) { create(:project, :repository) }
let(:anchor) { discussion.line_code }
context 'for a merge request discusion' do
let(:merge_request) { create(:merge_request, source_project: project, target_project: project, importing: true) }
@ -151,6 +152,15 @@ describe NotesHelper do
expect(helper.discussion_path(discussion)).to be_nil
end
end
context 'for a contextual commit discussion' do
let(:commit) { merge_request.commits.last }
let(:discussion) { create(:diff_note_on_merge_request, noteable: merge_request, project: project, commit_id: commit.id).to_discussion }
it 'returns the merge request diff discussion scoped in the commit' do
expect(helper.discussion_path(discussion)).to eq(diffs_project_merge_request_path(project, merge_request, commit_id: commit.id, anchor: anchor))
end
end
end
context 'for a commit discussion' do
@ -160,7 +170,7 @@ describe NotesHelper do
let(:discussion) { create(:diff_note_on_commit, project: project).to_discussion }
it 'returns the commit path with the line code' do
expect(helper.discussion_path(discussion)).to eq(project_commit_path(project, commit, anchor: discussion.line_code))
expect(helper.discussion_path(discussion)).to eq(project_commit_path(project, commit, anchor: anchor))
end
end
@ -168,7 +178,7 @@ describe NotesHelper do
let(:discussion) { create(:legacy_diff_note_on_commit, project: project).to_discussion }
it 'returns the commit path with the line code' do
expect(helper.discussion_path(discussion)).to eq(project_commit_path(project, commit, anchor: discussion.line_code))
expect(helper.discussion_path(discussion)).to eq(project_commit_path(project, commit, anchor: anchor))
end
end

View File

@ -1,11 +1,9 @@
/* global Notes */
import 'autosize';
import '~/gl_form';
import '~/lib/utils/text_utility';
import '~/render_gfm';
import '~/render_math';
import '~/notes';
import Notes from '~/notes';
const upArrowKeyCode = 38;

View File

@ -1,5 +1,4 @@
/* eslint-disable no-var, comma-dangle, object-shorthand */
/* global Notes */
import * as urlUtils from '~/lib/utils/url_utility';
import MergeRequestTabs from '~/merge_request_tabs';
@ -7,7 +6,7 @@ import '~/commit/pipelines/pipelines_bundle';
import '~/breakpoints';
import '~/lib/utils/common_utils';
import Diff from '~/diff';
import '~/notes';
import Notes from '~/notes';
import 'vendor/jquery.scrollTo';
(function () {
@ -279,8 +278,8 @@ import 'vendor/jquery.scrollTo';
loadFixtures('merge_requests/diff_comment.html.raw');
$('body').attr('data-page', 'projects:merge_requests:show');
window.gl.ImageFile = () => {};
window.notes = new Notes('', []);
spyOn(window.notes, 'toggleDiffNote').and.callThrough();
Notes.initialize('', []);
spyOn(Notes.instance, 'toggleDiffNote').and.callThrough();
});
afterEach(() => {
@ -338,7 +337,7 @@ import 'vendor/jquery.scrollTo';
this.class.loadDiff('/foo/bar/merge_requests/1/diffs');
expect(noteId.length).toBeGreaterThan(0);
expect(window.notes.toggleDiffNote).toHaveBeenCalledWith({
expect(Notes.instance.toggleDiffNote).toHaveBeenCalledWith({
target: jasmine.any(Object),
lineType: 'old',
forceShow: true,
@ -349,7 +348,7 @@ import 'vendor/jquery.scrollTo';
spyOn(urlUtils, 'getLocationHash').and.returnValue('note_something-that-does-not-exist');
this.class.loadDiff('/foo/bar/merge_requests/1/diffs');
expect(window.notes.toggleDiffNote).not.toHaveBeenCalled();
expect(Notes.instance.toggleDiffNote).not.toHaveBeenCalled();
});
});
@ -359,7 +358,7 @@ import 'vendor/jquery.scrollTo';
this.class.loadDiff('/foo/bar/merge_requests/1/diffs');
expect(noteLineNumId.length).toBeGreaterThan(0);
expect(window.notes.toggleDiffNote).not.toHaveBeenCalled();
expect(Notes.instance.toggleDiffNote).not.toHaveBeenCalled();
});
});
});
@ -393,7 +392,7 @@ import 'vendor/jquery.scrollTo';
this.class.loadDiff('/foo/bar/merge_requests/1/diffs');
expect(noteId.length).toBeGreaterThan(0);
expect(window.notes.toggleDiffNote).toHaveBeenCalledWith({
expect(Notes.instance.toggleDiffNote).toHaveBeenCalledWith({
target: jasmine.any(Object),
lineType: 'new',
forceShow: true,
@ -404,7 +403,7 @@ import 'vendor/jquery.scrollTo';
spyOn(urlUtils, 'getLocationHash').and.returnValue('note_something-that-does-not-exist');
this.class.loadDiff('/foo/bar/merge_requests/1/diffs');
expect(window.notes.toggleDiffNote).not.toHaveBeenCalled();
expect(Notes.instance.toggleDiffNote).not.toHaveBeenCalled();
});
});
@ -414,7 +413,7 @@ import 'vendor/jquery.scrollTo';
this.class.loadDiff('/foo/bar/merge_requests/1/diffs');
expect(noteLineNumId.length).toBeGreaterThan(0);
expect(window.notes.toggleDiffNote).not.toHaveBeenCalled();
expect(Notes.instance.toggleDiffNote).not.toHaveBeenCalled();
});
});
});

View File

@ -1,12 +1,10 @@
/* eslint-disable space-before-function-paren, no-unused-expressions, no-var, object-shorthand, comma-dangle, max-len */
/* global Notes */
import * as urlUtils from '~/lib/utils/url_utility';
import 'autosize';
import '~/gl_form';
import '~/lib/utils/text_utility';
import '~/render_gfm';
import '~/notes';
import Notes from '~/notes';
(function() {
window.gon || (window.gon = {});

View File

@ -2,6 +2,7 @@ import Vue from 'vue';
import mrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options';
import eventHub from '~/vue_merge_request_widget/event_hub';
import notify from '~/lib/utils/notify';
import { stateKey } from '~/vue_merge_request_widget/stores/state_maps';
import mockData from './mock_data';
import mountComponent from '../helpers/vue_mount_component_helper';
@ -344,4 +345,31 @@ describe('mrWidgetOptions', () => {
expect(comps['mr-widget-merge-when-pipeline-succeeds']).toBeDefined();
});
});
describe('rendering relatedLinks', () => {
beforeEach((done) => {
vm.mr.relatedLinks = {
assignToMe: null,
closing: `
<a class="close-related-link" href="#'>
Close
</a>
`,
mentioned: '',
};
Vue.nextTick(done);
});
it('renders if there are relatedLinks', () => {
expect(vm.$el.querySelector('.close-related-link')).toBeDefined();
});
it('does not render if state is nothingToMerge', (done) => {
vm.mr.state = stateKey.nothingToMerge;
Vue.nextTick(() => {
expect(vm.$el.querySelector('.close-related-link')).toBeNull();
done();
});
});
});
});

View File

@ -1,4 +1,5 @@
import MergeRequestStore from '~/vue_merge_request_widget/stores/mr_widget_store';
import { stateKey } from '~/vue_merge_request_widget/stores/state_maps';
import mockData from '../mock_data';
describe('MergeRequestStore', () => {
@ -52,5 +53,17 @@ describe('MergeRequestStore', () => {
expect(store.isPipelineSkipped).toBe(false);
});
});
describe('isNothingToMergeState', () => {
it('returns true when nothingToMerge', () => {
store.state = stateKey.nothingToMerge;
expect(store.isNothingToMergeState).toEqual(true);
});
it('returns false when not nothingToMerge', () => {
store.state = 'state';
expect(store.isNothingToMergeState).toEqual(false);
});
});
});
});

View File

@ -0,0 +1,29 @@
require 'spec_helper'
describe Gitlab::ActionRateLimiter do
let(:redis) { double('redis') }
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:key) { [user, project] }
let(:cache_key) { "action_rate_limiter:test_action:user:#{user.id}:project:#{project.id}" }
subject { described_class.new(action: :test_action, expiry_time: 100) }
before do
allow(Gitlab::Redis::Cache).to receive(:with).and_yield(redis)
end
it 'increases the throttle count and sets the expire time' do
expect(redis).to receive(:incr).with(cache_key).and_return(1)
expect(redis).to receive(:expire).with(cache_key, 100)
expect(subject.throttled?(key, 1)).to be false
end
it 'returns true if the key is throttled' do
expect(redis).to receive(:incr).with(cache_key).and_return(2)
expect(redis).not_to receive(:expire)
expect(subject.throttled?(key, 1)).to be true
end
end

View File

@ -41,7 +41,8 @@ describe Gitlab::Git::GitlabProjects do
end
it "fails if the source path doesn't exist" do
expect(logger).to receive(:error).with("mv-project failed: source path <#{tmp_repos_path}/bad-src.git> does not exist.")
expected_source_path = File.join(tmp_repos_path, 'bad-src.git')
expect(logger).to receive(:error).with("mv-project failed: source path <#{expected_source_path}> does not exist.")
result = build_gitlab_projects(tmp_repos_path, 'bad-src.git').mv_project('repo.git')
expect(result).to be_falsy
@ -50,7 +51,8 @@ describe Gitlab::Git::GitlabProjects do
it 'fails if the destination path already exists' do
FileUtils.mkdir_p(File.join(tmp_repos_path, 'already-exists.git'))
message = "mv-project failed: destination path <#{tmp_repos_path}/already-exists.git> already exists."
expected_distination_path = File.join(tmp_repos_path, 'already-exists.git')
message = "mv-project failed: destination path <#{expected_distination_path}> already exists."
expect(logger).to receive(:error).with(message)
expect(gl_projects.mv_project('already-exists.git')).to be_falsy

View File

@ -22,4 +22,51 @@ describe BlobViewer::PackageJson do
expect(subject.package_name).to eq('module-name')
end
end
describe '#package_url' do
it 'returns the package URL' do
expect(subject).to receive(:prepare!)
expect(subject.package_url).to eq("https://www.npmjs.com/package/#{subject.package_name}")
end
end
describe '#package_type' do
it 'returns "package"' do
expect(subject).to receive(:prepare!)
expect(subject.package_type).to eq('package')
end
end
context 'when package.json has "private": true' do
let(:data) do
<<-SPEC.strip_heredoc
{
"name": "module-name",
"version": "10.3.1",
"private": true,
"homepage": "myawesomepackage.com"
}
SPEC
end
let(:blob) { fake_blob(path: 'package.json', data: data) }
subject { described_class.new(blob) }
describe '#package_url' do
it 'returns homepage if any' do
expect(subject).to receive(:prepare!)
expect(subject.package_url).to eq('myawesomepackage.com')
end
end
describe '#package_type' do
it 'returns "private package"' do
expect(subject).to receive(:prepare!)
expect(subject.package_type).to eq('private package')
end
end
end
end

View File

@ -13,6 +13,45 @@ describe Commit do
it { is_expected.to include_module(StaticModel) }
end
describe '.lazy' do
set(:project) { create(:project, :repository) }
context 'when the commits are found' do
let(:oids) do
%w(
498214de67004b1da3d820901307bed2a68a8ef6
c642fe9b8b9f28f9225d7ea953fe14e74748d53b
6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9
048721d90c449b244b7b4c53a9186b04330174ec
281d3a76f31c812dbf48abce82ccf6860adedd81
)
end
subject { oids.map { |oid| described_class.lazy(project, oid) } }
it 'batches requests for commits' do
expect(project.repository).to receive(:commits_by).once.and_call_original
subject.first.title
subject.last.title
end
it 'maintains ordering' do
subject.each_with_index do |commit, i|
expect(commit.id).to eq(oids[i])
end
end
end
context 'when not found' do
it 'returns nil as commit' do
commit = described_class.lazy(project, 'deadbeef').__sync
expect(commit).to be_nil
end
end
end
describe '#author' do
it 'looks up the author in a case-insensitive way' do
user = create(:user, email: commit.author_email.upcase)

Some files were not shown because too many files have changed in this diff Show More