Eliminate duplicated words
Signed-off-by: Takuya Noguchi <takninnovationresearch@gmail.com>
This commit is contained in:
parent
d093bbaec5
commit
e902f462c2
42 changed files with 49 additions and 49 deletions
|
@ -124,7 +124,7 @@ export default class FileTemplateMediator {
|
|||
|
||||
selectTemplateFile(selector, query, data) {
|
||||
selector.renderLoading();
|
||||
// in case undo menu is already already there
|
||||
// in case undo menu is already there
|
||||
this.destroyUndoMenu();
|
||||
this.fetchFileTemplate(selector.config.type, query, data)
|
||||
.then(file => {
|
||||
|
|
|
@ -22,7 +22,7 @@ export default class Labels {
|
|||
updateColorPreview() {
|
||||
const previewColor = $('input#label_color').val();
|
||||
return $('div.label-color-preview').css('background-color', previewColor);
|
||||
// Updates the the preview color with the hex-color input
|
||||
// Updates the preview color with the hex-color input
|
||||
}
|
||||
|
||||
// Updates the preview color with a click on a suggested color
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
# Overrides `as_json` and `to_json` to raise an exception when called in order
|
||||
# to prevent accidentally exposing attributes
|
||||
#
|
||||
# Not that that would ever happen... but just in case.
|
||||
# Not that would ever happen... but just in case.
|
||||
module BlocksJsonSerialization
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
|
|
|
@ -190,7 +190,7 @@ class Namespace < ActiveRecord::Base
|
|||
.base_and_ancestors
|
||||
end
|
||||
|
||||
# returns all ancestors upto but excluding the the given namespace
|
||||
# returns all ancestors upto but excluding the given namespace
|
||||
# when no namespace is given, all ancestors upto the top are returned
|
||||
def ancestors_upto(top = nil)
|
||||
Gitlab::GroupHierarchy.new(self.class.where(id: id))
|
||||
|
|
|
@ -7,7 +7,7 @@ module Commits
|
|||
# - user: `User` that will be the committer
|
||||
# - params:
|
||||
# - branch_name: `String` the branch that will be committed into
|
||||
# - start_branch: `String` the branch that will will started from
|
||||
# - start_branch: `String` the branch that will be started from
|
||||
# - patches: `Gitlab::Git::Patches::Collection` that contains the patches
|
||||
def initialize(*args)
|
||||
super
|
||||
|
|
|
@ -50,7 +50,7 @@ class NotificationService
|
|||
|
||||
# Always notify the user about gpg key added
|
||||
#
|
||||
# This is a security email so it will be sent even if the user user disabled
|
||||
# This is a security email so it will be sent even if the user disabled
|
||||
# notifications
|
||||
def new_gpg_key(gpg_key)
|
||||
if gpg_key.user&.can?(:receive_notifications)
|
||||
|
|
|
@ -26,7 +26,7 @@
|
|||
%h4.prepend-top-0
|
||||
Feed token
|
||||
%p
|
||||
Your feed token is used to authenticate you when your RSS reader loads a personalized RSS feed or when when your calendar application loads a personalized calendar, and is included in those feed URLs.
|
||||
Your feed token is used to authenticate you when your RSS reader loads a personalized RSS feed or when your calendar application loads a personalized calendar, and is included in those feed URLs.
|
||||
%p
|
||||
It cannot be used to access any other data.
|
||||
.col-lg-8.feed-token-reset
|
||||
|
|
|
@ -12,5 +12,5 @@ require 'bundler/setup' if File.exist?(ENV['BUNDLE_GEMFILE'])
|
|||
begin
|
||||
require 'bootsnap/setup'
|
||||
rescue LoadError
|
||||
# bootsnap is optional dependency, so if we don't have it it's fine
|
||||
# bootsnap is an optional dependency, so if we don't have it, it's fine
|
||||
end
|
||||
|
|
|
@ -12,7 +12,7 @@ class AlterWebHookLogsIndexes < ActiveRecord::Migration
|
|||
disable_ddl_transaction!
|
||||
|
||||
# "created_at" comes first so the Sidekiq worker pruning old webhook logs can
|
||||
# use a composite index index.
|
||||
# use a composite index.
|
||||
#
|
||||
# We leave the old standalone index on "web_hook_id" in place so future code
|
||||
# that doesn't care about "created_at" can still use that index.
|
||||
|
|
|
@ -8,7 +8,7 @@ module Gitlab
|
|||
# Class that rewrites markdown links for uploads
|
||||
#
|
||||
# Using a pattern defined in `FileUploader` it copies files to a new
|
||||
# project and rewrites all links to uploads in in a given text.
|
||||
# project and rewrites all links to uploads in a given text.
|
||||
#
|
||||
#
|
||||
class UploadsRewriter
|
||||
|
|
|
@ -441,7 +441,7 @@ module Gitlab
|
|||
gitaly_ref_client.find_ref_name(sha, ref_path)
|
||||
end
|
||||
|
||||
# Get refs hash which key is is the commit id
|
||||
# Get refs hash which key is the commit id
|
||||
# and value is a Gitlab::Git::Tag or Gitlab::Git::Branch
|
||||
# Note that both inherit from Gitlab::Git::Ref
|
||||
def refs_hash
|
||||
|
|
|
@ -251,7 +251,7 @@ module Gitlab
|
|||
|
||||
def validate_variable_usage(errors, translation, required_variables)
|
||||
# We don't need to validate when the message is empty.
|
||||
# In this case we fall back to the default, which has all the the
|
||||
# In this case we fall back to the default, which has all the
|
||||
# required variables.
|
||||
return if translation.empty?
|
||||
|
||||
|
|
|
@ -6,10 +6,10 @@
|
|||
# used for rendering Markdown) are completely unnecessary and may even lead to
|
||||
# transaction timeouts.
|
||||
#
|
||||
# To ensure importing merge requests requests has a minimal impact and can
|
||||
# complete in a reasonable time we bypass all the hooks by inserting the row
|
||||
# and then retrieving it. We then only perform the additional work that is
|
||||
# strictly necessary.
|
||||
# To ensure importing merge requests has a minimal impact and can complete in
|
||||
# a reasonable time we bypass all the hooks by inserting the row and then
|
||||
# retrieving it. We then only perform the additional work that is strictly
|
||||
# necessary.
|
||||
module Gitlab
|
||||
module Import
|
||||
class MergeRequestCreator
|
||||
|
|
|
@ -152,7 +152,7 @@ describe Projects::BlobController do
|
|||
expect(match_line['meta_data']).to have_key('new_pos')
|
||||
end
|
||||
|
||||
it 'does not add top match line when when "since" is equal 1' do
|
||||
it 'does not add top match line when "since" is equal 1' do
|
||||
do_get(since: 1, to: 10, offset: 10, from_merge_request: true)
|
||||
|
||||
match_line = JSON.parse(response.body).first
|
||||
|
|
|
@ -42,7 +42,7 @@ describe 'Merge request > User assigns themselves' do
|
|||
visit project_merge_request_path(project, merge_request)
|
||||
end
|
||||
|
||||
it 'does not not show assignment link' do
|
||||
it 'does not show assignment link' do
|
||||
expect(page).not_to have_content 'Assign yourself'
|
||||
end
|
||||
end
|
||||
|
|
|
@ -325,7 +325,7 @@ describe 'Merge request > User resolves diff notes and discussions', :js do
|
|||
end
|
||||
end
|
||||
|
||||
it 'allows user user to mark all discussions as resolved' do
|
||||
it 'allows user to mark all discussions as resolved' do
|
||||
page.all('.discussion-reply-holder', count: 2).each do |reply_holder|
|
||||
page.within reply_holder do
|
||||
click_button 'Resolve discussion'
|
||||
|
|
|
@ -157,7 +157,7 @@ describe "User creates wiki page" do
|
|||
expect(page).to have_field("wiki[message]", with: "Create home")
|
||||
end
|
||||
|
||||
it "creates a page from from the home page" do
|
||||
it "creates a page from the home page" do
|
||||
page.within(".wiki-form") do
|
||||
fill_in(:wiki_content, with: "My awesome wiki!")
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ describe PipelineSchedulesFinder do
|
|||
context 'when the scope is nil' do
|
||||
let(:params) { { scope: nil } }
|
||||
|
||||
it 'selects all pipeline pipeline schedules' do
|
||||
it 'selects all pipeline schedules' do
|
||||
expect(subject.count).to be(2)
|
||||
expect(subject).to include(active_schedule, inactive_schedule)
|
||||
end
|
||||
|
|
2
spec/fixtures/emails/paragraphs.eml
vendored
2
spec/fixtures/emails/paragraphs.eml
vendored
|
@ -17,7 +17,7 @@ X-Received: by 10.0.0.1 with SMTP id n7mr11234144ipb.85.1371157428600; Thu,
|
|||
13 Jun 2013 14:03:48 -0700 (PDT)
|
||||
X-Scanned-By: MIMEDefang 2.69 on IPv6:2001:470:1d:165::1
|
||||
|
||||
Is there any reason the *old* candy can't be be kept in silos while the new candy
|
||||
Is there any reason the *old* candy can't be kept in silos while the new candy
|
||||
is imported into *new* silos?
|
||||
|
||||
The thing about candy is it stays delicious for a long time -- we can just keep
|
||||
|
|
|
@ -123,7 +123,7 @@ describe 'create_tokens' do
|
|||
create_tokens
|
||||
end
|
||||
|
||||
it 'sets the the keys to the values from the environment and secrets.yml' do
|
||||
it 'sets the keys to the values from the environment and secrets.yml' do
|
||||
create_tokens
|
||||
|
||||
expect(secrets.secret_key_base).to eq('secret_key_base')
|
||||
|
|
|
@ -29,7 +29,7 @@ describe('ide component', () => {
|
|||
resetStore(vm.$store);
|
||||
});
|
||||
|
||||
it('does not render right right when no files open', () => {
|
||||
it('does not render right when no files open', () => {
|
||||
expect(vm.$el.querySelector('.panel-right')).toBeNull();
|
||||
});
|
||||
|
||||
|
|
|
@ -59,7 +59,7 @@ describe('IDE branches actions', () => {
|
|||
});
|
||||
|
||||
describe('receiveBranchesError', () => {
|
||||
it('should should commit error', done => {
|
||||
it('should commit error', done => {
|
||||
testAction(
|
||||
receiveBranchesError,
|
||||
{ search: TEST_SEARCH },
|
||||
|
|
|
@ -39,7 +39,7 @@ describe('IDE merge requests actions', () => {
|
|||
});
|
||||
|
||||
describe('receiveMergeRequestsError', () => {
|
||||
it('should should commit error', done => {
|
||||
it('should commit error', done => {
|
||||
testAction(
|
||||
receiveMergeRequestsError,
|
||||
{ type: 'created', search: '' },
|
||||
|
|
|
@ -28,7 +28,7 @@ describe Banzai::Filter::AbsoluteLinkFilter do
|
|||
end
|
||||
|
||||
context 'if relative_url_root is set' do
|
||||
it 'joins the url without without doubling the path' do
|
||||
it 'joins the url without doubling the path' do
|
||||
allow(Gitlab.config.gitlab).to receive(:url).and_return("#{fake_url}/gitlab/")
|
||||
doc = filter(link("/gitlab/foo", 'gfm'), only_path_context)
|
||||
expect(doc.at_css('a')['href']).to eq "#{fake_url}/gitlab/foo"
|
||||
|
|
|
@ -498,7 +498,7 @@ describe Gitlab::Auth::OAuth::User do
|
|||
end
|
||||
end
|
||||
|
||||
describe 'ensure backwards compatibility with with sync email from provider option' do
|
||||
describe 'ensure backwards compatibility with sync email from provider option' do
|
||||
let!(:existing_user) { create(:omniauth_user, extern_uid: 'my-uid', provider: 'my-provider') }
|
||||
|
||||
before do
|
||||
|
|
|
@ -4,7 +4,7 @@ describe Gitlab::Ci::Build::Policy::Changes do
|
|||
set(:project) { create(:project) }
|
||||
|
||||
describe '#satisfied_by?' do
|
||||
describe 'paths matching matching' do
|
||||
describe 'paths matching' do
|
||||
let(:pipeline) do
|
||||
build(:ci_empty_pipeline, project: project,
|
||||
ref: 'master',
|
||||
|
|
|
@ -37,7 +37,7 @@ describe Gitlab::Ci::Config::External::File::Local do
|
|||
end
|
||||
|
||||
describe '#content' do
|
||||
context 'with a a valid file' do
|
||||
context 'with a valid file' do
|
||||
let(:local_file_content) do
|
||||
<<~HEREDOC
|
||||
before_script:
|
||||
|
|
|
@ -93,7 +93,7 @@ describe Gitlab::Ci::Pipeline::Expression::Lexeme::String do
|
|||
end
|
||||
|
||||
describe '#evaluate' do
|
||||
it 'returns string value it is is present' do
|
||||
it 'returns string value if it is present' do
|
||||
string = described_class.new('my string')
|
||||
|
||||
expect(string.evaluate).to eq 'my string'
|
||||
|
|
|
@ -135,7 +135,7 @@ describe Gitlab::ContributionsCalendar do
|
|||
expect(calendar(contributor).events_by_date(today)).to contain_exactly(e1, e2, e3)
|
||||
end
|
||||
|
||||
context 'when the user cannot read read cross project' do
|
||||
context 'when the user cannot read cross project' do
|
||||
before do
|
||||
allow(Ability).to receive(:allowed?).and_call_original
|
||||
expect(Ability).to receive(:allowed?).with(user, :read_cross_project) { false }
|
||||
|
|
|
@ -50,7 +50,7 @@ describe Gitlab::CrossProjectAccess::CheckInfo do
|
|||
expect(info.should_run?(dummy_controller)).to be_truthy
|
||||
end
|
||||
|
||||
it 'returns the the opposite of #should_skip? when the check is a skip' do
|
||||
it 'returns the opposite of #should_skip? when the check is a skip' do
|
||||
info = described_class.new({}, nil, nil, true)
|
||||
|
||||
expect(info).to receive(:should_skip?).with(dummy_controller).and_return(false)
|
||||
|
@ -101,7 +101,7 @@ describe Gitlab::CrossProjectAccess::CheckInfo do
|
|||
expect(info.should_skip?(dummy_controller)).to be_truthy
|
||||
end
|
||||
|
||||
it 'returns the the opposite of #should_run? when the check is not a skip' do
|
||||
it 'returns the opposite of #should_run? when the check is not a skip' do
|
||||
info = described_class.new({}, nil, nil, false)
|
||||
|
||||
expect(info).to receive(:should_run?).with(dummy_controller).and_return(false)
|
||||
|
|
|
@ -165,7 +165,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :
|
|||
end
|
||||
|
||||
describe '#rename_namespace_dependencies' do
|
||||
it "moves the the repository for a project in the namespace" do
|
||||
it "moves the repository for a project in the namespace" do
|
||||
create(:project, :repository, :legacy_storage, namespace: namespace, path: "the-path-project")
|
||||
expected_repo = File.join(TestEnv.repos_path, "the-path0", "the-path-project.git")
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ describe Gitlab::Diff::InlineDiffMarker do
|
|||
end
|
||||
end
|
||||
|
||||
context "when the text text is not html safe" do
|
||||
context "when the text is not html safe" do
|
||||
let(:rich) { "abc 'def' differs" }
|
||||
|
||||
it 'marks the range' do
|
||||
|
|
|
@ -49,7 +49,7 @@ describe Gitlab::Email::ReplyParser do
|
|||
expect(test_parse_body(fixture_file("emails/paragraphs.eml")))
|
||||
.to eq(
|
||||
<<-BODY.strip_heredoc.chomp
|
||||
Is there any reason the *old* candy can't be be kept in silos while the new candy
|
||||
Is there any reason the *old* candy can't be kept in silos while the new candy
|
||||
is imported into *new* silos?
|
||||
|
||||
The thing about candy is it stays delicious for a long time -- we can just keep
|
||||
|
|
|
@ -82,7 +82,7 @@ describe Gitlab::Git::MergeBase do
|
|||
end
|
||||
|
||||
describe '#unknown_refs', :missing_ref do
|
||||
it 'returns the the refs passed that are not part of the repository' do
|
||||
it 'returns the refs passed that are not part of the repository' do
|
||||
expect(merge_base.unknown_refs).to contain_exactly('aaaa')
|
||||
end
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ describe Gitlab::MultiCollectionPaginator do
|
|||
expect(paginator.paginate(1)).to eq(all_projects.take(3))
|
||||
end
|
||||
|
||||
it 'fils the second page with a mixture of of the first & second collection' do
|
||||
it 'fils the second page with a mixture of the first & second collection' do
|
||||
first_collection_element = all_projects.last
|
||||
second_collection_elements = all_groups.take(2)
|
||||
|
||||
|
|
|
@ -45,11 +45,11 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
|
|||
is_expected.to eq(%w[redis database fog])
|
||||
end
|
||||
|
||||
it 'returns redis store as the the lowest precedence' do
|
||||
it 'returns redis store as the lowest precedence' do
|
||||
expect(subject.first).to eq('redis')
|
||||
end
|
||||
|
||||
it 'returns fog store as the the highest precedence' do
|
||||
it 'returns fog store as the highest precedence' do
|
||||
expect(subject.last).to eq('fog')
|
||||
end
|
||||
end
|
||||
|
|
|
@ -22,13 +22,13 @@ describe List do
|
|||
end
|
||||
|
||||
describe '#destroy' do
|
||||
it 'can be destroyed when when list_type is set to label' do
|
||||
it 'can be destroyed when list_type is set to label' do
|
||||
subject = create(:list)
|
||||
|
||||
expect(subject.destroy).to be_truthy
|
||||
end
|
||||
|
||||
it 'can not be destroyed when when list_type is set to closed' do
|
||||
it 'can not be destroyed when list_type is set to closed' do
|
||||
subject = create(:closed_list)
|
||||
|
||||
expect(subject.destroy).to be_falsey
|
||||
|
|
|
@ -70,7 +70,7 @@ describe Ci::PipelineSchedulePolicy, :models do
|
|||
pipeline_schedule.update(owner: user)
|
||||
end
|
||||
|
||||
it 'includes abilities to do do all operations on pipeline schedule' do
|
||||
it 'includes abilities to do all operations on pipeline schedule' do
|
||||
expect(policy).to be_allowed :play_pipeline_schedule
|
||||
expect(policy).to be_allowed :update_pipeline_schedule
|
||||
expect(policy).to be_allowed :admin_pipeline_schedule
|
||||
|
@ -82,7 +82,7 @@ describe Ci::PipelineSchedulePolicy, :models do
|
|||
project.add_maintainer(user)
|
||||
end
|
||||
|
||||
it 'includes abilities to do do all operations on pipeline schedule' do
|
||||
it 'includes abilities to do all operations on pipeline schedule' do
|
||||
expect(policy).to be_allowed :play_pipeline_schedule
|
||||
expect(policy).to be_allowed :update_pipeline_schedule
|
||||
expect(policy).to be_allowed :admin_pipeline_schedule
|
||||
|
|
|
@ -223,7 +223,7 @@ describe ProjectPolicy do
|
|||
expect_disallowed(*other_write_abilities)
|
||||
end
|
||||
|
||||
it 'does not disable other other abilities' do
|
||||
it 'does not disable other abilities' do
|
||||
expect_allowed(*(regular_abilities - feature_write_abilities - other_write_abilities))
|
||||
end
|
||||
end
|
||||
|
|
|
@ -95,7 +95,7 @@ describe MergeRequests::MergeWhenPipelineSucceedsService do
|
|||
sha: '1234abcdef', status: 'success')
|
||||
end
|
||||
|
||||
it 'it does not merge merge request' do
|
||||
it 'it does not merge request' do
|
||||
expect(MergeWorker).not_to receive(:perform_async)
|
||||
service.trigger(old_pipeline)
|
||||
end
|
||||
|
|
|
@ -7,7 +7,7 @@ describe Users::SetStatusService do
|
|||
subject(:service) { described_class.new(current_user, params) }
|
||||
|
||||
describe '#execute' do
|
||||
context 'when when params are set' do
|
||||
context 'when params are set' do
|
||||
let(:params) { { emoji: 'taurus', message: 'a random status' } }
|
||||
|
||||
it 'creates a status' do
|
||||
|
|
|
@ -123,7 +123,7 @@ module ExportFileHelper
|
|||
false
|
||||
end
|
||||
|
||||
# Compares model attributes with those those found in the hash
|
||||
# Compares model attributes with those found in the hash
|
||||
# and returns true if there is a match, ignoring some excluded attributes.
|
||||
def safe_model?(model, excluded_attributes, parent)
|
||||
excluded_attributes += associations_for(model)
|
||||
|
|
Loading…
Reference in a new issue