Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-01-08 03:10:42 +00:00
parent d06b4740da
commit bc6937b797
16 changed files with 362 additions and 30 deletions

View file

@ -52,6 +52,10 @@ module AlertManagement
endpoint_identifier == LEGACY_IDENTIFIER
end
def token_changed?
attribute_changed?(:token)
end
# Blank token assignment triggers token reset
def prevent_token_assignment
if token.present? && token_changed?

View file

@ -247,7 +247,7 @@ module Clusters
def prevent_modification
return if provided_by_user?
if api_url_changed? || token_changed? || ca_pem_changed?
if api_url_changed? || attribute_changed?(:token) || ca_pem_changed?
errors.add(:base, _('Cannot modify managed Kubernetes cluster'))
return false
end

View file

@ -188,7 +188,7 @@ class PagesDomain < ApplicationRecord
def user_provided_key=(key)
self.key = key
self.certificate_source = 'user_provided' if key_changed?
self.certificate_source = 'user_provided' if attribute_changed?(:key)
end
def user_provided_certificate
@ -207,7 +207,7 @@ class PagesDomain < ApplicationRecord
def gitlab_provided_key=(key)
self.key = key
self.certificate_source = 'gitlab_provided' if key_changed?
self.certificate_source = 'gitlab_provided' if attribute_changed?(:key)
end
def pages_virtual_domain

View file

@ -11,4 +11,8 @@ class AlertsServiceData < ApplicationRecord
mode: :per_attribute_iv,
key: Settings.attr_encrypted_db_key_base_truncated,
algorithm: 'aes-256-gcm'
def token_changed?
attribute_changed?(:token)
end
end

View file

@ -308,7 +308,7 @@ class RemoteMirror < ApplicationRecord
end
def mirror_url_changed?
url_changed? || credentials_changed?
url_changed? || attribute_changed?(:credentials)
end
def saved_change_to_mirror_url?

View file

@ -0,0 +1,5 @@
---
title: Improve the reliability and observability of the container registry client
merge_request: 50750
author:
type: changed

View file

@ -0,0 +1,56 @@
# frozen_string_literal: true
class CreateElasticReindexingSubtasks < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
class ReindexingTask < ActiveRecord::Base
self.table_name = 'elastic_reindexing_tasks'
end
class ReindexingSubtask < ActiveRecord::Base
self.table_name = 'elastic_reindexing_subtasks'
end
def up
unless table_exists?(:elastic_reindexing_subtasks)
create_table :elastic_reindexing_subtasks do |t|
t.references :elastic_reindexing_task, foreign_key: { on_delete: :cascade }, null: false
t.text :alias_name, null: false
t.text :index_name_from, null: false
t.text :index_name_to, null: false
t.text :elastic_task, null: false
t.integer :documents_count_target
t.integer :documents_count
t.timestamps_with_timezone null: false
end
end
add_text_limit :elastic_reindexing_subtasks, :index_name_from, 255
add_text_limit :elastic_reindexing_subtasks, :index_name_to, 255
add_text_limit :elastic_reindexing_subtasks, :elastic_task, 255
add_text_limit :elastic_reindexing_subtasks, :alias_name, 255
ReindexingTask.find_each do |task|
next if task.index_name_from.blank? || task.index_name_to.blank? || task.elastic_task.blank?
next if ReindexingSubtask.where(elastic_reindexing_task_id: task.id).exists?
ReindexingSubtask.create(
elastic_reindexing_task_id: task.id,
documents_count_target: task.documents_count_target,
documents_count: task.documents_count,
alias_name: 'gitlab-production',
index_name_from: task.index_name_from,
index_name_to: task.index_name_to,
elastic_task: task.elastic_task
)
end
end
def down
drop_table :elastic_reindexing_subtasks
end
end

View file

@ -0,0 +1 @@
164bcc838beb7d51775f8b813b92d3ec7080d4c7937d6ad16cf973131b45359e

View file

@ -11947,6 +11947,32 @@ CREATE SEQUENCE draft_notes_id_seq
ALTER SEQUENCE draft_notes_id_seq OWNED BY draft_notes.id;
CREATE TABLE elastic_reindexing_subtasks (
id bigint NOT NULL,
elastic_reindexing_task_id bigint NOT NULL,
alias_name text NOT NULL,
index_name_from text NOT NULL,
index_name_to text NOT NULL,
elastic_task text NOT NULL,
documents_count_target integer,
documents_count integer,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
CONSTRAINT check_4910adc798 CHECK ((char_length(elastic_task) <= 255)),
CONSTRAINT check_88f56216a4 CHECK ((char_length(alias_name) <= 255)),
CONSTRAINT check_a1fbd9faa9 CHECK ((char_length(index_name_from) <= 255)),
CONSTRAINT check_f456494bd8 CHECK ((char_length(index_name_to) <= 255))
);
CREATE SEQUENCE elastic_reindexing_subtasks_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE elastic_reindexing_subtasks_id_seq OWNED BY elastic_reindexing_subtasks.id;
CREATE TABLE elastic_reindexing_tasks (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
@ -18387,6 +18413,8 @@ ALTER TABLE ONLY diff_note_positions ALTER COLUMN id SET DEFAULT nextval('diff_n
ALTER TABLE ONLY draft_notes ALTER COLUMN id SET DEFAULT nextval('draft_notes_id_seq'::regclass);
ALTER TABLE ONLY elastic_reindexing_subtasks ALTER COLUMN id SET DEFAULT nextval('elastic_reindexing_subtasks_id_seq'::regclass);
ALTER TABLE ONLY elastic_reindexing_tasks ALTER COLUMN id SET DEFAULT nextval('elastic_reindexing_tasks_id_seq'::regclass);
ALTER TABLE ONLY emails ALTER COLUMN id SET DEFAULT nextval('emails_id_seq'::regclass);
@ -19548,6 +19576,9 @@ ALTER TABLE ONLY diff_note_positions
ALTER TABLE ONLY draft_notes
ADD CONSTRAINT draft_notes_pkey PRIMARY KEY (id);
ALTER TABLE ONLY elastic_reindexing_subtasks
ADD CONSTRAINT elastic_reindexing_subtasks_pkey PRIMARY KEY (id);
ALTER TABLE ONLY elastic_reindexing_tasks
ADD CONSTRAINT elastic_reindexing_tasks_pkey PRIMARY KEY (id);
@ -21393,6 +21424,8 @@ CREATE INDEX index_draft_notes_on_discussion_id ON draft_notes USING btree (disc
CREATE INDEX index_draft_notes_on_merge_request_id ON draft_notes USING btree (merge_request_id);
CREATE INDEX index_elastic_reindexing_subtasks_on_elastic_reindexing_task_id ON elastic_reindexing_subtasks USING btree (elastic_reindexing_task_id);
CREATE UNIQUE INDEX index_elastic_reindexing_tasks_on_in_progress ON elastic_reindexing_tasks USING btree (in_progress) WHERE in_progress;
CREATE INDEX index_elastic_reindexing_tasks_on_state ON elastic_reindexing_tasks USING btree (state);
@ -25361,6 +25394,9 @@ ALTER TABLE ONLY requirements
ALTER TABLE ONLY snippet_repositories
ADD CONSTRAINT fk_rails_f21f899728 FOREIGN KEY (shard_id) REFERENCES shards(id) ON DELETE RESTRICT;
ALTER TABLE ONLY elastic_reindexing_subtasks
ADD CONSTRAINT fk_rails_f2cc190164 FOREIGN KEY (elastic_reindexing_task_id) REFERENCES elastic_reindexing_tasks(id) ON DELETE CASCADE;
ALTER TABLE ONLY ci_pipeline_chat_data
ADD CONSTRAINT fk_rails_f300456b63 FOREIGN KEY (chat_name_id) REFERENCES chat_names(id) ON DELETE CASCADE;

View file

@ -283,7 +283,7 @@ To disable the Elasticsearch integration:
1. Expand the **Advanced Search** section and uncheck **Elasticsearch indexing**
and **Search with Elasticsearch enabled**.
1. Click **Save changes** for the changes to take effect.
1. (Optional) Delete the existing index:
1. (Optional) Delete the existing indexes:
```shell
# Omnibus installations
@ -347,7 +347,8 @@ To reclaim the `gitlab-production` index name, you need to first create a `secon
To create a secondary index, run the following Rake task. The `SKIP_ALIAS`
environment variable will disable the automatic creation of the Elasticsearch
alias, which would conflict with the existing index under `$PRIMARY_INDEX`:
alias, which would conflict with the existing index under `$PRIMARY_INDEX`, and will
not create a separate Issue index:
```shell
# Omnibus installation
@ -523,8 +524,8 @@ The following are some available Rake tasks:
| [`sudo gitlab-rake gitlab:elastic:index_projects`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Iterates over all projects and queues Sidekiq jobs to index them in the background. |
| [`sudo gitlab-rake gitlab:elastic:index_projects_status`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Determines the overall status of the indexing. It is done by counting the total number of indexed projects, dividing by a count of the total number of projects, then multiplying by 100. |
| [`sudo gitlab-rake gitlab:elastic:clear_index_status`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Deletes all instances of IndexStatus for all projects. Note that this command will result in a complete wipe of the index, and it should be used with caution. |
| [`sudo gitlab-rake gitlab:elastic:create_empty_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Generates an empty index and assigns an alias for it on the Elasticsearch side only if it doesn't already exist. |
| [`sudo gitlab-rake gitlab:elastic:delete_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Removes the GitLab index and alias (if exists) on the Elasticsearch instance. |
| [`sudo gitlab-rake gitlab:elastic:create_empty_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Generates empty indexes (the default index and a separate issues index) and assigns an alias for each on the Elasticsearch side only if it doesn't already exist. |
| [`sudo gitlab-rake gitlab:elastic:delete_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Removes the GitLab indexes and aliases (if they exist) on the Elasticsearch instance. |
| [`sudo gitlab-rake gitlab:elastic:recreate_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Wrapper task for `gitlab:elastic:delete_index[<TARGET_NAME>]` and `gitlab:elastic:create_empty_index[<TARGET_NAME>]`. |
| [`sudo gitlab-rake gitlab:elastic:index_snippets`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Performs an Elasticsearch import that indexes the snippets data. |
| [`sudo gitlab-rake gitlab:elastic:projects_not_indexed`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Displays which projects are not indexed. |

View file

@ -22,6 +22,23 @@ module ContainerRegistry
# Taken from: FaradayMiddleware::FollowRedirects
REDIRECT_CODES = Set.new [301, 302, 303, 307]
RETRY_EXCEPTIONS = [Faraday::Request::Retry::DEFAULT_EXCEPTIONS, Faraday::ConnectionFailed].flatten.freeze
RETRY_OPTIONS = {
max: 1,
interval: 5,
exceptions: RETRY_EXCEPTIONS
}.freeze
ERROR_CALLBACK_OPTIONS = {
callback: -> (env, exception) do
Gitlab::ErrorTracking.log_exception(
exception,
class: name,
url: env[:url]
)
end
}.freeze
def self.supports_tag_delete?
registry_config = Gitlab.config.registry
return false unless registry_config.enabled && registry_config.api_url.present?
@ -97,12 +114,12 @@ module ContainerRegistry
end
def upload_blob(name, content, digest)
upload = faraday.post("/v2/#{name}/blobs/uploads/")
upload = faraday(timeout_enabled: false).post("/v2/#{name}/blobs/uploads/")
return upload unless upload.success?
location = URI(upload.headers['location'])
faraday.put("#{location.path}?#{location.query}") do |req|
faraday(timeout_enabled: false).put("#{location.path}?#{location.query}") do |req|
req.params['digest'] = digest
req.headers['Content-Type'] = 'application/octet-stream'
req.body = content
@ -137,7 +154,7 @@ module ContainerRegistry
end
def put_tag(name, reference, manifest)
response = faraday.put("/v2/#{name}/manifests/#{reference}") do |req|
response = faraday(timeout_enabled: false).put("/v2/#{name}/manifests/#{reference}") do |req|
req.headers['Content-Type'] = DOCKER_DISTRIBUTION_MANIFEST_V2_TYPE
req.body = Gitlab::Json.pretty_generate(manifest)
end
@ -158,6 +175,8 @@ module ContainerRegistry
yield(conn) if block_given?
conn.request(:retry, RETRY_OPTIONS)
conn.request(:gitlab_error_callback, ERROR_CALLBACK_OPTIONS)
conn.adapter :net_http
end
@ -188,8 +207,8 @@ module ContainerRegistry
faraday_redirect.get(uri)
end
def faraday
@faraday ||= faraday_base do |conn|
def faraday(timeout_enabled: true)
@faraday ||= faraday_base(timeout_enabled: timeout_enabled) do |conn|
initialize_connection(conn, @options, &method(:accept_manifest))
end
end
@ -205,12 +224,22 @@ module ContainerRegistry
def faraday_redirect
@faraday_redirect ||= faraday_base do |conn|
conn.request :json
conn.request(:retry, RETRY_OPTIONS)
conn.request(:gitlab_error_callback, ERROR_CALLBACK_OPTIONS)
conn.adapter :net_http
end
end
def faraday_base(&block)
Faraday.new(@base_uri, headers: { user_agent: "GitLab/#{Gitlab::VERSION}" }, &block)
def faraday_base(timeout_enabled: true, &block)
request_options = timeout_enabled ? Gitlab::HTTP::DEFAULT_TIMEOUT_OPTIONS : nil
Faraday.new(
@base_uri,
headers: { user_agent: "GitLab/#{Gitlab::VERSION}" },
request: request_options,
&block
)
end
def delete_if_exists(path)

7
lib/gitlab/faraday.rb Normal file
View file

@ -0,0 +1,7 @@
# frozen_string_literal: true
module Gitlab
module Faraday
::Faraday::Request.register_middleware(gitlab_error_callback: -> { ::Gitlab::Faraday::ErrorCallback })
end
end

View file

@ -0,0 +1,44 @@
# frozen_string_literal: true
module Gitlab
module Faraday
# Simple Faraday Middleware that catches any error risen during the request and run the configured callback.
# (https://lostisland.github.io/faraday/middleware/)
#
# By default, a no op callback is setup.
#
# Note that the error is not swallowed: it will be rerisen again. In that regard, this callback acts more
# like an error spy than anything else.
#
# The callback has access to the request `env` and the exception instance. For more details, see
# https://lostisland.github.io/faraday/middleware/custom
#
# Faraday.new do |conn|
# conn.request(
# :error_callback,
# callback: -> (env, exception) { Rails.logger.debug("Error #{exception.class.name} when trying to contact #{env[:url]}" ) }
# )
# conn.adapter(:net_http)
# end
class ErrorCallback < ::Faraday::Middleware
def initialize(app, options = nil)
super(app)
@options = ::Gitlab::Faraday::ErrorCallback::Options.from(options) # rubocop: disable CodeReuse/ActiveRecord
end
def call(env)
@app.call(env)
rescue => e
@options.callback&.call(env, e)
raise
end
class Options < ::Faraday::Options.new(:callback)
def callback
self[:callback]
end
end
end
end
end

View file

@ -10398,6 +10398,9 @@ msgstr ""
msgid "Elasticsearch HTTP client timeout value in seconds."
msgstr ""
msgid "Elasticsearch indexing"
msgstr ""
msgid "Elasticsearch indexing restrictions"
msgstr ""
@ -20371,6 +20374,9 @@ msgstr ""
msgid "Pause"
msgstr ""
msgid "Pause Elasticsearch indexing"
msgstr ""
msgid "Pause replication"
msgstr ""
@ -23196,7 +23202,7 @@ msgstr ""
msgid "Regulate approvals by authors/committers. Affects all projects."
msgstr ""
msgid "Reindexing status"
msgid "Reindexing Status: %{status}"
msgstr ""
msgid "Rejected (closed)"
@ -26687,9 +26693,6 @@ msgstr ""
msgid "State your message to activate"
msgstr ""
msgid "State: %{last_reindexing_task_state}"
msgstr ""
msgid "Static Application Security Testing (SAST)"
msgstr ""
@ -28212,6 +28215,9 @@ msgstr ""
msgid "There are no variables yet."
msgstr ""
msgid "There are pending advanced search migrations. Indexing must remain paused until the migrations are completed."
msgstr ""
msgid "There are running deployments on the environment. Please retry later."
msgstr ""
@ -30138,7 +30144,7 @@ msgstr ""
msgid "Until"
msgstr ""
msgid "Unused, previous index '%{index_name}' will be deleted after %{time} automatically."
msgid "Unused, previous indices: %{index_names} will be deleted after %{time} automatically."
msgstr ""
msgid "Unverified"

View file

@ -26,7 +26,54 @@ RSpec.describe ContainerRegistry::Client do
}
end
shared_examples '#repository_manifest' do |manifest_type|
let(:expected_faraday_headers) { { user_agent: "GitLab/#{Gitlab::VERSION}" } }
let(:expected_faraday_request_options) { Gitlab::HTTP::DEFAULT_TIMEOUT_OPTIONS }
shared_examples 'handling timeouts' do
let(:retry_options) do
ContainerRegistry::Client::RETRY_OPTIONS.merge(
interval: 0.1,
interval_randomness: 0,
backoff_factor: 0
)
end
before do
stub_request(method, url).to_timeout
end
it 'handles network timeouts' do
actual_retries = 0
retry_options_with_block = retry_options.merge(
retry_block: -> (_, _, _, _) { actual_retries += 1 }
)
stub_const('ContainerRegistry::Client::RETRY_OPTIONS', retry_options_with_block)
expect { subject }.to raise_error(Faraday::ConnectionFailed)
expect(actual_retries).to eq(retry_options_with_block[:max])
end
it 'logs the error' do
stub_const('ContainerRegistry::Client::RETRY_OPTIONS', retry_options)
expect(Gitlab::ErrorTracking)
.to receive(:log_exception)
.exactly(retry_options[:max] + 1)
.times
.with(
an_instance_of(Faraday::ConnectionFailed),
class: described_class.name,
url: URI(url)
)
expect { subject }.to raise_error(Faraday::ConnectionFailed)
end
end
shared_examples 'handling repository manifest' do |manifest_type|
let(:method) { :get }
let(:url) { 'http://container-registry/v2/group/test/manifests/mytag' }
let(:manifest) do
{
"schemaVersion" => 2,
@ -48,7 +95,7 @@ RSpec.describe ContainerRegistry::Client do
end
it 'GET /v2/:name/manifests/mytag' do
stub_request(:get, "http://container-registry/v2/group/test/manifests/mytag")
stub_request(method, url)
.with(headers: {
'Accept' => 'application/vnd.docker.distribution.manifest.v2+json, application/vnd.oci.image.manifest.v1+json',
'Authorization' => "bearer #{token}",
@ -56,14 +103,24 @@ RSpec.describe ContainerRegistry::Client do
})
.to_return(status: 200, body: manifest.to_json, headers: { content_type: manifest_type })
expect(client.repository_manifest('group/test', 'mytag')).to eq(manifest)
expect_new_faraday
expect(subject).to eq(manifest)
end
it_behaves_like 'handling timeouts'
end
it_behaves_like '#repository_manifest', described_class::DOCKER_DISTRIBUTION_MANIFEST_V2_TYPE
it_behaves_like '#repository_manifest', described_class::OCI_MANIFEST_V1_TYPE
describe '#repository_manifest' do
subject { client.repository_manifest('group/test', 'mytag') }
it_behaves_like 'handling repository manifest', described_class::DOCKER_DISTRIBUTION_MANIFEST_V2_TYPE
it_behaves_like 'handling repository manifest', described_class::OCI_MANIFEST_V1_TYPE
end
describe '#blob' do
let(:method) { :get }
let(:url) { 'http://container-registry/v2/group/test/blobs/sha256:0123456789012345' }
let(:blob_headers) do
{
'Accept' => 'application/octet-stream',
@ -78,16 +135,20 @@ RSpec.describe ContainerRegistry::Client do
}
end
subject { client.blob('group/test', 'sha256:0123456789012345') }
it 'GET /v2/:name/blobs/:digest' do
stub_request(:get, "http://container-registry/v2/group/test/blobs/sha256:0123456789012345")
stub_request(method, url)
.with(headers: blob_headers)
.to_return(status: 200, body: "Blob")
expect(client.blob('group/test', 'sha256:0123456789012345')).to eq('Blob')
expect_new_faraday
expect(subject).to eq('Blob')
end
it 'follows 307 redirect for GET /v2/:name/blobs/:digest' do
stub_request(:get, "http://container-registry/v2/group/test/blobs/sha256:0123456789012345")
stub_request(method, url)
.with(headers: blob_headers)
.to_return(status: 307, body: '', headers: { Location: 'http://redirected' })
# We should probably use hash_excluding here, but that requires an update to WebMock:
@ -98,10 +159,12 @@ RSpec.describe ContainerRegistry::Client do
end
.to_return(status: 200, body: "Successfully redirected")
response = client.blob('group/test', 'sha256:0123456789012345')
expect_new_faraday(times: 2)
expect(response).to eq('Successfully redirected')
expect(subject).to eq('Successfully redirected')
end
it_behaves_like 'handling timeouts'
end
describe '#upload_blob' do
@ -111,6 +174,8 @@ RSpec.describe ContainerRegistry::Client do
it 'starts the upload and posts the blob' do
stub_upload('path', 'content', 'sha256:123')
expect_new_faraday(timeout: false)
expect(subject).to be_success
end
end
@ -173,6 +238,8 @@ RSpec.describe ContainerRegistry::Client do
.with(body: "{\n \"foo\": \"bar\"\n}", headers: manifest_headers)
.to_return(status: 200, body: "", headers: { 'docker-content-digest' => 'sha256:123' })
expect_new_faraday(timeout: false)
expect(subject).to eq 'sha256:123'
end
end
@ -375,4 +442,17 @@ RSpec.describe ContainerRegistry::Client do
headers: { 'Allow' => 'DELETE' }
)
end
def expect_new_faraday(times: 1, timeout: true)
request_options = timeout ? expected_faraday_request_options : nil
expect(Faraday)
.to receive(:new)
.with(
'http://container-registry',
headers: expected_faraday_headers,
request: request_options
).and_call_original
.exactly(times)
.times
end
end

View file

@ -0,0 +1,59 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Faraday::ErrorCallback do
let(:app) { double(:app) }
let(:middleware) { described_class.new(app, {}) }
describe '#call' do
let(:env) { { url: 'http://target.url' } }
subject { middleware.call(env) }
context 'with no errors' do
before do
expect(app).to receive(:call).with(env).and_return('success')
end
it { is_expected.to eq('success') }
end
context 'with errors' do
before do
expect(app).to receive(:call).and_raise(ArgumentError, 'Kaboom!')
end
context 'with no callback' do
it 'uses the default callback' do
expect { subject }.to raise_error(ArgumentError, 'Kaboom!')
end
end
context 'with a custom callback' do
let(:options) { { callback: callback } }
it 'uses the custom callback' do
count = 0
target_url = nil
exception_class = nil
callback = proc do |env, exception|
count += 1
target_url = env[:url].to_s
exception_class = exception.class.name
end
options = { callback: callback }
middleware = described_class.new(app, options)
expect(callback).to receive(:call).and_call_original
expect { middleware.call(env) }.to raise_error(ArgumentError, 'Kaboom!')
expect(count).to eq(1)
expect(target_url).to eq('http://target.url')
expect(exception_class).to eq(ArgumentError.name)
end
end
end
end
end