mirror of
https://github.com/fog/fog-aws.git
synced 2022-11-09 13:50:52 -05:00
data pipeline mocks
This commit is contained in:
parent
67e0e28ab4
commit
8f79598297
14 changed files with 256 additions and 74 deletions
|
@ -9,6 +9,7 @@ module Fog
|
|||
request_path 'fog/aws/requests/data_pipeline'
|
||||
request :activate_pipeline
|
||||
request :create_pipeline
|
||||
request :deactivate_pipeline
|
||||
request :delete_pipeline
|
||||
request :describe_pipelines
|
||||
request :list_pipelines
|
||||
|
@ -22,8 +23,58 @@ module Fog
|
|||
collection :pipelines
|
||||
|
||||
class Mock
|
||||
include Fog::AWS::CredentialFetcher::ConnectionMethods
|
||||
|
||||
def self.data
|
||||
@data ||= Hash.new do |hash, region|
|
||||
hash[region] = Hash.new do |region_hash, key|
|
||||
region_hash[key] = {
|
||||
:pipelines => {},
|
||||
:pipeline_definitions => {},
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def self.reset
|
||||
@data = nil
|
||||
end
|
||||
|
||||
def data
|
||||
self.class.data[@region][@aws_access_key_id]
|
||||
end
|
||||
|
||||
def reset
|
||||
self.class.reset
|
||||
end
|
||||
|
||||
attr_accessor :region
|
||||
|
||||
def initialize(options={})
|
||||
Fog::Mock.not_implemented
|
||||
@region = options[:region] || "us-east-1"
|
||||
@aws_access_key_id = options[:aws_access_key_id]
|
||||
@aws_secret_access_key = options[:aws_secret_access_key]
|
||||
end
|
||||
|
||||
def stringify_keys(object)
|
||||
case object
|
||||
when Hash
|
||||
object.each_with_object({}) { |(k, v), a| a[k.to_s] = stringify_keys(v) }
|
||||
when Array
|
||||
object.map { |v| stringify_keys(v) }
|
||||
else
|
||||
object
|
||||
end
|
||||
end
|
||||
|
||||
def find_pipeline(id)
|
||||
pipeline = self.data[:pipelines].values.detect { |p| p["pipelineId"] == id }
|
||||
|
||||
if pipeline.nil? || pipeline[:deleted]
|
||||
raise Fog::AWS::DataPipeline::NotFound.new("Pipeline with id: #{id} does not exist")
|
||||
end
|
||||
|
||||
pipeline
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -116,7 +167,19 @@ module Fog
|
|||
end
|
||||
|
||||
def _request(params)
|
||||
@connection.request(params)
|
||||
response = @connection.request(params)
|
||||
|
||||
unless response.body.empty?
|
||||
response.body = Fog::JSON.decode(response.body)
|
||||
end
|
||||
|
||||
response
|
||||
rescue Excon::Error::BadRequest => error
|
||||
match = Fog::AWS::Errors.match_error(error)
|
||||
raise if match.empty?
|
||||
if %w(PipelineNotFoundException PipelineDeletedException).include?(match[:code])
|
||||
raise Fog::AWS::DataPipeline::NotFound.slurp(error, match[:message])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -170,6 +170,10 @@ module Fog
|
|||
"sir-#{Fog::Mock.random_letters_and_numbers(8)}"
|
||||
end
|
||||
|
||||
def self.data_pipeline_id
|
||||
"df-#{Fog::Mock.random_letters_and_numbers(19).capitalize}"
|
||||
end
|
||||
|
||||
def self.spot_product_descriptions
|
||||
[
|
||||
'Linux/UNIX',
|
||||
|
|
|
@ -14,16 +14,20 @@ module Fog
|
|||
|
||||
response = request({
|
||||
:body => Fog::JSON.encode(params),
|
||||
:headers => { 'X-Amz-Target' => 'DataPipeline.ActivatePipeline' },
|
||||
:headers => { 'X-Amz-Target' => 'DataPipeline.ActivatePipeline' }
|
||||
})
|
||||
|
||||
Fog::JSON.decode(response.body)
|
||||
end
|
||||
end
|
||||
|
||||
class Mock
|
||||
def activate_pipeline(id)
|
||||
Fog::Mock.not_implemented
|
||||
response = Excon::Response.new
|
||||
|
||||
pipeline = find_pipeline(id)
|
||||
pipeline[:active] = true
|
||||
|
||||
response.body = {}
|
||||
response
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -24,14 +24,35 @@ module Fog
|
|||
:body => Fog::JSON.encode(params),
|
||||
:headers => { 'X-Amz-Target' => 'DataPipeline.CreatePipeline' },
|
||||
})
|
||||
|
||||
Fog::JSON.decode(response.body)
|
||||
end
|
||||
end
|
||||
|
||||
class Mock
|
||||
def create_pipeline(unique_id, name, description=nil, tags=nil)
|
||||
Fog::Mock.not_implemented
|
||||
response = Excon::Response.new
|
||||
|
||||
if existing_pipeline = self.data[:pipelines][unique_id]
|
||||
{"pipelineId" => existing_pipeline["pipelineId"]}
|
||||
else
|
||||
pipeline_id = Fog::AWS::Mock.data_pipeline_id
|
||||
mapped_tags = if tags
|
||||
tags.map { |k,v| {"key" => k.to_s, "value" => v.to_s}}
|
||||
else
|
||||
[]
|
||||
end
|
||||
|
||||
pipeline = {
|
||||
"name" => name,
|
||||
"description" => description,
|
||||
"fields" => mapped_tags,
|
||||
"pipelineId" => pipeline_id,
|
||||
}
|
||||
|
||||
self.data[:pipelines][unique_id] = pipeline
|
||||
|
||||
response.body = {"pipelineId" => pipeline_id}
|
||||
end
|
||||
response
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
36
lib/fog/aws/requests/data_pipeline/deactivate_pipeline.rb
Normal file
36
lib/fog/aws/requests/data_pipeline/deactivate_pipeline.rb
Normal file
|
@ -0,0 +1,36 @@
|
|||
module Fog
|
||||
module AWS
|
||||
class DataPipeline
|
||||
class Real
|
||||
# Activate a pipeline
|
||||
# http://docs.aws.amazon.com/datapipeline/latest/APIReference/API_DectivatePipeline.html
|
||||
# ==== Parameters
|
||||
# * PipelineId <~String> - The ID of the pipeline to activate
|
||||
# ' cancelActive <~Boolean> - Indicates whether to cancel any running objects. The default is true, which sets the state of any running objects to CANCELED. If this value is false, the pipeline is deactivated after all running objects finish.
|
||||
# ==== Returns
|
||||
# * response<~Excon::Response>:
|
||||
# * body<~Hash>:
|
||||
def deactivate_pipeline(id, cancel_active=true)
|
||||
params = { 'pipelineId' => id, 'cancelActive' => cancel_active }
|
||||
|
||||
response = request({
|
||||
:body => Fog::JSON.encode(params),
|
||||
:headers => { 'X-Amz-Target' => 'DataPipeline.DectivatePipeline' }
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
class Mock
|
||||
def deactivate_pipeline(id, cancel_active=true)
|
||||
response = Excon::Response.new
|
||||
|
||||
pipeline = find_pipeline(id)
|
||||
pipeline[:active] = false
|
||||
|
||||
response.body = {}
|
||||
response
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -22,7 +22,12 @@ module Fog
|
|||
|
||||
class Mock
|
||||
def delete_pipeline(id)
|
||||
Fog::Mock.not_implemented
|
||||
response = Excon::Response.new
|
||||
|
||||
pipeline = find_pipeline(id)
|
||||
pipeline[:deleted] = true
|
||||
|
||||
true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -23,14 +23,28 @@ module Fog
|
|||
:body => Fog::JSON.encode(params),
|
||||
:headers => { 'X-Amz-Target' => 'DataPipeline.DescribeObjects' },
|
||||
})
|
||||
|
||||
Fog::JSON.decode(response.body)
|
||||
end
|
||||
end
|
||||
|
||||
class Mock
|
||||
def describe_objects(id, objects, options={})
|
||||
Fog::Mock.not_implemented
|
||||
response = Excon::Response.new
|
||||
|
||||
find_pipeline(id)
|
||||
|
||||
pipeline_objects = self.data[:pipeline_definitions][id]["pipelineObjects"].select { |o| objects.include?(o["id"]) }
|
||||
|
||||
response.body = {
|
||||
"hasMoreResults" => false,
|
||||
"marker" => options[:marker],
|
||||
"pipelineObjects" => [
|
||||
{
|
||||
"fields" => pipeline_objects
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
response
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -17,14 +17,14 @@ module Fog
|
|||
:body => Fog::JSON.encode(params),
|
||||
:headers => { 'X-Amz-Target' => 'DataPipeline.DescribePipelines' },
|
||||
})
|
||||
|
||||
Fog::JSON.decode(response.body)
|
||||
end
|
||||
end
|
||||
|
||||
class Mock
|
||||
def describe_pipelines(ids)
|
||||
Fog::Mock.not_implemented
|
||||
response = Excon::Response.new
|
||||
response.body = {"pipelineDescriptionList" => self.data[:pipelines].values.select { |p| !p[:deleted] && ids.include?(p["pipelineId"]) } }
|
||||
response
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -18,14 +18,17 @@ module Fog
|
|||
:body => Fog::JSON.encode(params),
|
||||
:headers => { 'X-Amz-Target' => 'DataPipeline.GetPipelineDefinition' },
|
||||
})
|
||||
|
||||
Fog::JSON.decode(response.body)
|
||||
end
|
||||
end
|
||||
|
||||
class Mock
|
||||
def get_pipeline_definition(id, objects)
|
||||
Fog::Mock.not_implemented
|
||||
def get_pipeline_definition(id)
|
||||
response = Excon::Response.new
|
||||
|
||||
pipeline = find_pipeline(id)
|
||||
|
||||
response.body = self.data[:pipeline_definitions][id] || {"pipelineObjects" => []}
|
||||
response
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -17,14 +17,14 @@ module Fog
|
|||
:body => Fog::JSON.encode(params),
|
||||
:headers => { 'X-Amz-Target' => 'DataPipeline.ListPipelines' },
|
||||
})
|
||||
|
||||
Fog::JSON.decode(response.body)
|
||||
end
|
||||
end
|
||||
|
||||
class Mock
|
||||
def list_pipelines(options={})
|
||||
Fog::Mock.not_implemented
|
||||
response = Excon::Response.new
|
||||
response.body = {"pipelineIdList" => self.data[:pipelines].values.map { |p| {"id" => p["pipelineId"], "name" => p["name"]} } }
|
||||
response
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,35 +1,7 @@
|
|||
module Fog
|
||||
module AWS
|
||||
class DataPipeline
|
||||
class Real
|
||||
# Put raw pipeline definition JSON
|
||||
# http://docs.aws.amazon.com/datapipeline/latest/APIReference/API_PutPipelineDefinition.html
|
||||
# ==== Parameters
|
||||
# * PipelineId <~String> - The ID of the pipeline
|
||||
# * PipelineObjects <~String> - Objects in the pipeline
|
||||
# ==== Returns
|
||||
# * response<~Excon::Response>:
|
||||
# * body<~Hash>:
|
||||
def put_pipeline_definition(id, objects)
|
||||
params = {
|
||||
'pipelineId' => id,
|
||||
'pipelineObjects' => transform_objects(objects),
|
||||
}
|
||||
|
||||
response = request({
|
||||
:body => Fog::JSON.encode(params),
|
||||
:headers => { 'X-Amz-Target' => 'DataPipeline.PutPipelineDefinition' },
|
||||
})
|
||||
|
||||
Fog::JSON.decode(response.body)
|
||||
end
|
||||
|
||||
# Take a list of pipeline object hashes as specified in the Data Pipeline JSON format
|
||||
# and transform it into the format expected by the API
|
||||
def transform_objects(objects)
|
||||
objects.map { |object| JSONObject.new(object).to_api }
|
||||
end
|
||||
|
||||
module Shared
|
||||
class JSONObject
|
||||
def initialize(object)
|
||||
@json_fields = object.clone
|
||||
|
@ -53,7 +25,7 @@ module Fog
|
|||
|
||||
def field_for_kv(key, value)
|
||||
if value.is_a?(Hash)
|
||||
{ 'key' => key, 'refValue' => value['ref'] }
|
||||
{ 'key' => key, 'refValue' => value['ref'], 'stringValue' => value['stringValue'] }
|
||||
|
||||
elsif value.is_a?(Array)
|
||||
value.map { |subvalue| field_for_kv(key, subvalue) }
|
||||
|
@ -64,11 +36,60 @@ module Fog
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Take a list of pipeline object hashes as specified in the Data Pipeline JSON format
|
||||
# and transform it into the format expected by the API
|
||||
def transform_objects(objects)
|
||||
objects.map { |object| JSONObject.new(object).to_api }
|
||||
end
|
||||
end
|
||||
|
||||
class Real
|
||||
include Shared
|
||||
# Put raw pipeline definition JSON
|
||||
# http://docs.aws.amazon.com/datapipeline/latest/APIReference/API_PutPipelineDefinition.html
|
||||
# ==== Parameters
|
||||
# * PipelineId <~String> - The ID of the pipeline
|
||||
# * PipelineObjects <~String> - Objects in the pipeline
|
||||
# ==== Returns
|
||||
# * response<~Excon::Response>:
|
||||
# * body<~Hash>:
|
||||
def put_pipeline_definition(id, pipeline_objects, options={})
|
||||
params = {
|
||||
'pipelineId' => id,
|
||||
'pipelineObjects' => transform_objects(pipeline_objects),
|
||||
}.merge(options)
|
||||
|
||||
response = request({
|
||||
:body => Fog::JSON.encode(params),
|
||||
:headers => { 'X-Amz-Target' => 'DataPipeline.PutPipelineDefinition' },
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
class Mock
|
||||
def put_pipeline_definition(id, objects)
|
||||
Fog::Mock.not_implemented
|
||||
include Shared
|
||||
|
||||
def put_pipeline_definition(id, pipeline_objects, _options={})
|
||||
response = Excon::Response.new
|
||||
options = _options.dup
|
||||
|
||||
pipeline = find_pipeline(id)
|
||||
|
||||
stringified_objects = if pipeline_objects.any?
|
||||
transform_objects(stringify_keys(pipeline_objects))
|
||||
else
|
||||
options.each { |k,v| options[k] = transform_objects(stringify_keys(v)) }
|
||||
end
|
||||
|
||||
if stringified_objects.is_a?(Array)
|
||||
stringified_objects = {"pipelineObjects" => stringified_objects}
|
||||
end
|
||||
|
||||
self.data[:pipeline_definitions][id] = stringified_objects
|
||||
|
||||
response.body = {"errored" => false, "validationErrors" => [], "validationWarnings" => []}
|
||||
response
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -23,14 +23,17 @@ module Fog
|
|||
:body => Fog::JSON.encode(params),
|
||||
:headers => { 'X-Amz-Target' => 'DataPipeline.QueryObjects' },
|
||||
})
|
||||
|
||||
Fog::JSON.decode(response.body)
|
||||
end
|
||||
end
|
||||
|
||||
class Mock
|
||||
def query_objects(id, sphere, options={})
|
||||
Fog::Mock.not_implemented
|
||||
response = Excon::Response.new
|
||||
|
||||
find_pipeline(id)
|
||||
|
||||
response.body = {"hasMoreResults" => false, "ids" => ["Default"]}
|
||||
response
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -34,6 +34,8 @@ class AWS
|
|||
"hasMoreResults" => Fog::Nullable::Boolean,
|
||||
"marker" => Fog::Nullable::String,
|
||||
"pipelineObjects" => [
|
||||
{
|
||||
"fields" => [
|
||||
{
|
||||
'id' => String,
|
||||
'name' => String,
|
||||
|
@ -41,6 +43,8 @@ class AWS
|
|||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
DESCRIBE_PIPELINES = {
|
||||
"pipelineDescriptionList" => [
|
||||
|
@ -65,7 +69,9 @@ class AWS
|
|||
"name" => String,
|
||||
"fields" => FIELDS,
|
||||
}
|
||||
]
|
||||
],
|
||||
"parameterObjects" => Fog::Nullable::Array,
|
||||
"parameterValues" => Fog::Nullable::Array,
|
||||
}
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
Shindo.tests('AWS::DataPipeline | pipeline_tests', ['aws', 'data_pipeline']) do
|
||||
pending if Fog.mocking?
|
||||
|
||||
@pipeline_id = nil
|
||||
|
||||
tests('success') do
|
||||
|
@ -9,18 +7,18 @@ Shindo.tests('AWS::DataPipeline | pipeline_tests', ['aws', 'data_pipeline']) do
|
|||
name = 'fog-test-pipeline-name'
|
||||
description = 'Fog test pipeline'
|
||||
|
||||
result = Fog::AWS[:data_pipeline].create_pipeline(unique_id, name, description, {})
|
||||
result = Fog::AWS[:data_pipeline].create_pipeline(unique_id, name, description, {}).body
|
||||
@pipeline_id = result['pipelineId']
|
||||
result
|
||||
end
|
||||
|
||||
tests("#list_pipelines").formats(AWS::DataPipeline::Formats::LIST_PIPELINES) do
|
||||
Fog::AWS[:data_pipeline].list_pipelines()
|
||||
Fog::AWS[:data_pipeline].list_pipelines.body
|
||||
end
|
||||
|
||||
tests("#describe_pipelines").formats(AWS::DataPipeline::Formats::DESCRIBE_PIPELINES) do
|
||||
ids = [@pipeline_id]
|
||||
Fog::AWS[:data_pipeline].describe_pipelines(ids)
|
||||
Fog::AWS[:data_pipeline].describe_pipelines(ids).body
|
||||
end
|
||||
|
||||
tests("#put_pipeline_definition").formats(AWS::DataPipeline::Formats::PUT_PIPELINE_DEFINITION) do
|
||||
|
@ -39,35 +37,39 @@ Shindo.tests('AWS::DataPipeline | pipeline_tests', ['aws', 'data_pipeline']) do
|
|||
},
|
||||
]
|
||||
|
||||
Fog::AWS[:data_pipeline].put_pipeline_definition(@pipeline_id, objects)
|
||||
Fog::AWS[:data_pipeline].put_pipeline_definition(@pipeline_id, objects).body
|
||||
end
|
||||
|
||||
tests("#activate_pipeline") do
|
||||
Fog::AWS[:data_pipeline].activate_pipeline(@pipeline_id)
|
||||
end
|
||||
|
||||
tests("#deactivate_pipeline") do
|
||||
Fog::AWS[:data_pipeline].activate_pipeline(@pipeline_id)
|
||||
end
|
||||
|
||||
tests("#get_pipeline_definition").formats(AWS::DataPipeline::Formats::GET_PIPELINE_DEFINITION) do
|
||||
Fog::AWS[:data_pipeline].get_pipeline_definition(@pipeline_id)
|
||||
Fog::AWS[:data_pipeline].get_pipeline_definition(@pipeline_id).body
|
||||
end
|
||||
|
||||
tests("#query_objects") do
|
||||
tests("for COMPONENTs").formats(AWS::DataPipeline::Formats::QUERY_OBJECTS) do
|
||||
Fog::AWS[:data_pipeline].query_objects(@pipeline_id, 'COMPONENT')
|
||||
Fog::AWS[:data_pipeline].query_objects(@pipeline_id, 'COMPONENT').body
|
||||
end
|
||||
|
||||
tests("for INSTANCEs").formats(AWS::DataPipeline::Formats::QUERY_OBJECTS) do
|
||||
Fog::AWS[:data_pipeline].query_objects(@pipeline_id, 'INSTANCE')
|
||||
Fog::AWS[:data_pipeline].query_objects(@pipeline_id, 'INSTANCE').body
|
||||
end
|
||||
|
||||
tests("for ATTEMPTs").formats(AWS::DataPipeline::Formats::QUERY_OBJECTS) do
|
||||
Fog::AWS[:data_pipeline].query_objects(@pipeline_id, 'ATTEMPT')
|
||||
Fog::AWS[:data_pipeline].query_objects(@pipeline_id, 'ATTEMPT').body
|
||||
end
|
||||
end
|
||||
|
||||
tests('#describe_objects').formats(AWS::DataPipeline::Formats::DESCRIBE_OBJECTS) do
|
||||
attempts = Fog::AWS[:data_pipeline].query_objects(@pipeline_id, 'ATTEMPT')
|
||||
attempts = Fog::AWS[:data_pipeline].query_objects(@pipeline_id, 'ATTEMPT').body
|
||||
object_ids = attempts['ids'][0..5]
|
||||
Fog::AWS[:data_pipeline].describe_objects(@pipeline_id, object_ids)
|
||||
Fog::AWS[:data_pipeline].describe_objects(@pipeline_id, object_ids).body
|
||||
end
|
||||
|
||||
tests("#delete_pipeline").returns(true) do
|
||||
|
|
Loading…
Reference in a new issue