1
0
Fork 0
mirror of https://github.com/fog/fog.git synced 2022-11-09 13:51:43 -05:00

Merge branch 'master' of github.com:fog/fog into cdn

This commit is contained in:
Kyle Rames 2013-02-19 08:42:07 -06:00
commit 02e4eb749f
26 changed files with 644 additions and 158 deletions

View file

@ -13,6 +13,7 @@ module Fog
service(:compute, 'aws/compute', 'Compute')
service(:cloud_formation, 'aws/cloud_formation', 'CloudFormation')
service(:cloud_watch, 'aws/cloud_watch', 'CloudWatch')
service(:data_pipeline, 'aws/data_pipeline', 'DataPipeline')
service(:dynamodb, 'aws/dynamodb', 'DynamoDB')
service(:dns, 'aws/dns', 'DNS')
service(:elasticache, 'aws/elasticache', 'Elasticache')

View file

@ -0,0 +1,114 @@
require 'fog/aws'
module Fog
module AWS
class DataPipeline < Fog::Service
extend Fog::AWS::CredentialFetcher::ServiceMethods
requires :aws_access_key_id, :aws_secret_access_key
recognizes :region, :host, :path, :port, :scheme, :persistent, :use_iam_profile, :aws_session_token, :aws_credentials_expire_at
request_path 'fog/aws/requests/data_pipeline'
request :activate_pipeline
request :create_pipeline
request :delete_pipeline
request :describe_pipelines
request :list_pipelines
request :put_pipeline_definition
model_path 'fog/aws/models/data_pipeline'
model :pipeline
collection :pipelines
class Mock
def initialize(options={})
Fog::Mock.not_implemented
end
end
class Real
attr_reader :region
include Fog::AWS::CredentialFetcher::ConnectionMethods
# Initialize connection to DataPipeline
#
# ==== Notes
# options parameter must include values for :aws_access_key_id and
# :aws_secret_access_key in order to create a connection
#
# ==== Examples
# datapipeline = DataPipeline.new(
# :aws_access_key_id => your_aws_access_key_id,
# :aws_secret_access_key => your_aws_secret_access_key
# )
#
# ==== Parameters
# * options<~Hash> - config arguments for connection. Defaults to {}.
# * region<~String> - optional region to use. For instance, 'eu-west-1', 'us-east-1' and etc.
#
# ==== Returns
# * DataPipeline object with connection to AWS.
def initialize(options={})
@use_iam_profile = options[:use_iam_profile]
@connection_options = options[:connection_options] || {}
@version = '2012-10-29'
@region = options[:region] || 'us-east-1'
@host = options[:host] || "datapipeline.#{@region}.amazonaws.com"
@path = options[:path] || '/'
@persistent = options[:persistent] || false
@port = options[:port] || 443
@scheme = options[:scheme] || 'https'
@connection = Fog::Connection.new("#{@scheme}://#{@host}:#{@port}#{@path}", @persistent, @connection_options)
setup_credentials(options)
end
def owner_id
@owner_id ||= security_groups.get('default').owner_id
end
def reload
@connection.reset
end
private
def setup_credentials(options)
@aws_access_key_id = options[:aws_access_key_id]
@aws_secret_access_key = options[:aws_secret_access_key]
@aws_session_token = options[:aws_session_token]
@aws_credentials_expire_at = options[:aws_credentials_expire_at]
@signer = Fog::AWS::SignatureV4.new(@aws_access_key_id, @aws_secret_access_key, @region, 'datapipeline')
end
def request(params)
refresh_credentials_if_expired
# Params for all DataPipeline requests
params.merge!({
:expects => 200,
:host => @host,
:method => :post,
:path => '/',
})
date = Fog::Time.now
params[:headers] = {
'Date' => date.to_date_header,
'Host' => @host,
'X-Amz-Date' => date.to_iso8601_basic,
'Content-Type' => 'application/x-amz-json-1.1',
'Content-Length' => params[:body].bytesize.to_s,
}.merge!(params[:headers] || {})
params[:headers]['x-amz-security-token'] = @aws_session_token if @aws_session_token
params[:headers]['Authorization'] = @signer.sign(params, date)
response = @connection.request(params)
response
end
end
end
end
end

View file

@ -0,0 +1,67 @@
require 'fog/core/model'
module Fog
module AWS
class DataPipeline
class Pipeline < Fog::Model
identity :id, :aliases => 'pipelineId'
attribute :name
attribute :description
attribute :user_id, :aliases => 'userId'
attribute :account_id, :aliases => 'accountId'
attribute :state, :aliases => 'pipelineState'
attribute :unique_id, :aliases => 'uniqueId'
def initialize(attributes={})
# Extract the 'fields' portion of a response to attributes
if attributes.include?('fields')
string_fields = attributes['fields'].select { |f| f.include?('stringValue') }
field_attributes = Hash[string_fields.map { |f| [f['key'][/^@(.+)$/, 1], f['stringValue']] }]
merge_attributes(field_attributes)
end
super
end
def save
requires :name
requires :unique_id
data = service.create_pipeline(unique_id, name)
merge_attributes(data)
true
end
def activate
requires :id
service.activate_pipeline(id)
true
end
def put(objects)
requires :id
service.put_pipeline_definition(id, objects)
true
end
def destroy
requires :id
service.delete_pipeline(id)
true
end
end
end
end
end

View file

@ -0,0 +1,36 @@
require 'fog/core/collection'
require 'fog/aws/models/data_pipeline/pipeline'
module Fog
module AWS
class DataPipeline
class Pipelines < Fog::Collection
model Fog::AWS::DataPipeline::Pipeline
def all
ids = []
begin
result = service.list_pipelines
ids << result['pipelineIdList'].map { |id| id['id'] }
end while (result['hasMoreResults'] && result['marker'])
load(service.describe_pipelines(ids.flatten)['pipelineDescriptionList'])
end
def get(id)
data = service.describe_pipelines([id])['pipelineDescriptionList'].first
new(data)
rescue Excon::Errors::BadRequest => error
data = Fog::JSON.decode(error.response.body)
raise unless data['__type'] == 'PipelineDeletedException' || data['__type'] == 'PipelineNotFoundException'
nil
end
end
end
end
end

View file

@ -0,0 +1,35 @@
module Fog
module AWS
class DataPipeline
class Real
# Activate a pipeline
# http://docs.aws.amazon.com/datapipeline/latest/APIReference/API_ActivatePipeline.html
# ==== Parameters
# * PipelineId <~String> - The ID of the pipeline to activate
# ==== Returns
# * response<~Excon::Response>:
# * body<~Hash>:
def activate_pipeline(id)
params = { 'pipelineId' => id }
response = request({
:body => Fog::JSON.encode(params),
:headers => { 'X-Amz-Target' => 'DataPipeline.ActivatePipeline' },
})
Fog::JSON.decode(response.body)
end
end
class Mock
def activate_pipeline(id)
Fog::Mock.not_implemented
end
end
end
end
end

View file

@ -0,0 +1,41 @@
module Fog
module AWS
class DataPipeline
class Real
# Create a pipeline
# http://docs.aws.amazon.com/datapipeline/latest/APIReference/API_CreatePipeline.html
# ==== Parameters
# * UniqueId <~String> - A unique ID for of the pipeline
# * Name <~String> - The name of the pipeline
# * Description <~String> - Description of the pipeline
# ==== Returns
# * response<~Excon::Response>:
# * body<~Hash>:
def create_pipeline(unique_id, name, description=nil)
params = {
'uniqueId' => unique_id,
'name' => name,
}
params['Description'] = description if description
response = request({
:body => Fog::JSON.encode(params),
:headers => { 'X-Amz-Target' => 'DataPipeline.CreatePipeline' },
})
Fog::JSON.decode(response.body)
end
end
class Mock
def create_pipeline(unique_id, name, description=nil)
Fog::Mock.not_implemented
end
end
end
end
end

View file

@ -0,0 +1,35 @@
module Fog
module AWS
class DataPipeline
class Real
# Delete a pipeline
# http://docs.aws.amazon.com/datapipeline/latest/APIReference/API_DeletePipeline.html
# ==== Parameters
# * PipelineId <~String> - The id of the pipeline to delete
# ==== Returns
# * response<~Excon::Response>:
# * body<~Hash>:
def delete_pipeline(id)
params = { 'pipelineId' => id }
response = request({
:body => Fog::JSON.encode(params),
:headers => { 'X-Amz-Target' => 'DataPipeline.DeletePipeline' },
})
Fog::JSON.decode(response.body)
end
end
class Mock
def delete_pipeline(id)
Fog::Mock.not_implemented
end
end
end
end
end

View file

@ -0,0 +1,36 @@
module Fog
module AWS
class DataPipeline
class Real
# Describe pipelines
# http://docs.aws.amazon.com/datapipeline/latest/APIReference/API_DescribePipelines.html
# ==== Parameters
# * PipelineIds <~String> - ID of pipeline to retrieve information for
# ==== Returns
# * response<~Excon::Response>:
# * body<~Hash>:
def describe_pipelines(ids)
params = {}
params['pipelineIds'] = ids
response = request({
:body => Fog::JSON.encode(params),
:headers => { 'X-Amz-Target' => 'DataPipeline.DescribePipelines' },
})
Fog::JSON.decode(response.body)
end
end
class Mock
def describe_pipelines(ids)
Fog::Mock.not_implemented
end
end
end
end
end

View file

@ -0,0 +1,36 @@
module Fog
module AWS
class DataPipeline
class Real
# List all pipelines
# http://docs.aws.amazon.com/datapipeline/latest/APIReference/API_ListPipelines.html
# ==== Parameters
# * Marker <~String> - The starting point for the results to be returned.
# ==== Returns
# * response<~Excon::Response>:
# * body<~Hash>:
def list_pipelines(options={})
params = {}
params['Marker'] = options[:marker] if options[:marker]
response = request({
:body => Fog::JSON.encode(params),
:headers => { 'X-Amz-Target' => 'DataPipeline.ListPipelines' },
})
Fog::JSON.decode(response.body)
end
end
class Mock
def list_pipelines(options={})
Fog::Mock.not_implemented
end
end
end
end
end

View file

@ -0,0 +1,72 @@
module Fog
module AWS
class DataPipeline
class Real
# Put raw pipeline definition JSON
# http://docs.aws.amazon.com/datapipeline/latest/APIReference/API_PutPipelineDefinition.html
# ==== Parameters
# * PipelineId <~String> - The ID of the pipeline
# * PipelineObjects <~String> - Objects in the pipeline
# ==== Returns
# * response<~Excon::Response>:
# * body<~Hash>:
def put_pipeline_definition(id, objects)
params = {
'pipelineId' => id,
'pipelineObjects' => transform_objects(objects),
}
response = request({
:body => Fog::JSON.encode(params),
:headers => { 'X-Amz-Target' => 'DataPipeline.PutPipelineDefinition' },
})
Fog::JSON.decode(response.body)
end
# Take a list of pipeline object hashes as specified in the Data Pipeline JSON format
# and transform it into the format expected by the API
private
def transform_objects(objects)
output = []
objects.each do |object|
new_object = {}
new_object['id'] = object.delete('id')
new_object['name'] = object.delete('name') || new_object['id']
new_object['fields'] = []
object.each do |key, value|
if value.is_a?(Hash)
new_object['fields'] << { 'key' => key, 'refValue' => value['ref'] }
elsif value.is_a?(Array)
value.each do |v|
new_object['fields'] << { 'key' => key, 'stringValue' => v }
end
else
new_object['fields'] << { 'key' => key, 'stringValue' => value }
end
end
output << new_object
end
output
end
end
class Mock
def put_pipeline_definition(id, objects)
Fog::Mock.not_implemented
end
end
end
end
end

View file

@ -15,6 +15,8 @@ class AWS < Fog::Bin
Fog::AWS::CloudWatch
when :compute
Fog::Compute::AWS
when :data_pipeline
Fog::AWS::DataPipeline
when :ddb, :dynamodb
Fog::AWS::DynamoDB
when :dns
@ -68,6 +70,8 @@ class AWS < Fog::Bin
when :compute
Fog::Logger.warning("AWS[:compute] is not recommended, use Compute[:aws] for portability")
Fog::Compute.new(:provider => 'AWS')
when :data_pipeline
Fog::AWS::DataPipeline
when :ddb, :dynamodb
Fog::AWS::DynamoDB.new
when :dns

View file

@ -7,6 +7,8 @@ class OpenStack < Fog::Bin
Fog::Compute::OpenStack
when :identity
Fog::Identity::OpenStack
when :image
Fog::Image::OpenStack
when :network
Fog::Network::OpenStack
when :storage
@ -23,14 +25,17 @@ class OpenStack < Fog::Bin
Fog::Logger.warning("OpenStack[:compute] is not recommended, use Compute[:openstack] for portability")
Fog::Compute.new(:provider => 'OpenStack')
when :identity
Fog::Logger.warning("OpenStack[:identity] is not recommended, use Compute[:openstack] for portability")
Fog::Compute.new(:provider => 'OpenStack')
Fog::Logger.warning("OpenStack[:identity] is not recommended, use Identity[:openstack] for portability")
Fog::Identity.new(:provider => 'OpenStack')
when :image
Fog::Logger.warning("OpenStack[:image] is not recommended, use Image[:openstack] for portability")
Fog::Image.new(:provider => 'OpenStack')
when :network
Fog::Logger.warning("OpenStack[:network] is not recommended, use Network[:openstack] for portability")
Fog::Network.new(:provider => 'OpenStack')
when :storage
Fog::Logger.warning("OpenStack[:storage] is not recommended, use Storage[:openstack] for portability")
Fog::Network.new(:provider => 'OpenStack')
Fog::Storage.new(:provider => 'OpenStack')
else
raise ArgumentError, "Unrecognized service: #{key.inspect}"
end

View file

@ -7,19 +7,12 @@ module Fog
def self.new(attributes)
attributes = attributes.dup # prevent delete from having side effects
case provider = attributes.delete(:provider).to_s.downcase.to_sym
when :aws
require 'fog/aws/cdn'
Fog::CDN::AWS.new(attributes)
when :hp
require 'fog/hp/cdn'
Fog::CDN::HP.new(attributes)
when :rackspace
require 'fog/rackspace/cdn'
Fog::CDN::Rackspace.new(attributes)
else
raise ArgumentError.new("#{provider} is not a recognized cdn provider")
provider = attributes.delete(:provider).to_s.downcase.to_sym
if self.providers.include?(provider)
require "fog/#{provider}/cdn"
return Fog::CDN.const_get(Fog.providers[provider]).new(attributes)
end
raise ArgumentError.new("#{provider} is not a recognized cdn provider")
end
def self.providers

View file

@ -9,46 +9,11 @@ module Fog
attributes = attributes.dup # prevent delete from having side effects
provider = attributes.delete(:provider).to_s.downcase.to_sym
case provider
when :aws
require 'fog/aws/compute'
Fog::Compute::AWS.new(attributes)
when :bluebox
require 'fog/bluebox/compute'
Fog::Compute::Bluebox.new(attributes)
when :brightbox
require 'fog/brightbox/compute'
Fog::Compute::Brightbox.new(attributes)
when :cloudstack
require 'fog/cloudstack/compute'
Fog::Compute::Cloudstack.new(attributes)
when :clodo
require 'fog/clodo/compute'
Fog::Compute::Clodo.new(attributes)
when :ecloud
require 'fog/ecloud/compute'
Fog::Compute::Ecloud.new(attributes)
when :glesys
require 'fog/glesys/compute'
Fog::Compute::Glesys.new(attributes)
when :gogrid
require 'fog/go_grid/compute'
Fog::Compute::GoGrid.new(attributes)
when :hp
require 'fog/hp/compute'
Fog::Compute::HP.new(attributes)
when :ibm
require 'fog/ibm/compute'
Fog::Compute::IBM.new(attributes)
when :joyent
require 'fog/joyent/compute'
Fog::Compute::Joyent.new(attributes)
when :libvirt
require 'fog/libvirt/compute'
Fog::Compute::Libvirt.new(attributes)
when :linode
require 'fog/linode/compute'
Fog::Compute::Linode.new(attributes)
when :new_servers
require 'fog/bare_metal_cloud/compute'
Fog::Logger.deprecation "`new_servers` is deprecated. Please use `bare_metal_cloud` instead."
@ -56,15 +21,6 @@ module Fog
when :baremetalcloud
require 'fog/bare_metal_cloud/compute'
Fog::Compute::BareMetalCloud.new(attributes)
when :ninefold
require 'fog/ninefold/compute'
Fog::Compute::Ninefold.new(attributes)
when :openstack
require 'fog/openstack/compute'
Fog::Compute::OpenStack.new(attributes)
when :ovirt
require 'fog/ovirt/compute'
Fog::Compute::Ovirt.new(attributes)
when :rackspace
version = attributes.delete(:version)
version = version.to_s.downcase.to_sym unless version.nil?
@ -76,31 +32,17 @@ module Fog
require 'fog/rackspace/compute'
Fog::Compute::Rackspace.new(attributes)
end
when :serverlove
require 'fog/serverlove/compute'
Fog::Compute::Serverlove.new(attributes)
when :stormondemand
require 'fog/storm_on_demand/compute'
Fog::Compute::StormOnDemand.new(attributes)
when :vcloud
require 'fog/vcloud/compute'
Fog::Vcloud::Compute.new(attributes)
when :virtualbox
require 'fog/virtual_box/compute'
Fog::Compute::VirtualBox.new(attributes)
when :vmfusion
require 'fog/vmfusion/compute'
Fog::Compute::Vmfusion.new(attributes)
when :voxel
require 'fog/voxel/compute'
Fog::Compute::Voxel.new(attributes)
when :vsphere
require 'fog/vsphere/compute'
Fog::Compute::Vsphere.new(attributes)
when :xenserver
require 'fog/xenserver/compute'
Fog::Compute::XenServer.new(attributes)
else
if self.providers.include?(provider)
require "fog/#{provider}/compute"
return Fog::Compute.const_get(Fog.providers[provider]).new(attributes)
end
raise ArgumentError.new("#{provider} is not a recognized compute provider")
end
end

View file

@ -7,37 +7,14 @@ module Fog
def self.new(attributes)
attributes = attributes.dup # prevent delete from having side effects
case provider = attributes.delete(:provider).to_s.downcase.to_sym
when :aws
require 'fog/aws/dns'
Fog::DNS::AWS.new(attributes)
when :bluebox
require 'fog/bluebox/dns'
Fog::DNS::Bluebox.new(attributes)
when :dnsimple
require 'fog/dnsimple/dns'
Fog::DNS::DNSimple.new(attributes)
when :dnsmadeeasy
require 'fog/dnsmadeeasy/dns'
Fog::DNS::DNSMadeEasy.new(attributes)
when :dreamhost
require 'fog/dreamhost/dns'
Fog::DNS::Dreamhost.new(attributes)
when :dynect
require 'fog/dynect/dns'
Fog::DNS::Dynect.new(attributes)
when :linode
require 'fog/linode/dns'
Fog::DNS::Linode.new(attributes)
when :zerigo
require 'fog/zerigo/dns'
Fog::DNS::Zerigo.new(attributes)
when :rackspace
require 'fog/rackspace/dns'
Fog::DNS::Rackspace.new(attributes)
else
raise ArgumentError.new("#{provider} is not a recognized dns provider")
provider = attributes.delete(:provider).to_s.downcase.to_sym
if self.providers.include?(provider)
require "fog/#{provider}/dns"
return Fog::DNS.const_get(Fog.providers[provider]).new(attributes)
end
raise ArgumentError.new("#{provider} is not a recognized dns provider")
end
def self.providers

View file

@ -11,16 +11,17 @@ module Fog
when :rackspace
require 'fog/rackspace/identity'
Fog::Rackspace::Identity.new(attributes)
when :openstack
require 'fog/openstack/identity'
Fog::Identity::OpenStack.new(attributes)
else
if self.providers.include?(provider)
require "fog/#{provider}/identity"
return Fog::Identity.const_get(Fog.providers[provider]).new(attributes)
end
raise ArgumentError.new("#{provider} has no identity service")
end
end
def self.providers
Fog.services[:idenity]
Fog.services[:identity]
end
end

View file

@ -7,13 +7,12 @@ module Fog
def self.new(attributes)
attributes = attributes.dup # Prevent delete from having side effects
case provider = attributes.delete(:provider).to_s.downcase.to_sym
when :openstack
require 'fog/openstack/image'
Fog::Image::OpenStack.new(attributes)
else
raise ArgumentError.new("#{provider} has no identity service")
provider = attributes.delete(:provider).to_s.downcase.to_sym
if self.providers.include?(provider)
require "fog/#{provider}/image"
return Fog::Image.const_get(Fog.providers[provider]).new(attributes)
end
raise ArgumentError.new("#{provider} has no identity service")
end
def self.providers

View file

@ -9,13 +9,12 @@ module Fog
attributes = attributes.dup # Prevent delete from having side effects
provider = attributes.delete(:provider).to_s.downcase.to_sym
case provider
when :openstack
require 'fog/openstack/network'
Fog::Network::OpenStack.new(attributes)
else
raise ArgumentError.new("#{provider} has no network service")
if self.providers.include?(provider)
require "fog/#{provider}/network"
return Fog::Network.const_get(Fog.providers[provider]).new(attributes)
end
raise ArgumentError.new("#{provider} has no network service")
end
def self.providers

View file

@ -42,6 +42,7 @@ module Fog
end
service(:compute , 'openstack/compute' , 'Compute' )
service(:image, 'openstack/image', 'Image')
service(:identity, 'openstack/identity', 'Identity')
service(:network, 'openstack/network', 'Network')
service(:storage, 'openstack/storage', 'Storage')

View file

@ -8,37 +8,15 @@ module Fog
def self.new(attributes)
attributes = attributes.dup # prevent delete from having side effects
case provider = attributes.delete(:provider).to_s.downcase.to_sym
when :atmos
require 'fog/atmos/storage'
Fog::Storage::Atmos.new(attributes)
when :aws
require 'fog/aws/storage'
Fog::Storage::AWS.new(attributes)
when :google
require 'fog/google/storage'
Fog::Storage::Google.new(attributes)
when :hp
require 'fog/hp/storage'
Fog::Storage::HP.new(attributes)
when :ibm
require 'fog/ibm/storage'
Fog::Storage::IBM.new(attributes)
when :internetarchive
require 'fog/internet_archive/storage'
Fog::Storage::InternetArchive.new(attributes)
when :local
require 'fog/local/storage'
Fog::Storage::Local.new(attributes)
when :ninefold
require 'fog/ninefold/storage'
Fog::Storage::Ninefold.new(attributes)
when :rackspace
require 'fog/rackspace/storage'
Fog::Storage::Rackspace.new(attributes)
when :openstack
require 'fog/openstack/storage'
Fog::Storage::OpenStack.new(attributes)
else
if self.providers.include?(provider)
require "fog/#{provider}/storage"
return Fog::Storage.const_get(Fog.providers[provider]).new(attributes)
end
raise ArgumentError.new("#{provider} is not a recognized storage provider")
end
end

View file

@ -7,13 +7,13 @@ module Fog
def self.new(attributes)
attributes = attributes.dup # Prevent delete from having side effects
case provider = attributes.delete(:provider).to_s.downcase.to_sym
when :openstack
require 'fog/openstack/volume'
Fog::Volume::OpenStack.new(attributes)
else
raise ArgumentError.new("#{provider} has no identity service")
provider = attributes.delete(:provider).to_s.downcase.to_sym
if self.providers.include?(provider)
require "fog/#{provider}/volume"
return Fog::Volume.const_get(Fog.providers[provider]).new(attributes)
end
raise ArgumentError.new("#{provider} has no identity service")
end
def self.providers

View file

@ -5,7 +5,7 @@ module Fog
def vm_reconfig_hardware(options = {})
raise ArgumentError, "hardware_spec is a required parameter" unless options.has_key? 'hardware_spec'
raise ArgumentError, "instance_uuid is a required parameter" unless options.has_key? 'instance_uuid'
vm_mob_ref = get_vm_by_ref(options['instance_uuid'])
vm_mob_ref = get_vm_ref(options['instance_uuid'])
task = vm_mob_ref.ReconfigVM_Task(:spec => RbVmomi::VIM.VirtualMachineConfigSpec(options['hardware_spec']))
task.wait_for_completion
{ 'task_state' => task.info.state }

View file

@ -0,0 +1,8 @@
Shindo.tests("AWS::DataPipeline | pipelines", ['aws', 'data_pipeline']) do
pending if Fog.mocking?
unique_id = uniq_id
model_tests(Fog::AWS[:data_pipeline].pipelines, { :id => unique_id, :name => "#{unique_id}-name", :unique_id => unique_id }) do
@instance.wait_for { state }
end
end

View file

@ -0,0 +1,8 @@
Shindo.tests("AWS::DataPipeline | pipelines", ['aws', 'data_pipeline']) do
pending if Fog.mocking?
unique_id = uniq_id
collection_tests(Fog::AWS[:data_pipeline].pipelines, { :id => unique_id, :name => "#{unique_id}-name", :unique_id => unique_id }) do
@instance.wait_for { state }
end
end

View file

@ -0,0 +1,44 @@
class AWS
module DataPipeline
module Formats
BASIC = {
'pipelineId' => String,
}
LIST_PIPELINES = {
"hasMoreResults" => Fog::Nullable::Boolean,
"marker" => Fog::Nullable::String,
"pipelineIdList" => [
{
"id" => String,
"name" => String,
}
]
}
DESCRIBE_PIPELINES = {
"pipelineDescriptionList" => [
{
"description" => Fog::Nullable::String,
"name" => String,
"pipelineId" => String,
"fields" => [
{
"key" => String,
"refValue" => Fog::Nullable::String,
"stringValue" => Fog::Nullable::String,
}
]
}
]
}
PUT_PIPELINE_DEFINITION = {
"errored" => Fog::Boolean,
"validationErrors" => Fog::Nullable::Array,
}
end
end
end

View file

@ -0,0 +1,54 @@
Shindo.tests('AWS::DataPipeline | pipeline_tests', ['aws', 'data_pipeline']) do
pending if Fog.mocking?
@pipeline_id = nil
tests('success') do
tests("#create_pipeline").formats(AWS::DataPipeline::Formats::BASIC) do
unique_id = 'fog-test-pipeline-unique-id'
name = 'fog-test-pipeline-name'
description = 'Fog test pipeline'
result = Fog::AWS[:data_pipeline].create_pipeline(unique_id, name, description)
@pipeline_id = result['pipelineId']
result
end
tests("#list_pipelines").formats(AWS::DataPipeline::Formats::LIST_PIPELINES) do
Fog::AWS[:data_pipeline].list_pipelines()
end
tests("#describe_pipelines").formats(AWS::DataPipeline::Formats::DESCRIBE_PIPELINES) do
ids = [@pipeline_id]
Fog::AWS[:data_pipeline].describe_pipelines(ids)
end
tests("#put_pipeline_definition").formats(AWS::DataPipeline::Formats::PUT_PIPELINE_DEFINITION) do
objects = [
{
"id" => "Nightly",
"type" => "Schedule",
"startDateTime" => Time.now.strftime("%Y-%m-%dT%H:%M:%S"),
"period" => "24 hours",
},
{
"id" => "Default",
"role" => "role-dumps",
"resourceRole" => "role-dumps-inst",
"schedule" => { "ref" => "Nightly" },
},
]
Fog::AWS[:data_pipeline].put_pipeline_definition(@pipeline_id, objects)
end
tests("#activate_pipeline") do
Fog::AWS[:data_pipeline].activate_pipeline(@pipeline_id)
end
tests("#delete_pipeline") do
Fog::AWS[:data_pipeline].delete_pipeline(@pipeline_id)
end
end
end