1
0
Fork 0
mirror of https://github.com/fog/fog.git synced 2022-11-09 13:51:43 -05:00

merged in 0.11.0 release

This commit is contained in:
bdorry 2011-09-07 10:25:08 -04:00
commit 5719b5376a
1183 changed files with 7528 additions and 2392 deletions

3
.gitignore vendored
View file

@ -4,9 +4,10 @@
*.sw?
.bundle
.DS_Store
docs/_site/*
coverage
doc/*
docs/_site/*
docs/about/supported_services.markdown
Gemfile.lock
rdoc
pkg

View file

@ -46,6 +46,15 @@ end
task :default => :test
namespace :test do
task :dynect do
[false].each do |mock|
sh("export FOG_MOCK=#{mock} && bundle exec shindont tests/dns/requests/dynect")
#sh("export FOG_MOCK=#{mock} && bundle exec shindont tests/dns/models/")
end
end
end
task :examples do
sh("export FOG_MOCK=false && bundle exec shindont examples")
# some don't provide mocks so we'll leave this out for now
@ -137,7 +146,6 @@ task :release => :build do
sh "git tag v#{version}"
sh "git push origin master"
sh "git push origin v#{version}"
Rake::Task[:build].invoke # rebuild with updated changelog
sh "gem push pkg/#{name}-#{version}.gem"
Rake::Task[:docs].invoke
end
@ -182,6 +190,18 @@ task :changelog do
changelog << ('=' * changelog[0].length)
changelog << ''
require 'multi_json'
github_data = MultiJson.decode(Excon.get('http://github.com/api/v2/json/repos/show/geemus/fog').body)
data = github_data['repository'].reject {|key, value| !['forks', 'open_issues', 'watchers'].include?(key)}
rubygems_data = MultiJson.decode(Excon.get('https://rubygems.org/api/v1/gems/fog.json').body)
data['downloads'] = rubygems_data['downloads']
stats = []
for key in data.keys.sort
stats << "'#{key}' => #{data[key]}"
end
changelog << "Stats! { #{stats.join(', ')} }"
changelog << ''
last_sha = `cat changelog.txt | head -1`.split(' ').last
shortlog = `git shortlog #{last_sha}..HEAD`
changes = {}
@ -208,7 +228,11 @@ task :changelog do
for committer, commits in committers.to_a.sort {|x,y| y[1] <=> x[1]}
if [
'Aaron Suggs',
'Brian Hartsock',
'Christopher Oliver',
'Dylan Egan',
'geemus',
'Henry Addison',
'Lincoln Stoll',
'Luqman Amjad',
'nightshade427',
@ -238,6 +262,7 @@ task :changelog do
end
task :docs do
Rake::Task[:supported_services_docs].invoke
Rake::Task[:upload_fog_io].invoke
Rake::Task[:upload_rdoc].invoke
@ -256,6 +281,71 @@ task :docs do
Formatador.display_line
end
task :supported_services_docs do
support, shared = {}, []
for key, values in Fog.services
unless values.length == 1
shared |= [key]
values.each do |value|
support[value] ||= {}
support[value][key] = '+'
end
else
value = values.first
support[value] ||= {}
support[value][:other] ||= []
support[value][:other] << key
end
end
shared.sort! {|x,y| x.to_s <=> y.to_s}
columns = [:provider] + shared + [:other]
data = []
for key in support.keys.sort {|x,y| x.to_s <=> y.to_s}
data << { :provider => key }.merge!(support[key])
end
table = ''
table << "<table border='1'>\n"
table << " <tr>"
for column in columns
table << "<th>#{column}</th>"
end
table << "</tr>\n"
for datum in data
table << " <tr>"
for column in columns
if value = datum[column]
case value
when Array
table << "<td>#{value.join(', ')}</td>"
when '+'
table << "<td style='text-align: center;'>#{value}</td>"
else
table << "<th>#{value}</th>"
end
else
table << "<td></td>"
end
end
table << "</tr>\n"
end
table << "</table>\n"
File.open('docs/about/supported_services.markdown', 'w') do |file|
file.puts <<-METADATA
---
layout: default
title: Supported Services
---
METADATA
file.puts(table)
end
end
task :upload_fog_io do
# connect to storage provider
Fog.credential = :geemus

View file

@ -5,7 +5,11 @@ require 'yaml'
Fog.credential = ARGV.first ? ARGV.first.to_sym : nil
Fog.mock! if ENV['FOG_MOCK']
if Fog.credentials.empty?
Fog::Errors.missing_credentials
begin
Fog::Errors.missing_credentials
rescue Fog::Errors::LoadError => error
abort error.message
end
end
require 'fog/bin'

View file

@ -1,3 +1,159 @@
0.11.0 08/18/2011 73bcee507a4732e071c58d85793b7f307eb377dc
==========================================================
Stats! { 'downloads' => 202791, 'forks' => 237, 'open_issues' => 20, 'watchers' => 1427 }
MVP! Brian Hartsock
[aws|cdn]
Added commands for streaming distribution lists. thanks Christopher Oliver
[aws|compute]
describe security groups parser was not taking into account ipPermissionsEgress and therefore returning unexpected results when the account had VPC groups. thanks Christopher Oliver
Added missing 'platform' attribute to server model and describe instances request. thanks Christopher Oliver
[aws|iam]
fix 'Path' handling for get_group response. thanks Nick Osborn
add missing update_server_certificate request. thanks Nick Osborn
[aws|rds]
Allow string or symbol hash keys. thanks Aaron Suggs
[aws|s3]
Added basic tests for get_bucket, fixed a bug in get_bucket with delimiter option, tests succeed for both mocked and real situation. thanks Erik Terpstra
policy should be base64 encoded and not contain new lines. thanks Fujimura Daisuke
Require 'multi_json' was lucked. thanks Fujimura Daisuke
[compute]
add server base class to contain shared stuff (scp/ssh). thanks geemus
[compute|aws]
Whitespace removal. thanks Dylan Egan
Allow image mocks to support state (except failed). thanks Dylan Egan
fix pluralization of modify_image_attribute. thanks geemus
update modify image/snapshot attribute to match latest API. thanks geemus
add modify_image_attribute. thanks geemus
add support for saving assigned tags at server creation time. thanks geemus
add docs for new options on run_instances. thanks geemus
guard tag creation against empty tag set. thanks geemus
fixes for bootstrap and placing attributes json. thanks geemus
identity not needed for setup. thanks geemus
fix for running nice with mocked test run. thanks geemus
[compute|brightbox]
Updated test for new expected response from server. thanks Paul Thornthwaite
Updated Account format test to allow valid_credit_card flag. thanks Paul Thornthwaite
Added IPv6 address to format now it is exposed to API. thanks Paul Thornthwaite
DRY up request method. thanks Paul Thornthwaite
Corrected attribute name. thanks Paul Thornthwaite
[compute|voxel]
position in format is string, not integer. thanks geemus
[dns]
dry generate_unique_domain to tests helper. thanks geemus
[dns|dynect]
cleanup of initial implementation. thanks geemus
fixes to play nice with mocked test runs. thanks geemus
[dns|zerigo]
add host/port/scheme to recognizes. thanks geemus
[docs]
add task to build/publish supported services matrix. thanks geemus
alphasort doc tasks. thanks geemus
[misc]
if creating an s3 directory (bucket), one needs to pass in :location as well as have the aws connection set to the correct region... thanks Adam Greene
- Write files as binary (otherwise UTF8 - ASCII errors can occur) - Check if File exists before trying to delete it (paperclip sometimes deletes files twice) - Check if Directory exists before trying to "cd" into it. thanks Andre Meij
Fix issue 464, add howto for European Rackspace cloud. thanks Andre Meij
Initial support for adding/deleting a load balancer (requests only). thanks Brian Hartsock
Complete lifecycle for a load balancer. thanks Brian Hartsock
Improved error handling. thanks Brian Hartsock
Model and collection for load balancers. thanks Brian Hartsock
Fixed issues with loading all LB params. thanks Brian Hartsock
Requests for nodes. thanks Brian Hartsock
Rackspace Load Balancers: model classes for nodes. thanks Brian Hartsock
Rackspace Load Balancers: requests for virtual ips. thanks Brian Hartsock
Added virtual IP models. thanks Brian Hartsock
Rackspace LB: Made some updates from the pull request. thanks Brian Hartsock
Rackspace LB: protocols, algorithms, and connection logging. thanks Brian Hartsock
Added access list requests. thanks Brian Hartsock
Rackspace LB: Added session persistence requests. thanks Brian Hartsock
Rackspace LB: Connection throttling requests. thanks Brian Hartsock
Rackspace LB: Fixed issues with connection logging model. thanks Brian Hartsock
Rackspace LB: Health Monitor requests. thanks Brian Hartsock
Rackspace LB: account usage request. thanks Brian Hartsock
Rackspace LB: Load Balancer Usage requests. thanks Brian Hartsock
Rackspace LB: Added model capabilities for a lot of additional actions. thanks Brian Hartsock
Rackspace LB: models for access lists. thanks Brian Hartsock
Rackspace LB: account usage call. thanks Brian Hartsock
Rackspace LB: Refactoring and cleanup. thanks Brian Hartsock
register_image mocking support. thanks Dylan Egan
Remove GENTOO_AMI. thanks Dylan Egan
Store it under the ID, not the name. thanks Dylan Egan
Allow tag filtering for images. thanks Dylan Egan
Set imageOwnerAlias to self. Not 100% on this, but it will allow you to search for images with 'owner-alias' => 'self'. thanks Dylan Egan
Back to using Owner. A couple of tests for it too. thanks Dylan Egan
Added support for delimiter option in Fog::Storage::AWS::Mock object. thanks Erik Terpstra
Link to EBS snapshots blog post. thanks Gavin Sandie
Add force stop functionality to AWS Instance. thanks John Ferlito
* Changed LoadError to Fog::Error::LoadError when missing configuration * When running from command line, rescue the exception, and print the help message. thanks Mark A. Miller
* Fix VirtualBox in compute case statement. thanks Mark A. Miller
Update to the latest VirtualBox gem while we're at it for good measure. thanks Mark A. Miller
add dynect DNS provider with session request implemented. thanks Michael Hale
add dynect provider and cleanup extra requires. thanks Michael Hale
enable mocking for dynect. thanks Michael Hale
parse session request and fix mock for tests. thanks Michael Hale
whoops forgot to add these files. thanks Michael Hale
temporary rake task for convenient testing. thanks Michael Hale
include /REST in all requests. thanks Michael Hale
change API-Token to Auth-Token. thanks Michael Hale
add zone request. thanks Michael Hale
fix API-Token in mock session request. thanks Michael Hale
always run both mock and non-mock tests. thanks Michael Hale
parse the list of zones returned. thanks Michael Hale
require builder in dynect. thanks Michael Hale
WIP: add stub model classes. thanks Michael Hale
tests passing. thanks Michael Hale
rename zone request to zones. thanks Michael Hale
add zone request to show information for a single zone. thanks Michael Hale
hook up zones model. thanks Michael Hale
hook up zones.get. thanks Michael Hale
dynect: add a bunch of stuff: node_list, list_any_records, handle 307 job redirect,. thanks Michael Hale
dynect: nicer filter api for records. thanks Michael Hale
Escape source object name when copying. thanks Pratik Naik
provide #providers for shared services. thanks geemus
[rackspace|load balancers]
fixed broken tests. thanks Brian Hartsock
[rackspace|load_balancers]
fixes to play nice with mock test runs. thanks geemus
fix typo in tests. thanks geemus
[rackspace|loadbalancers]
cleanup. thanks geemus
[release]
add newest MVP to changelog task MVP omit list. thanks geemus
add stats to changelog. thanks geemus
remove un-needed rebuild of gem. thanks geemus
[storage]
fix deprecated get_url usage. thanks geemus
[storage|aws]
simplify region accessor. thanks geemus
[storage|ninefold]
remove debug output. thanks geemus
[tests]
non-destructively generate id for get('fake') == nil tests. thanks geemus
0.10.0 07/25/2011 9ca8cffc000c417a792235438c12855a277fe1ce
==========================================================

View file

@ -61,6 +61,7 @@
<li><a href="/about/getting_started.html">Getting Started</a></li>
<li><a href="/about/press.html">Press</a></li>
<li><a href="/about/structure.html">Structure</a></li>
<li><a href="/about/supported_services.html">Supported Services</a></li>
<li><a href="/about/users.html">Users</a></li>
</ul>

View file

@ -81,6 +81,10 @@ Rackspace has <a href="http://www.rackspacecloud.com/cloud_hosting_products/serv
:rackspace_api_key => RACKSPACE_API_KEY
})
If you work with the European cloud from Rackspace you have to add the following:
:rackspace_auth_url => "lon.auth.api.rackspacecloud.com"
We will skip over learning how to do this the 'Rackspace Way' and instead jump right to using bootstrap to get their smallest Ubuntu 10.04 LTS server.
server = connection.servers.bootstrap

View file

@ -66,7 +66,7 @@ You can add more specifics if you need to, but reasonable defaults make it just
## No Zerigo? No Problem
If you already have an account with another service you can just as easily use this same code with different credentials. fog currently supports <a href="http://aws.amazon.com/route53/">AWS Route 53</a>, <a href="http://bluebox.net">Blue Box</a>, <a href="http://dnsimple.com">DNSimple</a>, <a href="http://www.linode.com">Linode</a>, <a href="http://www.slicehost.com">Slicehost</a> and <a href="http://www.zerigo.com/managed-dns">Zerigo</a>; so you can have your pick. As an example you can connect to AWS instead of Zerigo:
If you already have an account with another service you can just as easily use this same code with different credentials. fog currently supports <a href="http://aws.amazon.com/route53/">AWS Route 53</a>, <a href="http://bluebox.net">Blue Box</a>, <a href="http://dnsimple.com">DNSimple</a>, <a href="http://www.linode.com">Linode</a>, <a href="http://www.rackspace.com">Rackspace</a>, <a href="http://www.slicehost.com">Slicehost</a> and <a href="http://www.zerigo.com/managed-dns">Zerigo</a>; so you can have your pick. As an example you can connect to AWS instead of Zerigo:
dns = Fog::DNS.new({
:provider => 'AWS',

View file

@ -85,29 +85,29 @@ Now you've got a bunch of files in S3: your resume, some code samples,
and maybe some pictures of your cat doing funny stuff. Since this is
all of vital importance, you need to back it up.
# copy each file to local disk
directory.files.each do |s3_file|
File.open(s3_file.key, 'w') do |local_file|
local_file.write(s3_file.body)
end
end
# copy each file to local disk
directory.files.each do |s3_file|
File.open(s3_file.key, 'w') do |local_file|
local_file.write(s3_file.body)
end
end
One caveat: it's way more efficient to do this:
# do two things per file
directory.files.each do |file|
do_one_thing(file)
do_another_thing(file)
end
# do two things per file
directory.files.each do |file|
do_one_thing(file)
do_another_thing(file)
end
than it is to do this:
# do two things per file
directory.files.each do |file|
do_one_thing(file)
end.each do |file|
do_another_thing(file)
end
# do two things per file
directory.files.each do |file|
do_one_thing(file)
end.each do |file|
do_another_thing(file)
end
The reason is that the list of files might be large. Really
large. Eat-all-your-RAM-and-ask-for-more large. Therefore, every time
@ -145,6 +145,10 @@ Rackspace has <a href="http://www.rackspacecloud.com/cloud_hosting_products/file
:rackspace_api_key => RACKSPACE_API_KEY
})
If you work with the European cloud from Rackspace you have to add the following:
:rackspace_auth_url => "lon.auth.api.rackspacecloud.com"
Then create, save, destroy as per fog-for-AWS. The `:public => true` option when creating directories (see above) is important for Rackspace; your folder and files won't be shared to Rackspace's CDN and hence your users without it. Similarly the `:public =&gt; true` on files is important for AWS and Google or they will be private.
## Local Storage

View file

@ -6,8 +6,8 @@ Gem::Specification.new do |s|
## If your rubyforge_project name is different, then edit it and comment out
## the sub! line in the Rakefile
s.name = 'fog'
s.version = '0.10.0'
s.date = '2011-07-25'
s.version = '0.11.0'
s.date = '2011-08-18'
s.rubyforge_project = 'fog'
## Make sure your summary is short. The description may be as long
@ -53,7 +53,8 @@ Gem::Specification.new do |s|
s.add_development_dependency('rdoc')
s.add_development_dependency('rspec', '~>1.3.1')
s.add_development_dependency('shindo', '~>0.3.4')
s.add_development_dependency('virtualbox', '~>0.8.3')
s.add_development_dependency('virtualbox', '~>0.9.1')
# s.add_development_dependency('ruby-libvirt','~>0.4.0')
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {spec,tests}/*`.split("\n")

View file

@ -3,7 +3,7 @@ require File.join(File.dirname(__FILE__), 'fog', 'core')
module Fog
unless const_defined?(:VERSION)
VERSION = '0.10.0'
VERSION = '0.11.0'
end
end

View file

@ -1,6 +1,4 @@
require 'fog/core'
require 'openssl' # For RSA key pairs
require 'base64' # For console output
require(File.expand_path(File.join(File.dirname(__FILE__), 'core')))
module Fog
module AWS
@ -8,11 +6,11 @@ module Fog
extend Fog::Provider
service(:auto_scaling, 'aws/auto_scaling')
service(:cdn, 'cdn/aws')
service(:compute, 'compute/aws')
service(:cdn, 'aws/cdn')
service(:compute, 'aws/compute')
service(:cloud_formation, 'aws/cloud_formation')
service(:cloud_watch, 'aws/cloud_watch')
service(:dns, 'dns/aws')
service(:dns, 'aws/dns')
service(:elb, 'aws/elb')
service(:iam, 'aws/iam')
service(:rds, 'aws/rds')
@ -20,7 +18,7 @@ module Fog
service(:simpledb, 'aws/simpledb')
service(:sns, 'aws/sns')
service(:sqs, 'aws/sqs')
service(:storage, 'storage/aws')
service(:storage, 'aws/storage')
def self.indexed_param(key, values)
params = {}

View file

@ -1,3 +1,5 @@
require File.expand_path(File.join(File.dirname(__FILE__), '..', 'aws'))
module Fog
module AWS
class AutoScaling < Fog::Service

View file

@ -1,3 +1,6 @@
require File.expand_path(File.join(File.dirname(__FILE__), '..', 'aws'))
require 'fog/cdn'
module Fog
module CDN
class AWS < Fog::Service
@ -5,9 +8,9 @@ module Fog
requires :aws_access_key_id, :aws_secret_access_key
recognizes :host, :path, :port, :scheme, :version, :persistent
model_path 'fog/cdn/models/aws'
model_path 'fog/aws/cdn/models'
request_path 'fog/cdn/requests/aws'
request_path 'fog/aws/requests/cdn'
request 'delete_distribution'
request 'delete_streaming_distribution'
request 'get_distribution'

View file

@ -1,3 +1,5 @@
require File.expand_path(File.join(File.dirname(__FILE__), '..', 'aws'))
module Fog
module AWS
class CloudFormation < Fog::Service

View file

@ -1,3 +1,5 @@
require File.expand_path(File.join(File.dirname(__FILE__), '..', 'aws'))
module Fog
module AWS
class CloudWatch < Fog::Service

View file

@ -1,3 +1,6 @@
require File.expand_path(File.join(File.dirname(__FILE__), '..', 'aws'))
require 'fog/compute'
module Fog
module Compute
class AWS < Fog::Service
@ -5,7 +8,7 @@ module Fog
requires :aws_access_key_id, :aws_secret_access_key
recognizes :endpoint, :region, :host, :path, :port, :scheme, :persistent
model_path 'fog/compute/models/aws'
model_path 'fog/aws/models/compute'
model :address
collection :addresses
model :flavor
@ -27,7 +30,7 @@ module Fog
model :spot_request
collection :spot_requests
request_path 'fog/compute/requests/aws'
request_path 'fog/aws/requests/compute'
request :allocate_address
request :associate_address
request :attach_volume
@ -70,7 +73,8 @@ module Fog
request :get_console_output
request :get_password_data
request :import_key_pair
request :modify_image_attributes
request :modify_image_attribute
request :modify_instance_attribute
request :modify_snapshot_attribute
request :purchase_reserved_instances_offering
request :reboot_instances
@ -85,6 +89,16 @@ module Fog
request :monitor_instances
request :unmonitor_instances
# deprecation
class Real
def modify_image_attributes(*params)
Fog::Logger.warning("modify_image_attributes is deprecated, use modify_image_attribute instead [light_black](#{caller.first})[/]")
modify_image_attribute(*params)
end
end
class Mock
def self.data

View file

@ -1,3 +1,6 @@
require File.expand_path(File.join(File.dirname(__FILE__), '..', 'aws'))
require 'fog/dns'
module Fog
module DNS
class AWS < Fog::Service
@ -5,13 +8,13 @@ module Fog
requires :aws_access_key_id, :aws_secret_access_key
recognizes :host, :path, :port, :scheme, :version, :persistent
model_path 'fog/dns/models/aws'
model_path 'fog/aws/models/dns'
model :record
collection :records
model :zone
collection :zones
request_path 'fog/dns/requests/aws'
request_path 'fog/aws/requests/dns'
request :create_hosted_zone
request :get_hosted_zone
request :delete_hosted_zone

View file

@ -1,9 +1,12 @@
require File.expand_path(File.join(File.dirname(__FILE__), '..', 'aws'))
module Fog
module AWS
class ELB < Fog::Service
class IdentifierTaken < Fog::Errors::Error; end
class InvalidInstance < Fog::Errors::Error; end
class Throttled < Fog::Errors::Error; end
requires :aws_access_key_id, :aws_secret_access_key
recognizes :region, :host, :path, :port, :scheme, :persistent
@ -165,6 +168,8 @@ module Fog
raise Fog::AWS::ELB::IdentifierTaken.slurp(error, match[2])
when 'InvalidInstance'
raise Fog::AWS::ELB::InvalidInstance.slurp(error, match[2])
when 'Throttling'
raise Fog::AWS::ELB::Throttled.slurp(error, match[2])
else
raise
end

View file

@ -1,3 +1,5 @@
require File.expand_path(File.join(File.dirname(__FILE__), '..', 'aws'))
module Fog
module AWS
class IAM < Fog::Service
@ -16,8 +18,8 @@ module Fog
request :create_access_key
request :create_account_alias
request :create_group
request :create_user
request :create_login_profile
request :create_user
request :delete_access_key
request :delete_account_alias
request :delete_group
@ -27,17 +29,17 @@ module Fog
request :delete_signing_certificate
request :delete_user
request :delete_user_policy
request :get_login_profile
request :get_user
request :get_user_policy
request :get_group
request :get_group_policy
request :get_login_profile
request :get_server_certificate
request :get_user
request :get_user_policy
request :list_access_keys
request :list_account_aliases
request :list_group_policies
request :list_groups
request :list_groups_for_user
request :list_group_policies
request :list_server_certificates
request :list_signing_certificates
request :list_user_policies
@ -48,8 +50,9 @@ module Fog
request :update_access_key
request :update_group
request :update_login_profile
request :update_user
request :update_server_certificate
request :update_signing_certificate
request :update_user
request :upload_server_certificate
request :upload_signing_certificate

View file

@ -1,5 +1,5 @@
require 'fog/core/collection'
require 'fog/compute/models/aws//address'
require 'fog/aws/models/compute/address'
module Fog
module Compute
@ -56,7 +56,7 @@ module Fog
def all(filters = filters)
unless filters.is_a?(Hash)
Formatador.display_line("[yellow][WARN] all with #{filters.class} param is deprecated, use all('public-ip' => []) instead[/] [light_black](#{caller.first})[/]")
Fog::Logger.warning("all with #{filters.class} param is deprecated, use all('public-ip' => []) instead [light_black](#{caller.first})[/]")
filters = {'public-ip' => [*filters]}
end
self.filters = filters

View file

@ -1,5 +1,5 @@
require 'fog/core/collection'
require 'fog/compute/models/aws/flavor'
require 'fog/aws/models/compute/flavor'
module Fog
module Compute

View file

@ -13,6 +13,7 @@ module Fog
attribute :description
attribute :location, :aliases => 'imageLocation'
attribute :owner_id, :aliases => 'imageOwnerId'
attribute :owner_alias, :aliases => 'imageOwnerAlias'
attribute :state, :aliases => 'imageState'
attribute :type, :aliases => 'imageType'
attribute :is_public, :aliases => 'isPublic'
@ -36,6 +37,10 @@ module Fog
end
end
def ready?
state == 'available'
end
end
end

View file

@ -1,5 +1,5 @@
require 'fog/core/collection'
require 'fog/compute/models/aws/image'
require 'fog/aws/models/compute/image'
module Fog
module Compute

View file

@ -1,5 +1,5 @@
require 'fog/core/collection'
require 'fog/compute/models/aws/key_pair'
require 'fog/aws/models/compute/key_pair'
module Fog
module Compute
@ -51,7 +51,7 @@ module Fog
def all(filters = filters)
unless filters.is_a?(Hash)
Formatador.display_line("[yellow][WARN] all with #{filters.class} param is deprecated, use all('key-name' => []) instead[/] [light_black](#{caller.first})[/]")
Fog::Logger.warning("all with #{filters.class} param is deprecated, use all('key-name' => []) instead [light_black](#{caller.first})[/]")
filters = {'key-name' => [*filters]}
end
self.filters = filters

View file

@ -38,7 +38,7 @@ module Fog
# "Server"=>"AmazonEC2"}
#
def authorize_group_and_owner(group, owner)
def authorize_group_and_owner(group, owner = nil)
requires :name
connection.authorize_security_group_ingress(
@ -130,7 +130,7 @@ module Fog
# "Server"=>"AmazonEC2"}
#
def revoke_group_and_owner(group, owner)
def revoke_group_and_owner(group, owner = nil)
requires :name
connection.revoke_security_group_ingress(

View file

@ -1,5 +1,5 @@
require 'fog/core/collection'
require 'fog/compute/models/aws/security_group'
require 'fog/aws/models/compute/security_group'
module Fog
module Compute
@ -57,7 +57,7 @@ module Fog
def all(filters = filters)
unless filters.is_a?(Hash)
Formatador.display_line("[yellow][WARN] all with #{filters.class} param is deprecated, use all('group-name' => []) instead[/] [light_black](#{caller.first})[/]")
Fog::Logger.warning("all with #{filters.class} param is deprecated, use all('group-name' => []) instead [light_black](#{caller.first})[/]")
filters = {'group-name' => [*filters]}
end
self.filters = filters

View file

@ -1,10 +1,10 @@
require 'fog/core/model'
require 'fog/compute/models/server'
module Fog
module Compute
class AWS
class Server < Fog::Model
class Server < Fog::Compute::Server
extend Fog::Deprecation
deprecate :ip_address, :public_ip_address
@ -168,17 +168,29 @@ module Fog
data = connection.run_instances(image_id, 1, 1, options)
merge_attributes(data.body['instancesSet'].first)
if self.tags
for key, value in self.tags
connection.tags.create(
:key => key,
:resource_id => self.identity,
:value => value
)
end
end
true
end
def setup(credentials = {})
requires :identity, :public_ip_address, :username
requires :public_ip_address, :username
require 'multi_json'
require 'net/ssh'
commands = [
%{mkdir .ssh},
%{passwd -l #{username}},
%{echo "#{MultiJson.encode(attributes)}" >> ~/attributes.json}
%{echo "#{MultiJson.encode(Fog::JSON.sanitize(attributes))}" >> ~/attributes.json}
]
if public_key
commands << %{echo "#{public_key}" >> ~/.ssh/authorized_keys}
@ -200,31 +212,15 @@ module Fog
Fog::SSH.new(public_ip_address, username, credentials).run(commands)
end
def ssh(commands)
requires :identity, :public_ip_address, :username
options = {}
options[:key_data] = [private_key] if private_key
Fog::SSH.new(public_ip_address, username, options).run(commands)
end
def scp(local_path, remote_path, upload_options = {})
requires :public_ip_address, :username
scp_options = {}
scp_options[:key_data] = [private_key] if private_key
Fog::SCP.new(public_ip_address, username, scp_options).upload(local_path, remote_path, upload_options)
end
def start
requires :id
connection.start_instances(id)
true
end
def stop
def stop(force = false)
requires :id
connection.stop_instances(id)
connection.stop_instances(id, force)
true
end

View file

@ -1,5 +1,5 @@
require 'fog/core/collection'
require 'fog/compute/models/aws/server'
require 'fog/aws/models/compute/server'
module Fog
module Compute
@ -57,7 +57,7 @@ module Fog
def all(filters = self.filters)
unless filters.is_a?(Hash)
Formatador.display_line("[yellow][WARN] all with #{filters.class} param is deprecated, use all('instance-id' => []) instead[/] [light_black](#{caller.first})[/]")
Fog::Logger.warning("all with #{filters.class} param is deprecated, use all('instance-id' => []) instead [light_black](#{caller.first})[/]")
filters = {'instance-id' => [*filters]}
end
self.filters = filters

View file

@ -1,5 +1,5 @@
require 'fog/core/collection'
require 'fog/compute/models/aws/snapshot'
require 'fog/aws/models/compute/snapshot'
module Fog
module Compute
@ -19,7 +19,7 @@ module Fog
def all(filters = filters, options = {})
unless filters.is_a?(Hash)
Formatador.display_line("[yellow][WARN] all with #{filters.class} param is deprecated, use all('snapshot-id' => []) instead[/] [light_black](#{caller.first})[/]")
Fog::Logger.warning("all with #{filters.class} param is deprecated, use all('snapshot-id' => []) instead [light_black](#{caller.first})[/]")
filters = {'snapshot-id' => [*filters]}
end
self.filters = filters

View file

@ -1,5 +1,5 @@
require 'fog/core/collection'
require 'fog/compute/models/aws/spot_request'
require 'fog/aws/models/compute/spot_request'
module Fog
module Compute
@ -17,7 +17,7 @@ module Fog
def all(filters = self.filters)
unless filters.is_a?(Hash)
Formatador.display_line("[yellow][WARN] all with #{filters.class} param is deprecated, use all('spot-instance-request-id' => []) instead[/] [light_black](#{caller.first})[/]")
Fog::Logger.warning("all with #{filters.class} param is deprecated, use all('spot-instance-request-id' => []) instead [light_black](#{caller.first})[/]")
filters = {'spot-instance-request-id' => [*filters]}
end
self.filters = filters

View file

@ -10,7 +10,7 @@ module Fog
attribute :value
attribute :resource_id, :aliases => 'resourceId'
attribute :resource_type, :aliases => 'resourceType'
attribute :resource_type, :aliases => 'resourceType'
def initialize(attributes = {})
super

View file

@ -1,5 +1,5 @@
require 'fog/core/collection'
require 'fog/compute/models/aws/tag'
require 'fog/aws/models/compute/tag'
module Fog
module Compute

View file

@ -1,5 +1,5 @@
require 'fog/core/collection'
require 'fog/compute/models/aws/volume'
require 'fog/aws/models/compute/volume'
module Fog
module Compute
@ -64,7 +64,7 @@ module Fog
def all(filters = filters)
unless filters.is_a?(Hash)
Formatador.display_line("[yellow][WARN] all with #{filters.class} param is deprecated, use all('volume-id' => []) instead[/] [light_black](#{caller.first})[/]")
Fog::Logger.warning("all with #{filters.class} param is deprecated, use all('volume-id' => []) instead [light_black](#{caller.first})[/]")
filters = {'volume-id' => [*filters]}
end
self.filters = filters

View file

@ -1,5 +1,5 @@
require 'fog/core/collection'
require 'fog/dns/models/aws/record'
require 'fog/aws/models/dns/record'
module Fog
module DNS

View file

@ -1,5 +1,5 @@
require 'fog/core/model'
# require 'fog/dns/models/aws/records'
# require 'fog/aws/models/dns/records'
module Fog
module DNS

View file

@ -1,5 +1,5 @@
require 'fog/core/collection'
require 'fog/dns/models/aws/zone'
require 'fog/aws/models/dns/zone'
module Fog
module DNS

View file

@ -1,4 +1,5 @@
require 'fog/core/model'
require 'fog/core/current_machine'
module Fog
module AWS
@ -43,6 +44,16 @@ module Fog
authorize_ingress({'CIDRIP' => cidrip})
end
# Add the current machine to the RDS security group.
def authorize_me
authorize_ip_address(Fog::CurrentMachine.ip_address)
end
# Add the ip address to the RDS security group.
def authorize_ip_address(ip)
authorize_cidrip("#{ip}/32")
end
def authorize_ingress(opts)
data = connection.authorize_db_security_group_ingress(id, opts).body['AuthorizeDBSecurityGroupIngressResult']['DBSecurityGroup']
merge_attributes(data)

View file

@ -1,5 +1,5 @@
require 'fog/core/collection'
require 'fog/storage/models/aws/directory'
require 'fog/aws/models/storage/directory'
module Fog
module Storage

View file

@ -1,5 +1,5 @@
require 'fog/core/model'
require 'fog/storage/models/aws/files'
require 'fog/aws/models/storage/files'
module Fog
module Storage
@ -86,9 +86,7 @@ module Fog
if @acl
options['x-amz-acl'] = @acl
end
if @location
options['LocationConstraint'] = @location
end
options['LocationConstraint'] = @location || self.connection.region
connection.put_bucket(key, options)
true
end

View file

@ -46,9 +46,9 @@ module Fog
@directory
end
def copy(target_directory_key, target_file_key)
def copy(target_directory_key, target_file_key, options = {})
requires :directory, :key
connection.copy_object(directory.key, key, target_directory_key, target_file_key)
connection.copy_object(directory.key, key, target_directory_key, target_file_key, options)
target_directory = connection.directories.new(:key => target_directory_key)
target_directory.files.get(target_file_key)
end
@ -104,7 +104,7 @@ module Fog
def save(options = {})
requires :body, :directory, :key
if options != {}
Formatador.display_line("[yellow][WARN] options param is deprecated, use acl= instead[/] [light_black](#{caller.first})[/]")
Fog::Logger.warning("options param is deprecated, use acl= instead [light_black](#{caller.first})[/]")
end
options['x-amz-acl'] ||= @acl if @acl
options['Cache-Control'] = cache_control if cache_control
@ -125,7 +125,7 @@ module Fog
def url(expires)
requires :key
collection.get_url(key, expires)
collection.get_https_url(key, expires)
end
private

View file

@ -1,5 +1,5 @@
require 'fog/core/collection'
require 'fog/storage/models/aws/file'
require 'fog/aws/models/storage/file'
module Fog
module Storage

View file

@ -7,8 +7,9 @@ module Fog
def reset
@block_device_mapping = {}
@image = { 'blockDeviceMapping' => [], 'productCodes' => [], 'tagSet' => {} }
@image = { 'blockDeviceMapping' => [], 'productCodes' => [], 'stateReason' => {}, 'tagSet' => {} }
@response = { 'imagesSet' => [] }
@state_reason = {}
@tag = {}
end
@ -17,6 +18,8 @@ module Fog
case name
when 'blockDeviceMapping'
@in_block_device_mapping = true
when 'stateReason'
@in_state_reason = true
when 'tagSet'
@in_tag_set = true
end
@ -45,9 +48,18 @@ module Fog
@image['blockDeviceMapping'] << @block_device_mapping
@block_device_mapping = {}
end
elsif @in_state_reason
case name
when 'code', 'message'
@state_reason[name] = value
when 'stateReason'
@image['stateReason'] = @state_reason
@state_reason = {}
@in_state_reason = false
end
else
case name
when 'architecture', 'description', 'imageId', 'imageLocation', 'imageOwnerId', 'imageState', 'imageType', 'kernelId', 'name', 'platform', 'ramdiskId', 'rootDeviceType','rootDeviceName','virtualizationType'
when 'architecture', 'description', 'hypervisor', 'imageId', 'imageLocation', 'imageOwnerAlias', 'imageOwnerId', 'imageState', 'imageType', 'kernelId', 'name', 'platform', 'ramdiskId', 'rootDeviceType','rootDeviceName','virtualizationType'
@image[name] = value
when 'isPublic'
if value == 'true'
@ -57,7 +69,7 @@ module Fog
end
when 'item'
@response['imagesSet'] << @image
@image = { 'blockDeviceMapping' => [], 'productCodes' => [], 'tagSet' => {} }
@image = { 'blockDeviceMapping' => [], 'productCodes' => [], 'stateReason' => {}, 'tagSet' => {} }
when 'productCode'
@image['productCodes'] << value
when 'requestId'

View file

@ -23,7 +23,7 @@ module Fog
def end_element(name)
case name
when 'Arn'
when 'Arn', 'Path'
if @in_group
@response['Group'][name] = value
elsif @in_users
@ -35,7 +35,7 @@ module Fog
@response['Group'][name] = value
when 'Users'
@in_users = false
when 'UserId', 'UserName', 'Path'
when 'UserId', 'UserName'
@user[name] = value
when 'member'
@response['Users'] << @user

Some files were not shown because too many files have changed in this diff Show more