mirror of
https://github.com/fog/fog.git
synced 2022-11-09 13:51:43 -05:00
[storage|aws] Make Files#each iterate all files, not just the first page.
Removed Directory#each_file; it's not the API that people expect. This may break code that expects Files#each to only go a page at a time; those callers should switch to using #each_file_this_page.
This commit is contained in:
parent
3299e16fb5
commit
1827f00c3a
2 changed files with 14 additions and 18 deletions
|
@ -46,21 +46,6 @@ module Fog
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def each_file
|
|
||||||
if !block_given?
|
|
||||||
Enumerable::Enumerator.new(self, :each_file)
|
|
||||||
else
|
|
||||||
page_of_files = files.all
|
|
||||||
page_of_files.each_nowarn {|f| yield f }
|
|
||||||
|
|
||||||
while page_of_files.is_truncated
|
|
||||||
page_of_files = files.all(:marker => page_of_files.last.key)
|
|
||||||
page_of_files.each_nowarn {|f| yield f }
|
|
||||||
end
|
|
||||||
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def payer
|
def payer
|
||||||
requires :key
|
requires :key
|
||||||
data = connection.get_request_payment(key)
|
data = connection.get_request_payment(key)
|
||||||
|
|
|
@ -39,10 +39,21 @@ module Fog
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
alias :each_nowarn :each
|
alias :each_file_this_page :each
|
||||||
def each
|
def each
|
||||||
Formatador.display_line("[yellow][WARN] fog: AWS::Storage::Files#each only works on the first page of files; consider using AWS::Storage::Directory#each_file instead[/]")
|
if !block_given?
|
||||||
super
|
self
|
||||||
|
else
|
||||||
|
subset = dup.all
|
||||||
|
|
||||||
|
subset.each_file_this_page {|f| yield f}
|
||||||
|
while subset.is_truncated
|
||||||
|
subset = subset.all(:marker => subset.last.key)
|
||||||
|
subset.each_file_this_page {|f| yield f}
|
||||||
|
end
|
||||||
|
|
||||||
|
self
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def get(key, options = {}, &block)
|
def get(key, options = {}, &block)
|
||||||
|
|
Loading…
Add table
Reference in a new issue