mirror of
https://github.com/rails/rails.git
synced 2022-11-09 12:12:34 -05:00
Gzip files on page caching
Signed-off-by: José Valim <jose.valim@gmail.com>
This commit is contained in:
parent
7c42b9321a
commit
7b1ac55f50
3 changed files with 90 additions and 5 deletions
|
@ -66,24 +66,30 @@ module ActionController #:nodoc:
|
|||
|
||||
instrument_page_cache :expire_page, path do
|
||||
File.delete(path) if File.exist?(path)
|
||||
File.delete(path + '.gz') if File.exist?(path + '.gz')
|
||||
end
|
||||
end
|
||||
|
||||
# Manually cache the +content+ in the key determined by +path+. Example:
|
||||
# cache_page "I'm the cached content", "/lists/show"
|
||||
def cache_page(content, path, extension = nil)
|
||||
def cache_page(content, path, extension = nil, gzip = Zlib::BEST_COMPRESSION)
|
||||
return unless perform_caching
|
||||
path = page_cache_path(path, extension)
|
||||
|
||||
instrument_page_cache :write_page, path do
|
||||
FileUtils.makedirs(File.dirname(path))
|
||||
File.open(path, "wb+") { |f| f.write(content) }
|
||||
if gzip
|
||||
Zlib::GzipWriter.open(path + '.gz', gzip) { |f| f.write(content) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Caches the +actions+ using the page-caching approach that'll store the cache in a path within the page_cache_directory that
|
||||
# matches the triggering url.
|
||||
#
|
||||
# You can disable gzipping by setting +:gzip+ option to false.
|
||||
#
|
||||
# Usage:
|
||||
#
|
||||
# # cache the index action
|
||||
|
@ -91,10 +97,28 @@ module ActionController #:nodoc:
|
|||
#
|
||||
# # cache the index action except for JSON requests
|
||||
# caches_page :index, :if => Proc.new { |c| !c.request.format.json? }
|
||||
#
|
||||
# # don't gzip images
|
||||
# caches_page :image, :gzip => false
|
||||
def caches_page(*actions)
|
||||
return unless perform_caching
|
||||
options = actions.extract_options!
|
||||
after_filter({:only => actions}.merge(options)) { |c| c.cache_page }
|
||||
|
||||
gzip_level = options.fetch(:gzip, :best_compression)
|
||||
if gzip_level
|
||||
gzip_level = case gzip_level
|
||||
when Symbol
|
||||
Zlib.const_get(gzip_level.to_s.upcase)
|
||||
when Fixnum
|
||||
gzip_level
|
||||
else
|
||||
Zlib::BEST_COMPRESSION
|
||||
end
|
||||
end
|
||||
|
||||
after_filter({:only => actions}.merge(options)) do |c|
|
||||
c.cache_page(nil, nil, gzip_level)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
@ -136,7 +160,7 @@ module ActionController #:nodoc:
|
|||
# Manually cache the +content+ in the key determined by +options+. If no content is provided, the contents of response.body is used.
|
||||
# If no options are provided, the url of the current request being handled is used. Example:
|
||||
# cache_page "I'm the cached content", :controller => "lists", :action => "show"
|
||||
def cache_page(content = nil, options = nil)
|
||||
def cache_page(content = nil, options = nil, gzip = Zlib::BEST_COMPRESSION)
|
||||
return unless self.class.perform_caching && caching_allowed?
|
||||
|
||||
path = case options
|
||||
|
@ -152,7 +176,7 @@ module ActionController #:nodoc:
|
|||
extension = ".#{type_symbol}"
|
||||
end
|
||||
|
||||
self.class.cache_page(content || response.body, path, extension)
|
||||
self.class.cache_page(content || response.body, path, extension, gzip)
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -17,6 +17,9 @@ class PageCachingTestController < CachingController
|
|||
caches_page :ok, :no_content, :if => Proc.new { |c| !c.request.format.json? }
|
||||
caches_page :found, :not_found
|
||||
caches_page :about_me
|
||||
caches_page :default_gzip
|
||||
caches_page :no_gzip, :gzip => false
|
||||
caches_page :gzip_level, :gzip => :best_speed
|
||||
|
||||
|
||||
def ok
|
||||
|
@ -40,6 +43,18 @@ class PageCachingTestController < CachingController
|
|||
cache_page("Super soaker", "/index.html")
|
||||
end
|
||||
|
||||
def default_gzip
|
||||
render :text => "Text"
|
||||
end
|
||||
|
||||
def no_gzip
|
||||
render :text => "PNG"
|
||||
end
|
||||
|
||||
def gzip_level
|
||||
render :text => "Big text"
|
||||
end
|
||||
|
||||
def expire_custom_path
|
||||
expire_page("/index.html")
|
||||
head :ok
|
||||
|
@ -115,6 +130,30 @@ class PageCachingTest < ActionController::TestCase
|
|||
assert !File.exist?("#{FILE_STORE_PATH}/index.html")
|
||||
end
|
||||
|
||||
def test_should_gzip_cache
|
||||
get :custom_path
|
||||
assert File.exist?("#{FILE_STORE_PATH}/index.html.gz")
|
||||
|
||||
get :expire_custom_path
|
||||
assert !File.exist?("#{FILE_STORE_PATH}/index.html.gz")
|
||||
end
|
||||
|
||||
def test_should_allow_to_disable_gzip
|
||||
get :no_gzip
|
||||
assert File.exist?("#{FILE_STORE_PATH}/page_caching_test/no_gzip.html")
|
||||
assert !File.exist?("#{FILE_STORE_PATH}/page_caching_test/no_gzip.html.gz")
|
||||
end
|
||||
|
||||
def test_should_use_best_gzip_by_default
|
||||
@controller.expects(:cache_page).with(nil, nil, Zlib::BEST_COMPRESSION)
|
||||
get :default_gzip
|
||||
end
|
||||
|
||||
def test_should_set_gzip_level
|
||||
@controller.expects(:cache_page).with(nil, nil, Zlib::BEST_SPEED)
|
||||
get :gzip_level
|
||||
end
|
||||
|
||||
def test_should_cache_without_trailing_slash_on_url
|
||||
@controller.class.cache_page 'cached content', '/page_caching_test/trailing_slash'
|
||||
assert File.exist?("#{FILE_STORE_PATH}/page_caching_test/trailing_slash.html")
|
||||
|
@ -224,7 +263,7 @@ class ActionCachingTestController < CachingController
|
|||
@cache_this = MockTime.now.to_f.to_s
|
||||
render :text => @cache_this
|
||||
end
|
||||
|
||||
|
||||
def record_not_found
|
||||
raise ActiveRecord::RecordNotFound, "oops!"
|
||||
end
|
||||
|
|
|
@ -64,6 +64,28 @@ end
|
|||
|
||||
If you want a more complicated expiration scheme, you can use cache sweepers to expire cached objects when things change. This is covered in the section on Sweepers.
|
||||
|
||||
By default, page caching automatically gzips file (for example, to +products.html.gz+ if user requests +/products+) to reduce size of transmitted data (web servers are typically configured to use a moderate compression ratio as a compromise, but since precompilation happens once, compression ration is maximum).
|
||||
|
||||
Nginx is able to serve compressed content directly from disk by enabling +gzip_static+:
|
||||
|
||||
<plain>
|
||||
location / {
|
||||
gzip_static on; # to serve pre-gzipped version
|
||||
}
|
||||
</plain>
|
||||
|
||||
You can disable gzipping by setting +:gzip+ option to false (for example, if action returns image):
|
||||
|
||||
<ruby>
|
||||
caches_page :image, :gzip => false
|
||||
</ruby>
|
||||
|
||||
Or, you can set custom gzip compression level (level names are taken from +Zlib+ constants):
|
||||
|
||||
<ruby>
|
||||
caches_page :image, :gzip => :best_speed
|
||||
</ruby>
|
||||
|
||||
NOTE: Page caching ignores all parameters. For example +/products?page=1+ will be written out to the filesystem as +products.html+ with no reference to the +page+ parameter. Thus, if someone requests +/products?page=2+ later, they will get the cached first page. A workaround for this limitation is to include the parameters in the page's path, e.g. +/productions/page/1+.
|
||||
|
||||
INFO: Page caching runs in an after filter. Thus, invalid requests won't generate spurious cache entries as long as you halt them. Typically, a redirection in some before filter that checks request preconditions does the job.
|
||||
|
|
Loading…
Reference in a new issue