2008-01-31 20:43:47 -05:00
|
|
|
require 'benchmark'
|
|
|
|
|
2008-01-03 16:05:12 -05:00
|
|
|
module ActiveSupport
|
|
|
|
module Cache
|
|
|
|
def self.lookup_store(*store_option)
|
|
|
|
store, *parameters = *([ store_option ].flatten)
|
|
|
|
|
|
|
|
case store
|
|
|
|
when Symbol
|
|
|
|
store_class_name = (store == :drb_store ? "DRbStore" : store.to_s.camelize)
|
|
|
|
store_class = ActiveSupport::Cache.const_get(store_class_name)
|
|
|
|
store_class.new(*parameters)
|
|
|
|
when nil
|
|
|
|
ActiveSupport::Cache::MemoryStore.new
|
|
|
|
else
|
|
|
|
store
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.expand_cache_key(key, namespace = nil)
|
|
|
|
expanded_cache_key = namespace ? "#{namespace}/" : ""
|
2008-06-20 03:25:41 -04:00
|
|
|
|
2008-01-03 16:05:12 -05:00
|
|
|
if ENV["RAILS_CACHE_ID"] || ENV["RAILS_APP_VERSION"]
|
2008-07-17 16:29:30 -04:00
|
|
|
expanded_cache_key << "#{ENV["RAILS_CACHE_ID"] || ENV["RAILS_APP_VERSION"]}/"
|
2008-01-03 16:05:12 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
expanded_cache_key << case
|
2008-06-20 03:46:00 -04:00
|
|
|
when key.respond_to?(:cache_key)
|
|
|
|
key.cache_key
|
|
|
|
when key.is_a?(Array)
|
|
|
|
key.collect { |element| expand_cache_key(element) }.to_param
|
2008-06-21 07:19:30 -04:00
|
|
|
when key
|
2008-06-20 03:46:00 -04:00
|
|
|
key.to_param
|
|
|
|
end.to_s
|
2008-01-03 16:05:12 -05:00
|
|
|
|
|
|
|
expanded_cache_key
|
|
|
|
end
|
|
|
|
|
|
|
|
class Store
|
|
|
|
cattr_accessor :logger
|
|
|
|
|
2008-07-17 17:00:59 -04:00
|
|
|
def silence!
|
|
|
|
@silence = true
|
|
|
|
self
|
|
|
|
end
|
|
|
|
|
2008-05-02 09:45:23 -04:00
|
|
|
# Pass <tt>:force => true</tt> to force a cache miss.
|
2008-01-31 20:33:42 -05:00
|
|
|
def fetch(key, options = {})
|
|
|
|
@logger_off = true
|
|
|
|
if !options[:force] && value = read(key, options)
|
2008-01-03 16:05:12 -05:00
|
|
|
@logger_off = false
|
|
|
|
log("hit", key, options)
|
|
|
|
value
|
|
|
|
elsif block_given?
|
|
|
|
@logger_off = false
|
|
|
|
log("miss", key, options)
|
|
|
|
|
|
|
|
value = nil
|
|
|
|
seconds = Benchmark.realtime { value = yield }
|
|
|
|
|
|
|
|
@logger_off = true
|
|
|
|
write(key, value, options)
|
|
|
|
@logger_off = false
|
|
|
|
|
2008-09-05 08:22:10 -04:00
|
|
|
log("write (will save #{'%.2f' % (seconds * 1000)}ms)", key, nil)
|
2008-01-03 16:05:12 -05:00
|
|
|
|
|
|
|
value
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def read(key, options = nil)
|
|
|
|
log("read", key, options)
|
|
|
|
end
|
|
|
|
|
|
|
|
def write(key, value, options = nil)
|
|
|
|
log("write", key, options)
|
|
|
|
end
|
|
|
|
|
|
|
|
def delete(key, options = nil)
|
|
|
|
log("delete", key, options)
|
|
|
|
end
|
|
|
|
|
|
|
|
def delete_matched(matcher, options = nil)
|
|
|
|
log("delete matched", matcher.inspect, options)
|
2008-05-16 13:10:30 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def exist?(key, options = nil)
|
|
|
|
log("exist?", key, options)
|
|
|
|
end
|
|
|
|
|
2008-04-29 15:12:47 -04:00
|
|
|
def increment(key, amount = 1)
|
|
|
|
log("incrementing", key, amount)
|
|
|
|
if num = read(key)
|
|
|
|
write(key, num + amount)
|
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
2008-01-03 16:05:12 -05:00
|
|
|
end
|
|
|
|
|
2008-04-29 15:12:47 -04:00
|
|
|
def decrement(key, amount = 1)
|
|
|
|
log("decrementing", key, amount)
|
|
|
|
if num = read(key)
|
|
|
|
write(key, num - amount)
|
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
end
|
2008-07-17 16:29:30 -04:00
|
|
|
|
2008-01-03 16:05:12 -05:00
|
|
|
private
|
|
|
|
def log(operation, key, options)
|
2008-07-17 17:00:59 -04:00
|
|
|
logger.debug("Cache #{operation}: #{key}#{options ? " (#{options.inspect})" : ""}") if logger && !@silence && !@logger_off
|
2008-01-03 16:05:12 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
require 'active_support/cache/file_store'
|
|
|
|
require 'active_support/cache/memory_store'
|
|
|
|
require 'active_support/cache/drb_store'
|
|
|
|
require 'active_support/cache/mem_cache_store'
|
2008-01-31 20:33:42 -05:00
|
|
|
require 'active_support/cache/compressed_mem_cache_store'
|