lib/active_support/cache.rb in activesupport-3.1.12 vs lib/active_support/cache.rb in activesupport-3.2.0.rc1

- old
+ new

@@ -14,90 +14,89 @@ # See ActiveSupport::Cache::Store for documentation. module Cache autoload :FileStore, 'active_support/cache/file_store' autoload :MemoryStore, 'active_support/cache/memory_store' autoload :MemCacheStore, 'active_support/cache/mem_cache_store' - autoload :SynchronizedMemoryStore, 'active_support/cache/synchronized_memory_store' - autoload :CompressedMemCacheStore, 'active_support/cache/compressed_mem_cache_store' + autoload :NullStore, 'active_support/cache/null_store' # These options mean something to all cache implementations. Individual cache # implementations may support additional options. UNIVERSAL_OPTIONS = [:namespace, :compress, :compress_threshold, :expires_in, :race_condition_ttl] module Strategy autoload :LocalCache, 'active_support/cache/strategy/local_cache' end - # Creates a new CacheStore object according to the given options. - # - # If no arguments are passed to this method, then a new - # ActiveSupport::Cache::MemoryStore object will be returned. - # - # If you pass a Symbol as the first argument, then a corresponding cache - # store class under the ActiveSupport::Cache namespace will be created. - # For example: - # - # ActiveSupport::Cache.lookup_store(:memory_store) - # # => returns a new ActiveSupport::Cache::MemoryStore object - # - # ActiveSupport::Cache.lookup_store(:mem_cache_store) - # # => returns a new ActiveSupport::Cache::MemCacheStore object - # - # Any additional arguments will be passed to the corresponding cache store - # class's constructor: - # - # ActiveSupport::Cache.lookup_store(:file_store, "/tmp/cache") - # # => same as: ActiveSupport::Cache::FileStore.new("/tmp/cache") - # - # If the first argument is not a Symbol, then it will simply be returned: - # - # ActiveSupport::Cache.lookup_store(MyOwnCacheStore.new) - # # => returns MyOwnCacheStore.new - def self.lookup_store(*store_option) - store, *parameters = *Array.wrap(store_option).flatten + class << self + # Creates a new CacheStore object according to the given options. + # + # If no arguments are passed to this method, then a new + # ActiveSupport::Cache::MemoryStore object will be returned. + # + # If you pass a Symbol as the first argument, then a corresponding cache + # store class under the ActiveSupport::Cache namespace will be created. + # For example: + # + # ActiveSupport::Cache.lookup_store(:memory_store) + # # => returns a new ActiveSupport::Cache::MemoryStore object + # + # ActiveSupport::Cache.lookup_store(:mem_cache_store) + # # => returns a new ActiveSupport::Cache::MemCacheStore object + # + # Any additional arguments will be passed to the corresponding cache store + # class's constructor: + # + # ActiveSupport::Cache.lookup_store(:file_store, "/tmp/cache") + # # => same as: ActiveSupport::Cache::FileStore.new("/tmp/cache") + # + # If the first argument is not a Symbol, then it will simply be returned: + # + # ActiveSupport::Cache.lookup_store(MyOwnCacheStore.new) + # # => returns MyOwnCacheStore.new + def lookup_store(*store_option) + store, *parameters = *Array.wrap(store_option).flatten - case store - when Symbol - store_class_name = store.to_s.camelize - store_class = - begin - require "active_support/cache/#{store}" - rescue LoadError => e - raise "Could not find cache store adapter for #{store} (#{e})" - else - ActiveSupport::Cache.const_get(store_class_name) - end - store_class.new(*parameters) - when nil - ActiveSupport::Cache::MemoryStore.new - else - store + case store + when Symbol + store_class_name = store.to_s.camelize + store_class = + begin + require "active_support/cache/#{store}" + rescue LoadError => e + raise "Could not find cache store adapter for #{store} (#{e})" + else + ActiveSupport::Cache.const_get(store_class_name) + end + store_class.new(*parameters) + when nil + ActiveSupport::Cache::MemoryStore.new + else + store + end end - end - def self.expand_cache_key(key, namespace = nil) - expanded_cache_key = namespace ? "#{namespace}/" : "" + def expand_cache_key(key, namespace = nil) + expanded_cache_key = namespace ? "#{namespace}/" : "" - prefix = ENV["RAILS_CACHE_ID"] || ENV["RAILS_APP_VERSION"] - if prefix - expanded_cache_key << "#{prefix}/" + prefix = ENV["RAILS_CACHE_ID"] || ENV["RAILS_APP_VERSION"] + if prefix + expanded_cache_key << "#{prefix}/" + end + + expanded_cache_key << retrieve_cache_key(key) + expanded_cache_key end - expanded_cache_key << - if key.respond_to?(:cache_key) - key.cache_key - elsif key.is_a?(Array) - if key.size > 1 - key.collect { |element| expand_cache_key(element) }.to_param - else - key.first.to_param - end - elsif key - key.to_param - end.to_s + private - expanded_cache_key + def retrieve_cache_key(key) + case + when key.respond_to?(:cache_key) then key.cache_key + when key.is_a?(Array) then ['Array', *key.map { |element| retrieve_cache_key(element) }].to_param + else key.to_param + end.to_s + end end # An abstract cache store class. There are multiple cache store # implementations, each having its own additional features. See the classes # under the ActiveSupport::Cache module, e.g. @@ -114,45 +113,46 @@ # cache.read("city") # => nil # cache.write("city", "Duckburgh") # cache.read("city") # => "Duckburgh" # # Keys are always translated into Strings and are case sensitive. When an - # object is specified as a key, its +cache_key+ method will be called if it - # is defined. Otherwise, the +to_param+ method will be called. Hashes and - # Arrays can be used as keys. The elements will be delimited by slashes - # and Hashes elements will be sorted by key so they are consistent. + # object is specified as a key and has a +cache_key+ method defined, this + # method will be called to define the key. Otherwise, the +to_param+ + # method will be called. Hashes and Arrays can also be used as keys. The + # elements will be delimited by slashes, and the elements within a Hash + # will be sorted by key so they are consistent. # # cache.read("city") == cache.read(:city) # => true # # Nil values can be cached. # - # If your cache is on a shared infrastructure, you can define a namespace for - # your cache entries. If a namespace is defined, it will be prefixed on to every - # key. The namespace can be either a static value or a Proc. If it is a Proc, it - # will be invoked when each key is evaluated so that you can use application logic - # to invalidate keys. + # If your cache is on a shared infrastructure, you can define a namespace + # for your cache entries. If a namespace is defined, it will be prefixed on + # to every key. The namespace can be either a static value or a Proc. If it + # is a Proc, it will be invoked when each key is evaluated so that you can + # use application logic to invalidate keys. # # cache.namespace = lambda { @last_mod_time } # Set the namespace to a variable # @last_mod_time = Time.now # Invalidate the entire cache by changing namespace # # - # Caches can also store values in a compressed format to save space and reduce - # time spent sending data. Since there is some overhead, values must be large - # enough to warrant compression. To turn on compression either pass - # <tt>:compress => true</tt> in the initializer or to +fetch+ or +write+. - # To specify the threshold at which to compress values, set - # <tt>:compress_threshold</tt>. The default threshold is 32K. + # Caches can also store values in a compressed format to save space and + # reduce time spent sending data. Since there is overhead, values must be + # large enough to warrant compression. To turn on compression either pass + # <tt>:compress => true</tt> in the initializer or as an option to +fetch+ + # or +write+. To specify the threshold at which to compress values, set the + # <tt>:compress_threshold</tt> option. The default threshold is 16K. class Store cattr_accessor :logger, :instance_writer => true attr_reader :silence, :options alias :silence? :silence # Create a new cache. The options will be passed to any write method calls except # for :namespace which can be used to set the global namespace for the cache. - def initialize (options = nil) + def initialize(options = nil) @options = options ? options.dup : {} end # Silence the logger. def silence! @@ -178,15 +178,15 @@ end # Fetches data from the cache, using the given key. If there is data in # the cache with the given key, then that data is returned. # - # If there is no such data in the cache (a cache miss occurred), - # then nil will be returned. However, if a block has been passed, then - # that block will be run in the event of a cache miss. The return value - # of the block will be written to the cache under the given cache key, - # and that return value will be returned. + # If there is no such data in the cache (a cache miss), then nil will be + # returned. However, if a block has been passed, that block will be run + # in the event of a cache miss. The return value of the block will be + # written to the cache under the given cache key, and that return value + # will be returned. # # cache.write("today", "Monday") # cache.fetch("today") # => "Monday" # # cache.fetch("city") # => nil @@ -203,14 +203,15 @@ # # Setting <tt>:compress</tt> will store a large cache entry set by the call # in a compressed format. # # - # Setting <tt>:expires_in</tt> will set an expiration time on the cache. All caches - # support auto expiring content after a specified number of seconds. This value can - # be specified as an option to the construction in which call all entries will be - # affected. Or it can be supplied to the +fetch+ or +write+ method for just one entry. + # Setting <tt>:expires_in</tt> will set an expiration time on the cache. + # All caches support auto-expiring content after a specified number of + # seconds. This value can be specified as an option to the constructor + # (in which case all entries will be affected), or it can be supplied to + # the +fetch+ or +write+ method to effect just one entry. # # cache = ActiveSupport::Cache::MemoryStore.new(:expires_in => 5.minutes) # cache.write(key, value, :expires_in => 1.minute) # Set a lower value for one entry # # Setting <tt>:race_condition_ttl</tt> is very useful in situations where a cache entry @@ -536,15 +537,15 @@ class << self # Create an entry with internal attributes set. This method is intended to be # used by implementations that store cache entries in a native format instead # of as serialized Ruby objects. - def create (raw_value, created_at, options = {}) + def create(raw_value, created_at, options = {}) entry = new(nil) entry.instance_variable_set(:@value, raw_value) entry.instance_variable_set(:@created_at, created_at.to_f) - entry.instance_variable_set(:@compressed, !!options[:compressed]) + entry.instance_variable_set(:@compressed, options[:compressed]) entry.instance_variable_set(:@expires_in, options[:expires_in]) entry end end @@ -553,35 +554,33 @@ def initialize(value, options = {}) @compressed = false @expires_in = options[:expires_in] @expires_in = @expires_in.to_f if @expires_in @created_at = Time.now.to_f - unless value.nil? - if should_compress?(value, options) - @value = Zlib::Deflate.deflate(Marshal.dump(value)) + if value.nil? + @value = nil + else + @value = Marshal.dump(value) + if should_compress?(@value, options) + @value = Zlib::Deflate.deflate(@value) @compressed = true - else - @value = value end - else - @value = nil end end # Get the raw value. This value may be serialized and compressed. def raw_value @value end # Get the value stored in the cache. def value - unless @value.nil? - val = compressed? ? Marshal.load(Zlib::Inflate.inflate(@value)) : @value - unless val.frozen? - val.freeze rescue nil - end - val + # If the original value was exactly false @value is still true because + # it is marshalled and eventually compressed. Both operations yield + # strings. + if @value + Marshal.load(compressed? ? Zlib::Inflate.inflate(@value) : @value) end end def compressed? @compressed @@ -610,24 +609,19 @@ # Returns the size of the cached value. This could be less than value.size # if the data is compressed. def size if @value.nil? 0 - elsif @value.respond_to?(:bytesize) - @value.bytesize else - Marshal.dump(@value).bytesize + @value.bytesize end end private - def should_compress?(value, options) - if options[:compress] && value - unless value.is_a?(Numeric) - compress_threshold = options[:compress_threshold] || DEFAULT_COMPRESS_LIMIT - serialized_value = value.is_a?(String) ? value : Marshal.dump(value) - return true if serialized_value.size >= compress_threshold - end + def should_compress?(serialized_value, options) + if options[:compress] + compress_threshold = options[:compress_threshold] || DEFAULT_COMPRESS_LIMIT + return true if serialized_value.size >= compress_threshold end false end end end