lib/active_support/cache.rb in activesupport-4.0.13 vs lib/active_support/cache.rb in activesupport-4.1.0.beta1
- old
+ new
@@ -1,23 +1,23 @@
require 'benchmark'
require 'zlib'
require 'active_support/core_ext/array/extract_options'
require 'active_support/core_ext/array/wrap'
require 'active_support/core_ext/benchmark'
-require 'active_support/core_ext/class/attribute_accessors'
+require 'active_support/core_ext/module/attribute_accessors'
require 'active_support/core_ext/numeric/bytes'
require 'active_support/core_ext/numeric/time'
require 'active_support/core_ext/object/to_param'
require 'active_support/core_ext/string/inflections'
module ActiveSupport
# See ActiveSupport::Cache::Store for documentation.
module Cache
- autoload :FileStore, 'active_support/cache/file_store'
- autoload :MemoryStore, 'active_support/cache/memory_store'
+ autoload :FileStore, 'active_support/cache/file_store'
+ autoload :MemoryStore, 'active_support/cache/memory_store'
autoload :MemCacheStore, 'active_support/cache/mem_cache_store'
- autoload :NullStore, 'active_support/cache/null_store'
+ autoload :NullStore, 'active_support/cache/null_store'
# These options mean something to all cache implementations. Individual cache
# implementations may support additional options.
UNIVERSAL_OPTIONS = [:namespace, :compress, :compress_threshold, :expires_in, :race_condition_ttl]
@@ -86,29 +86,28 @@
expanded_cache_key << retrieve_cache_key(key)
expanded_cache_key
end
private
+ def retrieve_cache_key(key)
+ case
+ when key.respond_to?(:cache_key) then key.cache_key
+ when key.is_a?(Array) then key.map { |element| retrieve_cache_key(element) }.to_param
+ when key.respond_to?(:to_a) then retrieve_cache_key(key.to_a)
+ else key.to_param
+ end.to_s
+ end
- def retrieve_cache_key(key)
- case
- when key.respond_to?(:cache_key) then key.cache_key
- when key.is_a?(Array) then key.map { |element| retrieve_cache_key(element) }.to_param
- when key.respond_to?(:to_a) then retrieve_cache_key(key.to_a)
- else key.to_param
- end.to_s
- end
-
- # Obtains the specified cache store class, given the name of the +store+.
- # Raises an error when the store class cannot be found.
- def retrieve_store_class(store)
- require "active_support/cache/#{store}"
- rescue LoadError => e
- raise "Could not find cache store adapter for #{store} (#{e})"
- else
- ActiveSupport::Cache.const_get(store.to_s.camelize)
- end
+ # Obtains the specified cache store class, given the name of the +store+.
+ # Raises an error when the store class cannot be found.
+ def retrieve_store_class(store)
+ require "active_support/cache/#{store}"
+ rescue LoadError => e
+ raise "Could not find cache store adapter for #{store} (#{e})"
+ else
+ ActiveSupport::Cache.const_get(store.to_s.camelize)
+ end
end
# An abstract cache store class. There are multiple cache store
# implementations, each having its own additional features. See the classes
# under the ActiveSupport::Cache module, e.g.
@@ -151,11 +150,10 @@
# large enough to warrant compression. To turn on compression either pass
# <tt>compress: true</tt> in the initializer or as an option to +fetch+
# or +write+. To specify the threshold at which to compress values, set the
# <tt>:compress_threshold</tt> option. The default threshold is 16K.
class Store
-
cattr_accessor :logger, :instance_writer => true
attr_reader :silence, :options
alias :silence? :silence
@@ -226,11 +224,11 @@
# cache = ActiveSupport::Cache::MemoryStore.new(expires_in: 5.minutes)
# cache.write(key, value, expires_in: 1.minute) # Set a lower value for one entry
#
# Setting <tt>:race_condition_ttl</tt> is very useful in situations where
# a cache entry is used very frequently and is under heavy load. If a
- # cache expires and due to heavy load seven different processes will try
+ # cache expires and due to heavy load several different processes will try
# to read data natively and then they all will try to write to cache. To
# avoid that case the first process to find an expired cache entry will
# bump the cache expiration time by the value set in <tt>:race_condition_ttl</tt>.
# Yes, this process is extending the time for a stale value by another few
# seconds. Because of extended life of the previous cache, other processes
@@ -350,15 +348,44 @@
end
end
results
end
+ # Fetches data from the cache, using the given keys. If there is data in
+ # the cache with the given keys, then that data is returned. Otherwise,
+ # the supplied block is called for each key for which there was no data,
+ # and the result will be written to the cache and returned.
+ #
+ # Options are passed to the underlying cache implementation.
+ #
+ # Returns an array with the data for each of the names. For example:
+ #
+ # cache.write("bim", "bam")
+ # cache.fetch_multi("bim", "boom") {|key| key * 2 }
+ # # => ["bam", "boomboom"]
+ #
+ def fetch_multi(*names)
+ options = names.extract_options!
+ options = merged_options(options)
+
+ results = read_multi(*names, options)
+
+ names.map do |name|
+ results.fetch(name) do
+ value = yield name
+ write(name, value, options)
+ value
+ end
+ end
+ end
+
# Writes the value to the cache, with the key.
#
# Options are passed to the underlying cache implementation.
def write(name, value, options = nil)
options = merged_options(options)
+
instrument(:write, name, options) do
entry = Entry.new(value, options)
write_entry(namespaced_key(name, options), entry, options)
end
end
@@ -366,23 +393,25 @@
# Deletes an entry in the cache. Returns +true+ if an entry is deleted.
#
# Options are passed to the underlying cache implementation.
def delete(name, options = nil)
options = merged_options(options)
+
instrument(:delete, name) do
delete_entry(namespaced_key(name, options), options)
end
end
# Return +true+ if the cache contains an entry for the given key.
#
# Options are passed to the underlying cache implementation.
def exist?(name, options = nil)
options = merged_options(options)
+
instrument(:exist?, name) do
entry = read_entry(namespaced_key(name, options), options)
- entry && !entry.expired?
+ (entry && !entry.expired?) || false
end
end
# Delete all entries with keys matching the pattern.
#
@@ -555,10 +584,11 @@
def save_block_result_to_cache(name, options)
result = instrument(:generate, name, options) do |payload|
yield(name)
end
+
write(name, result, options)
result
end
end
@@ -578,10 +608,11 @@
@value = compress(value)
@compressed = true
else
@value = value
end
+
@created_at = Time.now.to_f
@expires_in = options[:expires_in]
@expires_in = @expires_in.to_f if @expires_in
end
@@ -591,11 +622,11 @@
end
# Check if the entry is expired. The +expires_in+ parameter can override
# the value set when the entry was created.
def expired?
- convert_version_4beta1_entry! if defined?(@v)
+ convert_version_4beta1_entry! if defined?(@value)
@expires_in && @created_at + @expires_in <= Time.now.to_f
end
def expires_at
@expires_in ? @created_at + @expires_in : nil
@@ -628,10 +659,11 @@
# Duplicate the value in a class. This is used by cache implementations that don't natively
# serialize entries to protect against accidental cache modifications.
def dup_value!
convert_version_4beta1_entry! if defined?(@v)
+
if @value && !compressed? && !(@value.is_a?(Numeric) || @value == true || @value == false)
if @value.is_a?(String)
@value = @value.dup
else
@value = Marshal.load(Marshal.dump(@value))
@@ -642,12 +674,14 @@
private
def should_compress?(value, options)
if value && options[:compress]
compress_threshold = options[:compress_threshold] || DEFAULT_COMPRESS_LIMIT
serialized_value_size = (value.is_a?(String) ? value : Marshal.dump(value)).bytesize
+
return true if serialized_value_size >= compress_threshold
end
+
false
end
def compressed?
defined?(@compressed) ? @compressed : false
@@ -666,13 +700,15 @@
def convert_version_4beta1_entry!
if defined?(@v)
@value = @v
remove_instance_variable(:@v)
end
+
if defined?(@c)
@compressed = @c
remove_instance_variable(:@c)
end
+
if defined?(@x) && @x
@created_at ||= Time.now.to_f
@expires_in = @x - @created_at
remove_instance_variable(:@x)
end