lib/active_support/cache.rb in activesupport-5.0.0.beta1.1 vs lib/active_support/cache.rb in activesupport-5.0.0.beta2
- old
+ new
@@ -253,14 +253,15 @@
# val_2 = cache.fetch('foo', race_condition_ttl: 10) do
# 'new value 2'
# end
# end
#
- # # val_1 => "new value 1"
- # # val_2 => "original value"
- # # sleep 10 # First thread extend the life of cache by another 10 seconds
- # # cache.fetch('foo') => "new value 1"
+ # cache.fetch('foo') # => "original value"
+ # sleep 10 # First thread extended the life of cache by another 10 seconds
+ # cache.fetch('foo') # => "new value 1"
+ # val_1 # => "new value 1"
+ # val_2 # => "original value"
#
# Other options will be handled by the specific cache store implementation.
# Internally, #fetch calls #read_entry, and calls #write_entry on a cache
# miss. +options+ will be passed to the #read and #write calls.
#
@@ -276,21 +277,21 @@
def fetch(name, options = nil)
if block_given?
options = merged_options(options)
key = normalize_key(name, options)
+ entry = nil
instrument(:read, name, options) do |payload|
cached_entry = read_entry(key, options) unless options[:force]
- payload[:super_operation] = :fetch if payload
entry = handle_expired_entry(cached_entry, key, options)
+ payload[:super_operation] = :fetch if payload
+ payload[:hit] = !!entry if payload
+ end
- if entry
- payload[:hit] = true if payload
- get_entry_value(entry, name, options)
- else
- payload[:hit] = false if payload
- save_block_result_to_cache(name, options) { |_name| yield _name }
- end
+ if entry
+ get_entry_value(entry, name, options)
+ else
+ save_block_result_to_cache(name, options) { |_name| yield _name }
end
else
read(name, options)
end
end