提交 37dbcc61 编写于 作者: A Akira Matsuda 提交者: Jean Boussier

Introduce keyword arguments for some AS::Cache methods

since RedisCacheStore#write_entry takes kwargs, we needed to kwargsify all these methods
in order to eliminate Ruby 2.7 warnings.

It's a little bit bigger patch than I expected, but it doesn't warn on Ruby 3,
and it doesn't introduce any incompatibility on loder rubies, so it may not be a bad thing anyway.
上级 7caea808
......@@ -318,7 +318,7 @@ def fetch(name, options = nil)
entry = nil
instrument(:read, name, options) do |payload|
cached_entry = read_entry(key, options) unless options[:force]
cached_entry = read_entry(key, **options) unless options[:force]
entry = handle_expired_entry(cached_entry, key, options)
entry = nil if entry && entry.mismatched?(normalize_version(name, options))
payload[:super_operation] = :fetch if payload
......@@ -326,9 +326,9 @@ def fetch(name, options = nil)
end
if entry
get_entry_value(entry, name, options)
get_entry_value(entry, name, **options)
else
save_block_result_to_cache(name, options) { |_name| yield _name }
save_block_result_to_cache(name, **options) { |_name| yield _name }
end
elsif options && options[:force]
raise ArgumentError, "Missing block: Calling `Cache#fetch` with `force: true` requires a block."
......@@ -352,11 +352,11 @@ def read(name, options = nil)
version = normalize_version(name, options)
instrument(:read, name, options) do |payload|
entry = read_entry(key, options)
entry = read_entry(key, **options)
if entry
if entry.expired?
delete_entry(key, options)
delete_entry(key, **options)
payload[:hit] = false if payload
nil
elsif entry.mismatched?(version)
......@@ -384,7 +384,7 @@ def read_multi(*names)
options = merged_options(options)
instrument :read_multi, names, options do |payload|
read_multi_entries(names, options).tap do |results|
read_multi_entries(names, **options).tap do |results|
payload[:hits] = results.keys
end
end
......@@ -396,10 +396,10 @@ def write_multi(hash, options = nil)
instrument :write_multi, hash, options do |payload|
entries = hash.each_with_object({}) do |(name, value), memo|
memo[normalize_key(name, options)] = Entry.new(value, options.merge(version: normalize_version(name, options)))
memo[normalize_key(name, options)] = Entry.new(value, **options.merge(version: normalize_version(name, options)))
end
write_multi_entries entries, options
write_multi_entries entries, **options
end
end
......@@ -438,7 +438,7 @@ def fetch_multi(*names)
options = merged_options(options)
instrument :read_multi, names, options do |payload|
reads = read_multi_entries(names, options)
reads = read_multi_entries(names, **options)
writes = {}
ordered = names.each_with_object({}) do |name, hash|
hash[name] = reads.fetch(name) { writes[name] = yield(name) }
......@@ -447,7 +447,7 @@ def fetch_multi(*names)
payload[:hits] = reads.keys
payload[:super_operation] = :fetch_multi
write_multi(writes, options)
write_multi(writes, **options)
ordered
end
......@@ -460,8 +460,8 @@ def write(name, value, options = nil)
options = merged_options(options)
instrument(:write, name, options) do
entry = Entry.new(value, options.merge(version: normalize_version(name, options)))
write_entry(normalize_key(name, options), entry, options)
entry = Entry.new(value, **options.merge(version: normalize_version(name, options)))
write_entry(normalize_key(name, options), entry, **options)
end
end
......@@ -472,7 +472,7 @@ def delete(name, options = nil)
options = merged_options(options)
instrument(:delete, name) do
delete_entry(normalize_key(name, options), options)
delete_entry(normalize_key(name, options), **options)
end
end
......@@ -483,7 +483,7 @@ def exist?(name, options = nil)
options = merged_options(options)
instrument(:exist?, name) do
entry = read_entry(normalize_key(name, options), options)
entry = read_entry(normalize_key(name, options), **options)
(entry && !entry.expired? && !entry.mismatched?(normalize_version(name, options))) || false
end
end
......@@ -556,28 +556,28 @@ def key_matcher(pattern, options) # :doc:
# Reads an entry from the cache implementation. Subclasses must implement
# this method.
def read_entry(key, options)
def read_entry(key, **options)
raise NotImplementedError.new
end
# Writes an entry to the cache implementation. Subclasses must implement
# this method.
def write_entry(key, entry, options)
def write_entry(key, entry, **options)
raise NotImplementedError.new
end
# Reads multiple entries from the cache implementation. Subclasses MAY
# implement this method.
def read_multi_entries(names, options)
def read_multi_entries(names, **options)
results = {}
names.each do |name|
key = normalize_key(name, options)
version = normalize_version(name, options)
entry = read_entry(key, options)
entry = read_entry(key, **options)
if entry
if entry.expired?
delete_entry(key, options)
delete_entry(key, **options)
elsif entry.mismatched?(version)
# Skip mismatched versions
else
......@@ -590,15 +590,15 @@ def read_multi_entries(names, options)
# Writes multiple entries to the cache implementation. Subclasses MAY
# implement this method.
def write_multi_entries(hash, options)
def write_multi_entries(hash, **options)
hash.each do |key, entry|
write_entry key, entry, options
write_entry key, entry, **options
end
end
# Deletes an entry from the cache implementation. Subclasses must
# implement this method.
def delete_entry(key, options)
def delete_entry(key, **options)
raise NotImplementedError.new
end
......@@ -699,7 +699,7 @@ def handle_expired_entry(entry, key, options)
entry.expires_at = Time.now + race_ttl
write_entry(key, entry, expires_in: race_ttl * 2)
else
delete_entry(key, options)
delete_entry(key, **options)
end
entry = nil
end
......@@ -711,7 +711,7 @@ def get_entry_value(entry, name, options)
entry.value
end
def save_block_result_to_cache(name, options)
def save_block_result_to_cache(name, **options)
result = instrument(:generate, name, options) do
yield(name)
end
......
......@@ -43,8 +43,8 @@ def clear(options = nil)
def cleanup(options = nil)
options = merged_options(options)
search_dir(cache_path) do |fname|
entry = read_entry(fname, options)
delete_entry(fname, options) if entry && entry.expired?
entry = read_entry(fname, **options)
delete_entry(fname, **options) if entry && entry.expired?
end
end
......@@ -66,13 +66,13 @@ def delete_matched(matcher, options = nil)
matcher = key_matcher(matcher, options)
search_dir(cache_path) do |path|
key = file_path_key(path)
delete_entry(path, options) if key.match(matcher)
delete_entry(path, **options) if key.match(matcher)
end
end
end
private
def read_entry(key, options)
def read_entry(key, **options)
if File.exist?(key)
File.open(key) { |f| Marshal.load(f) }
end
......@@ -81,14 +81,14 @@ def read_entry(key, options)
nil
end
def write_entry(key, entry, options)
def write_entry(key, entry, **options)
return false if options[:unless_exist] && File.exist?(key)
ensure_cache_path(File.dirname(key))
File.atomic_write(key, cache_path) { |f| Marshal.dump(entry, f) }
true
end
def delete_entry(key, options)
def delete_entry(key, **options)
if File.exist?(key)
begin
File.delete(key)
......
......@@ -28,7 +28,7 @@ class MemCacheStore < Store
# Provide support for raw values in the local cache strategy.
module LocalCacheWithRaw # :nodoc:
private
def read_entry(key, options)
def read_entry(key, **options)
entry = super
if options[:raw] && local_cache && entry
entry = deserialize_entry(entry.value)
......@@ -36,11 +36,11 @@ def read_entry(key, options)
entry
end
def write_entry(key, entry, options)
def write_entry(key, entry, **options)
if options[:raw] && local_cache
raw_entry = Entry.new(entry.value.to_s)
raw_entry.expires_at = entry.expires_at
super(key, raw_entry, options)
super(key, raw_entry, **options)
else
super
end
......@@ -142,12 +142,12 @@ def stats
private
# Read an entry from the cache.
def read_entry(key, options)
def read_entry(key, **options)
rescue_error_with(nil) { deserialize_entry(@data.with { |c| c.get(key, options) }) }
end
# Write an entry to the cache.
def write_entry(key, entry, options)
def write_entry(key, entry, **options)
method = options && options[:unless_exist] ? :add : :set
value = options[:raw] ? entry.value.to_s : entry
expires_in = options[:expires_in].to_i
......@@ -156,12 +156,12 @@ def write_entry(key, entry, options)
expires_in += 5.minutes
end
rescue_error_with false do
@data.with { |c| c.send(method, key, value, expires_in, options) }
@data.with { |c| c.send(method, key, value, expires_in, **options) }
end
end
# Reads multiple entries from the cache implementation.
def read_multi_entries(names, options)
def read_multi_entries(names, **options)
keys_to_names = Hash[names.map { |name| [normalize_key(name, options), name] }]
raw_values = @data.with { |c| c.get_multi(keys_to_names.keys) }
......@@ -179,7 +179,7 @@ def read_multi_entries(names, options)
end
# Delete an entry from the cache.
def delete_entry(key, options)
def delete_entry(key, **options)
rescue_error_with(false) { @data.with { |c| c.delete(key) } }
end
......
......@@ -51,7 +51,7 @@ def cleanup(options = nil)
keys = synchronize { @data.keys }
keys.each do |key|
entry = @data[key]
delete_entry(key, options) if entry && entry.expired?
delete_entry(key, **options) if entry && entry.expired?
end
end
end
......@@ -67,7 +67,7 @@ def prune(target_size, max_time = nil)
instrument(:prune, target_size, from: @cache_size) do
keys = synchronize { @key_access.keys.sort { |a, b| @key_access[a].to_f <=> @key_access[b].to_f } }
keys.each do |key|
delete_entry(key, options)
delete_entry(key, **options)
return if @cache_size <= target_size || (max_time && Concurrent.monotonic_time - start_time > max_time)
end
end
......@@ -98,7 +98,7 @@ def delete_matched(matcher, options = nil)
matcher = key_matcher(matcher, options)
keys = synchronize { @data.keys }
keys.each do |key|
delete_entry(key, options) if key.match(matcher)
delete_entry(key, **options) if key.match(matcher)
end
end
end
......@@ -120,7 +120,7 @@ def cached_size(key, entry)
key.to_s.bytesize + entry.size + PER_ENTRY_OVERHEAD
end
def read_entry(key, options)
def read_entry(key, **options)
entry = @data[key]
synchronize do
if entry
......@@ -134,7 +134,7 @@ def read_entry(key, options)
entry
end
def write_entry(key, entry, options)
def write_entry(key, entry, **options)
entry.dup_value!
synchronize do
old_entry = @data[key]
......@@ -151,7 +151,7 @@ def write_entry(key, entry, options)
end
end
def delete_entry(key, options)
def delete_entry(key, **options)
synchronize do
@key_access.delete(key)
entry = @data.delete(key)
......
......@@ -33,14 +33,14 @@ def delete_matched(matcher, options = nil)
end
private
def read_entry(key, options)
def read_entry(key, **options)
end
def write_entry(key, entry, options)
def write_entry(key, entry, **options)
true
end
def delete_entry(key, options)
def delete_entry(key, **options)
false
end
end
......
......@@ -74,7 +74,7 @@ def self.supports_cache_versioning?
# Support raw values in the local cache strategy.
module LocalCacheWithRaw # :nodoc:
private
def read_entry(key, options)
def read_entry(key, **options)
entry = super
if options[:raw] && local_cache && entry
entry = deserialize_entry(entry.value)
......@@ -82,24 +82,24 @@ def read_entry(key, options)
entry
end
def write_entry(key, entry, options)
def write_entry(key, entry, **options)
if options[:raw] && local_cache
raw_entry = Entry.new(serialize_entry(entry, raw: true))
raw_entry.expires_at = entry.expires_at
super(key, raw_entry, options)
super(key, raw_entry, **options)
else
super
end
end
def write_multi_entries(entries, options)
def write_multi_entries(entries, **options)
if options[:raw] && local_cache
raw_entries = entries.map do |key, entry|
raw_entry = Entry.new(serialize_entry(entry, raw: true))
raw_entry.expires_at = entry.expires_at
end.to_h
super(raw_entries, options)
super(raw_entries, **options)
else
super
end
......@@ -346,13 +346,13 @@ def set_redis_capabilities
# Store provider interface:
# Read an entry from the cache.
def read_entry(key, options = nil)
def read_entry(key, **options)
failsafe :read_entry do
deserialize_entry redis.with { |c| c.get(key) }
end
end
def read_multi_entries(names, _options)
def read_multi_entries(names, **options)
if mget_capable?
read_multi_mget(*names)
else
......
......@@ -50,27 +50,27 @@ def clear(options = nil)
@data.clear
end
def read_entry(key, options)
def read_entry(key, **options)
@data[key]
end
def read_multi_entries(keys, options)
def read_multi_entries(keys, **options)
values = {}
keys.each do |name|
entry = read_entry(name, options)
entry = read_entry(name, **options)
values[name] = entry.value if entry
end
values
end
def write_entry(key, value, options)
def write_entry(key, value, **options)
@data[key] = value
true
end
def delete_entry(key, options)
def delete_entry(key, **options)
!!@data.delete(key)
end
......@@ -92,34 +92,34 @@ def middleware
local_cache_key)
end
def clear(options = nil) # :nodoc:
def clear(**options) # :nodoc:
return super unless cache = local_cache
cache.clear(options)
super
end
def cleanup(options = nil) # :nodoc:
def cleanup(**options) # :nodoc:
return super unless cache = local_cache
cache.clear
super
end
def increment(name, amount = 1, options = nil) # :nodoc:
def increment(name, amount = 1, **options) # :nodoc:
return super unless local_cache
value = bypass_local_cache { super }
write_cache_value(name, value, options)
write_cache_value(name, value, **options)
value
end
def decrement(name, amount = 1, options = nil) # :nodoc:
def decrement(name, amount = 1, **options) # :nodoc:
return super unless local_cache
value = bypass_local_cache { super }
write_cache_value(name, value, options)
write_cache_value(name, value, **options)
value
end
private
def read_entry(key, options)
def read_entry(key, **options)
if cache = local_cache
cache.fetch_entry(key) { super }
else
......@@ -127,42 +127,42 @@ def read_entry(key, options)
end
end
def read_multi_entries(keys, options)
def read_multi_entries(keys, **options)
return super unless local_cache
local_entries = local_cache.read_multi_entries(keys, options)
local_entries = local_cache.read_multi_entries(keys, **options)
missed_keys = keys - local_entries.keys
if missed_keys.any?
local_entries.merge!(super(missed_keys, options))
local_entries.merge!(super(missed_keys, **options))
else
local_entries
end
end
def write_entry(key, entry, options)
def write_entry(key, entry, **options)
if options[:unless_exist]
local_cache.delete_entry(key, options) if local_cache
local_cache.delete_entry(key, **options) if local_cache
else
local_cache.write_entry(key, entry, options) if local_cache
local_cache.write_entry(key, entry, **options) if local_cache
end
super
end
def delete_entry(key, options)
local_cache.delete_entry(key, options) if local_cache
def delete_entry(key, **options)
local_cache.delete_entry(key, **options) if local_cache
super
end
def write_cache_value(name, value, options)
def write_cache_value(name, value, **options)
name = normalize_key(name, options)
cache = local_cache
cache.mute do
if value
cache.write(name, value, options)
cache.write(name, value, **options)
else
cache.delete(name, options)
cache.delete(name, **options)
end
end
end
......
......@@ -400,7 +400,7 @@ def test_expires_in
def test_race_condition_protection_skipped_if_not_defined
@cache.write("foo", "bar")
time = @cache.send(:read_entry, @cache.send(:normalize_key, "foo", {}), {}).expires_at
time = @cache.send(:read_entry, @cache.send(:normalize_key, "foo", {}), **{}).expires_at
Time.stub(:now, Time.at(time)) do
result = @cache.fetch("foo") do
......@@ -529,8 +529,8 @@ def assert_compression(should_compress, value, **options)
assert_equal value, @cache.read("uncompressed")
end
actual_entry = @cache.send(:read_entry, @cache.send(:normalize_key, "actual", {}), {})
uncompressed_entry = @cache.send(:read_entry, @cache.send(:normalize_key, "uncompressed", {}), {})
actual_entry = @cache.send(:read_entry, @cache.send(:normalize_key, "actual", {}), **{})
uncompressed_entry = @cache.send(:read_entry, @cache.send(:normalize_key, "uncompressed", {}), **{})
actual_size = Marshal.dump(actual_entry).bytesize
uncompressed_size = Marshal.dump(uncompressed_entry).bytesize
......
......@@ -6,7 +6,7 @@
class CacheKeyTest < ActiveSupport::TestCase
def test_entry_legacy_optional_ivars
legacy = Class.new(ActiveSupport::Cache::Entry) do
def initialize(value, options = {})
def initialize(value, **options)
@value = value
@expires_in = nil
@created_at = nil
......
......@@ -106,7 +106,7 @@ def test_delete_does_not_delete_empty_parent_dir
def test_log_exception_when_cache_read_fails
File.stub(:exist?, -> { raise StandardError.new("failed") }) do
@cache.send(:read_entry, "winston", {})
@cache.send(:read_entry, "winston", **{})
assert_predicate @buffer.string, :present?
end
end
......
......@@ -90,7 +90,7 @@ def test_prune_size_on_write_based_on_key_length
end
def test_pruning_is_capped_at_a_max_time
def @cache.delete_entry(*args)
def @cache.delete_entry(*args, **options)
sleep(0.01)
super
end
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册