Merge pull request #39747 from maxgurewitz/fix-memcached-double-compress

prevents redundant memcached compression
This commit is contained in:
Eugene Kenny 2020-07-25 21:28:00 +01:00 committed by GitHub
commit 9e9a0a501f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 43 additions and 1 deletions

View File

@ -1,3 +1,8 @@
* Fixed issue in `ActiveSupport::Cache::MemCacheStore` which caused duplicate compression,
and caused the provided `compression_threshold` to not be respected.
*Max Gurewitz*
* Prevent `RedisCacheStore` and `MemCacheStore` from performing compression
when reading entries written with `raw: true`.

View File

@ -149,7 +149,8 @@ module ActiveSupport
expires_in += 5.minutes
end
rescue_error_with false do
@data.with { |c| c.send(method, key, value, expires_in, **options) }
# The value "compress: false" prevents duplicate compression within Dalli.
@data.with { |c| c.send(method, key, value, expires_in, **options, compress: false) }
end
end

View File

@ -129,7 +129,43 @@ class MemCacheStoreTest < ActiveSupport::TestCase
assert_not_equal value, @cache.read("foo")
end
def test_no_compress_when_below_threshold
cache = lookup_store(compress: true, compress_threshold: 10.kilobytes)
val = random_string(2.kilobytes)
compressed = Zlib::Deflate.deflate(val)
assert_called(
Zlib::Deflate,
:deflate,
"Memcached writes should not compress when below compress threshold.",
times: 0,
returns: compressed
) do
cache.write("foo", val)
end
end
def test_no_multiple_compress
cache = lookup_store(compress: true)
val = random_string(100.kilobytes)
compressed = Zlib::Deflate.deflate(val)
assert_called(
Zlib::Deflate,
:deflate,
"Memcached writes should not perform duplicate compression.",
times: 1,
returns: compressed
) do
cache.write("foo", val)
end
end
private
def random_string(length)
(0...length).map { (65 + rand(26)).chr }.join
end
def store
[:mem_cache_store, ENV["MEMCACHE_SERVERS"] || "localhost:11211"]
end