DEV: Implement a faster Discourse.cache

This is a bottom up rewrite of Discourse cache to support faster performance
and a limited surface area.

ActiveSupport::Cache::Store accepts many options we do not use, this partial
implementation only picks the bits out that we do use and want to support.

Additionally params are named which avoids typos such as "expires_at" vs "expires_in"

This also moves a few spots in Discourse to use Discourse.cache over setex
Performance of setex and Discourse.cache.write is similar.
This commit is contained in:
Sam Saffron 2019-11-27 16:11:49 +11:00
parent 0fb497eb23
commit 88ecb650a9
9 changed files with 127 additions and 29 deletions

View File

@ -112,7 +112,7 @@ class EmailController < ApplicationController
else else
key = "unsub_#{SecureRandom.hex}" key = "unsub_#{SecureRandom.hex}"
$redis.setex key, 1.hour, user.email Discourse.cache.write key, user.email, expires_in: 1.hour
url = path("/email/unsubscribed?key=#{key}") url = path("/email/unsubscribed?key=#{key}")
if topic if topic
@ -125,7 +125,7 @@ class EmailController < ApplicationController
end end
def unsubscribed def unsubscribed
@email = $redis.get(params[:key]) @email = Discourse.cache.read(params[:key])
@topic_id = params[:topic_id] @topic_id = params[:topic_id]
user = User.find_by_email(@email) user = User.find_by_email(@email)
raise Discourse::NotFound unless user raise Discourse::NotFound unless user

View File

@ -27,21 +27,21 @@ class DiscourseSingleSignOn < SingleSignOn
def register_nonce(return_path) def register_nonce(return_path)
if nonce if nonce
$redis.setex(nonce_key, SingleSignOn.nonce_expiry_time, return_path) Discourse.cache.write(nonce_key, return_path, expires_in: SingleSignOn.nonce_expiry_time)
end end
end end
def nonce_valid? def nonce_valid?
nonce && $redis.get(nonce_key).present? nonce && Discourse.cache.read(nonce_key).present?
end end
def return_path def return_path
$redis.get(nonce_key) || "/" Discourse.cache.read(nonce_key) || "/"
end end
def expire_nonce! def expire_nonce!
if nonce if nonce
$redis.del nonce_key Discourse.cache.delete nonce_key
end end
end end

View File

@ -163,7 +163,7 @@ class Report
end end
def self.cache(report, duration) def self.cache(report, duration)
Discourse.cache.write(cache_key(report), report.as_json, force: true, expires_in: duration) Discourse.cache.write(cache_key(report), report.as_json, expires_in: duration)
end end
def self.find(type, opts = nil) def self.find(type, opts = nil)

View File

@ -1,8 +1,20 @@
# frozen_string_literal: true # frozen_string_literal: true
# Discourse specific cache, enforces 1 day expiry # Discourse specific cache, enforces 1 day expiry by default
class Cache < ActiveSupport::Cache::Store # This is a bottom up implementation of ActiveSupport::Cache::Store
# this allows us to cleanly implement without using cache entries and version
# support which we do not use, in tern this makes the cache as fast as simply
# using `$redis.setex` with a more convenient API
#
# It only implements a subset of ActiveSupport::Cache::Store as we make no use
# of large parts of the interface.
#
# An additional advantage of this class is that all methods have named params
# Rails tends to use options hash for lots of stuff due to legacy reasons
# this makes it harder to reason about the API
class Cache
# nothing is cached for longer than 1 day EVER # nothing is cached for longer than 1 day EVER
# there is no reason to have data older than this clogging redis # there is no reason to have data older than this clogging redis
@ -10,9 +22,14 @@ class Cache < ActiveSupport::Cache::Store
# pointless data # pointless data
MAX_CACHE_AGE = 1.day unless defined? MAX_CACHE_AGE MAX_CACHE_AGE = 1.day unless defined? MAX_CACHE_AGE
def initialize(opts = {}) # we don't need this feature, 1 day expiry is enough
@namespace = opts[:namespace] || "_CACHE_" # it makes lookups a tad cheaper
super(opts) def self.supports_cache_versioning?
false
end
def initialize(namespace: "_CACHE")
@namespace = namespace
end end
def redis def redis
@ -33,30 +50,82 @@ class Cache < ActiveSupport::Cache::Store
end end
end end
def normalize_key(key, opts = nil) def normalize_key(key)
"#{@namespace}:#{key}" "#{@namespace}:#{key}"
end end
def exist?(name)
key = normalize_key(name)
redis.exists(key)
end
# this removes a bunch of stuff we do not need like instrumentation and versioning
def read(name)
key = normalize_key(name)
read_entry(key)
end
def write(name, value, expires_in: nil)
write_entry(normalize_key(name), value, expires_in: nil)
end
def delete(name)
redis.del(normalize_key(name))
end
def fetch(name, expires_in: nil, force: nil, &blk)
if block_given?
key = normalize_key(name)
raw = nil
if !force
raw = redis.get(key)
end
if raw
begin
Marshal.load(raw)
rescue => e
log_first_exception(e)
end
else
val = blk.call
write_entry(key, val, expires_in: expires_in)
val
end
elsif force
raise ArgumentError, "Missing block: Calling `Cache#fetch` with `force: true` requires a block."
else
read(name)
end
end
protected protected
def read_entry(key, options) def log_first_exception(e)
if data = redis.get(key) if !defined? @logged_a_warning
data = Marshal.load(data) @logged_a_warning = true
ActiveSupport::Cache::Entry.new data Discourse.warn_exception(e, "Corrupt cache... skipping entry for key #{key}")
end end
rescue
# corrupt cache, fail silently for now, remove rescue later
end end
def write_entry(key, entry, options) def read_entry(key)
dumped = Marshal.dump(entry.value) if data = redis.get(key)
expiry = options[:expires_in] || MAX_CACHE_AGE Marshal.load(data)
end
rescue => e
# corrupt cache, this can happen if Marshal version
# changes. Log it once so we can tell it is happening.
# should not happen under any normal circumstances, but we
# do not want to flood logs
log_first_exception(e)
end
def write_entry(key, value, expires_in: nil)
dumped = Marshal.dump(value)
expiry = expires_in || MAX_CACHE_AGE
redis.setex(key, expiry, dumped) redis.setex(key, expiry, dumped)
true true
end end
def delete_entry(key, options)
redis.del key
end
end end

View File

@ -336,7 +336,7 @@ module DiscourseTagging
end end
def self.staff_tag_names def self.staff_tag_names
tag_names = Discourse.cache.read(TAGS_STAFF_CACHE_KEY, tag_names) tag_names = Discourse.cache.read(TAGS_STAFF_CACHE_KEY)
if !tag_names if !tag_names
tag_names = Tag.joins(tag_groups: :tag_group_permissions) tag_names = Tag.joins(tag_groups: :tag_group_permissions)

View File

@ -79,3 +79,17 @@ end
# redis get string marshal: 12789.2 i/s - same-ish: difference falls within error # redis get string marshal: 12789.2 i/s - same-ish: difference falls within error
# Rails read cache string: 10486.4 i/s - 1.25x slower # Rails read cache string: 10486.4 i/s - 1.25x slower
# Discourse read cache string: 10457.1 i/s - 1.26x slower # Discourse read cache string: 10457.1 i/s - 1.26x slower
#
# After Cache re-write
#
# Comparison:
# redis setex string: 13390.9 i/s
# redis setex marshal string: 13202.0 i/s - same-ish: difference falls within error
# Discourse cache string: 12406.5 i/s - same-ish: difference falls within error
# Rails cache string: 12289.2 i/s - same-ish: difference falls within error
#
# Comparison:
# redis get string: 13589.6 i/s
# redis get string marshal: 13118.3 i/s - same-ish: difference falls within error
# Rails read cache string: 12482.2 i/s - same-ish: difference falls within error
# Discourse read cache string: 12296.8 i/s - same-ish: difference falls within error

View File

@ -9,6 +9,12 @@ describe Cache do
Cache.new Cache.new
end end
it "supports exist?" do
cache.write("testing", 1.1)
expect(cache.exist?("testing")).to eq(true)
expect(cache.exist?(SecureRandom.hex)).to eq(false)
end
it "supports float" do it "supports float" do
cache.write("float", 1.1) cache.write("float", 1.1)
expect(cache.read("float")).to eq(1.1) expect(cache.read("float")).to eq(1.1)
@ -36,10 +42,14 @@ describe Cache do
end end
it "can delete correctly" do it "can delete correctly" do
cache.delete("key")
cache.fetch("key", expires_in: 1.minute) do cache.fetch("key", expires_in: 1.minute) do
"test" "test"
end end
expect(cache.fetch("key")).to eq("test")
cache.delete("key") cache.delete("key")
expect(cache.fetch("key")).to eq(nil) expect(cache.fetch("key")).to eq(nil)
end end
@ -69,6 +79,7 @@ describe Cache do
r = cache.fetch "key" do r = cache.fetch "key" do
"bob" "bob"
end end
expect(r).to eq("bob") expect(r).to eq("bob")
end end

View File

@ -44,6 +44,9 @@ describe "Redis Store" do
end end
it "can be cleared without clearing our cache" do it "can be cleared without clearing our cache" do
cache.clear
store.clear
store.fetch "key" do store.fetch "key" do
"key in store" "key in store"
end end
@ -53,6 +56,7 @@ describe "Redis Store" do
end end
store.clear store.clear
expect(store.read("key")).to eq(nil) expect(store.read("key")).to eq(nil)
expect(cache.fetch("key")).to eq("key in cache") expect(cache.fetch("key")).to eq("key in cache")

View File

@ -151,7 +151,7 @@ RSpec.describe EmailController do
describe 'when topic is public' do describe 'when topic is public' do
it 'should return the right response' do it 'should return the right response' do
key = SecureRandom.hex key = SecureRandom.hex
$redis.set(key, user.email) Discourse.cache.write(key, user.email)
get '/email/unsubscribed', params: { key: key, topic_id: topic.id } get '/email/unsubscribed', params: { key: key, topic_id: topic.id }
expect(response.status).to eq(200) expect(response.status).to eq(200)
expect(response.body).to include(topic.title) expect(response.body).to include(topic.title)
@ -161,7 +161,7 @@ RSpec.describe EmailController do
describe 'when topic is private' do describe 'when topic is private' do
it 'should return the right response' do it 'should return the right response' do
key = SecureRandom.hex key = SecureRandom.hex
$redis.set(key, user.email) Discourse.cache.write(key, user.email)
get '/email/unsubscribed', params: { key: key, topic_id: private_topic.id } get '/email/unsubscribed', params: { key: key, topic_id: private_topic.id }
expect(response.status).to eq(200) expect(response.status).to eq(200)
expect(response.body).to_not include(private_topic.title) expect(response.body).to_not include(private_topic.title)