mirror of
https://github.com/discourse/discourse.git
synced 2024-11-22 13:09:18 +08:00
DEV: s/\$redis/Discourse\.redis (#8431)
This commit also adds a rubocop rule to prevent global variables.
This commit is contained in:
parent
9eccfb7b52
commit
0d3d2c43a0
|
@ -137,3 +137,12 @@ DiscourseCops/NoChdir:
|
|||
Exclude:
|
||||
- 'spec/**/*' # Specs are run sequentially, so chdir can be used
|
||||
- 'plugins/*/spec/**/*'
|
||||
|
||||
Style/GlobalVars:
|
||||
Enabled: true
|
||||
Severity: warning
|
||||
Exclude:
|
||||
- 'lib/tasks/**/*'
|
||||
- 'script/**/*'
|
||||
- 'spec/**/*.rb'
|
||||
- 'plugins/*/spec/**/*'
|
||||
|
|
|
@ -768,14 +768,14 @@ class ApplicationController < ActionController::Base
|
|||
|
||||
if !SiteSetting.login_required? || (current_user rescue false)
|
||||
key = "page_not_found_topics"
|
||||
if @topics_partial = $redis.get(key)
|
||||
if @topics_partial = Discourse.redis.get(key)
|
||||
@topics_partial = @topics_partial.html_safe
|
||||
else
|
||||
category_topic_ids = Category.pluck(:topic_id).compact
|
||||
@top_viewed = TopicQuery.new(nil, except_topic_ids: category_topic_ids).list_top_for("monthly").topics.first(10)
|
||||
@recent = Topic.includes(:category).where.not(id: category_topic_ids).recent(10)
|
||||
@topics_partial = render_to_string partial: '/exceptions/not_found_topics', formats: [:html]
|
||||
$redis.setex(key, 10.minutes, @topics_partial)
|
||||
Discourse.redis.setex(key, 10.minutes, @topics_partial)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ class ForumsController < ActionController::Base
|
|||
after_action :add_readonly_header
|
||||
|
||||
def status
|
||||
if $shutdown
|
||||
if $shutdown # rubocop:disable Style/GlobalVars
|
||||
render plain: "shutting down", status: 500
|
||||
else
|
||||
render plain: "ok"
|
||||
|
|
|
@ -409,15 +409,15 @@ class SessionController < ApplicationController
|
|||
end
|
||||
|
||||
def one_time_password
|
||||
@otp_username = otp_username = $redis.get "otp_#{params[:token]}"
|
||||
@otp_username = otp_username = Discourse.redis.get "otp_#{params[:token]}"
|
||||
|
||||
if otp_username && user = User.find_by_username(otp_username)
|
||||
if current_user&.username == otp_username
|
||||
$redis.del "otp_#{params[:token]}"
|
||||
Discourse.redis.del "otp_#{params[:token]}"
|
||||
return redirect_to path("/")
|
||||
elsif request.post?
|
||||
log_on_user(user)
|
||||
$redis.del "otp_#{params[:token]}"
|
||||
Discourse.redis.del "otp_#{params[:token]}"
|
||||
return redirect_to path("/")
|
||||
else
|
||||
# Display the form
|
||||
|
|
|
@ -205,7 +205,7 @@ class UserApiKeysController < ApplicationController
|
|||
raise Discourse::InvalidAccess unless UserApiKey.allowed_scopes.superset?(Set.new(["one_time_password"]))
|
||||
|
||||
otp = SecureRandom.hex
|
||||
$redis.setex "otp_#{otp}", 10.minutes, username
|
||||
Discourse.redis.setex "otp_#{otp}", 10.minutes, username
|
||||
|
||||
Base64.encode64(public_key.public_encrypt(otp))
|
||||
end
|
||||
|
|
|
@ -21,7 +21,7 @@ class Users::AssociateAccountsController < ApplicationController
|
|||
# Presents a confirmation screen to the user. Accessed via GET, with no CSRF checks
|
||||
def connect
|
||||
auth = get_auth_hash
|
||||
$redis.del "#{REDIS_PREFIX}_#{current_user&.id}_#{params[:token]}"
|
||||
Discourse.redis.del "#{REDIS_PREFIX}_#{current_user&.id}_#{params[:token]}"
|
||||
|
||||
provider_name = auth.provider
|
||||
authenticator = Discourse.enabled_authenticators.find { |a| a.name == provider_name }
|
||||
|
@ -37,7 +37,7 @@ class Users::AssociateAccountsController < ApplicationController
|
|||
|
||||
def get_auth_hash
|
||||
token = params[:token]
|
||||
json = $redis.get "#{REDIS_PREFIX}_#{current_user&.id}_#{token}"
|
||||
json = Discourse.redis.get "#{REDIS_PREFIX}_#{current_user&.id}_#{token}"
|
||||
raise Discourse::NotFound if json.nil?
|
||||
|
||||
OmniAuth::AuthHash.new(JSON.parse(json))
|
||||
|
|
|
@ -31,7 +31,7 @@ class Users::OmniauthCallbacksController < ApplicationController
|
|||
if session.delete(:auth_reconnect) && authenticator.can_connect_existing_user? && current_user
|
||||
# Save to redis, with a secret token, then redirect to confirmation screen
|
||||
token = SecureRandom.hex
|
||||
$redis.setex "#{Users::AssociateAccountsController::REDIS_PREFIX}_#{current_user.id}_#{token}", 10.minutes, auth.to_json
|
||||
Discourse.redis.setex "#{Users::AssociateAccountsController::REDIS_PREFIX}_#{current_user.id}_#{token}", 10.minutes, auth.to_json
|
||||
return redirect_to Discourse.base_uri("/associate/#{token}")
|
||||
else
|
||||
@auth_result = authenticator.after_authenticate(auth)
|
||||
|
|
|
@ -119,8 +119,8 @@ class WebhooksController < ActionController::Base
|
|||
|
||||
# prevent replay attacks
|
||||
key = "mailgun_token_#{token}"
|
||||
return false unless $redis.setnx(key, 1)
|
||||
$redis.expire(key, 10.minutes)
|
||||
return false unless Discourse.redis.setnx(key, 1)
|
||||
Discourse.redis.expire(key, 10.minutes)
|
||||
|
||||
# ensure timestamp isn't too far from current time
|
||||
return false if (Time.at(timestamp.to_i) - Time.now).abs > 12.hours.to_i
|
||||
|
|
|
@ -41,7 +41,7 @@ module ApplicationHelper
|
|||
return request.env[sk] if request.env[sk]
|
||||
|
||||
request.env[sk] = key = (session[sk] ||= SecureRandom.hex)
|
||||
$redis.setex "#{sk}_#{key}", 7.days, current_user.id.to_s
|
||||
Discourse.redis.setex "#{sk}_#{key}", 7.days, current_user.id.to_s
|
||||
key
|
||||
end
|
||||
end
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
module Jobs
|
||||
class CleanUpSidekiqStatistic < ::Jobs::Onceoff
|
||||
def execute_onceoff(args)
|
||||
$redis.without_namespace.del('sidekiq:sidekiq:statistic')
|
||||
Discourse.redis.without_namespace.del('sidekiq:sidekiq:statistic')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -16,7 +16,7 @@ class Jobs::Onceoff < ::Jobs::Base
|
|||
# Pass `force: true` to force it happen again
|
||||
def execute(args)
|
||||
job_name = self.class.name_for(self.class)
|
||||
has_lock = $redis.setnx(running_key_name, Time.now.to_i)
|
||||
has_lock = Discourse.redis.setnx(running_key_name, Time.now.to_i)
|
||||
|
||||
# If we can't get a lock, just noop
|
||||
if args[:force] || has_lock
|
||||
|
@ -25,7 +25,7 @@ class Jobs::Onceoff < ::Jobs::Base
|
|||
execute_onceoff(args)
|
||||
OnceoffLog.create!(job_name: job_name)
|
||||
ensure
|
||||
$redis.del(running_key_name) if has_lock
|
||||
Discourse.redis.del(running_key_name) if has_lock
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -10,11 +10,11 @@ module Jobs
|
|||
end
|
||||
|
||||
def execute(args)
|
||||
$redis.set(self.class.heartbeat_key, Time.new.to_i.to_s)
|
||||
Discourse.redis.set(self.class.heartbeat_key, Time.new.to_i.to_s)
|
||||
end
|
||||
|
||||
def self.last_heartbeat
|
||||
$redis.get(heartbeat_key).to_i
|
||||
Discourse.redis.get(heartbeat_key).to_i
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -96,16 +96,16 @@ module Jobs
|
|||
end
|
||||
|
||||
def last_cleanup=(v)
|
||||
$redis.setex(last_cleanup_key, 7.days.to_i, v.to_s)
|
||||
Discourse.redis.setex(last_cleanup_key, 7.days.to_i, v.to_s)
|
||||
end
|
||||
|
||||
def last_cleanup
|
||||
v = $redis.get(last_cleanup_key)
|
||||
v = Discourse.redis.get(last_cleanup_key)
|
||||
v ? v.to_i : v
|
||||
end
|
||||
|
||||
def reset_last_cleanup!
|
||||
$redis.del(last_cleanup_key)
|
||||
Discourse.redis.del(last_cleanup_key)
|
||||
end
|
||||
|
||||
protected
|
||||
|
|
|
@ -33,11 +33,11 @@ module Jobs
|
|||
end
|
||||
|
||||
def last_notified_id
|
||||
(i = $redis.get(self.class.last_notified_key)) && i.to_i
|
||||
(i = Discourse.redis.get(self.class.last_notified_key)) && i.to_i
|
||||
end
|
||||
|
||||
def last_notified_id=(arg)
|
||||
$redis.set(self.class.last_notified_key, arg)
|
||||
Discourse.redis.set(self.class.last_notified_key, arg)
|
||||
end
|
||||
|
||||
def self.last_notified_key
|
||||
|
|
|
@ -37,11 +37,11 @@ module Jobs
|
|||
end
|
||||
|
||||
def self.last_notified_id
|
||||
$redis.get(last_notified_key).to_i
|
||||
Discourse.redis.get(last_notified_key).to_i
|
||||
end
|
||||
|
||||
def self.last_notified_id=(arg)
|
||||
$redis.set(last_notified_key, arg)
|
||||
Discourse.redis.set(last_notified_key, arg)
|
||||
end
|
||||
|
||||
def self.last_notified_key
|
||||
|
@ -49,7 +49,7 @@ module Jobs
|
|||
end
|
||||
|
||||
def self.clear_key
|
||||
$redis.del(last_notified_key)
|
||||
Discourse.redis.del(last_notified_key)
|
||||
end
|
||||
|
||||
def active_moderator_usernames
|
||||
|
|
|
@ -50,11 +50,11 @@ module Jobs
|
|||
end
|
||||
|
||||
def previous_newest_username
|
||||
$redis.get previous_newest_username_cache_key
|
||||
Discourse.redis.get previous_newest_username_cache_key
|
||||
end
|
||||
|
||||
def previous_newest_username=(username)
|
||||
$redis.setex previous_newest_username_cache_key, 7.days, username
|
||||
Discourse.redis.setex previous_newest_username_cache_key, 7.days, username
|
||||
end
|
||||
|
||||
def previous_newest_username_cache_key
|
||||
|
|
|
@ -43,15 +43,15 @@ module Jobs
|
|||
end
|
||||
end
|
||||
rescue Net::OpenTimeout => e
|
||||
count = $redis.incr(POLL_MAILBOX_TIMEOUT_ERROR_KEY).to_i
|
||||
count = Discourse.redis.incr(POLL_MAILBOX_TIMEOUT_ERROR_KEY).to_i
|
||||
|
||||
$redis.expire(
|
||||
Discourse.redis.expire(
|
||||
POLL_MAILBOX_TIMEOUT_ERROR_KEY,
|
||||
SiteSetting.pop3_polling_period_mins.minutes * 3
|
||||
) if count == 1
|
||||
|
||||
if count > 3
|
||||
$redis.del(POLL_MAILBOX_TIMEOUT_ERROR_KEY)
|
||||
Discourse.redis.del(POLL_MAILBOX_TIMEOUT_ERROR_KEY)
|
||||
mark_as_errored!
|
||||
add_admin_dashboard_problem_message('dashboard.poll_pop3_timeout')
|
||||
Discourse.handle_job_exception(e, error_context(@args, "Connecting to '#{SiteSetting.pop3_polling_host}' for polling emails."))
|
||||
|
@ -65,13 +65,13 @@ module Jobs
|
|||
POLL_MAILBOX_ERRORS_KEY ||= "poll_mailbox_errors".freeze
|
||||
|
||||
def self.errors_in_past_24_hours
|
||||
$redis.zremrangebyscore(POLL_MAILBOX_ERRORS_KEY, 0, 24.hours.ago.to_i)
|
||||
$redis.zcard(POLL_MAILBOX_ERRORS_KEY).to_i
|
||||
Discourse.redis.zremrangebyscore(POLL_MAILBOX_ERRORS_KEY, 0, 24.hours.ago.to_i)
|
||||
Discourse.redis.zcard(POLL_MAILBOX_ERRORS_KEY).to_i
|
||||
end
|
||||
|
||||
def mark_as_errored!
|
||||
now = Time.now.to_i
|
||||
$redis.zadd(POLL_MAILBOX_ERRORS_KEY, now, now.to_s)
|
||||
Discourse.redis.zadd(POLL_MAILBOX_ERRORS_KEY, now, now.to_s)
|
||||
end
|
||||
|
||||
def add_admin_dashboard_problem_message(i18n_key)
|
||||
|
|
|
@ -697,9 +697,9 @@ class UserNotifications < ActionMailer::Base
|
|||
def summary_new_users_count(min_date)
|
||||
min_date_str = min_date.is_a?(String) ? min_date : min_date.strftime('%Y-%m-%d')
|
||||
key = self.class.summary_new_users_count_key(min_date_str)
|
||||
((count = $redis.get(key)) && count.to_i) || begin
|
||||
((count = Discourse.redis.get(key)) && count.to_i) || begin
|
||||
count = User.real.where(active: true, staged: false).not_suspended.where("created_at > ?", min_date_str).count
|
||||
$redis.setex(key, 1.day, count)
|
||||
Discourse.redis.setex(key, 1.day, count)
|
||||
count
|
||||
end
|
||||
end
|
||||
|
|
|
@ -63,16 +63,16 @@ class AdminDashboardData
|
|||
end
|
||||
|
||||
def self.set_problems_started
|
||||
existing_time = $redis.get(problems_started_key)
|
||||
$redis.setex(problems_started_key, 14.days.to_i, existing_time || Time.zone.now.to_s)
|
||||
existing_time = Discourse.redis.get(problems_started_key)
|
||||
Discourse.redis.setex(problems_started_key, 14.days.to_i, existing_time || Time.zone.now.to_s)
|
||||
end
|
||||
|
||||
def self.clear_problems_started
|
||||
$redis.del problems_started_key
|
||||
Discourse.redis.del problems_started_key
|
||||
end
|
||||
|
||||
def self.problems_started_at
|
||||
s = $redis.get(problems_started_key)
|
||||
s = Discourse.redis.get(problems_started_key)
|
||||
s ? Time.zone.parse(s) : nil
|
||||
end
|
||||
|
||||
|
@ -109,19 +109,19 @@ class AdminDashboardData
|
|||
end
|
||||
|
||||
def self.problem_message_check(i18n_key)
|
||||
$redis.get(problem_message_key(i18n_key)) ? I18n.t(i18n_key, base_path: Discourse.base_path) : nil
|
||||
Discourse.redis.get(problem_message_key(i18n_key)) ? I18n.t(i18n_key, base_path: Discourse.base_path) : nil
|
||||
end
|
||||
|
||||
def self.add_problem_message(i18n_key, expire_seconds = nil)
|
||||
if expire_seconds.to_i > 0
|
||||
$redis.setex problem_message_key(i18n_key), expire_seconds.to_i, 1
|
||||
Discourse.redis.setex problem_message_key(i18n_key), expire_seconds.to_i, 1
|
||||
else
|
||||
$redis.set problem_message_key(i18n_key), 1
|
||||
Discourse.redis.set problem_message_key(i18n_key), 1
|
||||
end
|
||||
end
|
||||
|
||||
def self.clear_problem_message(i18n_key)
|
||||
$redis.del problem_message_key(i18n_key)
|
||||
Discourse.redis.del problem_message_key(i18n_key)
|
||||
end
|
||||
|
||||
def self.problem_message_key(i18n_key)
|
||||
|
|
|
@ -52,7 +52,7 @@ class ApplicationRequest < ActiveRecord::Base
|
|||
|
||||
req_types.each do |req_type, _|
|
||||
key = redis_key(req_type, date)
|
||||
$redis.del key
|
||||
Discourse.redis.del key
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ class CategoryFeaturedTopic < ActiveRecord::Base
|
|||
|
||||
batch_size ||= DEFAULT_BATCH_SIZE
|
||||
|
||||
next_category_id = batched ? $redis.get(NEXT_CATEGORY_ID_KEY).to_i : 0
|
||||
next_category_id = batched ? Discourse.redis.get(NEXT_CATEGORY_ID_KEY).to_i : 0
|
||||
|
||||
categories = Category.select(:id, :topic_id, :num_featured_topics)
|
||||
.where('id >= ?', next_category_id)
|
||||
|
@ -27,7 +27,7 @@ class CategoryFeaturedTopic < ActiveRecord::Base
|
|||
if batched
|
||||
if categories.length == batch_size
|
||||
next_id = Category.where('id > ?', categories.last.id).order('id asc').limit(1).pluck(:id)[0]
|
||||
next_id ? $redis.setex(NEXT_CATEGORY_ID_KEY, 1.day, next_id) : clear_batch!
|
||||
next_id ? Discourse.redis.setex(NEXT_CATEGORY_ID_KEY, 1.day, next_id) : clear_batch!
|
||||
else
|
||||
clear_batch!
|
||||
end
|
||||
|
@ -39,7 +39,7 @@ class CategoryFeaturedTopic < ActiveRecord::Base
|
|||
end
|
||||
|
||||
def self.clear_batch!
|
||||
$redis.del(NEXT_CATEGORY_ID_KEY)
|
||||
Discourse.redis.del(NEXT_CATEGORY_ID_KEY)
|
||||
end
|
||||
|
||||
def self.feature_topics_for(c, existing = nil)
|
||||
|
|
|
@ -19,13 +19,13 @@ module CachedCounting
|
|||
|
||||
class_methods do
|
||||
def perform_increment!(key, opts = nil)
|
||||
val = $redis.incr(key).to_i
|
||||
val = Discourse.redis.incr(key).to_i
|
||||
|
||||
# readonly mode it is going to be 0, skip
|
||||
return if val == 0
|
||||
|
||||
# 3.days, see: https://github.com/rails/rails/issues/21296
|
||||
$redis.expire(key, 259200)
|
||||
Discourse.redis.expire(key, 259200)
|
||||
|
||||
autoflush = (opts && opts[:autoflush]) || self.autoflush
|
||||
if autoflush > 0 && val >= autoflush
|
||||
|
@ -51,9 +51,9 @@ module CachedCounting
|
|||
# this may seem a bit fancy but in so it allows
|
||||
# for concurrent calls without double counting
|
||||
def get_and_reset(key)
|
||||
namespaced_key = $redis.namespace_key(key)
|
||||
val = $redis.without_namespace.eval(GET_AND_RESET, keys: [namespaced_key]).to_i
|
||||
$redis.expire(key, 259200) # SET removes expiry, so set it again
|
||||
namespaced_key = Discourse.redis.namespace_key(key)
|
||||
val = Discourse.redis.without_namespace.eval(GET_AND_RESET, keys: [namespaced_key]).to_i
|
||||
Discourse.redis.expire(key, 259200) # SET removes expiry, so set it again
|
||||
val
|
||||
end
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ module StatsCacheable
|
|||
|
||||
def fetch_cached_stats
|
||||
# The scheduled Stats job is responsible for generating and caching this.
|
||||
stats = $redis.get(stats_cache_key)
|
||||
stats = Discourse.redis.get(stats_cache_key)
|
||||
stats = refresh_stats if !stats
|
||||
JSON.parse(stats).with_indifferent_access
|
||||
end
|
||||
|
@ -35,7 +35,7 @@ module StatsCacheable
|
|||
def set_cache(stats)
|
||||
# Add some extra time to the expiry so that the next job run has plenty of time to
|
||||
# finish before previous cached value expires.
|
||||
$redis.setex stats_cache_key, (recalculate_stats_interval + 5).minutes, stats
|
||||
Discourse.redis.setex stats_cache_key, (recalculate_stats_interval + 5).minutes, stats
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -26,9 +26,9 @@ class GlobalSetting
|
|||
|
||||
if @safe_secret_key_base && @token_in_redis && (@token_last_validated + REDIS_VALIDATE_SECONDS) < Time.now
|
||||
@token_last_validated = Time.now
|
||||
token = $redis.without_namespace.get(REDIS_SECRET_KEY)
|
||||
token = Discourse.redis.without_namespace.get(REDIS_SECRET_KEY)
|
||||
if token.nil?
|
||||
$redis.without_namespace.set(REDIS_SECRET_KEY, @safe_secret_key_base)
|
||||
Discourse.redis.without_namespace.set(REDIS_SECRET_KEY, @safe_secret_key_base)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -39,10 +39,10 @@ class GlobalSetting
|
|||
@token_in_redis = true
|
||||
@token_last_validated = Time.now
|
||||
|
||||
token = $redis.without_namespace.get(REDIS_SECRET_KEY)
|
||||
token = Discourse.redis.without_namespace.get(REDIS_SECRET_KEY)
|
||||
unless token && token =~ VALID_SECRET_KEY
|
||||
token = SecureRandom.hex(64)
|
||||
$redis.without_namespace.set(REDIS_SECRET_KEY, token)
|
||||
Discourse.redis.without_namespace.set(REDIS_SECRET_KEY, token)
|
||||
end
|
||||
end
|
||||
if !secret_key_base.blank? && token != secret_key_base
|
||||
|
|
|
@ -247,12 +247,12 @@ class Post < ActiveRecord::Base
|
|||
|
||||
def store_unique_post_key
|
||||
if SiteSetting.unique_posts_mins > 0
|
||||
$redis.setex(unique_post_key, SiteSetting.unique_posts_mins.minutes.to_i, id)
|
||||
Discourse.redis.setex(unique_post_key, SiteSetting.unique_posts_mins.minutes.to_i, id)
|
||||
end
|
||||
end
|
||||
|
||||
def matches_recent_post?
|
||||
post_id = $redis.get(unique_post_key)
|
||||
post_id = Discourse.redis.get(unique_post_key)
|
||||
post_id != (nil) && post_id.to_i != (id)
|
||||
end
|
||||
|
||||
|
@ -696,11 +696,11 @@ class Post < ActiveRecord::Base
|
|||
end
|
||||
|
||||
def self.estimate_posts_per_day
|
||||
val = $redis.get("estimated_posts_per_day")
|
||||
val = Discourse.redis.get("estimated_posts_per_day")
|
||||
return val.to_i if val
|
||||
|
||||
posts_per_day = Topic.listable_topics.secured.joins(:posts).merge(Post.created_since(30.days.ago)).count / 30
|
||||
$redis.setex("estimated_posts_per_day", 1.day.to_i, posts_per_day.to_s)
|
||||
Discourse.redis.setex("estimated_posts_per_day", 1.day.to_i, posts_per_day.to_s)
|
||||
posts_per_day
|
||||
|
||||
end
|
||||
|
|
|
@ -39,8 +39,8 @@ class SearchLog < ActiveRecord::Base
|
|||
|
||||
# for testing
|
||||
def self.clear_debounce_cache!
|
||||
$redis.keys("__SEARCH__LOG_*").each do |k|
|
||||
$redis.del(k)
|
||||
Discourse.redis.keys("__SEARCH__LOG_*").each do |k|
|
||||
Discourse.redis.del(k)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -56,7 +56,7 @@ class SearchLog < ActiveRecord::Base
|
|||
|
||||
result = nil
|
||||
|
||||
if existing = $redis.get(key)
|
||||
if existing = Discourse.redis.get(key)
|
||||
id, old_term = existing.split(",", 2)
|
||||
|
||||
if term.start_with?(old_term)
|
||||
|
@ -80,7 +80,7 @@ class SearchLog < ActiveRecord::Base
|
|||
result = [:created, log.id]
|
||||
end
|
||||
|
||||
$redis.setex(key, 5, "#{result[1]},#{term}")
|
||||
Discourse.redis.setex(key, 5, "#{result[1]},#{term}")
|
||||
|
||||
result
|
||||
end
|
||||
|
|
|
@ -102,7 +102,7 @@ class Site
|
|||
if guardian.anonymous?
|
||||
seq = MessageBus.last_id('/site_json')
|
||||
|
||||
cached_json, cached_seq, cached_version = $redis.mget('site_json', 'site_json_seq', 'site_json_version')
|
||||
cached_json, cached_seq, cached_version = Discourse.redis.mget('site_json', 'site_json_seq', 'site_json_version')
|
||||
|
||||
if cached_json && seq == cached_seq.to_i && Discourse.git_version == cached_version
|
||||
return cached_json
|
||||
|
@ -114,10 +114,10 @@ class Site
|
|||
json = MultiJson.dump(SiteSerializer.new(site, root: false, scope: guardian))
|
||||
|
||||
if guardian.anonymous?
|
||||
$redis.multi do
|
||||
$redis.setex 'site_json', 1800, json
|
||||
$redis.set 'site_json_seq', seq
|
||||
$redis.set 'site_json_version', Discourse.git_version
|
||||
Discourse.redis.multi do
|
||||
Discourse.redis.setex 'site_json', 1800, json
|
||||
Discourse.redis.set 'site_json_seq', seq
|
||||
Discourse.redis.set 'site_json_version', Discourse.git_version
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -102,8 +102,8 @@ class TopicLinkClick < ActiveRecord::Base
|
|||
|
||||
# Rate limit the click counts to once in 24 hours
|
||||
rate_key = "link-clicks:#{link.id}:#{args[:user_id] || args[:ip]}"
|
||||
if $redis.setnx(rate_key, "1")
|
||||
$redis.expire(rate_key, 1.day.to_i)
|
||||
if Discourse.redis.setnx(rate_key, "1")
|
||||
Discourse.redis.expire(rate_key, 1.day.to_i)
|
||||
args[:ip] = nil if args[:user_id]
|
||||
create!(topic_link_id: link.id, user_id: args[:user_id], ip_address: args[:ip])
|
||||
end
|
||||
|
|
|
@ -18,8 +18,8 @@ class TopicViewItem < ActiveRecord::Base
|
|||
redis_key << ":ip-#{ip}"
|
||||
end
|
||||
|
||||
if skip_redis || $redis.setnx(redis_key, "1")
|
||||
skip_redis || $redis.expire(redis_key, SiteSetting.topic_view_duration_hours.hours)
|
||||
if skip_redis || Discourse.redis.setnx(redis_key, "1")
|
||||
skip_redis || Discourse.redis.expire(redis_key, SiteSetting.topic_view_duration_hours.hours)
|
||||
|
||||
TopicViewItem.transaction do
|
||||
# this is called real frequently, working hard to avoid exceptions
|
||||
|
|
|
@ -243,8 +243,8 @@ class TrustLevel3Requirements
|
|||
end
|
||||
|
||||
def self.clear_cache
|
||||
$redis.del NUM_TOPICS_KEY
|
||||
$redis.del NUM_POSTS_KEY
|
||||
Discourse.redis.del NUM_TOPICS_KEY
|
||||
Discourse.redis.del NUM_POSTS_KEY
|
||||
end
|
||||
|
||||
CACHE_DURATION = 1.day.seconds - 60
|
||||
|
@ -252,17 +252,17 @@ class TrustLevel3Requirements
|
|||
NUM_POSTS_KEY = "tl3_num_posts"
|
||||
|
||||
def self.num_topics_in_time_period
|
||||
$redis.get(NUM_TOPICS_KEY) || begin
|
||||
Discourse.redis.get(NUM_TOPICS_KEY) || begin
|
||||
count = Topic.listable_topics.visible.created_since(SiteSetting.tl3_time_period.days.ago).count
|
||||
$redis.setex NUM_TOPICS_KEY, CACHE_DURATION, count
|
||||
Discourse.redis.setex NUM_TOPICS_KEY, CACHE_DURATION, count
|
||||
count
|
||||
end
|
||||
end
|
||||
|
||||
def self.num_posts_in_time_period
|
||||
$redis.get(NUM_POSTS_KEY) || begin
|
||||
Discourse.redis.get(NUM_POSTS_KEY) || begin
|
||||
count = Post.public_posts.visible.created_since(SiteSetting.tl3_time_period.days.ago).count
|
||||
$redis.setex NUM_POSTS_KEY, CACHE_DURATION, count
|
||||
Discourse.redis.setex NUM_POSTS_KEY, CACHE_DURATION, count
|
||||
count
|
||||
end
|
||||
end
|
||||
|
|
|
@ -711,9 +711,9 @@ class User < ActiveRecord::Base
|
|||
now_date = now.to_date
|
||||
# Only update last seen once every minute
|
||||
redis_key = "user:#{id}:#{now_date}"
|
||||
return unless $redis.setnx(redis_key, "1")
|
||||
return unless Discourse.redis.setnx(redis_key, "1")
|
||||
|
||||
$redis.expire(redis_key, SiteSetting.active_user_rate_limit_secs)
|
||||
Discourse.redis.expire(redis_key, SiteSetting.active_user_rate_limit_secs)
|
||||
update_previous_visit(now)
|
||||
# using update_column to avoid the AR transaction
|
||||
update_column(:last_seen_at, now)
|
||||
|
|
|
@ -94,8 +94,8 @@ class UserOption < ActiveRecord::Base
|
|||
delay = SiteSetting.active_user_rate_limit_secs
|
||||
|
||||
# only update last_redirected_to_top_at once every minute
|
||||
return unless $redis.setnx(key, "1")
|
||||
$redis.expire(key, delay)
|
||||
return unless Discourse.redis.setnx(key, "1")
|
||||
Discourse.redis.expire(key, delay)
|
||||
|
||||
# delay the update
|
||||
Jobs.enqueue_in(delay / 2, :update_top_redirection, user_id: self.user_id, redirected_at: Time.zone.now)
|
||||
|
|
|
@ -16,8 +16,8 @@ class UserProfileView < ActiveRecord::Base
|
|||
redis_key << ":ip-#{ip}"
|
||||
end
|
||||
|
||||
if skip_redis || $redis.setnx(redis_key, '1')
|
||||
skip_redis || $redis.expire(redis_key, SiteSetting.user_profile_view_duration_hours.hours)
|
||||
if skip_redis || Discourse.redis.setnx(redis_key, '1')
|
||||
skip_redis || Discourse.redis.expire(redis_key, SiteSetting.user_profile_view_duration_hours.hours)
|
||||
|
||||
self.transaction do
|
||||
sql = "INSERT INTO user_profile_views (user_profile_id, ip_address, viewed_at, user_id)
|
||||
|
|
|
@ -163,11 +163,11 @@ class UserStat < ActiveRecord::Base
|
|||
end
|
||||
|
||||
def self.last_seen_cached(id)
|
||||
$redis.get(last_seen_key(id))
|
||||
Discourse.redis.get(last_seen_key(id))
|
||||
end
|
||||
|
||||
def self.cache_last_seen(id, val)
|
||||
$redis.setex(last_seen_key(id), MAX_TIME_READ_DIFF, val)
|
||||
Discourse.redis.setex(last_seen_key(id), MAX_TIME_READ_DIFF, val)
|
||||
end
|
||||
|
||||
protected
|
||||
|
|
|
@ -16,8 +16,8 @@ class WebCrawlerRequest < ActiveRecord::Base
|
|||
|
||||
def self.increment!(user_agent, opts = nil)
|
||||
ua_list_key = user_agent_list_key
|
||||
$redis.sadd(ua_list_key, user_agent)
|
||||
$redis.expire(ua_list_key, 259200) # 3.days
|
||||
Discourse.redis.sadd(ua_list_key, user_agent)
|
||||
Discourse.redis.expire(ua_list_key, 259200) # 3.days
|
||||
|
||||
perform_increment!(redis_key(user_agent), opts)
|
||||
end
|
||||
|
@ -34,7 +34,7 @@ class WebCrawlerRequest < ActiveRecord::Base
|
|||
date = date.to_date
|
||||
ua_list_key = user_agent_list_key(date)
|
||||
|
||||
while user_agent = $redis.spop(ua_list_key)
|
||||
while user_agent = Discourse.redis.spop(ua_list_key)
|
||||
val = get_and_reset(redis_key(user_agent, date))
|
||||
|
||||
next if val == 0
|
||||
|
@ -55,11 +55,11 @@ class WebCrawlerRequest < ActiveRecord::Base
|
|||
|
||||
ua_list_key = user_agent_list_key(date)
|
||||
|
||||
while user_agent = $redis.spop(ua_list_key)
|
||||
$redis.del redis_key(user_agent, date)
|
||||
while user_agent = Discourse.redis.spop(ua_list_key)
|
||||
Discourse.redis.del redis_key(user_agent, date)
|
||||
end
|
||||
|
||||
$redis.del(ua_list_key)
|
||||
Discourse.redis.del(ua_list_key)
|
||||
end
|
||||
|
||||
protected
|
||||
|
|
|
@ -122,17 +122,17 @@ class BadgeGranter
|
|||
}
|
||||
end
|
||||
|
||||
$redis.lpush queue_key, payload.to_json if payload
|
||||
Discourse.redis.lpush queue_key, payload.to_json if payload
|
||||
end
|
||||
|
||||
def self.clear_queue!
|
||||
$redis.del queue_key
|
||||
Discourse.redis.del queue_key
|
||||
end
|
||||
|
||||
def self.process_queue!
|
||||
limit = 1000
|
||||
items = []
|
||||
while limit > 0 && item = $redis.lpop(queue_key)
|
||||
while limit > 0 && item = Discourse.redis.lpop(queue_key)
|
||||
items << JSON.parse(item)
|
||||
limit -= 1
|
||||
end
|
||||
|
|
|
@ -54,12 +54,12 @@ class GroupMessage
|
|||
|
||||
def sent_recently?
|
||||
return false if @opts[:limit_once_per] == false
|
||||
$redis.get(sent_recently_key).present?
|
||||
Discourse.redis.get(sent_recently_key).present?
|
||||
end
|
||||
|
||||
# default is to send no more than once every 24 hours (24 * 60 * 60 = 86,400 seconds)
|
||||
def remember_message_sent
|
||||
$redis.setex(sent_recently_key, @opts[:limit_once_per].try(:to_i) || 86_400, 1) unless @opts[:limit_once_per] == false
|
||||
Discourse.redis.setex(sent_recently_key, @opts[:limit_once_per].try(:to_i) || 86_400, 1) unless @opts[:limit_once_per] == false
|
||||
end
|
||||
|
||||
def sent_recently_key
|
||||
|
|
|
@ -40,9 +40,9 @@ class RandomTopicSelector
|
|||
key = cache_key(category)
|
||||
|
||||
if results.present?
|
||||
$redis.multi do
|
||||
$redis.rpush(key, results)
|
||||
$redis.expire(key, 2.days)
|
||||
Discourse.redis.multi do
|
||||
Discourse.redis.rpush(key, results)
|
||||
Discourse.redis.expire(key, 2.days)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -56,13 +56,13 @@ class RandomTopicSelector
|
|||
|
||||
return results if count < 1
|
||||
|
||||
results = $redis.multi do
|
||||
$redis.lrange(key, 0, count - 1)
|
||||
$redis.ltrim(key, count, -1)
|
||||
results = Discourse.redis.multi do
|
||||
Discourse.redis.lrange(key, 0, count - 1)
|
||||
Discourse.redis.ltrim(key, count, -1)
|
||||
end
|
||||
|
||||
if !results.is_a?(Array) # Redis is in readonly mode
|
||||
results = $redis.lrange(key, 0, count - 1)
|
||||
results = Discourse.redis.lrange(key, 0, count - 1)
|
||||
else
|
||||
results = results[0]
|
||||
end
|
||||
|
@ -80,7 +80,7 @@ class RandomTopicSelector
|
|||
results = results[0...count]
|
||||
end
|
||||
|
||||
if !backfilled && $redis.llen(key) < BACKFILL_LOW_WATER_MARK
|
||||
if !backfilled && Discourse.redis.llen(key) < BACKFILL_LOW_WATER_MARK
|
||||
Scheduler::Defer.later("backfill") do
|
||||
backfill(category)
|
||||
end
|
||||
|
@ -94,7 +94,7 @@ class RandomTopicSelector
|
|||
end
|
||||
|
||||
def self.clear_cache!
|
||||
$redis.delete_prefixed(cache_key)
|
||||
Discourse.redis.delete_prefixed(cache_key)
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -35,7 +35,7 @@ class TopicTimestampChanger
|
|||
end
|
||||
|
||||
# Burst the cache for stats
|
||||
[AdminDashboardData, About].each { |klass| $redis.del klass.stats_cache_key }
|
||||
[AdminDashboardData, About].each { |klass| Discourse.redis.del klass.stats_cache_key }
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
@ -243,7 +243,7 @@ module Discourse
|
|||
require 'logster/redis_store'
|
||||
# Use redis for our cache
|
||||
config.cache_store = DiscourseRedis.new_redis_store
|
||||
$redis = DiscourseRedis.new
|
||||
$redis = DiscourseRedis.new # rubocop:disable Style/GlobalVars
|
||||
Logster.store = Logster::RedisStore.new(DiscourseRedis.new)
|
||||
|
||||
# we configure rack cache on demand in an initializer
|
||||
|
|
|
@ -2,5 +2,5 @@
|
|||
|
||||
if Rails.env.development? && ENV['DISCOURSE_FLUSH_REDIS']
|
||||
puts "Flushing redis (development mode)"
|
||||
$redis.flushall
|
||||
Discourse.redis.flushall
|
||||
end
|
||||
|
|
|
@ -24,7 +24,7 @@ end
|
|||
|
||||
MiniScheduler.configure do |config|
|
||||
|
||||
config.redis = $redis
|
||||
config.redis = Discourse.redis
|
||||
|
||||
config.job_exception_handler do |ex, context|
|
||||
Discourse.handle_job_exception(ex, context)
|
||||
|
|
|
@ -29,7 +29,7 @@ Thread.new do
|
|||
|
||||
if old_time != time
|
||||
Rails.logger.info "attempting to reload #{$$} #{$PROGRAM_NAME} in #{wait_seconds} seconds"
|
||||
$shutdown = true
|
||||
$shutdown = true # rubocop:disable Style/GlobalVars
|
||||
sleep wait_seconds
|
||||
Rails.logger.info "restarting #{$$}"
|
||||
Process.kill("USR2", $$)
|
||||
|
|
|
@ -155,7 +155,7 @@ before_fork do |server, worker|
|
|||
sleep 10
|
||||
force_kill_rogue_sidekiq
|
||||
end
|
||||
$redis._client.disconnect
|
||||
Discourse.redis._client.disconnect
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -170,7 +170,7 @@ before_fork do |server, worker|
|
|||
|
||||
end
|
||||
|
||||
$redis._client.disconnect
|
||||
Discourse.redis._client.disconnect
|
||||
|
||||
# Throttle the master from forking too quickly by sleeping. Due
|
||||
# to the implementation of standard Unix signal handlers, this
|
||||
|
|
|
@ -18,18 +18,18 @@ class CreateDigestUnsubscribeKeys < ActiveRecord::Migration[4.2]
|
|||
def migrate_redis_keys
|
||||
return if Rails.env.test?
|
||||
|
||||
temp_keys = $redis.keys('temporary_key:*')
|
||||
temp_keys = Discourse.redis.keys('temporary_key:*')
|
||||
if temp_keys.present?
|
||||
temp_keys.map! do |key|
|
||||
user_id = $redis.get(key).to_i
|
||||
ttl = $redis.ttl(key).to_i
|
||||
user_id = Discourse.redis.get(key).to_i
|
||||
ttl = Discourse.redis.ttl(key).to_i
|
||||
|
||||
if ttl > 0
|
||||
ttl = "'#{ttl.seconds.ago.strftime('%Y-%m-%d %H:%M:%S')}'"
|
||||
else
|
||||
ttl = "CURRENT_TIMESTAMP"
|
||||
end
|
||||
$redis.del(key)
|
||||
Discourse.redis.del(key)
|
||||
key.gsub!('temporary_key:', '')
|
||||
user_id ? "('#{key}', #{user_id}, #{ttl}, #{ttl})" : nil
|
||||
end
|
||||
|
|
|
@ -4,6 +4,6 @@ require "common_passwords/common_passwords"
|
|||
|
||||
class ClearCommonPasswordsCache < ActiveRecord::Migration[4.2]
|
||||
def change
|
||||
$redis.without_namespace.del CommonPasswords::LIST_KEY
|
||||
Discourse.redis.without_namespace.del CommonPasswords::LIST_KEY
|
||||
end
|
||||
end
|
||||
|
|
|
@ -15,13 +15,13 @@ class AdminConfirmation
|
|||
guardian.ensure_can_grant_admin!(@target_user)
|
||||
|
||||
@token = SecureRandom.hex
|
||||
$redis.setex("admin-confirmation:#{@target_user.id}", 3.hours.to_i, @token)
|
||||
Discourse.redis.setex("admin-confirmation:#{@target_user.id}", 3.hours.to_i, @token)
|
||||
|
||||
payload = {
|
||||
target_user_id: @target_user.id,
|
||||
performed_by: @performed_by.id
|
||||
}
|
||||
$redis.setex("admin-confirmation-token:#{@token}", 3.hours.to_i, payload.to_json)
|
||||
Discourse.redis.setex("admin-confirmation-token:#{@token}", 3.hours.to_i, payload.to_json)
|
||||
|
||||
Jobs.enqueue(
|
||||
:admin_confirmation_email,
|
||||
|
@ -38,16 +38,16 @@ class AdminConfirmation
|
|||
|
||||
@target_user.grant_admin!
|
||||
StaffActionLogger.new(@performed_by).log_grant_admin(@target_user)
|
||||
$redis.del "admin-confirmation:#{@target_user.id}"
|
||||
$redis.del "admin-confirmation-token:#{@token}"
|
||||
Discourse.redis.del "admin-confirmation:#{@target_user.id}"
|
||||
Discourse.redis.del "admin-confirmation-token:#{@token}"
|
||||
end
|
||||
|
||||
def self.exists_for?(user_id)
|
||||
$redis.exists "admin-confirmation:#{user_id}"
|
||||
Discourse.redis.exists "admin-confirmation:#{user_id}"
|
||||
end
|
||||
|
||||
def self.find_by_code(token)
|
||||
json = $redis.get("admin-confirmation-token:#{token}")
|
||||
json = Discourse.redis.get("admin-confirmation-token:#{token}")
|
||||
return nil unless json
|
||||
|
||||
parsed = JSON.parse(json)
|
||||
|
|
|
@ -30,7 +30,7 @@ class Auth::DefaultCurrentUserProvider
|
|||
|
||||
# bypass if we have the shared session header
|
||||
if shared_key = @env['HTTP_X_SHARED_SESSION_KEY']
|
||||
uid = $redis.get("shared_session_key_#{shared_key}")
|
||||
uid = Discourse.redis.get("shared_session_key_#{shared_key}")
|
||||
user = nil
|
||||
if uid
|
||||
user = User.find_by(id: uid.to_i)
|
||||
|
|
|
@ -87,7 +87,7 @@ class Auth::OpenIdAuthenticator < Auth::Authenticator
|
|||
omniauth.provider :open_id,
|
||||
setup: lambda { |env|
|
||||
strategy = env["omniauth.strategy"]
|
||||
strategy.options[:store] = OpenID::Store::Redis.new($redis)
|
||||
strategy.options[:store] = OpenID::Store::Redis.new(Discourse.redis)
|
||||
|
||||
# Add CSRF protection in addition to OpenID Specification
|
||||
def strategy.query_string
|
||||
|
|
|
@ -36,21 +36,21 @@ module BackupRestore
|
|||
end
|
||||
|
||||
def self.mark_as_running!
|
||||
$redis.setex(running_key, 60, "1")
|
||||
Discourse.redis.setex(running_key, 60, "1")
|
||||
save_start_logs_message_id
|
||||
keep_it_running
|
||||
end
|
||||
|
||||
def self.is_operation_running?
|
||||
!!$redis.get(running_key)
|
||||
!!Discourse.redis.get(running_key)
|
||||
end
|
||||
|
||||
def self.mark_as_not_running!
|
||||
$redis.del(running_key)
|
||||
Discourse.redis.del(running_key)
|
||||
end
|
||||
|
||||
def self.should_shutdown?
|
||||
!!$redis.get(shutdown_signal_key)
|
||||
!!Discourse.redis.get(shutdown_signal_key)
|
||||
end
|
||||
|
||||
def self.can_rollback?
|
||||
|
@ -128,7 +128,7 @@ module BackupRestore
|
|||
Thread.new do
|
||||
# this thread will be killed when the fork dies
|
||||
while true
|
||||
$redis.expire(running_key, 1.minute)
|
||||
Discourse.redis.expire(running_key, 1.minute)
|
||||
sleep 30.seconds
|
||||
end
|
||||
end
|
||||
|
@ -139,20 +139,20 @@ module BackupRestore
|
|||
end
|
||||
|
||||
def self.set_shutdown_signal!
|
||||
$redis.set(shutdown_signal_key, "1")
|
||||
Discourse.redis.set(shutdown_signal_key, "1")
|
||||
end
|
||||
|
||||
def self.clear_shutdown_signal!
|
||||
$redis.del(shutdown_signal_key)
|
||||
Discourse.redis.del(shutdown_signal_key)
|
||||
end
|
||||
|
||||
def self.save_start_logs_message_id
|
||||
id = MessageBus.last_id(LOGS_CHANNEL)
|
||||
$redis.set(start_logs_message_id_key, id)
|
||||
Discourse.redis.set(start_logs_message_id_key, id)
|
||||
end
|
||||
|
||||
def self.start_logs_message_id
|
||||
$redis.get(start_logs_message_id_key).to_i
|
||||
Discourse.redis.get(start_logs_message_id_key).to_i
|
||||
end
|
||||
|
||||
def self.start_logs_message_id_key
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
# This is a bottom up implementation of ActiveSupport::Cache::Store
|
||||
# this allows us to cleanly implement without using cache entries and version
|
||||
# support which we do not use, in tern this makes the cache as fast as simply
|
||||
# using `$redis.setex` with a more convenient API
|
||||
# using `Discourse.redis.setex` with a more convenient API
|
||||
#
|
||||
# It only implements a subset of ActiveSupport::Cache::Store as we make no use
|
||||
# of large parts of the interface.
|
||||
|
@ -33,7 +33,7 @@ class Cache
|
|||
end
|
||||
|
||||
def redis
|
||||
$redis
|
||||
Discourse.redis
|
||||
end
|
||||
|
||||
def reconnect
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
# If the password file is changed, you need to add a migration that deletes the list from redis
|
||||
# so it gets re-populated:
|
||||
#
|
||||
# $redis.without_namespace.del CommonPasswords::LIST_KEY
|
||||
# Discourse.redis.without_namespace.del CommonPasswords::LIST_KEY
|
||||
|
||||
class CommonPasswords
|
||||
|
||||
|
@ -39,7 +39,7 @@ class CommonPasswords
|
|||
end
|
||||
|
||||
def self.redis
|
||||
$redis.without_namespace
|
||||
Discourse.redis.without_namespace
|
||||
end
|
||||
|
||||
def self.load_passwords
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
# rubocop:disable Style/GlobalVars
|
||||
|
||||
require 'cache'
|
||||
require 'open3'
|
||||
|
@ -378,9 +379,9 @@ module Discourse
|
|||
|
||||
def self.enable_readonly_mode(key = READONLY_MODE_KEY)
|
||||
if key == USER_READONLY_MODE_KEY
|
||||
$redis.set(key, 1)
|
||||
Discourse.redis.set(key, 1)
|
||||
else
|
||||
$redis.setex(key, READONLY_MODE_KEY_TTL, 1)
|
||||
Discourse.redis.setex(key, READONLY_MODE_KEY_TTL, 1)
|
||||
keep_readonly_mode(key) if !Rails.env.test?
|
||||
end
|
||||
|
||||
|
@ -406,7 +407,7 @@ module Discourse
|
|||
@mutex.synchronize do
|
||||
@dbs.each do |db|
|
||||
RailsMultisite::ConnectionManagement.with_connection(db) do
|
||||
if !$redis.expire(key, READONLY_MODE_KEY_TTL)
|
||||
if !Discourse.redis.expire(key, READONLY_MODE_KEY_TTL)
|
||||
@dbs.delete(db)
|
||||
end
|
||||
end
|
||||
|
@ -419,18 +420,18 @@ module Discourse
|
|||
end
|
||||
|
||||
def self.disable_readonly_mode(key = READONLY_MODE_KEY)
|
||||
$redis.del(key)
|
||||
Discourse.redis.del(key)
|
||||
MessageBus.publish(readonly_channel, false)
|
||||
Site.clear_anon_cache!
|
||||
true
|
||||
end
|
||||
|
||||
def self.readonly_mode?(keys = READONLY_KEYS)
|
||||
recently_readonly? || $redis.mget(*keys).compact.present?
|
||||
recently_readonly? || Discourse.redis.mget(*keys).compact.present?
|
||||
end
|
||||
|
||||
def self.pg_readonly_mode?
|
||||
$redis.get(PG_READONLY_MODE_KEY).present?
|
||||
Discourse.redis.get(PG_READONLY_MODE_KEY).present?
|
||||
end
|
||||
|
||||
# Shared between processes
|
||||
|
@ -444,23 +445,23 @@ module Discourse
|
|||
end
|
||||
|
||||
def self.recently_readonly?
|
||||
postgres_read_only = postgres_last_read_only[$redis.namespace]
|
||||
redis_read_only = redis_last_read_only[$redis.namespace]
|
||||
postgres_read_only = postgres_last_read_only[Discourse.redis.namespace]
|
||||
redis_read_only = redis_last_read_only[Discourse.redis.namespace]
|
||||
|
||||
(redis_read_only.present? && redis_read_only > 15.seconds.ago) ||
|
||||
(postgres_read_only.present? && postgres_read_only > 15.seconds.ago)
|
||||
end
|
||||
|
||||
def self.received_postgres_readonly!
|
||||
postgres_last_read_only[$redis.namespace] = Time.zone.now
|
||||
postgres_last_read_only[Discourse.redis.namespace] = Time.zone.now
|
||||
end
|
||||
|
||||
def self.received_redis_readonly!
|
||||
redis_last_read_only[$redis.namespace] = Time.zone.now
|
||||
redis_last_read_only[Discourse.redis.namespace] = Time.zone.now
|
||||
end
|
||||
|
||||
def self.clear_readonly!
|
||||
postgres_last_read_only[$redis.namespace] = redis_last_read_only[$redis.namespace] = nil
|
||||
postgres_last_read_only[Discourse.redis.namespace] = redis_last_read_only[Discourse.redis.namespace] = nil
|
||||
Site.clear_anon_cache!
|
||||
true
|
||||
end
|
||||
|
@ -491,7 +492,7 @@ module Discourse
|
|||
begin
|
||||
git_cmd = 'git rev-parse HEAD'
|
||||
self.try_git(git_cmd, Discourse::VERSION::STRING)
|
||||
end
|
||||
end # rubocop:disable Style/GlobalVars
|
||||
end
|
||||
|
||||
def self.git_branch
|
||||
|
@ -589,7 +590,7 @@ module Discourse
|
|||
# note: some of this reconnecting may no longer be needed per https://github.com/redis/redis-rb/pull/414
|
||||
MessageBus.after_fork
|
||||
SiteSetting.after_fork
|
||||
$redis._client.reconnect
|
||||
Discourse.redis._client.reconnect
|
||||
Rails.cache.reconnect
|
||||
Discourse.cache.reconnect
|
||||
Logster.store.redis.reconnect
|
||||
|
@ -737,10 +738,10 @@ module Discourse
|
|||
digest = Digest::MD5.hexdigest(warning)
|
||||
redis_key = "deprecate-notice-#{digest}"
|
||||
|
||||
if !$redis.without_namespace.get(redis_key)
|
||||
if !Discourse.redis.without_namespace.get(redis_key)
|
||||
Rails.logger.warn(warning)
|
||||
begin
|
||||
$redis.without_namespace.setex(redis_key, 3600, "x")
|
||||
Discourse.redis.without_namespace.setex(redis_key, 3600, "x")
|
||||
rescue Redis::CommandError => e
|
||||
raise unless e.message =~ /READONLY/
|
||||
end
|
||||
|
@ -832,4 +833,10 @@ module Discourse
|
|||
ensure
|
||||
@preloaded_rails = true
|
||||
end
|
||||
|
||||
def self.redis
|
||||
$redis
|
||||
end
|
||||
end
|
||||
|
||||
# rubocop:enable Style/GlobalVars
|
||||
|
|
|
@ -14,7 +14,7 @@ module DiscourseHub
|
|||
end
|
||||
|
||||
def self.stats_fetched_at=(time_with_zone)
|
||||
$redis.set STATS_FETCHED_AT_KEY, time_with_zone.to_i
|
||||
Discourse.redis.set STATS_FETCHED_AT_KEY, time_with_zone.to_i
|
||||
end
|
||||
|
||||
def self.get_payload
|
||||
|
@ -102,7 +102,7 @@ module DiscourseHub
|
|||
end
|
||||
|
||||
def self.stats_fetched_at
|
||||
t = $redis.get(STATS_FETCHED_AT_KEY)
|
||||
t = Discourse.redis.get(STATS_FETCHED_AT_KEY)
|
||||
t ? Time.zone.at(t.to_i) : 1.year.ago
|
||||
end
|
||||
|
||||
|
|
|
@ -262,7 +262,7 @@ class DiscourseRedis
|
|||
|
||||
def delete_prefixed(prefix)
|
||||
DiscourseRedis.ignore_readonly do
|
||||
keys("#{prefix}*").each { |k| $redis.del(k) }
|
||||
keys("#{prefix}*").each { |k| Discourse.redis.del(k) }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -58,42 +58,42 @@ module DiscourseUpdates
|
|||
|
||||
# last_installed_version is the installed version at the time of the last version check
|
||||
def last_installed_version
|
||||
$redis.get last_installed_version_key
|
||||
Discourse.redis.get last_installed_version_key
|
||||
end
|
||||
|
||||
def latest_version
|
||||
$redis.get latest_version_key
|
||||
Discourse.redis.get latest_version_key
|
||||
end
|
||||
|
||||
def missing_versions_count
|
||||
$redis.get(missing_versions_count_key).try(:to_i)
|
||||
Discourse.redis.get(missing_versions_count_key).try(:to_i)
|
||||
end
|
||||
|
||||
def critical_updates_available?
|
||||
($redis.get(critical_updates_available_key) || false) == 'true'
|
||||
(Discourse.redis.get(critical_updates_available_key) || false) == 'true'
|
||||
end
|
||||
|
||||
def updated_at
|
||||
t = $redis.get(updated_at_key)
|
||||
t = Discourse.redis.get(updated_at_key)
|
||||
t ? Time.zone.parse(t) : nil
|
||||
end
|
||||
|
||||
def updated_at=(time_with_zone)
|
||||
$redis.set updated_at_key, time_with_zone.as_json
|
||||
Discourse.redis.set updated_at_key, time_with_zone.as_json
|
||||
end
|
||||
|
||||
['last_installed_version', 'latest_version', 'missing_versions_count', 'critical_updates_available'].each do |name|
|
||||
eval "define_method :#{name}= do |arg|
|
||||
$redis.set #{name}_key, arg
|
||||
Discourse.redis.set #{name}_key, arg
|
||||
end"
|
||||
end
|
||||
|
||||
def missing_versions=(versions)
|
||||
# delete previous list from redis
|
||||
prev_keys = $redis.lrange(missing_versions_list_key, 0, 4)
|
||||
prev_keys = Discourse.redis.lrange(missing_versions_list_key, 0, 4)
|
||||
if prev_keys
|
||||
$redis.del prev_keys
|
||||
$redis.del(missing_versions_list_key)
|
||||
Discourse.redis.del prev_keys
|
||||
Discourse.redis.del(missing_versions_list_key)
|
||||
end
|
||||
|
||||
if versions.present?
|
||||
|
@ -101,18 +101,18 @@ module DiscourseUpdates
|
|||
version_keys = []
|
||||
versions[0, 5].each do |v|
|
||||
key = "#{missing_versions_key_prefix}:#{v['version']}"
|
||||
$redis.mapped_hmset key, v
|
||||
Discourse.redis.mapped_hmset key, v
|
||||
version_keys << key
|
||||
end
|
||||
$redis.rpush missing_versions_list_key, version_keys
|
||||
Discourse.redis.rpush missing_versions_list_key, version_keys
|
||||
end
|
||||
|
||||
versions || []
|
||||
end
|
||||
|
||||
def missing_versions
|
||||
keys = $redis.lrange(missing_versions_list_key, 0, 4) # max of 5 versions
|
||||
keys.present? ? keys.map { |k| $redis.hgetall(k) } : []
|
||||
keys = Discourse.redis.lrange(missing_versions_list_key, 0, 4) # max of 5 versions
|
||||
keys.present? ? keys.map { |k| Discourse.redis.hgetall(k) } : []
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
@ -8,7 +8,7 @@ class DistributedMemoizer
|
|||
|
||||
# memoize a key across processes and machines
|
||||
def self.memoize(key, duration = 60 * 60 * 24, redis = nil)
|
||||
redis ||= $redis
|
||||
redis ||= Discourse.redis
|
||||
|
||||
redis_key = self.redis_key(key)
|
||||
|
||||
|
@ -50,7 +50,7 @@ class DistributedMemoizer
|
|||
|
||||
# Used for testing
|
||||
def self.flush!
|
||||
$redis.scan_each(match: "memoize_*").each { |key| $redis.del(key) }
|
||||
Discourse.redis.scan_each(match: "memoize_*").each { |key| Discourse.redis.del(key) }
|
||||
end
|
||||
|
||||
protected
|
||||
|
|
|
@ -17,7 +17,7 @@ class DistributedMutex
|
|||
def initialize(key, redis: nil, validity: DEFAULT_VALIDITY)
|
||||
@key = key
|
||||
@using_global_redis = true if !redis
|
||||
@redis = redis || $redis
|
||||
@redis = redis || Discourse.redis
|
||||
@mutex = Mutex.new
|
||||
@validity = validity
|
||||
end
|
||||
|
|
|
@ -119,8 +119,8 @@ module Email
|
|||
|
||||
key = "rejection_email:#{email}:#{type}:#{Date.today}"
|
||||
|
||||
if $redis.setnx(key, "1")
|
||||
$redis.expire(key, 25.hours)
|
||||
if Discourse.redis.setnx(key, "1")
|
||||
Discourse.redis.expire(key, 25.hours)
|
||||
true
|
||||
else
|
||||
false
|
||||
|
|
|
@ -12,16 +12,16 @@ class EmailBackupToken
|
|||
|
||||
def self.set(user_id)
|
||||
token = self.generate
|
||||
$redis.setex self.key(user_id), 1.day.to_i, token
|
||||
Discourse.redis.setex self.key(user_id), 1.day.to_i, token
|
||||
token
|
||||
end
|
||||
|
||||
def self.get(user_id)
|
||||
$redis.get self.key(user_id)
|
||||
Discourse.redis.get self.key(user_id)
|
||||
end
|
||||
|
||||
def self.del(user_id)
|
||||
$redis.del self.key(user_id)
|
||||
Discourse.redis.del self.key(user_id)
|
||||
end
|
||||
|
||||
def self.compare(user_id, token)
|
||||
|
|
|
@ -11,17 +11,17 @@ class FinalDestination
|
|||
|
||||
def self.clear_https_cache!(domain)
|
||||
key = redis_https_key(domain)
|
||||
$redis.without_namespace.del(key)
|
||||
Discourse.redis.without_namespace.del(key)
|
||||
end
|
||||
|
||||
def self.cache_https_domain(domain)
|
||||
key = redis_https_key(domain)
|
||||
$redis.without_namespace.setex(key, "1", 1.day.to_i).present?
|
||||
Discourse.redis.without_namespace.setex(key, "1", 1.day.to_i).present?
|
||||
end
|
||||
|
||||
def self.is_https_domain?(domain)
|
||||
key = redis_https_key(domain)
|
||||
$redis.without_namespace.get(key).present?
|
||||
Discourse.redis.without_namespace.get(key).present?
|
||||
end
|
||||
|
||||
def self.redis_https_key(domain)
|
||||
|
|
|
@ -177,8 +177,8 @@ module Middleware
|
|||
end
|
||||
|
||||
def cached(env = {})
|
||||
if body = decompress($redis.get(cache_key_body))
|
||||
if other = $redis.get(cache_key_other)
|
||||
if body = decompress(Discourse.redis.get(cache_key_body))
|
||||
if other = Discourse.redis.get(cache_key_other)
|
||||
other = JSON.parse(other)
|
||||
if req_params = other[1].delete(ADP)
|
||||
env[ADP] = req_params
|
||||
|
@ -203,7 +203,7 @@ module Middleware
|
|||
if status == 200 && cache_duration
|
||||
|
||||
if GlobalSetting.anon_cache_store_threshold > 1
|
||||
count = $redis.eval(<<~REDIS, [cache_key_count], [cache_duration])
|
||||
count = Discourse.redis.eval(<<~REDIS, [cache_key_count], [cache_duration])
|
||||
local current = redis.call("incr", KEYS[1])
|
||||
redis.call("expire",KEYS[1],ARGV[1])
|
||||
return current
|
||||
|
@ -231,8 +231,8 @@ module Middleware
|
|||
}
|
||||
end
|
||||
|
||||
$redis.setex(cache_key_body, cache_duration, compress(parts.join))
|
||||
$redis.setex(cache_key_other, cache_duration, [status, headers_stripped].to_json)
|
||||
Discourse.redis.setex(cache_key_body, cache_duration, compress(parts.join))
|
||||
Discourse.redis.setex(cache_key_other, cache_duration, [status, headers_stripped].to_json)
|
||||
|
||||
headers["X-Discourse-Cached"] = "store"
|
||||
else
|
||||
|
@ -243,8 +243,8 @@ module Middleware
|
|||
end
|
||||
|
||||
def clear_cache
|
||||
$redis.del(cache_key_body)
|
||||
$redis.del(cache_key_other)
|
||||
Discourse.redis.del(cache_key_body)
|
||||
Discourse.redis.del(cache_key_other)
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -122,15 +122,15 @@ module Oneboxer
|
|||
end
|
||||
|
||||
def self.is_previewing?(user_id)
|
||||
$redis.get(preview_key(user_id)) == "1"
|
||||
Discourse.redis.get(preview_key(user_id)) == "1"
|
||||
end
|
||||
|
||||
def self.preview_onebox!(user_id)
|
||||
$redis.setex(preview_key(user_id), 1.minute, "1")
|
||||
Discourse.redis.setex(preview_key(user_id), 1.minute, "1")
|
||||
end
|
||||
|
||||
def self.onebox_previewed!(user_id)
|
||||
$redis.del(preview_key(user_id))
|
||||
Discourse.redis.del(preview_key(user_id))
|
||||
end
|
||||
|
||||
def self.engine(url)
|
||||
|
|
|
@ -265,11 +265,11 @@ class PostRevisor
|
|||
end
|
||||
|
||||
def cached_original_raw
|
||||
@cached_original_raw ||= $redis.get(original_raw_key)
|
||||
@cached_original_raw ||= Discourse.redis.get(original_raw_key)
|
||||
end
|
||||
|
||||
def cached_original_cooked
|
||||
@cached_original_cooked ||= $redis.get(original_cooked_key)
|
||||
@cached_original_cooked ||= Discourse.redis.get(original_cooked_key)
|
||||
end
|
||||
|
||||
def original_raw
|
||||
|
@ -278,12 +278,12 @@ class PostRevisor
|
|||
|
||||
def original_raw=(val)
|
||||
@cached_original_raw = val
|
||||
$redis.setex(original_raw_key, SiteSetting.editing_grace_period + 1, val)
|
||||
Discourse.redis.setex(original_raw_key, SiteSetting.editing_grace_period + 1, val)
|
||||
end
|
||||
|
||||
def original_cooked=(val)
|
||||
@cached_original_cooked = val
|
||||
$redis.setex(original_cooked_key, SiteSetting.editing_grace_period + 1, val)
|
||||
Discourse.redis.setex(original_cooked_key, SiteSetting.editing_grace_period + 1, val)
|
||||
end
|
||||
|
||||
def diff_size(before, after)
|
||||
|
|
|
@ -24,12 +24,12 @@ class RateLimiter
|
|||
|
||||
# Only used in test, only clears current namespace, does not clear globals
|
||||
def self.clear_all!
|
||||
$redis.delete_prefixed(RateLimiter.key_prefix)
|
||||
Discourse.redis.delete_prefixed(RateLimiter.key_prefix)
|
||||
end
|
||||
|
||||
def self.clear_all_global!
|
||||
$redis.without_namespace.keys("GLOBAL::#{key_prefix}*").each do |k|
|
||||
$redis.without_namespace.del k
|
||||
Discourse.redis.without_namespace.keys("GLOBAL::#{key_prefix}*").each do |k|
|
||||
Discourse.redis.without_namespace.del k
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -119,12 +119,12 @@ class RateLimiter
|
|||
if @global
|
||||
"GLOBAL::#{key}"
|
||||
else
|
||||
$redis.namespace_key(key)
|
||||
Discourse.redis.namespace_key(key)
|
||||
end
|
||||
end
|
||||
|
||||
def redis
|
||||
$redis.without_namespace
|
||||
Discourse.redis.without_namespace
|
||||
end
|
||||
|
||||
def seconds_to_wait
|
||||
|
|
|
@ -16,23 +16,23 @@ class SecureSession
|
|||
|
||||
def set(key, val, expires: nil)
|
||||
expires ||= SecureSession.expiry
|
||||
$redis.setex(prefixed_key(key), expires.to_i, val.to_s)
|
||||
Discourse.redis.setex(prefixed_key(key), expires.to_i, val.to_s)
|
||||
true
|
||||
end
|
||||
|
||||
def ttl(key)
|
||||
$redis.ttl(prefixed_key(key))
|
||||
Discourse.redis.ttl(prefixed_key(key))
|
||||
end
|
||||
|
||||
def [](key)
|
||||
$redis.get(prefixed_key(key))
|
||||
Discourse.redis.get(prefixed_key(key))
|
||||
end
|
||||
|
||||
def []=(key, val)
|
||||
if val == nil
|
||||
$redis.del(prefixed_key(key))
|
||||
Discourse.redis.del(prefixed_key(key))
|
||||
else
|
||||
$redis.setex(prefixed_key(key), SecureSession.expiry.to_i, val.to_s)
|
||||
Discourse.redis.setex(prefixed_key(key), SecureSession.expiry.to_i, val.to_s)
|
||||
end
|
||||
val
|
||||
end
|
||||
|
|
|
@ -12,13 +12,13 @@ class SidekiqPauser
|
|||
end
|
||||
|
||||
def pause!(value = "paused")
|
||||
$redis.setex PAUSED_KEY, TTL, value
|
||||
Discourse.redis.setex PAUSED_KEY, TTL, value
|
||||
extend_lease_thread
|
||||
true
|
||||
end
|
||||
|
||||
def paused?
|
||||
!!$redis.get(PAUSED_KEY)
|
||||
!!Discourse.redis.get(PAUSED_KEY)
|
||||
end
|
||||
|
||||
def unpause_all!
|
||||
|
@ -48,7 +48,7 @@ class SidekiqPauser
|
|||
stop_extend_lease_thread if @dbs.size == 0
|
||||
end
|
||||
|
||||
$redis.del(PAUSED_KEY)
|
||||
Discourse.redis.del(PAUSED_KEY)
|
||||
true
|
||||
end
|
||||
|
||||
|
@ -83,7 +83,7 @@ class SidekiqPauser
|
|||
@mutex.synchronize do
|
||||
@dbs.each do |db|
|
||||
RailsMultisite::ConnectionManagement.with_connection(db) do
|
||||
if !$redis.expire(PAUSED_KEY, TTL)
|
||||
if !Discourse.redis.expire(PAUSED_KEY, TTL)
|
||||
# if it was unpaused in another process we got to remove the
|
||||
# bad key
|
||||
@dbs.delete(db)
|
||||
|
|
|
@ -7,11 +7,11 @@ module Stylesheet
|
|||
REDIS_KEY = "dev_last_used_theme_id"
|
||||
|
||||
def self.theme_id=(v)
|
||||
$redis.set(REDIS_KEY, v)
|
||||
Discourse.redis.set(REDIS_KEY, v)
|
||||
end
|
||||
|
||||
def self.theme_id
|
||||
($redis.get(REDIS_KEY) || SiteSetting.default_theme_id).to_i
|
||||
(Discourse.redis.get(REDIS_KEY) || SiteSetting.default_theme_id).to_i
|
||||
end
|
||||
|
||||
def self.watch(paths = nil)
|
||||
|
|
|
@ -9,7 +9,7 @@ task 'redis:clean_up' => ['environment'] do
|
|||
regexp = /((\$(?<message_bus>\w+)$)|(^?(?<namespace>\w+):))/
|
||||
|
||||
cursor = 0
|
||||
redis = $redis.without_namespace
|
||||
redis = Discourse.redis.without_namespace
|
||||
|
||||
loop do
|
||||
cursor, keys = redis.scan(cursor)
|
||||
|
|
|
@ -161,11 +161,11 @@ class Typepad < Thor
|
|||
|
||||
if options[:google_api] && comment[:author] =~ /plus.google.com\/(\d+)/
|
||||
gplus_id = Regexp.last_match[1]
|
||||
from_redis = $redis.get("gplus:#{gplus_id}")
|
||||
from_redis = Discourse.redis.get("gplus:#{gplus_id}")
|
||||
if from_redis.blank?
|
||||
json = ::JSON.parse(open("https://www.googleapis.com/plus/v1/people/#{gplus_id}?key=#{options[:google_api]}").read)
|
||||
from_redis = json['displayName']
|
||||
$redis.set("gplus:#{gplus_id}", from_redis)
|
||||
Discourse.redis.set("gplus:#{gplus_id}", from_redis)
|
||||
end
|
||||
comment[:author] = from_redis
|
||||
end
|
||||
|
@ -184,11 +184,11 @@ class Typepad < Thor
|
|||
|
||||
if comment[:author] =~ /www.facebook.com\/profile.php\?id=(\d+)/
|
||||
fb_id = Regexp.last_match[1]
|
||||
from_redis = $redis.get("fb:#{fb_id}")
|
||||
from_redis = Discourse.redis.get("fb:#{fb_id}")
|
||||
if from_redis.blank?
|
||||
json = ::JSON.parse(open("http://graph.facebook.com/#{fb_id}").read)
|
||||
from_redis = json['username']
|
||||
$redis.set("fb:#{fb_id}", from_redis)
|
||||
Discourse.redis.set("fb:#{fb_id}", from_redis)
|
||||
end
|
||||
comment[:author] = from_redis
|
||||
end
|
||||
|
|
|
@ -24,8 +24,8 @@ class TopicRetriever
|
|||
|
||||
# Throttle other users to once every 60 seconds
|
||||
retrieved_key = "retrieved_topic"
|
||||
if $redis.setnx(retrieved_key, "1")
|
||||
$redis.expire(retrieved_key, 60)
|
||||
if Discourse.redis.setnx(retrieved_key, "1")
|
||||
Discourse.redis.expire(retrieved_key, 60)
|
||||
return false
|
||||
end
|
||||
|
||||
|
|
|
@ -51,15 +51,15 @@ module DiscourseNarrativeBot
|
|||
|
||||
key = "#{DiscourseNarrativeBot::PLUGIN_NAME}:reset-rate-limit:#{post.topic_id}:#{data['state']}"
|
||||
|
||||
if !(count = $redis.get(key))
|
||||
if !(count = Discourse.redis.get(key))
|
||||
count = 0
|
||||
$redis.setex(key, duration, count)
|
||||
Discourse.redis.setex(key, duration, count)
|
||||
end
|
||||
|
||||
if count.to_i < 2
|
||||
post.default_rate_limiter.rollback!
|
||||
post.limit_posts_per_day&.rollback!
|
||||
$redis.incr(key)
|
||||
Discourse.redis.incr(key)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ module DiscourseNarrativeBot
|
|||
if previous_status && data[:attempted] == previous_status && !data[:skip_attempted]
|
||||
generic_replies(klass.reset_trigger, state)
|
||||
else
|
||||
$redis.del(generic_replies_key(@user))
|
||||
Discourse.redis.del(generic_replies_key(@user))
|
||||
end
|
||||
|
||||
Store.set(@user.id, data)
|
||||
|
@ -143,14 +143,14 @@ module DiscourseNarrativeBot
|
|||
|
||||
if public_reply?
|
||||
key = "#{PUBLIC_DISPLAY_BOT_HELP_KEY}:#{@post.topic_id}"
|
||||
last_bot_help_post_number = $redis.get(key)
|
||||
last_bot_help_post_number = Discourse.redis.get(key)
|
||||
|
||||
if !last_bot_help_post_number ||
|
||||
(last_bot_help_post_number &&
|
||||
@post.post_number - 10 > last_bot_help_post_number.to_i &&
|
||||
(1.day.to_i - $redis.ttl(key)) > 6.hours.to_i)
|
||||
(1.day.to_i - Discourse.redis.ttl(key)) > 6.hours.to_i)
|
||||
|
||||
$redis.setex(key, 1.day.to_i, @post.post_number)
|
||||
Discourse.redis.setex(key, 1.day.to_i, @post.post_number)
|
||||
message
|
||||
end
|
||||
else
|
||||
|
@ -190,7 +190,7 @@ module DiscourseNarrativeBot
|
|||
def generic_replies(track_reset_trigger, state = nil)
|
||||
reset_trigger = "#{self.class.reset_trigger} #{track_reset_trigger}"
|
||||
key = generic_replies_key(@user)
|
||||
count = ($redis.get(key) || $redis.setex(key, 900, 0)).to_i
|
||||
count = (Discourse.redis.get(key) || Discourse.redis.setex(key, 900, 0)).to_i
|
||||
|
||||
case count
|
||||
when 0
|
||||
|
@ -210,7 +210,7 @@ module DiscourseNarrativeBot
|
|||
# Stay out of the user's way
|
||||
end
|
||||
|
||||
$redis.incr(key)
|
||||
Discourse.redis.incr(key)
|
||||
end
|
||||
|
||||
def self.i18n_key(key)
|
||||
|
|
|
@ -243,7 +243,7 @@ describe DiscourseNarrativeBot::TrackSelector do
|
|||
|
||||
context 'generic replies' do
|
||||
after do
|
||||
$redis.del("#{described_class::GENERIC_REPLIES_COUNT_PREFIX}#{user.id}")
|
||||
Discourse.redis.del("#{described_class::GENERIC_REPLIES_COUNT_PREFIX}#{user.id}")
|
||||
end
|
||||
|
||||
it 'should create the right generic do not understand responses' do
|
||||
|
@ -472,17 +472,17 @@ describe DiscourseNarrativeBot::TrackSelector do
|
|||
let(:post) { Fabricate(:post, topic: topic) }
|
||||
|
||||
after do
|
||||
$redis.flushall
|
||||
Discourse.redis.flushall
|
||||
end
|
||||
|
||||
describe 'when random reply massage has been displayed in the last 6 hours' do
|
||||
it 'should not do anything' do
|
||||
$redis.set(
|
||||
Discourse.redis.set(
|
||||
"#{described_class::PUBLIC_DISPLAY_BOT_HELP_KEY}:#{other_post.topic_id}",
|
||||
post.post_number - 11
|
||||
)
|
||||
|
||||
$redis.class.any_instance.expects(:ttl).returns(19.hours.to_i)
|
||||
Discourse.redis.class.any_instance.expects(:ttl).returns(19.hours.to_i)
|
||||
|
||||
user
|
||||
post.update!(raw: "Show me what you can do @discobot")
|
||||
|
@ -494,12 +494,12 @@ describe DiscourseNarrativeBot::TrackSelector do
|
|||
|
||||
describe 'when random reply message has not been displayed in the last 6 hours' do
|
||||
it 'should create the right reply' do
|
||||
$redis.set(
|
||||
Discourse.redis.set(
|
||||
"#{described_class::PUBLIC_DISPLAY_BOT_HELP_KEY}:#{other_post.topic_id}",
|
||||
post.post_number - 11
|
||||
)
|
||||
|
||||
$redis.class.any_instance.expects(:ttl).returns(7.hours.to_i)
|
||||
Discourse.redis.class.any_instance.expects(:ttl).returns(7.hours.to_i)
|
||||
|
||||
user
|
||||
post.update!(raw: "Show me what you can do @discobot")
|
||||
|
@ -515,7 +515,7 @@ describe DiscourseNarrativeBot::TrackSelector do
|
|||
described_class.new(:reply, user, post_id: other_post.id).select
|
||||
expect(Post.last.raw).to eq(random_mention_reply)
|
||||
|
||||
expect($redis.get(
|
||||
expect(Discourse.redis.get(
|
||||
"#{described_class::PUBLIC_DISPLAY_BOT_HELP_KEY}:#{other_post.topic_id}"
|
||||
).to_i).to eq(other_post.post_number.to_i)
|
||||
|
||||
|
|
|
@ -36,20 +36,20 @@ after_initialize do
|
|||
# return true if a key was added
|
||||
def self.add(type, id, user_id)
|
||||
key = get_redis_key(type, id)
|
||||
result = $redis.hset(key, user_id, Time.zone.now)
|
||||
$redis.expire(key, MAX_BACKLOG_AGE)
|
||||
result = Discourse.redis.hset(key, user_id, Time.zone.now)
|
||||
Discourse.redis.expire(key, MAX_BACKLOG_AGE)
|
||||
result
|
||||
end
|
||||
|
||||
# return true if a key was deleted
|
||||
def self.remove(type, id, user_id)
|
||||
key = get_redis_key(type, id)
|
||||
$redis.expire(key, MAX_BACKLOG_AGE)
|
||||
$redis.hdel(key, user_id) > 0
|
||||
Discourse.redis.expire(key, MAX_BACKLOG_AGE)
|
||||
Discourse.redis.hdel(key, user_id) > 0
|
||||
end
|
||||
|
||||
def self.get_users(type, id)
|
||||
user_ids = $redis.hkeys(get_redis_key(type, id)).map(&:to_i)
|
||||
user_ids = Discourse.redis.hkeys(get_redis_key(type, id)).map(&:to_i)
|
||||
User.where(id: user_ids)
|
||||
end
|
||||
|
||||
|
@ -88,7 +88,7 @@ after_initialize do
|
|||
has_changed = false
|
||||
|
||||
# Delete entries older than 20 seconds
|
||||
hash = $redis.hgetall(get_redis_key(type, id))
|
||||
hash = Discourse.redis.hgetall(get_redis_key(type, id))
|
||||
hash.each do |user_id, time|
|
||||
if Time.zone.now - Time.parse(time) >= 20
|
||||
has_changed |= remove(type, id, user_id)
|
||||
|
|
|
@ -13,10 +13,10 @@ describe ::Presence::PresenceManager do
|
|||
let(:post2) { Fabricate(:post) }
|
||||
|
||||
after(:each) do
|
||||
$redis.del("presence:topic:#{post1.topic.id}")
|
||||
$redis.del("presence:topic:#{post2.topic.id}")
|
||||
$redis.del("presence:post:#{post1.id}")
|
||||
$redis.del("presence:post:#{post2.id}")
|
||||
Discourse.redis.del("presence:topic:#{post1.topic.id}")
|
||||
Discourse.redis.del("presence:topic:#{post2.topic.id}")
|
||||
Discourse.redis.del("presence:post:#{post1.id}")
|
||||
Discourse.redis.del("presence:post:#{post2.id}")
|
||||
end
|
||||
|
||||
it 'adds, removes and lists users correctly' do
|
||||
|
|
|
@ -17,10 +17,10 @@ describe ::Presence::PresencesController do
|
|||
let(:manager) { ::Presence::PresenceManager }
|
||||
|
||||
after do
|
||||
$redis.del("presence:topic:#{post1.topic.id}")
|
||||
$redis.del("presence:topic:#{post2.topic.id}")
|
||||
$redis.del("presence:post:#{post1.id}")
|
||||
$redis.del("presence:post:#{post2.id}")
|
||||
Discourse.redis.del("presence:topic:#{post1.topic.id}")
|
||||
Discourse.redis.del("presence:topic:#{post2.topic.id}")
|
||||
Discourse.redis.del("presence:post:#{post1.id}")
|
||||
Discourse.redis.del("presence:post:#{post2.id}")
|
||||
end
|
||||
|
||||
context 'when not logged in' do
|
||||
|
|
8
script/benchmarks/cache/bench.rb
vendored
8
script/benchmarks/cache/bench.rb
vendored
|
@ -7,14 +7,14 @@ Benchmark.ips do |x|
|
|||
|
||||
x.report("redis setex string") do |times|
|
||||
while times > 0
|
||||
$redis.setex("test_key", 60, "test")
|
||||
Discourse.redis.setex("test_key", 60, "test")
|
||||
times -= 1
|
||||
end
|
||||
end
|
||||
|
||||
x.report("redis setex marshal string") do |times|
|
||||
while times > 0
|
||||
$redis.setex("test_keym", 60, Marshal.dump("test"))
|
||||
Discourse.redis.setex("test_keym", 60, Marshal.dump("test"))
|
||||
times -= 1
|
||||
end
|
||||
end
|
||||
|
@ -39,14 +39,14 @@ end
|
|||
Benchmark.ips do |x|
|
||||
x.report("redis get string") do |times|
|
||||
while times > 0
|
||||
$redis.get("test_key")
|
||||
Discourse.redis.get("test_key")
|
||||
times -= 1
|
||||
end
|
||||
end
|
||||
|
||||
x.report("redis get string marshal") do |times|
|
||||
while times > 0
|
||||
Marshal.load($redis.get("test_keym"))
|
||||
Marshal.load(Discourse.redis.get("test_keym"))
|
||||
times -= 1
|
||||
end
|
||||
end
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require File.expand_path("../../config/environment", __FILE__)
|
||||
|
||||
@redis = $redis.without_namespace
|
||||
@redis = Discourse.redis.without_namespace
|
||||
|
||||
stats = {}
|
||||
|
||||
|
|
|
@ -41,7 +41,7 @@ describe ActiveRecord::ConnectionHandling do
|
|||
ActiveRecord::Base.clear_all_connections!
|
||||
ActiveRecord::Base.establish_connection
|
||||
|
||||
$redis.flushall
|
||||
Discourse.redis.flushall
|
||||
end
|
||||
|
||||
describe "#postgresql_fallback_connection" do
|
||||
|
|
|
@ -399,7 +399,7 @@ describe Auth::DefaultCurrentUserProvider do
|
|||
end
|
||||
|
||||
after do
|
||||
$redis.flushall
|
||||
Discourse.redis.flushall
|
||||
end
|
||||
|
||||
it "should not update last seen for suspended users" do
|
||||
|
@ -416,7 +416,7 @@ describe Auth::DefaultCurrentUserProvider do
|
|||
u.suspended_till = 1.year.from_now
|
||||
u.save!
|
||||
|
||||
$redis.del("user:#{user.id}:#{Time.now.to_date}")
|
||||
Discourse.redis.del("user:#{user.id}:#{Time.now.to_date}")
|
||||
provider2 = provider("/", "HTTP_COOKIE" => "_t=#{unhashed_token}")
|
||||
expect(provider2.current_user).to eq(nil)
|
||||
|
||||
|
|
|
@ -32,12 +32,12 @@ describe Cache do
|
|||
end
|
||||
|
||||
it "can be cleared" do
|
||||
$redis.set("boo", "boo")
|
||||
Discourse.redis.set("boo", "boo")
|
||||
cache.write("hello0", "world")
|
||||
cache.write("hello1", "world")
|
||||
cache.clear
|
||||
|
||||
expect($redis.get("boo")).to eq("boo")
|
||||
expect(Discourse.redis.get("boo")).to eq("boo")
|
||||
expect(cache.read("hello0")).to eq(nil)
|
||||
end
|
||||
|
||||
|
@ -64,13 +64,13 @@ describe Cache do
|
|||
"bob"
|
||||
end
|
||||
|
||||
expect($redis.ttl(key)).to be_within(2.seconds).of(1.minute)
|
||||
expect(Discourse.redis.ttl(key)).to be_within(2.seconds).of(1.minute)
|
||||
|
||||
# we always expire withing a day
|
||||
cache.fetch("bla") { "hi" }
|
||||
|
||||
key = cache.normalize_key("bla")
|
||||
expect($redis.ttl(key)).to be_within(2.seconds).of(1.day)
|
||||
expect(Discourse.redis.ttl(key)).to be_within(2.seconds).of(1.day)
|
||||
end
|
||||
|
||||
it "can store and fetch correctly" do
|
||||
|
|
|
@ -101,12 +101,12 @@ describe DiscourseRedis do
|
|||
it 'should check the status of the master server' do
|
||||
begin
|
||||
fallback_handler.master = false
|
||||
$redis.without_namespace.expects(:set).raises(Redis::CommandError.new("READONLY"))
|
||||
Discourse.redis.without_namespace.expects(:set).raises(Redis::CommandError.new("READONLY"))
|
||||
fallback_handler.expects(:verify_master).once
|
||||
$redis.set('test', '1')
|
||||
Discourse.redis.set('test', '1')
|
||||
ensure
|
||||
fallback_handler.master = true
|
||||
$redis.del('test')
|
||||
Discourse.redis.del('test')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -185,21 +185,21 @@ describe Discourse do
|
|||
let(:user_readonly_mode_key) { Discourse::USER_READONLY_MODE_KEY }
|
||||
|
||||
after do
|
||||
$redis.del(readonly_mode_key)
|
||||
$redis.del(user_readonly_mode_key)
|
||||
Discourse.redis.del(readonly_mode_key)
|
||||
Discourse.redis.del(user_readonly_mode_key)
|
||||
end
|
||||
|
||||
def assert_readonly_mode(message, key, ttl = -1)
|
||||
expect(message.channel).to eq(Discourse.readonly_channel)
|
||||
expect(message.data).to eq(true)
|
||||
expect($redis.get(key)).to eq("1")
|
||||
expect($redis.ttl(key)).to eq(ttl)
|
||||
expect(Discourse.redis.get(key)).to eq("1")
|
||||
expect(Discourse.redis.ttl(key)).to eq(ttl)
|
||||
end
|
||||
|
||||
def assert_readonly_mode_disabled(message, key)
|
||||
expect(message.channel).to eq(Discourse.readonly_channel)
|
||||
expect(message.data).to eq(false)
|
||||
expect($redis.get(key)).to eq(nil)
|
||||
expect(Discourse.redis.get(key)).to eq(nil)
|
||||
end
|
||||
|
||||
def get_readonly_message
|
||||
|
@ -217,14 +217,14 @@ describe Discourse do
|
|||
|
||||
describe ".enable_readonly_mode" do
|
||||
it "adds a key in redis and publish a message through the message bus" do
|
||||
expect($redis.get(readonly_mode_key)).to eq(nil)
|
||||
expect(Discourse.redis.get(readonly_mode_key)).to eq(nil)
|
||||
message = get_readonly_message { Discourse.enable_readonly_mode }
|
||||
assert_readonly_mode(message, readonly_mode_key, readonly_mode_ttl)
|
||||
end
|
||||
|
||||
context 'user enabled readonly mode' do
|
||||
it "adds a key in redis and publish a message through the message bus" do
|
||||
expect($redis.get(user_readonly_mode_key)).to eq(nil)
|
||||
expect(Discourse.redis.get(user_readonly_mode_key)).to eq(nil)
|
||||
message = get_readonly_message { Discourse.enable_readonly_mode(user_readonly_mode_key) }
|
||||
assert_readonly_mode(message, user_readonly_mode_key)
|
||||
end
|
||||
|
@ -252,7 +252,7 @@ describe Discourse do
|
|||
end
|
||||
|
||||
it "returns true when the key is present in redis" do
|
||||
$redis.set(readonly_mode_key, 1)
|
||||
Discourse.redis.set(readonly_mode_key, 1)
|
||||
expect(Discourse.readonly_mode?).to eq(true)
|
||||
end
|
||||
|
||||
|
|
|
@ -5,9 +5,9 @@ require 'rails_helper'
|
|||
describe DistributedMemoizer do
|
||||
|
||||
before do
|
||||
$redis.del(DistributedMemoizer.redis_key("hello"))
|
||||
$redis.del(DistributedMemoizer.redis_lock_key("hello"))
|
||||
$redis.unwatch
|
||||
Discourse.redis.del(DistributedMemoizer.redis_key("hello"))
|
||||
Discourse.redis.del(DistributedMemoizer.redis_lock_key("hello"))
|
||||
Discourse.redis.unwatch
|
||||
end
|
||||
|
||||
# NOTE we could use a mock redis here, but I think it makes sense to test the real thing
|
||||
|
|
|
@ -6,7 +6,7 @@ describe DistributedMutex do
|
|||
let(:key) { "test_mutex_key" }
|
||||
|
||||
after do
|
||||
$redis.del(key)
|
||||
Discourse.redis.del(key)
|
||||
end
|
||||
|
||||
it "allows only one mutex object to have the lock at a time" do
|
||||
|
@ -31,7 +31,7 @@ describe DistributedMutex do
|
|||
it "handles auto cleanup correctly" do
|
||||
m = DistributedMutex.new(key)
|
||||
|
||||
$redis.setnx key, Time.now.to_i - 1
|
||||
Discourse.redis.setnx key, Time.now.to_i - 1
|
||||
|
||||
start = Time.now.to_i
|
||||
m.synchronize do
|
||||
|
@ -54,16 +54,16 @@ describe DistributedMutex do
|
|||
mutex = DistributedMutex.new(key, validity: 2)
|
||||
|
||||
mutex.synchronize do
|
||||
expect($redis.ttl(key)).to eq(2)
|
||||
expect($redis.get(key).to_i).to eq(Time.now.to_i + 2)
|
||||
expect(Discourse.redis.ttl(key)).to eq(2)
|
||||
expect(Discourse.redis.get(key).to_i).to eq(Time.now.to_i + 2)
|
||||
end
|
||||
|
||||
mutex = DistributedMutex.new(key)
|
||||
|
||||
mutex.synchronize do
|
||||
expect($redis.ttl(key)).to eq(DistributedMutex::DEFAULT_VALIDITY)
|
||||
expect(Discourse.redis.ttl(key)).to eq(DistributedMutex::DEFAULT_VALIDITY)
|
||||
|
||||
expect($redis.get(key).to_i)
|
||||
expect(Discourse.redis.get(key).to_i)
|
||||
.to eq(Time.now.to_i + DistributedMutex::DEFAULT_VALIDITY)
|
||||
end
|
||||
end
|
||||
|
@ -80,11 +80,11 @@ describe DistributedMutex do
|
|||
|
||||
context "readonly redis" do
|
||||
before do
|
||||
$redis.slaveof "127.0.0.1", "99991"
|
||||
Discourse.redis.slaveof "127.0.0.1", "99991"
|
||||
end
|
||||
|
||||
after do
|
||||
$redis.slaveof "no", "one"
|
||||
Discourse.redis.slaveof "no", "one"
|
||||
end
|
||||
|
||||
it "works even if redis is in readonly" do
|
||||
|
@ -111,7 +111,7 @@ describe DistributedMutex do
|
|||
Concurrency::Scenario.new do |execution|
|
||||
locked = false
|
||||
|
||||
$redis.del('mutex_key')
|
||||
Discourse.redis.del('mutex_key')
|
||||
|
||||
connections.each do |connection|
|
||||
connection.unwatch
|
||||
|
|
|
@ -5,7 +5,7 @@ require "email/processor"
|
|||
|
||||
describe Email::Processor do
|
||||
after do
|
||||
$redis.flushall
|
||||
Discourse.redis.flushall
|
||||
end
|
||||
|
||||
let(:from) { "foo@bar.com" }
|
||||
|
@ -78,7 +78,7 @@ describe Email::Processor do
|
|||
|
||||
it "only sends one rejection email per day" do
|
||||
key = "rejection_email:#{[from]}:email_reject_empty:#{Date.today}"
|
||||
$redis.expire(key, 0)
|
||||
Discourse.redis.expire(key, 0)
|
||||
|
||||
expect {
|
||||
Email::Processor.process!(mail)
|
||||
|
@ -91,7 +91,7 @@ describe Email::Processor do
|
|||
freeze_time(Date.today + 1)
|
||||
|
||||
key = "rejection_email:#{[from]}:email_reject_empty:#{Date.today}"
|
||||
$redis.expire(key, 0)
|
||||
Discourse.redis.expire(key, 0)
|
||||
|
||||
expect {
|
||||
Email::Processor.process!(mail3)
|
||||
|
@ -131,7 +131,7 @@ describe Email::Processor do
|
|||
it "sends more than one rejection email per day" do
|
||||
Email::Receiver.any_instance.stubs(:process_internal).raises("boom")
|
||||
key = "rejection_email:#{[from]}:email_reject_unrecognized_error:#{Date.today}"
|
||||
$redis.expire(key, 0)
|
||||
Discourse.redis.expire(key, 0)
|
||||
|
||||
expect {
|
||||
Email::Processor.process!(mail)
|
||||
|
|
|
@ -143,7 +143,7 @@ describe Middleware::AnonymousCache::Helper do
|
|||
|
||||
# depends on i7z implementation, but lets assume it is stable unless we discover
|
||||
# otherwise
|
||||
expect($redis.get(helper.cache_key_body).length).to eq(16)
|
||||
expect(Discourse.redis.get(helper.cache_key_body).length).to eq(16)
|
||||
end
|
||||
|
||||
it "handles brotli switching" do
|
||||
|
|
|
@ -264,7 +264,7 @@ describe Middleware::RequestTracker do
|
|||
User.where(id: -100).pluck(:id)
|
||||
end
|
||||
redis_calls.times do
|
||||
$redis.get("x")
|
||||
Discourse.redis.get("x")
|
||||
end
|
||||
result
|
||||
end
|
||||
|
|
|
@ -91,7 +91,7 @@ describe PostRevisor do
|
|||
before do
|
||||
# There used to be a bug where wiki changes were considered posting "too similar"
|
||||
# so this is enabled and checked
|
||||
$redis.delete_prefixed('unique-post')
|
||||
Discourse.redis.delete_prefixed('unique-post')
|
||||
SiteSetting.unique_posts_mins = 10
|
||||
end
|
||||
|
||||
|
|
|
@ -456,7 +456,7 @@ describe PrettyText do
|
|||
['apple', 'banana'].each { |w| Fabricate(:watched_word, word: w, action: WatchedWord.actions[:censor]) }
|
||||
expect(PrettyText.cook("# banana")).not_to include('banana')
|
||||
ensure
|
||||
$redis.flushall
|
||||
Discourse.redis.flushall
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -1091,7 +1091,7 @@ HTML
|
|||
end
|
||||
|
||||
describe "censoring" do
|
||||
after(:all) { $redis.flushall }
|
||||
after(:all) { Discourse.redis.flushall }
|
||||
|
||||
def expect_cooked_match(raw, expected_cooked)
|
||||
expect(PrettyText.cook(raw)).to eq(expected_cooked)
|
||||
|
|
|
@ -63,11 +63,11 @@ describe RateLimiter do
|
|||
|
||||
context 'handles readonly' do
|
||||
before do
|
||||
$redis.without_namespace.slaveof '10.0.0.1', '99999'
|
||||
Discourse.redis.without_namespace.slaveof '10.0.0.1', '99999'
|
||||
end
|
||||
|
||||
after do
|
||||
$redis.without_namespace.slaveof 'no', 'one'
|
||||
Discourse.redis.without_namespace.slaveof 'no', 'one'
|
||||
end
|
||||
|
||||
it 'does not explode' do
|
||||
|
|
|
@ -767,7 +767,7 @@ describe TopicQuery do
|
|||
|
||||
context 'suggested_for' do
|
||||
def clear_cache!
|
||||
$redis.keys('random_topic_cache*').each { |k| $redis.del k }
|
||||
Discourse.redis.keys('random_topic_cache*').each { |k| Discourse.redis.del k }
|
||||
end
|
||||
|
||||
before do
|
||||
|
|
|
@ -198,7 +198,7 @@ describe PostValidator do
|
|||
end
|
||||
|
||||
after do
|
||||
$redis.del(@key)
|
||||
Discourse.redis.del(@key)
|
||||
end
|
||||
|
||||
context "post is unique" do
|
||||
|
|
|
@ -7,12 +7,12 @@ describe Jobs::AboutStats do
|
|||
begin
|
||||
stats = About.fetch_stats.to_json
|
||||
cache_key = About.stats_cache_key
|
||||
$redis.del(cache_key)
|
||||
Discourse.redis.del(cache_key)
|
||||
|
||||
expect(described_class.new.execute({})).to eq(stats)
|
||||
expect($redis.get(cache_key)).to eq(stats)
|
||||
expect(Discourse.redis.get(cache_key)).to eq(stats)
|
||||
ensure
|
||||
$redis.del(cache_key)
|
||||
Discourse.redis.del(cache_key)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -30,7 +30,7 @@ describe Jobs::PollMailbox do
|
|||
end
|
||||
|
||||
after do
|
||||
$redis.del(Jobs::PollMailbox::POLL_MAILBOX_TIMEOUT_ERROR_KEY)
|
||||
Discourse.redis.del(Jobs::PollMailbox::POLL_MAILBOX_TIMEOUT_ERROR_KEY)
|
||||
end
|
||||
|
||||
it "add an admin dashboard message on pop authentication error" do
|
||||
|
|
|
@ -108,7 +108,7 @@ describe UserNotifications do
|
|||
subject { UserNotifications.digest(user) }
|
||||
|
||||
after do
|
||||
$redis.keys('summary-new-users:*').each { |key| $redis.del(key) }
|
||||
Discourse.redis.keys('summary-new-users:*').each { |key| Discourse.redis.del(key) }
|
||||
end
|
||||
|
||||
context "without new topics" do
|
||||
|
|
|
@ -5,7 +5,7 @@ require 'rails_helper'
|
|||
describe ApplicationRequest do
|
||||
before do
|
||||
ApplicationRequest.last_flush = Time.now.utc
|
||||
$redis.flushall
|
||||
Discourse.redis.flushall
|
||||
end
|
||||
|
||||
after do
|
||||
|
@ -28,15 +28,15 @@ describe ApplicationRequest do
|
|||
inc(:http_total)
|
||||
inc(:http_total)
|
||||
|
||||
$redis.without_namespace.stubs(:incr).raises(Redis::CommandError.new("READONLY"))
|
||||
$redis.without_namespace.stubs(:eval).raises(Redis::CommandError.new("READONLY"))
|
||||
Discourse.redis.without_namespace.stubs(:incr).raises(Redis::CommandError.new("READONLY"))
|
||||
Discourse.redis.without_namespace.stubs(:eval).raises(Redis::CommandError.new("READONLY"))
|
||||
|
||||
# flush will be deferred no error raised
|
||||
inc(:http_total, autoflush: 3)
|
||||
ApplicationRequest.write_cache!
|
||||
|
||||
$redis.without_namespace.unstub(:incr)
|
||||
$redis.without_namespace.unstub(:eval)
|
||||
Discourse.redis.without_namespace.unstub(:incr)
|
||||
Discourse.redis.without_namespace.unstub(:eval)
|
||||
|
||||
inc(:http_total, autoflush: 3)
|
||||
expect(ApplicationRequest.http_total.first.count).to eq(3)
|
||||
|
|
|
@ -35,18 +35,18 @@ describe GlobalSetting do
|
|||
freeze_time Time.now
|
||||
|
||||
token = GlobalSetting.safe_secret_key_base
|
||||
$redis.without_namespace.del(GlobalSetting::REDIS_SECRET_KEY)
|
||||
Discourse.redis.without_namespace.del(GlobalSetting::REDIS_SECRET_KEY)
|
||||
freeze_time Time.now + 20
|
||||
|
||||
GlobalSetting.safe_secret_key_base
|
||||
new_token = $redis.without_namespace.get(GlobalSetting::REDIS_SECRET_KEY)
|
||||
new_token = Discourse.redis.without_namespace.get(GlobalSetting::REDIS_SECRET_KEY)
|
||||
expect(new_token).to eq(nil)
|
||||
|
||||
freeze_time Time.now + 11
|
||||
|
||||
GlobalSetting.safe_secret_key_base
|
||||
|
||||
new_token = $redis.without_namespace.get(GlobalSetting::REDIS_SECRET_KEY)
|
||||
new_token = Discourse.redis.without_namespace.get(GlobalSetting::REDIS_SECRET_KEY)
|
||||
expect(new_token).to eq(token)
|
||||
|
||||
end
|
||||
|
|
|
@ -128,7 +128,7 @@ RSpec.describe SearchLog, type: :model do
|
|||
expect(action).to eq(:created)
|
||||
|
||||
freeze_time(10.minutes.from_now)
|
||||
$redis.del(SearchLog.redis_key(ip_address: '192.168.0.1', user_id: user.id))
|
||||
Discourse.redis.del(SearchLog.redis_key(ip_address: '192.168.0.1', user_id: user.id))
|
||||
|
||||
action, _ = SearchLog.log(
|
||||
term: 'hello',
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user