mirror of
https://github.com/discourse/discourse.git
synced 2024-11-25 09:42:07 +08:00
DEV: s/\$redis/Discourse\.redis (#8431)
This commit also adds a rubocop rule to prevent global variables.
This commit is contained in:
parent
9eccfb7b52
commit
0d3d2c43a0
17
.rubocop.yml
17
.rubocop.yml
|
@ -132,8 +132,17 @@ Style/Semicolon:
|
||||||
Style/RedundantReturn:
|
Style/RedundantReturn:
|
||||||
Enabled: true
|
Enabled: true
|
||||||
|
|
||||||
DiscourseCops/NoChdir:
|
DiscourseCops/NoChdir:
|
||||||
Enabled: true
|
Enabled: true
|
||||||
Exclude:
|
Exclude:
|
||||||
- 'spec/**/*' # Specs are run sequentially, so chdir can be used
|
- 'spec/**/*' # Specs are run sequentially, so chdir can be used
|
||||||
|
- 'plugins/*/spec/**/*'
|
||||||
|
|
||||||
|
Style/GlobalVars:
|
||||||
|
Enabled: true
|
||||||
|
Severity: warning
|
||||||
|
Exclude:
|
||||||
|
- 'lib/tasks/**/*'
|
||||||
|
- 'script/**/*'
|
||||||
|
- 'spec/**/*.rb'
|
||||||
- 'plugins/*/spec/**/*'
|
- 'plugins/*/spec/**/*'
|
||||||
|
|
|
@ -768,14 +768,14 @@ class ApplicationController < ActionController::Base
|
||||||
|
|
||||||
if !SiteSetting.login_required? || (current_user rescue false)
|
if !SiteSetting.login_required? || (current_user rescue false)
|
||||||
key = "page_not_found_topics"
|
key = "page_not_found_topics"
|
||||||
if @topics_partial = $redis.get(key)
|
if @topics_partial = Discourse.redis.get(key)
|
||||||
@topics_partial = @topics_partial.html_safe
|
@topics_partial = @topics_partial.html_safe
|
||||||
else
|
else
|
||||||
category_topic_ids = Category.pluck(:topic_id).compact
|
category_topic_ids = Category.pluck(:topic_id).compact
|
||||||
@top_viewed = TopicQuery.new(nil, except_topic_ids: category_topic_ids).list_top_for("monthly").topics.first(10)
|
@top_viewed = TopicQuery.new(nil, except_topic_ids: category_topic_ids).list_top_for("monthly").topics.first(10)
|
||||||
@recent = Topic.includes(:category).where.not(id: category_topic_ids).recent(10)
|
@recent = Topic.includes(:category).where.not(id: category_topic_ids).recent(10)
|
||||||
@topics_partial = render_to_string partial: '/exceptions/not_found_topics', formats: [:html]
|
@topics_partial = render_to_string partial: '/exceptions/not_found_topics', formats: [:html]
|
||||||
$redis.setex(key, 10.minutes, @topics_partial)
|
Discourse.redis.setex(key, 10.minutes, @topics_partial)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ class ForumsController < ActionController::Base
|
||||||
after_action :add_readonly_header
|
after_action :add_readonly_header
|
||||||
|
|
||||||
def status
|
def status
|
||||||
if $shutdown
|
if $shutdown # rubocop:disable Style/GlobalVars
|
||||||
render plain: "shutting down", status: 500
|
render plain: "shutting down", status: 500
|
||||||
else
|
else
|
||||||
render plain: "ok"
|
render plain: "ok"
|
||||||
|
|
|
@ -409,15 +409,15 @@ class SessionController < ApplicationController
|
||||||
end
|
end
|
||||||
|
|
||||||
def one_time_password
|
def one_time_password
|
||||||
@otp_username = otp_username = $redis.get "otp_#{params[:token]}"
|
@otp_username = otp_username = Discourse.redis.get "otp_#{params[:token]}"
|
||||||
|
|
||||||
if otp_username && user = User.find_by_username(otp_username)
|
if otp_username && user = User.find_by_username(otp_username)
|
||||||
if current_user&.username == otp_username
|
if current_user&.username == otp_username
|
||||||
$redis.del "otp_#{params[:token]}"
|
Discourse.redis.del "otp_#{params[:token]}"
|
||||||
return redirect_to path("/")
|
return redirect_to path("/")
|
||||||
elsif request.post?
|
elsif request.post?
|
||||||
log_on_user(user)
|
log_on_user(user)
|
||||||
$redis.del "otp_#{params[:token]}"
|
Discourse.redis.del "otp_#{params[:token]}"
|
||||||
return redirect_to path("/")
|
return redirect_to path("/")
|
||||||
else
|
else
|
||||||
# Display the form
|
# Display the form
|
||||||
|
|
|
@ -205,7 +205,7 @@ class UserApiKeysController < ApplicationController
|
||||||
raise Discourse::InvalidAccess unless UserApiKey.allowed_scopes.superset?(Set.new(["one_time_password"]))
|
raise Discourse::InvalidAccess unless UserApiKey.allowed_scopes.superset?(Set.new(["one_time_password"]))
|
||||||
|
|
||||||
otp = SecureRandom.hex
|
otp = SecureRandom.hex
|
||||||
$redis.setex "otp_#{otp}", 10.minutes, username
|
Discourse.redis.setex "otp_#{otp}", 10.minutes, username
|
||||||
|
|
||||||
Base64.encode64(public_key.public_encrypt(otp))
|
Base64.encode64(public_key.public_encrypt(otp))
|
||||||
end
|
end
|
||||||
|
|
|
@ -21,7 +21,7 @@ class Users::AssociateAccountsController < ApplicationController
|
||||||
# Presents a confirmation screen to the user. Accessed via GET, with no CSRF checks
|
# Presents a confirmation screen to the user. Accessed via GET, with no CSRF checks
|
||||||
def connect
|
def connect
|
||||||
auth = get_auth_hash
|
auth = get_auth_hash
|
||||||
$redis.del "#{REDIS_PREFIX}_#{current_user&.id}_#{params[:token]}"
|
Discourse.redis.del "#{REDIS_PREFIX}_#{current_user&.id}_#{params[:token]}"
|
||||||
|
|
||||||
provider_name = auth.provider
|
provider_name = auth.provider
|
||||||
authenticator = Discourse.enabled_authenticators.find { |a| a.name == provider_name }
|
authenticator = Discourse.enabled_authenticators.find { |a| a.name == provider_name }
|
||||||
|
@ -37,7 +37,7 @@ class Users::AssociateAccountsController < ApplicationController
|
||||||
|
|
||||||
def get_auth_hash
|
def get_auth_hash
|
||||||
token = params[:token]
|
token = params[:token]
|
||||||
json = $redis.get "#{REDIS_PREFIX}_#{current_user&.id}_#{token}"
|
json = Discourse.redis.get "#{REDIS_PREFIX}_#{current_user&.id}_#{token}"
|
||||||
raise Discourse::NotFound if json.nil?
|
raise Discourse::NotFound if json.nil?
|
||||||
|
|
||||||
OmniAuth::AuthHash.new(JSON.parse(json))
|
OmniAuth::AuthHash.new(JSON.parse(json))
|
||||||
|
|
|
@ -31,7 +31,7 @@ class Users::OmniauthCallbacksController < ApplicationController
|
||||||
if session.delete(:auth_reconnect) && authenticator.can_connect_existing_user? && current_user
|
if session.delete(:auth_reconnect) && authenticator.can_connect_existing_user? && current_user
|
||||||
# Save to redis, with a secret token, then redirect to confirmation screen
|
# Save to redis, with a secret token, then redirect to confirmation screen
|
||||||
token = SecureRandom.hex
|
token = SecureRandom.hex
|
||||||
$redis.setex "#{Users::AssociateAccountsController::REDIS_PREFIX}_#{current_user.id}_#{token}", 10.minutes, auth.to_json
|
Discourse.redis.setex "#{Users::AssociateAccountsController::REDIS_PREFIX}_#{current_user.id}_#{token}", 10.minutes, auth.to_json
|
||||||
return redirect_to Discourse.base_uri("/associate/#{token}")
|
return redirect_to Discourse.base_uri("/associate/#{token}")
|
||||||
else
|
else
|
||||||
@auth_result = authenticator.after_authenticate(auth)
|
@auth_result = authenticator.after_authenticate(auth)
|
||||||
|
|
|
@ -119,8 +119,8 @@ class WebhooksController < ActionController::Base
|
||||||
|
|
||||||
# prevent replay attacks
|
# prevent replay attacks
|
||||||
key = "mailgun_token_#{token}"
|
key = "mailgun_token_#{token}"
|
||||||
return false unless $redis.setnx(key, 1)
|
return false unless Discourse.redis.setnx(key, 1)
|
||||||
$redis.expire(key, 10.minutes)
|
Discourse.redis.expire(key, 10.minutes)
|
||||||
|
|
||||||
# ensure timestamp isn't too far from current time
|
# ensure timestamp isn't too far from current time
|
||||||
return false if (Time.at(timestamp.to_i) - Time.now).abs > 12.hours.to_i
|
return false if (Time.at(timestamp.to_i) - Time.now).abs > 12.hours.to_i
|
||||||
|
|
|
@ -41,7 +41,7 @@ module ApplicationHelper
|
||||||
return request.env[sk] if request.env[sk]
|
return request.env[sk] if request.env[sk]
|
||||||
|
|
||||||
request.env[sk] = key = (session[sk] ||= SecureRandom.hex)
|
request.env[sk] = key = (session[sk] ||= SecureRandom.hex)
|
||||||
$redis.setex "#{sk}_#{key}", 7.days, current_user.id.to_s
|
Discourse.redis.setex "#{sk}_#{key}", 7.days, current_user.id.to_s
|
||||||
key
|
key
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
module Jobs
|
module Jobs
|
||||||
class CleanUpSidekiqStatistic < ::Jobs::Onceoff
|
class CleanUpSidekiqStatistic < ::Jobs::Onceoff
|
||||||
def execute_onceoff(args)
|
def execute_onceoff(args)
|
||||||
$redis.without_namespace.del('sidekiq:sidekiq:statistic')
|
Discourse.redis.without_namespace.del('sidekiq:sidekiq:statistic')
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -16,7 +16,7 @@ class Jobs::Onceoff < ::Jobs::Base
|
||||||
# Pass `force: true` to force it happen again
|
# Pass `force: true` to force it happen again
|
||||||
def execute(args)
|
def execute(args)
|
||||||
job_name = self.class.name_for(self.class)
|
job_name = self.class.name_for(self.class)
|
||||||
has_lock = $redis.setnx(running_key_name, Time.now.to_i)
|
has_lock = Discourse.redis.setnx(running_key_name, Time.now.to_i)
|
||||||
|
|
||||||
# If we can't get a lock, just noop
|
# If we can't get a lock, just noop
|
||||||
if args[:force] || has_lock
|
if args[:force] || has_lock
|
||||||
|
@ -25,7 +25,7 @@ class Jobs::Onceoff < ::Jobs::Base
|
||||||
execute_onceoff(args)
|
execute_onceoff(args)
|
||||||
OnceoffLog.create!(job_name: job_name)
|
OnceoffLog.create!(job_name: job_name)
|
||||||
ensure
|
ensure
|
||||||
$redis.del(running_key_name) if has_lock
|
Discourse.redis.del(running_key_name) if has_lock
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -10,11 +10,11 @@ module Jobs
|
||||||
end
|
end
|
||||||
|
|
||||||
def execute(args)
|
def execute(args)
|
||||||
$redis.set(self.class.heartbeat_key, Time.new.to_i.to_s)
|
Discourse.redis.set(self.class.heartbeat_key, Time.new.to_i.to_s)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.last_heartbeat
|
def self.last_heartbeat
|
||||||
$redis.get(heartbeat_key).to_i
|
Discourse.redis.get(heartbeat_key).to_i
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -96,16 +96,16 @@ module Jobs
|
||||||
end
|
end
|
||||||
|
|
||||||
def last_cleanup=(v)
|
def last_cleanup=(v)
|
||||||
$redis.setex(last_cleanup_key, 7.days.to_i, v.to_s)
|
Discourse.redis.setex(last_cleanup_key, 7.days.to_i, v.to_s)
|
||||||
end
|
end
|
||||||
|
|
||||||
def last_cleanup
|
def last_cleanup
|
||||||
v = $redis.get(last_cleanup_key)
|
v = Discourse.redis.get(last_cleanup_key)
|
||||||
v ? v.to_i : v
|
v ? v.to_i : v
|
||||||
end
|
end
|
||||||
|
|
||||||
def reset_last_cleanup!
|
def reset_last_cleanup!
|
||||||
$redis.del(last_cleanup_key)
|
Discourse.redis.del(last_cleanup_key)
|
||||||
end
|
end
|
||||||
|
|
||||||
protected
|
protected
|
||||||
|
|
|
@ -33,11 +33,11 @@ module Jobs
|
||||||
end
|
end
|
||||||
|
|
||||||
def last_notified_id
|
def last_notified_id
|
||||||
(i = $redis.get(self.class.last_notified_key)) && i.to_i
|
(i = Discourse.redis.get(self.class.last_notified_key)) && i.to_i
|
||||||
end
|
end
|
||||||
|
|
||||||
def last_notified_id=(arg)
|
def last_notified_id=(arg)
|
||||||
$redis.set(self.class.last_notified_key, arg)
|
Discourse.redis.set(self.class.last_notified_key, arg)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.last_notified_key
|
def self.last_notified_key
|
||||||
|
|
|
@ -37,11 +37,11 @@ module Jobs
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.last_notified_id
|
def self.last_notified_id
|
||||||
$redis.get(last_notified_key).to_i
|
Discourse.redis.get(last_notified_key).to_i
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.last_notified_id=(arg)
|
def self.last_notified_id=(arg)
|
||||||
$redis.set(last_notified_key, arg)
|
Discourse.redis.set(last_notified_key, arg)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.last_notified_key
|
def self.last_notified_key
|
||||||
|
@ -49,7 +49,7 @@ module Jobs
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.clear_key
|
def self.clear_key
|
||||||
$redis.del(last_notified_key)
|
Discourse.redis.del(last_notified_key)
|
||||||
end
|
end
|
||||||
|
|
||||||
def active_moderator_usernames
|
def active_moderator_usernames
|
||||||
|
|
|
@ -50,11 +50,11 @@ module Jobs
|
||||||
end
|
end
|
||||||
|
|
||||||
def previous_newest_username
|
def previous_newest_username
|
||||||
$redis.get previous_newest_username_cache_key
|
Discourse.redis.get previous_newest_username_cache_key
|
||||||
end
|
end
|
||||||
|
|
||||||
def previous_newest_username=(username)
|
def previous_newest_username=(username)
|
||||||
$redis.setex previous_newest_username_cache_key, 7.days, username
|
Discourse.redis.setex previous_newest_username_cache_key, 7.days, username
|
||||||
end
|
end
|
||||||
|
|
||||||
def previous_newest_username_cache_key
|
def previous_newest_username_cache_key
|
||||||
|
|
|
@ -43,15 +43,15 @@ module Jobs
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
rescue Net::OpenTimeout => e
|
rescue Net::OpenTimeout => e
|
||||||
count = $redis.incr(POLL_MAILBOX_TIMEOUT_ERROR_KEY).to_i
|
count = Discourse.redis.incr(POLL_MAILBOX_TIMEOUT_ERROR_KEY).to_i
|
||||||
|
|
||||||
$redis.expire(
|
Discourse.redis.expire(
|
||||||
POLL_MAILBOX_TIMEOUT_ERROR_KEY,
|
POLL_MAILBOX_TIMEOUT_ERROR_KEY,
|
||||||
SiteSetting.pop3_polling_period_mins.minutes * 3
|
SiteSetting.pop3_polling_period_mins.minutes * 3
|
||||||
) if count == 1
|
) if count == 1
|
||||||
|
|
||||||
if count > 3
|
if count > 3
|
||||||
$redis.del(POLL_MAILBOX_TIMEOUT_ERROR_KEY)
|
Discourse.redis.del(POLL_MAILBOX_TIMEOUT_ERROR_KEY)
|
||||||
mark_as_errored!
|
mark_as_errored!
|
||||||
add_admin_dashboard_problem_message('dashboard.poll_pop3_timeout')
|
add_admin_dashboard_problem_message('dashboard.poll_pop3_timeout')
|
||||||
Discourse.handle_job_exception(e, error_context(@args, "Connecting to '#{SiteSetting.pop3_polling_host}' for polling emails."))
|
Discourse.handle_job_exception(e, error_context(@args, "Connecting to '#{SiteSetting.pop3_polling_host}' for polling emails."))
|
||||||
|
@ -65,13 +65,13 @@ module Jobs
|
||||||
POLL_MAILBOX_ERRORS_KEY ||= "poll_mailbox_errors".freeze
|
POLL_MAILBOX_ERRORS_KEY ||= "poll_mailbox_errors".freeze
|
||||||
|
|
||||||
def self.errors_in_past_24_hours
|
def self.errors_in_past_24_hours
|
||||||
$redis.zremrangebyscore(POLL_MAILBOX_ERRORS_KEY, 0, 24.hours.ago.to_i)
|
Discourse.redis.zremrangebyscore(POLL_MAILBOX_ERRORS_KEY, 0, 24.hours.ago.to_i)
|
||||||
$redis.zcard(POLL_MAILBOX_ERRORS_KEY).to_i
|
Discourse.redis.zcard(POLL_MAILBOX_ERRORS_KEY).to_i
|
||||||
end
|
end
|
||||||
|
|
||||||
def mark_as_errored!
|
def mark_as_errored!
|
||||||
now = Time.now.to_i
|
now = Time.now.to_i
|
||||||
$redis.zadd(POLL_MAILBOX_ERRORS_KEY, now, now.to_s)
|
Discourse.redis.zadd(POLL_MAILBOX_ERRORS_KEY, now, now.to_s)
|
||||||
end
|
end
|
||||||
|
|
||||||
def add_admin_dashboard_problem_message(i18n_key)
|
def add_admin_dashboard_problem_message(i18n_key)
|
||||||
|
|
|
@ -697,9 +697,9 @@ class UserNotifications < ActionMailer::Base
|
||||||
def summary_new_users_count(min_date)
|
def summary_new_users_count(min_date)
|
||||||
min_date_str = min_date.is_a?(String) ? min_date : min_date.strftime('%Y-%m-%d')
|
min_date_str = min_date.is_a?(String) ? min_date : min_date.strftime('%Y-%m-%d')
|
||||||
key = self.class.summary_new_users_count_key(min_date_str)
|
key = self.class.summary_new_users_count_key(min_date_str)
|
||||||
((count = $redis.get(key)) && count.to_i) || begin
|
((count = Discourse.redis.get(key)) && count.to_i) || begin
|
||||||
count = User.real.where(active: true, staged: false).not_suspended.where("created_at > ?", min_date_str).count
|
count = User.real.where(active: true, staged: false).not_suspended.where("created_at > ?", min_date_str).count
|
||||||
$redis.setex(key, 1.day, count)
|
Discourse.redis.setex(key, 1.day, count)
|
||||||
count
|
count
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -63,16 +63,16 @@ class AdminDashboardData
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.set_problems_started
|
def self.set_problems_started
|
||||||
existing_time = $redis.get(problems_started_key)
|
existing_time = Discourse.redis.get(problems_started_key)
|
||||||
$redis.setex(problems_started_key, 14.days.to_i, existing_time || Time.zone.now.to_s)
|
Discourse.redis.setex(problems_started_key, 14.days.to_i, existing_time || Time.zone.now.to_s)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.clear_problems_started
|
def self.clear_problems_started
|
||||||
$redis.del problems_started_key
|
Discourse.redis.del problems_started_key
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.problems_started_at
|
def self.problems_started_at
|
||||||
s = $redis.get(problems_started_key)
|
s = Discourse.redis.get(problems_started_key)
|
||||||
s ? Time.zone.parse(s) : nil
|
s ? Time.zone.parse(s) : nil
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -109,19 +109,19 @@ class AdminDashboardData
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.problem_message_check(i18n_key)
|
def self.problem_message_check(i18n_key)
|
||||||
$redis.get(problem_message_key(i18n_key)) ? I18n.t(i18n_key, base_path: Discourse.base_path) : nil
|
Discourse.redis.get(problem_message_key(i18n_key)) ? I18n.t(i18n_key, base_path: Discourse.base_path) : nil
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.add_problem_message(i18n_key, expire_seconds = nil)
|
def self.add_problem_message(i18n_key, expire_seconds = nil)
|
||||||
if expire_seconds.to_i > 0
|
if expire_seconds.to_i > 0
|
||||||
$redis.setex problem_message_key(i18n_key), expire_seconds.to_i, 1
|
Discourse.redis.setex problem_message_key(i18n_key), expire_seconds.to_i, 1
|
||||||
else
|
else
|
||||||
$redis.set problem_message_key(i18n_key), 1
|
Discourse.redis.set problem_message_key(i18n_key), 1
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.clear_problem_message(i18n_key)
|
def self.clear_problem_message(i18n_key)
|
||||||
$redis.del problem_message_key(i18n_key)
|
Discourse.redis.del problem_message_key(i18n_key)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.problem_message_key(i18n_key)
|
def self.problem_message_key(i18n_key)
|
||||||
|
|
|
@ -52,7 +52,7 @@ class ApplicationRequest < ActiveRecord::Base
|
||||||
|
|
||||||
req_types.each do |req_type, _|
|
req_types.each do |req_type, _|
|
||||||
key = redis_key(req_type, date)
|
key = redis_key(req_type, date)
|
||||||
$redis.del key
|
Discourse.redis.del key
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,7 @@ class CategoryFeaturedTopic < ActiveRecord::Base
|
||||||
|
|
||||||
batch_size ||= DEFAULT_BATCH_SIZE
|
batch_size ||= DEFAULT_BATCH_SIZE
|
||||||
|
|
||||||
next_category_id = batched ? $redis.get(NEXT_CATEGORY_ID_KEY).to_i : 0
|
next_category_id = batched ? Discourse.redis.get(NEXT_CATEGORY_ID_KEY).to_i : 0
|
||||||
|
|
||||||
categories = Category.select(:id, :topic_id, :num_featured_topics)
|
categories = Category.select(:id, :topic_id, :num_featured_topics)
|
||||||
.where('id >= ?', next_category_id)
|
.where('id >= ?', next_category_id)
|
||||||
|
@ -27,7 +27,7 @@ class CategoryFeaturedTopic < ActiveRecord::Base
|
||||||
if batched
|
if batched
|
||||||
if categories.length == batch_size
|
if categories.length == batch_size
|
||||||
next_id = Category.where('id > ?', categories.last.id).order('id asc').limit(1).pluck(:id)[0]
|
next_id = Category.where('id > ?', categories.last.id).order('id asc').limit(1).pluck(:id)[0]
|
||||||
next_id ? $redis.setex(NEXT_CATEGORY_ID_KEY, 1.day, next_id) : clear_batch!
|
next_id ? Discourse.redis.setex(NEXT_CATEGORY_ID_KEY, 1.day, next_id) : clear_batch!
|
||||||
else
|
else
|
||||||
clear_batch!
|
clear_batch!
|
||||||
end
|
end
|
||||||
|
@ -39,7 +39,7 @@ class CategoryFeaturedTopic < ActiveRecord::Base
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.clear_batch!
|
def self.clear_batch!
|
||||||
$redis.del(NEXT_CATEGORY_ID_KEY)
|
Discourse.redis.del(NEXT_CATEGORY_ID_KEY)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.feature_topics_for(c, existing = nil)
|
def self.feature_topics_for(c, existing = nil)
|
||||||
|
|
|
@ -19,13 +19,13 @@ module CachedCounting
|
||||||
|
|
||||||
class_methods do
|
class_methods do
|
||||||
def perform_increment!(key, opts = nil)
|
def perform_increment!(key, opts = nil)
|
||||||
val = $redis.incr(key).to_i
|
val = Discourse.redis.incr(key).to_i
|
||||||
|
|
||||||
# readonly mode it is going to be 0, skip
|
# readonly mode it is going to be 0, skip
|
||||||
return if val == 0
|
return if val == 0
|
||||||
|
|
||||||
# 3.days, see: https://github.com/rails/rails/issues/21296
|
# 3.days, see: https://github.com/rails/rails/issues/21296
|
||||||
$redis.expire(key, 259200)
|
Discourse.redis.expire(key, 259200)
|
||||||
|
|
||||||
autoflush = (opts && opts[:autoflush]) || self.autoflush
|
autoflush = (opts && opts[:autoflush]) || self.autoflush
|
||||||
if autoflush > 0 && val >= autoflush
|
if autoflush > 0 && val >= autoflush
|
||||||
|
@ -51,9 +51,9 @@ module CachedCounting
|
||||||
# this may seem a bit fancy but in so it allows
|
# this may seem a bit fancy but in so it allows
|
||||||
# for concurrent calls without double counting
|
# for concurrent calls without double counting
|
||||||
def get_and_reset(key)
|
def get_and_reset(key)
|
||||||
namespaced_key = $redis.namespace_key(key)
|
namespaced_key = Discourse.redis.namespace_key(key)
|
||||||
val = $redis.without_namespace.eval(GET_AND_RESET, keys: [namespaced_key]).to_i
|
val = Discourse.redis.without_namespace.eval(GET_AND_RESET, keys: [namespaced_key]).to_i
|
||||||
$redis.expire(key, 259200) # SET removes expiry, so set it again
|
Discourse.redis.expire(key, 259200) # SET removes expiry, so set it again
|
||||||
val
|
val
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@ module StatsCacheable
|
||||||
|
|
||||||
def fetch_cached_stats
|
def fetch_cached_stats
|
||||||
# The scheduled Stats job is responsible for generating and caching this.
|
# The scheduled Stats job is responsible for generating and caching this.
|
||||||
stats = $redis.get(stats_cache_key)
|
stats = Discourse.redis.get(stats_cache_key)
|
||||||
stats = refresh_stats if !stats
|
stats = refresh_stats if !stats
|
||||||
JSON.parse(stats).with_indifferent_access
|
JSON.parse(stats).with_indifferent_access
|
||||||
end
|
end
|
||||||
|
@ -35,7 +35,7 @@ module StatsCacheable
|
||||||
def set_cache(stats)
|
def set_cache(stats)
|
||||||
# Add some extra time to the expiry so that the next job run has plenty of time to
|
# Add some extra time to the expiry so that the next job run has plenty of time to
|
||||||
# finish before previous cached value expires.
|
# finish before previous cached value expires.
|
||||||
$redis.setex stats_cache_key, (recalculate_stats_interval + 5).minutes, stats
|
Discourse.redis.setex stats_cache_key, (recalculate_stats_interval + 5).minutes, stats
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -26,9 +26,9 @@ class GlobalSetting
|
||||||
|
|
||||||
if @safe_secret_key_base && @token_in_redis && (@token_last_validated + REDIS_VALIDATE_SECONDS) < Time.now
|
if @safe_secret_key_base && @token_in_redis && (@token_last_validated + REDIS_VALIDATE_SECONDS) < Time.now
|
||||||
@token_last_validated = Time.now
|
@token_last_validated = Time.now
|
||||||
token = $redis.without_namespace.get(REDIS_SECRET_KEY)
|
token = Discourse.redis.without_namespace.get(REDIS_SECRET_KEY)
|
||||||
if token.nil?
|
if token.nil?
|
||||||
$redis.without_namespace.set(REDIS_SECRET_KEY, @safe_secret_key_base)
|
Discourse.redis.without_namespace.set(REDIS_SECRET_KEY, @safe_secret_key_base)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -39,10 +39,10 @@ class GlobalSetting
|
||||||
@token_in_redis = true
|
@token_in_redis = true
|
||||||
@token_last_validated = Time.now
|
@token_last_validated = Time.now
|
||||||
|
|
||||||
token = $redis.without_namespace.get(REDIS_SECRET_KEY)
|
token = Discourse.redis.without_namespace.get(REDIS_SECRET_KEY)
|
||||||
unless token && token =~ VALID_SECRET_KEY
|
unless token && token =~ VALID_SECRET_KEY
|
||||||
token = SecureRandom.hex(64)
|
token = SecureRandom.hex(64)
|
||||||
$redis.without_namespace.set(REDIS_SECRET_KEY, token)
|
Discourse.redis.without_namespace.set(REDIS_SECRET_KEY, token)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
if !secret_key_base.blank? && token != secret_key_base
|
if !secret_key_base.blank? && token != secret_key_base
|
||||||
|
|
|
@ -247,12 +247,12 @@ class Post < ActiveRecord::Base
|
||||||
|
|
||||||
def store_unique_post_key
|
def store_unique_post_key
|
||||||
if SiteSetting.unique_posts_mins > 0
|
if SiteSetting.unique_posts_mins > 0
|
||||||
$redis.setex(unique_post_key, SiteSetting.unique_posts_mins.minutes.to_i, id)
|
Discourse.redis.setex(unique_post_key, SiteSetting.unique_posts_mins.minutes.to_i, id)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def matches_recent_post?
|
def matches_recent_post?
|
||||||
post_id = $redis.get(unique_post_key)
|
post_id = Discourse.redis.get(unique_post_key)
|
||||||
post_id != (nil) && post_id.to_i != (id)
|
post_id != (nil) && post_id.to_i != (id)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -696,11 +696,11 @@ class Post < ActiveRecord::Base
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.estimate_posts_per_day
|
def self.estimate_posts_per_day
|
||||||
val = $redis.get("estimated_posts_per_day")
|
val = Discourse.redis.get("estimated_posts_per_day")
|
||||||
return val.to_i if val
|
return val.to_i if val
|
||||||
|
|
||||||
posts_per_day = Topic.listable_topics.secured.joins(:posts).merge(Post.created_since(30.days.ago)).count / 30
|
posts_per_day = Topic.listable_topics.secured.joins(:posts).merge(Post.created_since(30.days.ago)).count / 30
|
||||||
$redis.setex("estimated_posts_per_day", 1.day.to_i, posts_per_day.to_s)
|
Discourse.redis.setex("estimated_posts_per_day", 1.day.to_i, posts_per_day.to_s)
|
||||||
posts_per_day
|
posts_per_day
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -39,8 +39,8 @@ class SearchLog < ActiveRecord::Base
|
||||||
|
|
||||||
# for testing
|
# for testing
|
||||||
def self.clear_debounce_cache!
|
def self.clear_debounce_cache!
|
||||||
$redis.keys("__SEARCH__LOG_*").each do |k|
|
Discourse.redis.keys("__SEARCH__LOG_*").each do |k|
|
||||||
$redis.del(k)
|
Discourse.redis.del(k)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -56,7 +56,7 @@ class SearchLog < ActiveRecord::Base
|
||||||
|
|
||||||
result = nil
|
result = nil
|
||||||
|
|
||||||
if existing = $redis.get(key)
|
if existing = Discourse.redis.get(key)
|
||||||
id, old_term = existing.split(",", 2)
|
id, old_term = existing.split(",", 2)
|
||||||
|
|
||||||
if term.start_with?(old_term)
|
if term.start_with?(old_term)
|
||||||
|
@ -80,7 +80,7 @@ class SearchLog < ActiveRecord::Base
|
||||||
result = [:created, log.id]
|
result = [:created, log.id]
|
||||||
end
|
end
|
||||||
|
|
||||||
$redis.setex(key, 5, "#{result[1]},#{term}")
|
Discourse.redis.setex(key, 5, "#{result[1]},#{term}")
|
||||||
|
|
||||||
result
|
result
|
||||||
end
|
end
|
||||||
|
|
|
@ -102,7 +102,7 @@ class Site
|
||||||
if guardian.anonymous?
|
if guardian.anonymous?
|
||||||
seq = MessageBus.last_id('/site_json')
|
seq = MessageBus.last_id('/site_json')
|
||||||
|
|
||||||
cached_json, cached_seq, cached_version = $redis.mget('site_json', 'site_json_seq', 'site_json_version')
|
cached_json, cached_seq, cached_version = Discourse.redis.mget('site_json', 'site_json_seq', 'site_json_version')
|
||||||
|
|
||||||
if cached_json && seq == cached_seq.to_i && Discourse.git_version == cached_version
|
if cached_json && seq == cached_seq.to_i && Discourse.git_version == cached_version
|
||||||
return cached_json
|
return cached_json
|
||||||
|
@ -114,10 +114,10 @@ class Site
|
||||||
json = MultiJson.dump(SiteSerializer.new(site, root: false, scope: guardian))
|
json = MultiJson.dump(SiteSerializer.new(site, root: false, scope: guardian))
|
||||||
|
|
||||||
if guardian.anonymous?
|
if guardian.anonymous?
|
||||||
$redis.multi do
|
Discourse.redis.multi do
|
||||||
$redis.setex 'site_json', 1800, json
|
Discourse.redis.setex 'site_json', 1800, json
|
||||||
$redis.set 'site_json_seq', seq
|
Discourse.redis.set 'site_json_seq', seq
|
||||||
$redis.set 'site_json_version', Discourse.git_version
|
Discourse.redis.set 'site_json_version', Discourse.git_version
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -102,8 +102,8 @@ class TopicLinkClick < ActiveRecord::Base
|
||||||
|
|
||||||
# Rate limit the click counts to once in 24 hours
|
# Rate limit the click counts to once in 24 hours
|
||||||
rate_key = "link-clicks:#{link.id}:#{args[:user_id] || args[:ip]}"
|
rate_key = "link-clicks:#{link.id}:#{args[:user_id] || args[:ip]}"
|
||||||
if $redis.setnx(rate_key, "1")
|
if Discourse.redis.setnx(rate_key, "1")
|
||||||
$redis.expire(rate_key, 1.day.to_i)
|
Discourse.redis.expire(rate_key, 1.day.to_i)
|
||||||
args[:ip] = nil if args[:user_id]
|
args[:ip] = nil if args[:user_id]
|
||||||
create!(topic_link_id: link.id, user_id: args[:user_id], ip_address: args[:ip])
|
create!(topic_link_id: link.id, user_id: args[:user_id], ip_address: args[:ip])
|
||||||
end
|
end
|
||||||
|
|
|
@ -18,8 +18,8 @@ class TopicViewItem < ActiveRecord::Base
|
||||||
redis_key << ":ip-#{ip}"
|
redis_key << ":ip-#{ip}"
|
||||||
end
|
end
|
||||||
|
|
||||||
if skip_redis || $redis.setnx(redis_key, "1")
|
if skip_redis || Discourse.redis.setnx(redis_key, "1")
|
||||||
skip_redis || $redis.expire(redis_key, SiteSetting.topic_view_duration_hours.hours)
|
skip_redis || Discourse.redis.expire(redis_key, SiteSetting.topic_view_duration_hours.hours)
|
||||||
|
|
||||||
TopicViewItem.transaction do
|
TopicViewItem.transaction do
|
||||||
# this is called real frequently, working hard to avoid exceptions
|
# this is called real frequently, working hard to avoid exceptions
|
||||||
|
|
|
@ -243,8 +243,8 @@ class TrustLevel3Requirements
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.clear_cache
|
def self.clear_cache
|
||||||
$redis.del NUM_TOPICS_KEY
|
Discourse.redis.del NUM_TOPICS_KEY
|
||||||
$redis.del NUM_POSTS_KEY
|
Discourse.redis.del NUM_POSTS_KEY
|
||||||
end
|
end
|
||||||
|
|
||||||
CACHE_DURATION = 1.day.seconds - 60
|
CACHE_DURATION = 1.day.seconds - 60
|
||||||
|
@ -252,17 +252,17 @@ class TrustLevel3Requirements
|
||||||
NUM_POSTS_KEY = "tl3_num_posts"
|
NUM_POSTS_KEY = "tl3_num_posts"
|
||||||
|
|
||||||
def self.num_topics_in_time_period
|
def self.num_topics_in_time_period
|
||||||
$redis.get(NUM_TOPICS_KEY) || begin
|
Discourse.redis.get(NUM_TOPICS_KEY) || begin
|
||||||
count = Topic.listable_topics.visible.created_since(SiteSetting.tl3_time_period.days.ago).count
|
count = Topic.listable_topics.visible.created_since(SiteSetting.tl3_time_period.days.ago).count
|
||||||
$redis.setex NUM_TOPICS_KEY, CACHE_DURATION, count
|
Discourse.redis.setex NUM_TOPICS_KEY, CACHE_DURATION, count
|
||||||
count
|
count
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.num_posts_in_time_period
|
def self.num_posts_in_time_period
|
||||||
$redis.get(NUM_POSTS_KEY) || begin
|
Discourse.redis.get(NUM_POSTS_KEY) || begin
|
||||||
count = Post.public_posts.visible.created_since(SiteSetting.tl3_time_period.days.ago).count
|
count = Post.public_posts.visible.created_since(SiteSetting.tl3_time_period.days.ago).count
|
||||||
$redis.setex NUM_POSTS_KEY, CACHE_DURATION, count
|
Discourse.redis.setex NUM_POSTS_KEY, CACHE_DURATION, count
|
||||||
count
|
count
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -711,9 +711,9 @@ class User < ActiveRecord::Base
|
||||||
now_date = now.to_date
|
now_date = now.to_date
|
||||||
# Only update last seen once every minute
|
# Only update last seen once every minute
|
||||||
redis_key = "user:#{id}:#{now_date}"
|
redis_key = "user:#{id}:#{now_date}"
|
||||||
return unless $redis.setnx(redis_key, "1")
|
return unless Discourse.redis.setnx(redis_key, "1")
|
||||||
|
|
||||||
$redis.expire(redis_key, SiteSetting.active_user_rate_limit_secs)
|
Discourse.redis.expire(redis_key, SiteSetting.active_user_rate_limit_secs)
|
||||||
update_previous_visit(now)
|
update_previous_visit(now)
|
||||||
# using update_column to avoid the AR transaction
|
# using update_column to avoid the AR transaction
|
||||||
update_column(:last_seen_at, now)
|
update_column(:last_seen_at, now)
|
||||||
|
|
|
@ -94,8 +94,8 @@ class UserOption < ActiveRecord::Base
|
||||||
delay = SiteSetting.active_user_rate_limit_secs
|
delay = SiteSetting.active_user_rate_limit_secs
|
||||||
|
|
||||||
# only update last_redirected_to_top_at once every minute
|
# only update last_redirected_to_top_at once every minute
|
||||||
return unless $redis.setnx(key, "1")
|
return unless Discourse.redis.setnx(key, "1")
|
||||||
$redis.expire(key, delay)
|
Discourse.redis.expire(key, delay)
|
||||||
|
|
||||||
# delay the update
|
# delay the update
|
||||||
Jobs.enqueue_in(delay / 2, :update_top_redirection, user_id: self.user_id, redirected_at: Time.zone.now)
|
Jobs.enqueue_in(delay / 2, :update_top_redirection, user_id: self.user_id, redirected_at: Time.zone.now)
|
||||||
|
|
|
@ -16,8 +16,8 @@ class UserProfileView < ActiveRecord::Base
|
||||||
redis_key << ":ip-#{ip}"
|
redis_key << ":ip-#{ip}"
|
||||||
end
|
end
|
||||||
|
|
||||||
if skip_redis || $redis.setnx(redis_key, '1')
|
if skip_redis || Discourse.redis.setnx(redis_key, '1')
|
||||||
skip_redis || $redis.expire(redis_key, SiteSetting.user_profile_view_duration_hours.hours)
|
skip_redis || Discourse.redis.expire(redis_key, SiteSetting.user_profile_view_duration_hours.hours)
|
||||||
|
|
||||||
self.transaction do
|
self.transaction do
|
||||||
sql = "INSERT INTO user_profile_views (user_profile_id, ip_address, viewed_at, user_id)
|
sql = "INSERT INTO user_profile_views (user_profile_id, ip_address, viewed_at, user_id)
|
||||||
|
|
|
@ -163,11 +163,11 @@ class UserStat < ActiveRecord::Base
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.last_seen_cached(id)
|
def self.last_seen_cached(id)
|
||||||
$redis.get(last_seen_key(id))
|
Discourse.redis.get(last_seen_key(id))
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.cache_last_seen(id, val)
|
def self.cache_last_seen(id, val)
|
||||||
$redis.setex(last_seen_key(id), MAX_TIME_READ_DIFF, val)
|
Discourse.redis.setex(last_seen_key(id), MAX_TIME_READ_DIFF, val)
|
||||||
end
|
end
|
||||||
|
|
||||||
protected
|
protected
|
||||||
|
|
|
@ -16,8 +16,8 @@ class WebCrawlerRequest < ActiveRecord::Base
|
||||||
|
|
||||||
def self.increment!(user_agent, opts = nil)
|
def self.increment!(user_agent, opts = nil)
|
||||||
ua_list_key = user_agent_list_key
|
ua_list_key = user_agent_list_key
|
||||||
$redis.sadd(ua_list_key, user_agent)
|
Discourse.redis.sadd(ua_list_key, user_agent)
|
||||||
$redis.expire(ua_list_key, 259200) # 3.days
|
Discourse.redis.expire(ua_list_key, 259200) # 3.days
|
||||||
|
|
||||||
perform_increment!(redis_key(user_agent), opts)
|
perform_increment!(redis_key(user_agent), opts)
|
||||||
end
|
end
|
||||||
|
@ -34,7 +34,7 @@ class WebCrawlerRequest < ActiveRecord::Base
|
||||||
date = date.to_date
|
date = date.to_date
|
||||||
ua_list_key = user_agent_list_key(date)
|
ua_list_key = user_agent_list_key(date)
|
||||||
|
|
||||||
while user_agent = $redis.spop(ua_list_key)
|
while user_agent = Discourse.redis.spop(ua_list_key)
|
||||||
val = get_and_reset(redis_key(user_agent, date))
|
val = get_and_reset(redis_key(user_agent, date))
|
||||||
|
|
||||||
next if val == 0
|
next if val == 0
|
||||||
|
@ -55,11 +55,11 @@ class WebCrawlerRequest < ActiveRecord::Base
|
||||||
|
|
||||||
ua_list_key = user_agent_list_key(date)
|
ua_list_key = user_agent_list_key(date)
|
||||||
|
|
||||||
while user_agent = $redis.spop(ua_list_key)
|
while user_agent = Discourse.redis.spop(ua_list_key)
|
||||||
$redis.del redis_key(user_agent, date)
|
Discourse.redis.del redis_key(user_agent, date)
|
||||||
end
|
end
|
||||||
|
|
||||||
$redis.del(ua_list_key)
|
Discourse.redis.del(ua_list_key)
|
||||||
end
|
end
|
||||||
|
|
||||||
protected
|
protected
|
||||||
|
|
|
@ -122,17 +122,17 @@ class BadgeGranter
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
$redis.lpush queue_key, payload.to_json if payload
|
Discourse.redis.lpush queue_key, payload.to_json if payload
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.clear_queue!
|
def self.clear_queue!
|
||||||
$redis.del queue_key
|
Discourse.redis.del queue_key
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.process_queue!
|
def self.process_queue!
|
||||||
limit = 1000
|
limit = 1000
|
||||||
items = []
|
items = []
|
||||||
while limit > 0 && item = $redis.lpop(queue_key)
|
while limit > 0 && item = Discourse.redis.lpop(queue_key)
|
||||||
items << JSON.parse(item)
|
items << JSON.parse(item)
|
||||||
limit -= 1
|
limit -= 1
|
||||||
end
|
end
|
||||||
|
|
|
@ -54,12 +54,12 @@ class GroupMessage
|
||||||
|
|
||||||
def sent_recently?
|
def sent_recently?
|
||||||
return false if @opts[:limit_once_per] == false
|
return false if @opts[:limit_once_per] == false
|
||||||
$redis.get(sent_recently_key).present?
|
Discourse.redis.get(sent_recently_key).present?
|
||||||
end
|
end
|
||||||
|
|
||||||
# default is to send no more than once every 24 hours (24 * 60 * 60 = 86,400 seconds)
|
# default is to send no more than once every 24 hours (24 * 60 * 60 = 86,400 seconds)
|
||||||
def remember_message_sent
|
def remember_message_sent
|
||||||
$redis.setex(sent_recently_key, @opts[:limit_once_per].try(:to_i) || 86_400, 1) unless @opts[:limit_once_per] == false
|
Discourse.redis.setex(sent_recently_key, @opts[:limit_once_per].try(:to_i) || 86_400, 1) unless @opts[:limit_once_per] == false
|
||||||
end
|
end
|
||||||
|
|
||||||
def sent_recently_key
|
def sent_recently_key
|
||||||
|
|
|
@ -40,9 +40,9 @@ class RandomTopicSelector
|
||||||
key = cache_key(category)
|
key = cache_key(category)
|
||||||
|
|
||||||
if results.present?
|
if results.present?
|
||||||
$redis.multi do
|
Discourse.redis.multi do
|
||||||
$redis.rpush(key, results)
|
Discourse.redis.rpush(key, results)
|
||||||
$redis.expire(key, 2.days)
|
Discourse.redis.expire(key, 2.days)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -56,13 +56,13 @@ class RandomTopicSelector
|
||||||
|
|
||||||
return results if count < 1
|
return results if count < 1
|
||||||
|
|
||||||
results = $redis.multi do
|
results = Discourse.redis.multi do
|
||||||
$redis.lrange(key, 0, count - 1)
|
Discourse.redis.lrange(key, 0, count - 1)
|
||||||
$redis.ltrim(key, count, -1)
|
Discourse.redis.ltrim(key, count, -1)
|
||||||
end
|
end
|
||||||
|
|
||||||
if !results.is_a?(Array) # Redis is in readonly mode
|
if !results.is_a?(Array) # Redis is in readonly mode
|
||||||
results = $redis.lrange(key, 0, count - 1)
|
results = Discourse.redis.lrange(key, 0, count - 1)
|
||||||
else
|
else
|
||||||
results = results[0]
|
results = results[0]
|
||||||
end
|
end
|
||||||
|
@ -80,7 +80,7 @@ class RandomTopicSelector
|
||||||
results = results[0...count]
|
results = results[0...count]
|
||||||
end
|
end
|
||||||
|
|
||||||
if !backfilled && $redis.llen(key) < BACKFILL_LOW_WATER_MARK
|
if !backfilled && Discourse.redis.llen(key) < BACKFILL_LOW_WATER_MARK
|
||||||
Scheduler::Defer.later("backfill") do
|
Scheduler::Defer.later("backfill") do
|
||||||
backfill(category)
|
backfill(category)
|
||||||
end
|
end
|
||||||
|
@ -94,7 +94,7 @@ class RandomTopicSelector
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.clear_cache!
|
def self.clear_cache!
|
||||||
$redis.delete_prefixed(cache_key)
|
Discourse.redis.delete_prefixed(cache_key)
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -35,7 +35,7 @@ class TopicTimestampChanger
|
||||||
end
|
end
|
||||||
|
|
||||||
# Burst the cache for stats
|
# Burst the cache for stats
|
||||||
[AdminDashboardData, About].each { |klass| $redis.del klass.stats_cache_key }
|
[AdminDashboardData, About].each { |klass| Discourse.redis.del klass.stats_cache_key }
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
|
@ -243,7 +243,7 @@ module Discourse
|
||||||
require 'logster/redis_store'
|
require 'logster/redis_store'
|
||||||
# Use redis for our cache
|
# Use redis for our cache
|
||||||
config.cache_store = DiscourseRedis.new_redis_store
|
config.cache_store = DiscourseRedis.new_redis_store
|
||||||
$redis = DiscourseRedis.new
|
$redis = DiscourseRedis.new # rubocop:disable Style/GlobalVars
|
||||||
Logster.store = Logster::RedisStore.new(DiscourseRedis.new)
|
Logster.store = Logster::RedisStore.new(DiscourseRedis.new)
|
||||||
|
|
||||||
# we configure rack cache on demand in an initializer
|
# we configure rack cache on demand in an initializer
|
||||||
|
|
|
@ -2,5 +2,5 @@
|
||||||
|
|
||||||
if Rails.env.development? && ENV['DISCOURSE_FLUSH_REDIS']
|
if Rails.env.development? && ENV['DISCOURSE_FLUSH_REDIS']
|
||||||
puts "Flushing redis (development mode)"
|
puts "Flushing redis (development mode)"
|
||||||
$redis.flushall
|
Discourse.redis.flushall
|
||||||
end
|
end
|
||||||
|
|
|
@ -24,7 +24,7 @@ end
|
||||||
|
|
||||||
MiniScheduler.configure do |config|
|
MiniScheduler.configure do |config|
|
||||||
|
|
||||||
config.redis = $redis
|
config.redis = Discourse.redis
|
||||||
|
|
||||||
config.job_exception_handler do |ex, context|
|
config.job_exception_handler do |ex, context|
|
||||||
Discourse.handle_job_exception(ex, context)
|
Discourse.handle_job_exception(ex, context)
|
||||||
|
|
|
@ -29,7 +29,7 @@ Thread.new do
|
||||||
|
|
||||||
if old_time != time
|
if old_time != time
|
||||||
Rails.logger.info "attempting to reload #{$$} #{$PROGRAM_NAME} in #{wait_seconds} seconds"
|
Rails.logger.info "attempting to reload #{$$} #{$PROGRAM_NAME} in #{wait_seconds} seconds"
|
||||||
$shutdown = true
|
$shutdown = true # rubocop:disable Style/GlobalVars
|
||||||
sleep wait_seconds
|
sleep wait_seconds
|
||||||
Rails.logger.info "restarting #{$$}"
|
Rails.logger.info "restarting #{$$}"
|
||||||
Process.kill("USR2", $$)
|
Process.kill("USR2", $$)
|
||||||
|
|
|
@ -155,7 +155,7 @@ before_fork do |server, worker|
|
||||||
sleep 10
|
sleep 10
|
||||||
force_kill_rogue_sidekiq
|
force_kill_rogue_sidekiq
|
||||||
end
|
end
|
||||||
$redis._client.disconnect
|
Discourse.redis._client.disconnect
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -170,7 +170,7 @@ before_fork do |server, worker|
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
$redis._client.disconnect
|
Discourse.redis._client.disconnect
|
||||||
|
|
||||||
# Throttle the master from forking too quickly by sleeping. Due
|
# Throttle the master from forking too quickly by sleeping. Due
|
||||||
# to the implementation of standard Unix signal handlers, this
|
# to the implementation of standard Unix signal handlers, this
|
||||||
|
|
|
@ -18,18 +18,18 @@ class CreateDigestUnsubscribeKeys < ActiveRecord::Migration[4.2]
|
||||||
def migrate_redis_keys
|
def migrate_redis_keys
|
||||||
return if Rails.env.test?
|
return if Rails.env.test?
|
||||||
|
|
||||||
temp_keys = $redis.keys('temporary_key:*')
|
temp_keys = Discourse.redis.keys('temporary_key:*')
|
||||||
if temp_keys.present?
|
if temp_keys.present?
|
||||||
temp_keys.map! do |key|
|
temp_keys.map! do |key|
|
||||||
user_id = $redis.get(key).to_i
|
user_id = Discourse.redis.get(key).to_i
|
||||||
ttl = $redis.ttl(key).to_i
|
ttl = Discourse.redis.ttl(key).to_i
|
||||||
|
|
||||||
if ttl > 0
|
if ttl > 0
|
||||||
ttl = "'#{ttl.seconds.ago.strftime('%Y-%m-%d %H:%M:%S')}'"
|
ttl = "'#{ttl.seconds.ago.strftime('%Y-%m-%d %H:%M:%S')}'"
|
||||||
else
|
else
|
||||||
ttl = "CURRENT_TIMESTAMP"
|
ttl = "CURRENT_TIMESTAMP"
|
||||||
end
|
end
|
||||||
$redis.del(key)
|
Discourse.redis.del(key)
|
||||||
key.gsub!('temporary_key:', '')
|
key.gsub!('temporary_key:', '')
|
||||||
user_id ? "('#{key}', #{user_id}, #{ttl}, #{ttl})" : nil
|
user_id ? "('#{key}', #{user_id}, #{ttl}, #{ttl})" : nil
|
||||||
end
|
end
|
||||||
|
|
|
@ -4,6 +4,6 @@ require "common_passwords/common_passwords"
|
||||||
|
|
||||||
class ClearCommonPasswordsCache < ActiveRecord::Migration[4.2]
|
class ClearCommonPasswordsCache < ActiveRecord::Migration[4.2]
|
||||||
def change
|
def change
|
||||||
$redis.without_namespace.del CommonPasswords::LIST_KEY
|
Discourse.redis.without_namespace.del CommonPasswords::LIST_KEY
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -15,13 +15,13 @@ class AdminConfirmation
|
||||||
guardian.ensure_can_grant_admin!(@target_user)
|
guardian.ensure_can_grant_admin!(@target_user)
|
||||||
|
|
||||||
@token = SecureRandom.hex
|
@token = SecureRandom.hex
|
||||||
$redis.setex("admin-confirmation:#{@target_user.id}", 3.hours.to_i, @token)
|
Discourse.redis.setex("admin-confirmation:#{@target_user.id}", 3.hours.to_i, @token)
|
||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
target_user_id: @target_user.id,
|
target_user_id: @target_user.id,
|
||||||
performed_by: @performed_by.id
|
performed_by: @performed_by.id
|
||||||
}
|
}
|
||||||
$redis.setex("admin-confirmation-token:#{@token}", 3.hours.to_i, payload.to_json)
|
Discourse.redis.setex("admin-confirmation-token:#{@token}", 3.hours.to_i, payload.to_json)
|
||||||
|
|
||||||
Jobs.enqueue(
|
Jobs.enqueue(
|
||||||
:admin_confirmation_email,
|
:admin_confirmation_email,
|
||||||
|
@ -38,16 +38,16 @@ class AdminConfirmation
|
||||||
|
|
||||||
@target_user.grant_admin!
|
@target_user.grant_admin!
|
||||||
StaffActionLogger.new(@performed_by).log_grant_admin(@target_user)
|
StaffActionLogger.new(@performed_by).log_grant_admin(@target_user)
|
||||||
$redis.del "admin-confirmation:#{@target_user.id}"
|
Discourse.redis.del "admin-confirmation:#{@target_user.id}"
|
||||||
$redis.del "admin-confirmation-token:#{@token}"
|
Discourse.redis.del "admin-confirmation-token:#{@token}"
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.exists_for?(user_id)
|
def self.exists_for?(user_id)
|
||||||
$redis.exists "admin-confirmation:#{user_id}"
|
Discourse.redis.exists "admin-confirmation:#{user_id}"
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.find_by_code(token)
|
def self.find_by_code(token)
|
||||||
json = $redis.get("admin-confirmation-token:#{token}")
|
json = Discourse.redis.get("admin-confirmation-token:#{token}")
|
||||||
return nil unless json
|
return nil unless json
|
||||||
|
|
||||||
parsed = JSON.parse(json)
|
parsed = JSON.parse(json)
|
||||||
|
|
|
@ -30,7 +30,7 @@ class Auth::DefaultCurrentUserProvider
|
||||||
|
|
||||||
# bypass if we have the shared session header
|
# bypass if we have the shared session header
|
||||||
if shared_key = @env['HTTP_X_SHARED_SESSION_KEY']
|
if shared_key = @env['HTTP_X_SHARED_SESSION_KEY']
|
||||||
uid = $redis.get("shared_session_key_#{shared_key}")
|
uid = Discourse.redis.get("shared_session_key_#{shared_key}")
|
||||||
user = nil
|
user = nil
|
||||||
if uid
|
if uid
|
||||||
user = User.find_by(id: uid.to_i)
|
user = User.find_by(id: uid.to_i)
|
||||||
|
|
|
@ -87,7 +87,7 @@ class Auth::OpenIdAuthenticator < Auth::Authenticator
|
||||||
omniauth.provider :open_id,
|
omniauth.provider :open_id,
|
||||||
setup: lambda { |env|
|
setup: lambda { |env|
|
||||||
strategy = env["omniauth.strategy"]
|
strategy = env["omniauth.strategy"]
|
||||||
strategy.options[:store] = OpenID::Store::Redis.new($redis)
|
strategy.options[:store] = OpenID::Store::Redis.new(Discourse.redis)
|
||||||
|
|
||||||
# Add CSRF protection in addition to OpenID Specification
|
# Add CSRF protection in addition to OpenID Specification
|
||||||
def strategy.query_string
|
def strategy.query_string
|
||||||
|
|
|
@ -36,21 +36,21 @@ module BackupRestore
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.mark_as_running!
|
def self.mark_as_running!
|
||||||
$redis.setex(running_key, 60, "1")
|
Discourse.redis.setex(running_key, 60, "1")
|
||||||
save_start_logs_message_id
|
save_start_logs_message_id
|
||||||
keep_it_running
|
keep_it_running
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.is_operation_running?
|
def self.is_operation_running?
|
||||||
!!$redis.get(running_key)
|
!!Discourse.redis.get(running_key)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.mark_as_not_running!
|
def self.mark_as_not_running!
|
||||||
$redis.del(running_key)
|
Discourse.redis.del(running_key)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.should_shutdown?
|
def self.should_shutdown?
|
||||||
!!$redis.get(shutdown_signal_key)
|
!!Discourse.redis.get(shutdown_signal_key)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.can_rollback?
|
def self.can_rollback?
|
||||||
|
@ -128,7 +128,7 @@ module BackupRestore
|
||||||
Thread.new do
|
Thread.new do
|
||||||
# this thread will be killed when the fork dies
|
# this thread will be killed when the fork dies
|
||||||
while true
|
while true
|
||||||
$redis.expire(running_key, 1.minute)
|
Discourse.redis.expire(running_key, 1.minute)
|
||||||
sleep 30.seconds
|
sleep 30.seconds
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -139,20 +139,20 @@ module BackupRestore
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.set_shutdown_signal!
|
def self.set_shutdown_signal!
|
||||||
$redis.set(shutdown_signal_key, "1")
|
Discourse.redis.set(shutdown_signal_key, "1")
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.clear_shutdown_signal!
|
def self.clear_shutdown_signal!
|
||||||
$redis.del(shutdown_signal_key)
|
Discourse.redis.del(shutdown_signal_key)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.save_start_logs_message_id
|
def self.save_start_logs_message_id
|
||||||
id = MessageBus.last_id(LOGS_CHANNEL)
|
id = MessageBus.last_id(LOGS_CHANNEL)
|
||||||
$redis.set(start_logs_message_id_key, id)
|
Discourse.redis.set(start_logs_message_id_key, id)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.start_logs_message_id
|
def self.start_logs_message_id
|
||||||
$redis.get(start_logs_message_id_key).to_i
|
Discourse.redis.get(start_logs_message_id_key).to_i
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.start_logs_message_id_key
|
def self.start_logs_message_id_key
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
# This is a bottom up implementation of ActiveSupport::Cache::Store
|
# This is a bottom up implementation of ActiveSupport::Cache::Store
|
||||||
# this allows us to cleanly implement without using cache entries and version
|
# this allows us to cleanly implement without using cache entries and version
|
||||||
# support which we do not use, in tern this makes the cache as fast as simply
|
# support which we do not use, in tern this makes the cache as fast as simply
|
||||||
# using `$redis.setex` with a more convenient API
|
# using `Discourse.redis.setex` with a more convenient API
|
||||||
#
|
#
|
||||||
# It only implements a subset of ActiveSupport::Cache::Store as we make no use
|
# It only implements a subset of ActiveSupport::Cache::Store as we make no use
|
||||||
# of large parts of the interface.
|
# of large parts of the interface.
|
||||||
|
@ -33,7 +33,7 @@ class Cache
|
||||||
end
|
end
|
||||||
|
|
||||||
def redis
|
def redis
|
||||||
$redis
|
Discourse.redis
|
||||||
end
|
end
|
||||||
|
|
||||||
def reconnect
|
def reconnect
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
# If the password file is changed, you need to add a migration that deletes the list from redis
|
# If the password file is changed, you need to add a migration that deletes the list from redis
|
||||||
# so it gets re-populated:
|
# so it gets re-populated:
|
||||||
#
|
#
|
||||||
# $redis.without_namespace.del CommonPasswords::LIST_KEY
|
# Discourse.redis.without_namespace.del CommonPasswords::LIST_KEY
|
||||||
|
|
||||||
class CommonPasswords
|
class CommonPasswords
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ class CommonPasswords
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.redis
|
def self.redis
|
||||||
$redis.without_namespace
|
Discourse.redis.without_namespace
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.load_passwords
|
def self.load_passwords
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
# frozen_string_literal: true
|
# frozen_string_literal: true
|
||||||
|
# rubocop:disable Style/GlobalVars
|
||||||
|
|
||||||
require 'cache'
|
require 'cache'
|
||||||
require 'open3'
|
require 'open3'
|
||||||
|
@ -378,9 +379,9 @@ module Discourse
|
||||||
|
|
||||||
def self.enable_readonly_mode(key = READONLY_MODE_KEY)
|
def self.enable_readonly_mode(key = READONLY_MODE_KEY)
|
||||||
if key == USER_READONLY_MODE_KEY
|
if key == USER_READONLY_MODE_KEY
|
||||||
$redis.set(key, 1)
|
Discourse.redis.set(key, 1)
|
||||||
else
|
else
|
||||||
$redis.setex(key, READONLY_MODE_KEY_TTL, 1)
|
Discourse.redis.setex(key, READONLY_MODE_KEY_TTL, 1)
|
||||||
keep_readonly_mode(key) if !Rails.env.test?
|
keep_readonly_mode(key) if !Rails.env.test?
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -406,7 +407,7 @@ module Discourse
|
||||||
@mutex.synchronize do
|
@mutex.synchronize do
|
||||||
@dbs.each do |db|
|
@dbs.each do |db|
|
||||||
RailsMultisite::ConnectionManagement.with_connection(db) do
|
RailsMultisite::ConnectionManagement.with_connection(db) do
|
||||||
if !$redis.expire(key, READONLY_MODE_KEY_TTL)
|
if !Discourse.redis.expire(key, READONLY_MODE_KEY_TTL)
|
||||||
@dbs.delete(db)
|
@dbs.delete(db)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -419,18 +420,18 @@ module Discourse
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.disable_readonly_mode(key = READONLY_MODE_KEY)
|
def self.disable_readonly_mode(key = READONLY_MODE_KEY)
|
||||||
$redis.del(key)
|
Discourse.redis.del(key)
|
||||||
MessageBus.publish(readonly_channel, false)
|
MessageBus.publish(readonly_channel, false)
|
||||||
Site.clear_anon_cache!
|
Site.clear_anon_cache!
|
||||||
true
|
true
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.readonly_mode?(keys = READONLY_KEYS)
|
def self.readonly_mode?(keys = READONLY_KEYS)
|
||||||
recently_readonly? || $redis.mget(*keys).compact.present?
|
recently_readonly? || Discourse.redis.mget(*keys).compact.present?
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.pg_readonly_mode?
|
def self.pg_readonly_mode?
|
||||||
$redis.get(PG_READONLY_MODE_KEY).present?
|
Discourse.redis.get(PG_READONLY_MODE_KEY).present?
|
||||||
end
|
end
|
||||||
|
|
||||||
# Shared between processes
|
# Shared between processes
|
||||||
|
@ -444,23 +445,23 @@ module Discourse
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.recently_readonly?
|
def self.recently_readonly?
|
||||||
postgres_read_only = postgres_last_read_only[$redis.namespace]
|
postgres_read_only = postgres_last_read_only[Discourse.redis.namespace]
|
||||||
redis_read_only = redis_last_read_only[$redis.namespace]
|
redis_read_only = redis_last_read_only[Discourse.redis.namespace]
|
||||||
|
|
||||||
(redis_read_only.present? && redis_read_only > 15.seconds.ago) ||
|
(redis_read_only.present? && redis_read_only > 15.seconds.ago) ||
|
||||||
(postgres_read_only.present? && postgres_read_only > 15.seconds.ago)
|
(postgres_read_only.present? && postgres_read_only > 15.seconds.ago)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.received_postgres_readonly!
|
def self.received_postgres_readonly!
|
||||||
postgres_last_read_only[$redis.namespace] = Time.zone.now
|
postgres_last_read_only[Discourse.redis.namespace] = Time.zone.now
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.received_redis_readonly!
|
def self.received_redis_readonly!
|
||||||
redis_last_read_only[$redis.namespace] = Time.zone.now
|
redis_last_read_only[Discourse.redis.namespace] = Time.zone.now
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.clear_readonly!
|
def self.clear_readonly!
|
||||||
postgres_last_read_only[$redis.namespace] = redis_last_read_only[$redis.namespace] = nil
|
postgres_last_read_only[Discourse.redis.namespace] = redis_last_read_only[Discourse.redis.namespace] = nil
|
||||||
Site.clear_anon_cache!
|
Site.clear_anon_cache!
|
||||||
true
|
true
|
||||||
end
|
end
|
||||||
|
@ -491,7 +492,7 @@ module Discourse
|
||||||
begin
|
begin
|
||||||
git_cmd = 'git rev-parse HEAD'
|
git_cmd = 'git rev-parse HEAD'
|
||||||
self.try_git(git_cmd, Discourse::VERSION::STRING)
|
self.try_git(git_cmd, Discourse::VERSION::STRING)
|
||||||
end
|
end # rubocop:disable Style/GlobalVars
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.git_branch
|
def self.git_branch
|
||||||
|
@ -589,7 +590,7 @@ module Discourse
|
||||||
# note: some of this reconnecting may no longer be needed per https://github.com/redis/redis-rb/pull/414
|
# note: some of this reconnecting may no longer be needed per https://github.com/redis/redis-rb/pull/414
|
||||||
MessageBus.after_fork
|
MessageBus.after_fork
|
||||||
SiteSetting.after_fork
|
SiteSetting.after_fork
|
||||||
$redis._client.reconnect
|
Discourse.redis._client.reconnect
|
||||||
Rails.cache.reconnect
|
Rails.cache.reconnect
|
||||||
Discourse.cache.reconnect
|
Discourse.cache.reconnect
|
||||||
Logster.store.redis.reconnect
|
Logster.store.redis.reconnect
|
||||||
|
@ -737,10 +738,10 @@ module Discourse
|
||||||
digest = Digest::MD5.hexdigest(warning)
|
digest = Digest::MD5.hexdigest(warning)
|
||||||
redis_key = "deprecate-notice-#{digest}"
|
redis_key = "deprecate-notice-#{digest}"
|
||||||
|
|
||||||
if !$redis.without_namespace.get(redis_key)
|
if !Discourse.redis.without_namespace.get(redis_key)
|
||||||
Rails.logger.warn(warning)
|
Rails.logger.warn(warning)
|
||||||
begin
|
begin
|
||||||
$redis.without_namespace.setex(redis_key, 3600, "x")
|
Discourse.redis.without_namespace.setex(redis_key, 3600, "x")
|
||||||
rescue Redis::CommandError => e
|
rescue Redis::CommandError => e
|
||||||
raise unless e.message =~ /READONLY/
|
raise unless e.message =~ /READONLY/
|
||||||
end
|
end
|
||||||
|
@ -832,4 +833,10 @@ module Discourse
|
||||||
ensure
|
ensure
|
||||||
@preloaded_rails = true
|
@preloaded_rails = true
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def self.redis
|
||||||
|
$redis
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# rubocop:enable Style/GlobalVars
|
||||||
|
|
|
@ -14,7 +14,7 @@ module DiscourseHub
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.stats_fetched_at=(time_with_zone)
|
def self.stats_fetched_at=(time_with_zone)
|
||||||
$redis.set STATS_FETCHED_AT_KEY, time_with_zone.to_i
|
Discourse.redis.set STATS_FETCHED_AT_KEY, time_with_zone.to_i
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.get_payload
|
def self.get_payload
|
||||||
|
@ -102,7 +102,7 @@ module DiscourseHub
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.stats_fetched_at
|
def self.stats_fetched_at
|
||||||
t = $redis.get(STATS_FETCHED_AT_KEY)
|
t = Discourse.redis.get(STATS_FETCHED_AT_KEY)
|
||||||
t ? Time.zone.at(t.to_i) : 1.year.ago
|
t ? Time.zone.at(t.to_i) : 1.year.ago
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -262,7 +262,7 @@ class DiscourseRedis
|
||||||
|
|
||||||
def delete_prefixed(prefix)
|
def delete_prefixed(prefix)
|
||||||
DiscourseRedis.ignore_readonly do
|
DiscourseRedis.ignore_readonly do
|
||||||
keys("#{prefix}*").each { |k| $redis.del(k) }
|
keys("#{prefix}*").each { |k| Discourse.redis.del(k) }
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -58,42 +58,42 @@ module DiscourseUpdates
|
||||||
|
|
||||||
# last_installed_version is the installed version at the time of the last version check
|
# last_installed_version is the installed version at the time of the last version check
|
||||||
def last_installed_version
|
def last_installed_version
|
||||||
$redis.get last_installed_version_key
|
Discourse.redis.get last_installed_version_key
|
||||||
end
|
end
|
||||||
|
|
||||||
def latest_version
|
def latest_version
|
||||||
$redis.get latest_version_key
|
Discourse.redis.get latest_version_key
|
||||||
end
|
end
|
||||||
|
|
||||||
def missing_versions_count
|
def missing_versions_count
|
||||||
$redis.get(missing_versions_count_key).try(:to_i)
|
Discourse.redis.get(missing_versions_count_key).try(:to_i)
|
||||||
end
|
end
|
||||||
|
|
||||||
def critical_updates_available?
|
def critical_updates_available?
|
||||||
($redis.get(critical_updates_available_key) || false) == 'true'
|
(Discourse.redis.get(critical_updates_available_key) || false) == 'true'
|
||||||
end
|
end
|
||||||
|
|
||||||
def updated_at
|
def updated_at
|
||||||
t = $redis.get(updated_at_key)
|
t = Discourse.redis.get(updated_at_key)
|
||||||
t ? Time.zone.parse(t) : nil
|
t ? Time.zone.parse(t) : nil
|
||||||
end
|
end
|
||||||
|
|
||||||
def updated_at=(time_with_zone)
|
def updated_at=(time_with_zone)
|
||||||
$redis.set updated_at_key, time_with_zone.as_json
|
Discourse.redis.set updated_at_key, time_with_zone.as_json
|
||||||
end
|
end
|
||||||
|
|
||||||
['last_installed_version', 'latest_version', 'missing_versions_count', 'critical_updates_available'].each do |name|
|
['last_installed_version', 'latest_version', 'missing_versions_count', 'critical_updates_available'].each do |name|
|
||||||
eval "define_method :#{name}= do |arg|
|
eval "define_method :#{name}= do |arg|
|
||||||
$redis.set #{name}_key, arg
|
Discourse.redis.set #{name}_key, arg
|
||||||
end"
|
end"
|
||||||
end
|
end
|
||||||
|
|
||||||
def missing_versions=(versions)
|
def missing_versions=(versions)
|
||||||
# delete previous list from redis
|
# delete previous list from redis
|
||||||
prev_keys = $redis.lrange(missing_versions_list_key, 0, 4)
|
prev_keys = Discourse.redis.lrange(missing_versions_list_key, 0, 4)
|
||||||
if prev_keys
|
if prev_keys
|
||||||
$redis.del prev_keys
|
Discourse.redis.del prev_keys
|
||||||
$redis.del(missing_versions_list_key)
|
Discourse.redis.del(missing_versions_list_key)
|
||||||
end
|
end
|
||||||
|
|
||||||
if versions.present?
|
if versions.present?
|
||||||
|
@ -101,18 +101,18 @@ module DiscourseUpdates
|
||||||
version_keys = []
|
version_keys = []
|
||||||
versions[0, 5].each do |v|
|
versions[0, 5].each do |v|
|
||||||
key = "#{missing_versions_key_prefix}:#{v['version']}"
|
key = "#{missing_versions_key_prefix}:#{v['version']}"
|
||||||
$redis.mapped_hmset key, v
|
Discourse.redis.mapped_hmset key, v
|
||||||
version_keys << key
|
version_keys << key
|
||||||
end
|
end
|
||||||
$redis.rpush missing_versions_list_key, version_keys
|
Discourse.redis.rpush missing_versions_list_key, version_keys
|
||||||
end
|
end
|
||||||
|
|
||||||
versions || []
|
versions || []
|
||||||
end
|
end
|
||||||
|
|
||||||
def missing_versions
|
def missing_versions
|
||||||
keys = $redis.lrange(missing_versions_list_key, 0, 4) # max of 5 versions
|
keys = Discourse.redis.lrange(missing_versions_list_key, 0, 4) # max of 5 versions
|
||||||
keys.present? ? keys.map { |k| $redis.hgetall(k) } : []
|
keys.present? ? keys.map { |k| Discourse.redis.hgetall(k) } : []
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
|
@ -8,7 +8,7 @@ class DistributedMemoizer
|
||||||
|
|
||||||
# memoize a key across processes and machines
|
# memoize a key across processes and machines
|
||||||
def self.memoize(key, duration = 60 * 60 * 24, redis = nil)
|
def self.memoize(key, duration = 60 * 60 * 24, redis = nil)
|
||||||
redis ||= $redis
|
redis ||= Discourse.redis
|
||||||
|
|
||||||
redis_key = self.redis_key(key)
|
redis_key = self.redis_key(key)
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ class DistributedMemoizer
|
||||||
|
|
||||||
# Used for testing
|
# Used for testing
|
||||||
def self.flush!
|
def self.flush!
|
||||||
$redis.scan_each(match: "memoize_*").each { |key| $redis.del(key) }
|
Discourse.redis.scan_each(match: "memoize_*").each { |key| Discourse.redis.del(key) }
|
||||||
end
|
end
|
||||||
|
|
||||||
protected
|
protected
|
||||||
|
|
|
@ -17,7 +17,7 @@ class DistributedMutex
|
||||||
def initialize(key, redis: nil, validity: DEFAULT_VALIDITY)
|
def initialize(key, redis: nil, validity: DEFAULT_VALIDITY)
|
||||||
@key = key
|
@key = key
|
||||||
@using_global_redis = true if !redis
|
@using_global_redis = true if !redis
|
||||||
@redis = redis || $redis
|
@redis = redis || Discourse.redis
|
||||||
@mutex = Mutex.new
|
@mutex = Mutex.new
|
||||||
@validity = validity
|
@validity = validity
|
||||||
end
|
end
|
||||||
|
|
|
@ -119,8 +119,8 @@ module Email
|
||||||
|
|
||||||
key = "rejection_email:#{email}:#{type}:#{Date.today}"
|
key = "rejection_email:#{email}:#{type}:#{Date.today}"
|
||||||
|
|
||||||
if $redis.setnx(key, "1")
|
if Discourse.redis.setnx(key, "1")
|
||||||
$redis.expire(key, 25.hours)
|
Discourse.redis.expire(key, 25.hours)
|
||||||
true
|
true
|
||||||
else
|
else
|
||||||
false
|
false
|
||||||
|
|
|
@ -12,16 +12,16 @@ class EmailBackupToken
|
||||||
|
|
||||||
def self.set(user_id)
|
def self.set(user_id)
|
||||||
token = self.generate
|
token = self.generate
|
||||||
$redis.setex self.key(user_id), 1.day.to_i, token
|
Discourse.redis.setex self.key(user_id), 1.day.to_i, token
|
||||||
token
|
token
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.get(user_id)
|
def self.get(user_id)
|
||||||
$redis.get self.key(user_id)
|
Discourse.redis.get self.key(user_id)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.del(user_id)
|
def self.del(user_id)
|
||||||
$redis.del self.key(user_id)
|
Discourse.redis.del self.key(user_id)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.compare(user_id, token)
|
def self.compare(user_id, token)
|
||||||
|
|
|
@ -11,17 +11,17 @@ class FinalDestination
|
||||||
|
|
||||||
def self.clear_https_cache!(domain)
|
def self.clear_https_cache!(domain)
|
||||||
key = redis_https_key(domain)
|
key = redis_https_key(domain)
|
||||||
$redis.without_namespace.del(key)
|
Discourse.redis.without_namespace.del(key)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.cache_https_domain(domain)
|
def self.cache_https_domain(domain)
|
||||||
key = redis_https_key(domain)
|
key = redis_https_key(domain)
|
||||||
$redis.without_namespace.setex(key, "1", 1.day.to_i).present?
|
Discourse.redis.without_namespace.setex(key, "1", 1.day.to_i).present?
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.is_https_domain?(domain)
|
def self.is_https_domain?(domain)
|
||||||
key = redis_https_key(domain)
|
key = redis_https_key(domain)
|
||||||
$redis.without_namespace.get(key).present?
|
Discourse.redis.without_namespace.get(key).present?
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.redis_https_key(domain)
|
def self.redis_https_key(domain)
|
||||||
|
|
|
@ -177,8 +177,8 @@ module Middleware
|
||||||
end
|
end
|
||||||
|
|
||||||
def cached(env = {})
|
def cached(env = {})
|
||||||
if body = decompress($redis.get(cache_key_body))
|
if body = decompress(Discourse.redis.get(cache_key_body))
|
||||||
if other = $redis.get(cache_key_other)
|
if other = Discourse.redis.get(cache_key_other)
|
||||||
other = JSON.parse(other)
|
other = JSON.parse(other)
|
||||||
if req_params = other[1].delete(ADP)
|
if req_params = other[1].delete(ADP)
|
||||||
env[ADP] = req_params
|
env[ADP] = req_params
|
||||||
|
@ -203,7 +203,7 @@ module Middleware
|
||||||
if status == 200 && cache_duration
|
if status == 200 && cache_duration
|
||||||
|
|
||||||
if GlobalSetting.anon_cache_store_threshold > 1
|
if GlobalSetting.anon_cache_store_threshold > 1
|
||||||
count = $redis.eval(<<~REDIS, [cache_key_count], [cache_duration])
|
count = Discourse.redis.eval(<<~REDIS, [cache_key_count], [cache_duration])
|
||||||
local current = redis.call("incr", KEYS[1])
|
local current = redis.call("incr", KEYS[1])
|
||||||
redis.call("expire",KEYS[1],ARGV[1])
|
redis.call("expire",KEYS[1],ARGV[1])
|
||||||
return current
|
return current
|
||||||
|
@ -231,8 +231,8 @@ module Middleware
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
$redis.setex(cache_key_body, cache_duration, compress(parts.join))
|
Discourse.redis.setex(cache_key_body, cache_duration, compress(parts.join))
|
||||||
$redis.setex(cache_key_other, cache_duration, [status, headers_stripped].to_json)
|
Discourse.redis.setex(cache_key_other, cache_duration, [status, headers_stripped].to_json)
|
||||||
|
|
||||||
headers["X-Discourse-Cached"] = "store"
|
headers["X-Discourse-Cached"] = "store"
|
||||||
else
|
else
|
||||||
|
@ -243,8 +243,8 @@ module Middleware
|
||||||
end
|
end
|
||||||
|
|
||||||
def clear_cache
|
def clear_cache
|
||||||
$redis.del(cache_key_body)
|
Discourse.redis.del(cache_key_body)
|
||||||
$redis.del(cache_key_other)
|
Discourse.redis.del(cache_key_other)
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -122,15 +122,15 @@ module Oneboxer
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.is_previewing?(user_id)
|
def self.is_previewing?(user_id)
|
||||||
$redis.get(preview_key(user_id)) == "1"
|
Discourse.redis.get(preview_key(user_id)) == "1"
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.preview_onebox!(user_id)
|
def self.preview_onebox!(user_id)
|
||||||
$redis.setex(preview_key(user_id), 1.minute, "1")
|
Discourse.redis.setex(preview_key(user_id), 1.minute, "1")
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.onebox_previewed!(user_id)
|
def self.onebox_previewed!(user_id)
|
||||||
$redis.del(preview_key(user_id))
|
Discourse.redis.del(preview_key(user_id))
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.engine(url)
|
def self.engine(url)
|
||||||
|
|
|
@ -265,11 +265,11 @@ class PostRevisor
|
||||||
end
|
end
|
||||||
|
|
||||||
def cached_original_raw
|
def cached_original_raw
|
||||||
@cached_original_raw ||= $redis.get(original_raw_key)
|
@cached_original_raw ||= Discourse.redis.get(original_raw_key)
|
||||||
end
|
end
|
||||||
|
|
||||||
def cached_original_cooked
|
def cached_original_cooked
|
||||||
@cached_original_cooked ||= $redis.get(original_cooked_key)
|
@cached_original_cooked ||= Discourse.redis.get(original_cooked_key)
|
||||||
end
|
end
|
||||||
|
|
||||||
def original_raw
|
def original_raw
|
||||||
|
@ -278,12 +278,12 @@ class PostRevisor
|
||||||
|
|
||||||
def original_raw=(val)
|
def original_raw=(val)
|
||||||
@cached_original_raw = val
|
@cached_original_raw = val
|
||||||
$redis.setex(original_raw_key, SiteSetting.editing_grace_period + 1, val)
|
Discourse.redis.setex(original_raw_key, SiteSetting.editing_grace_period + 1, val)
|
||||||
end
|
end
|
||||||
|
|
||||||
def original_cooked=(val)
|
def original_cooked=(val)
|
||||||
@cached_original_cooked = val
|
@cached_original_cooked = val
|
||||||
$redis.setex(original_cooked_key, SiteSetting.editing_grace_period + 1, val)
|
Discourse.redis.setex(original_cooked_key, SiteSetting.editing_grace_period + 1, val)
|
||||||
end
|
end
|
||||||
|
|
||||||
def diff_size(before, after)
|
def diff_size(before, after)
|
||||||
|
|
|
@ -24,12 +24,12 @@ class RateLimiter
|
||||||
|
|
||||||
# Only used in test, only clears current namespace, does not clear globals
|
# Only used in test, only clears current namespace, does not clear globals
|
||||||
def self.clear_all!
|
def self.clear_all!
|
||||||
$redis.delete_prefixed(RateLimiter.key_prefix)
|
Discourse.redis.delete_prefixed(RateLimiter.key_prefix)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.clear_all_global!
|
def self.clear_all_global!
|
||||||
$redis.without_namespace.keys("GLOBAL::#{key_prefix}*").each do |k|
|
Discourse.redis.without_namespace.keys("GLOBAL::#{key_prefix}*").each do |k|
|
||||||
$redis.without_namespace.del k
|
Discourse.redis.without_namespace.del k
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -119,12 +119,12 @@ class RateLimiter
|
||||||
if @global
|
if @global
|
||||||
"GLOBAL::#{key}"
|
"GLOBAL::#{key}"
|
||||||
else
|
else
|
||||||
$redis.namespace_key(key)
|
Discourse.redis.namespace_key(key)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def redis
|
def redis
|
||||||
$redis.without_namespace
|
Discourse.redis.without_namespace
|
||||||
end
|
end
|
||||||
|
|
||||||
def seconds_to_wait
|
def seconds_to_wait
|
||||||
|
|
|
@ -16,23 +16,23 @@ class SecureSession
|
||||||
|
|
||||||
def set(key, val, expires: nil)
|
def set(key, val, expires: nil)
|
||||||
expires ||= SecureSession.expiry
|
expires ||= SecureSession.expiry
|
||||||
$redis.setex(prefixed_key(key), expires.to_i, val.to_s)
|
Discourse.redis.setex(prefixed_key(key), expires.to_i, val.to_s)
|
||||||
true
|
true
|
||||||
end
|
end
|
||||||
|
|
||||||
def ttl(key)
|
def ttl(key)
|
||||||
$redis.ttl(prefixed_key(key))
|
Discourse.redis.ttl(prefixed_key(key))
|
||||||
end
|
end
|
||||||
|
|
||||||
def [](key)
|
def [](key)
|
||||||
$redis.get(prefixed_key(key))
|
Discourse.redis.get(prefixed_key(key))
|
||||||
end
|
end
|
||||||
|
|
||||||
def []=(key, val)
|
def []=(key, val)
|
||||||
if val == nil
|
if val == nil
|
||||||
$redis.del(prefixed_key(key))
|
Discourse.redis.del(prefixed_key(key))
|
||||||
else
|
else
|
||||||
$redis.setex(prefixed_key(key), SecureSession.expiry.to_i, val.to_s)
|
Discourse.redis.setex(prefixed_key(key), SecureSession.expiry.to_i, val.to_s)
|
||||||
end
|
end
|
||||||
val
|
val
|
||||||
end
|
end
|
||||||
|
|
|
@ -12,13 +12,13 @@ class SidekiqPauser
|
||||||
end
|
end
|
||||||
|
|
||||||
def pause!(value = "paused")
|
def pause!(value = "paused")
|
||||||
$redis.setex PAUSED_KEY, TTL, value
|
Discourse.redis.setex PAUSED_KEY, TTL, value
|
||||||
extend_lease_thread
|
extend_lease_thread
|
||||||
true
|
true
|
||||||
end
|
end
|
||||||
|
|
||||||
def paused?
|
def paused?
|
||||||
!!$redis.get(PAUSED_KEY)
|
!!Discourse.redis.get(PAUSED_KEY)
|
||||||
end
|
end
|
||||||
|
|
||||||
def unpause_all!
|
def unpause_all!
|
||||||
|
@ -48,7 +48,7 @@ class SidekiqPauser
|
||||||
stop_extend_lease_thread if @dbs.size == 0
|
stop_extend_lease_thread if @dbs.size == 0
|
||||||
end
|
end
|
||||||
|
|
||||||
$redis.del(PAUSED_KEY)
|
Discourse.redis.del(PAUSED_KEY)
|
||||||
true
|
true
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -83,7 +83,7 @@ class SidekiqPauser
|
||||||
@mutex.synchronize do
|
@mutex.synchronize do
|
||||||
@dbs.each do |db|
|
@dbs.each do |db|
|
||||||
RailsMultisite::ConnectionManagement.with_connection(db) do
|
RailsMultisite::ConnectionManagement.with_connection(db) do
|
||||||
if !$redis.expire(PAUSED_KEY, TTL)
|
if !Discourse.redis.expire(PAUSED_KEY, TTL)
|
||||||
# if it was unpaused in another process we got to remove the
|
# if it was unpaused in another process we got to remove the
|
||||||
# bad key
|
# bad key
|
||||||
@dbs.delete(db)
|
@dbs.delete(db)
|
||||||
|
|
|
@ -7,11 +7,11 @@ module Stylesheet
|
||||||
REDIS_KEY = "dev_last_used_theme_id"
|
REDIS_KEY = "dev_last_used_theme_id"
|
||||||
|
|
||||||
def self.theme_id=(v)
|
def self.theme_id=(v)
|
||||||
$redis.set(REDIS_KEY, v)
|
Discourse.redis.set(REDIS_KEY, v)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.theme_id
|
def self.theme_id
|
||||||
($redis.get(REDIS_KEY) || SiteSetting.default_theme_id).to_i
|
(Discourse.redis.get(REDIS_KEY) || SiteSetting.default_theme_id).to_i
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.watch(paths = nil)
|
def self.watch(paths = nil)
|
||||||
|
|
|
@ -9,7 +9,7 @@ task 'redis:clean_up' => ['environment'] do
|
||||||
regexp = /((\$(?<message_bus>\w+)$)|(^?(?<namespace>\w+):))/
|
regexp = /((\$(?<message_bus>\w+)$)|(^?(?<namespace>\w+):))/
|
||||||
|
|
||||||
cursor = 0
|
cursor = 0
|
||||||
redis = $redis.without_namespace
|
redis = Discourse.redis.without_namespace
|
||||||
|
|
||||||
loop do
|
loop do
|
||||||
cursor, keys = redis.scan(cursor)
|
cursor, keys = redis.scan(cursor)
|
||||||
|
|
|
@ -161,11 +161,11 @@ class Typepad < Thor
|
||||||
|
|
||||||
if options[:google_api] && comment[:author] =~ /plus.google.com\/(\d+)/
|
if options[:google_api] && comment[:author] =~ /plus.google.com\/(\d+)/
|
||||||
gplus_id = Regexp.last_match[1]
|
gplus_id = Regexp.last_match[1]
|
||||||
from_redis = $redis.get("gplus:#{gplus_id}")
|
from_redis = Discourse.redis.get("gplus:#{gplus_id}")
|
||||||
if from_redis.blank?
|
if from_redis.blank?
|
||||||
json = ::JSON.parse(open("https://www.googleapis.com/plus/v1/people/#{gplus_id}?key=#{options[:google_api]}").read)
|
json = ::JSON.parse(open("https://www.googleapis.com/plus/v1/people/#{gplus_id}?key=#{options[:google_api]}").read)
|
||||||
from_redis = json['displayName']
|
from_redis = json['displayName']
|
||||||
$redis.set("gplus:#{gplus_id}", from_redis)
|
Discourse.redis.set("gplus:#{gplus_id}", from_redis)
|
||||||
end
|
end
|
||||||
comment[:author] = from_redis
|
comment[:author] = from_redis
|
||||||
end
|
end
|
||||||
|
@ -184,11 +184,11 @@ class Typepad < Thor
|
||||||
|
|
||||||
if comment[:author] =~ /www.facebook.com\/profile.php\?id=(\d+)/
|
if comment[:author] =~ /www.facebook.com\/profile.php\?id=(\d+)/
|
||||||
fb_id = Regexp.last_match[1]
|
fb_id = Regexp.last_match[1]
|
||||||
from_redis = $redis.get("fb:#{fb_id}")
|
from_redis = Discourse.redis.get("fb:#{fb_id}")
|
||||||
if from_redis.blank?
|
if from_redis.blank?
|
||||||
json = ::JSON.parse(open("http://graph.facebook.com/#{fb_id}").read)
|
json = ::JSON.parse(open("http://graph.facebook.com/#{fb_id}").read)
|
||||||
from_redis = json['username']
|
from_redis = json['username']
|
||||||
$redis.set("fb:#{fb_id}", from_redis)
|
Discourse.redis.set("fb:#{fb_id}", from_redis)
|
||||||
end
|
end
|
||||||
comment[:author] = from_redis
|
comment[:author] = from_redis
|
||||||
end
|
end
|
||||||
|
|
|
@ -24,8 +24,8 @@ class TopicRetriever
|
||||||
|
|
||||||
# Throttle other users to once every 60 seconds
|
# Throttle other users to once every 60 seconds
|
||||||
retrieved_key = "retrieved_topic"
|
retrieved_key = "retrieved_topic"
|
||||||
if $redis.setnx(retrieved_key, "1")
|
if Discourse.redis.setnx(retrieved_key, "1")
|
||||||
$redis.expire(retrieved_key, 60)
|
Discourse.redis.expire(retrieved_key, 60)
|
||||||
return false
|
return false
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -51,15 +51,15 @@ module DiscourseNarrativeBot
|
||||||
|
|
||||||
key = "#{DiscourseNarrativeBot::PLUGIN_NAME}:reset-rate-limit:#{post.topic_id}:#{data['state']}"
|
key = "#{DiscourseNarrativeBot::PLUGIN_NAME}:reset-rate-limit:#{post.topic_id}:#{data['state']}"
|
||||||
|
|
||||||
if !(count = $redis.get(key))
|
if !(count = Discourse.redis.get(key))
|
||||||
count = 0
|
count = 0
|
||||||
$redis.setex(key, duration, count)
|
Discourse.redis.setex(key, duration, count)
|
||||||
end
|
end
|
||||||
|
|
||||||
if count.to_i < 2
|
if count.to_i < 2
|
||||||
post.default_rate_limiter.rollback!
|
post.default_rate_limiter.rollback!
|
||||||
post.limit_posts_per_day&.rollback!
|
post.limit_posts_per_day&.rollback!
|
||||||
$redis.incr(key)
|
Discourse.redis.incr(key)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -53,7 +53,7 @@ module DiscourseNarrativeBot
|
||||||
if previous_status && data[:attempted] == previous_status && !data[:skip_attempted]
|
if previous_status && data[:attempted] == previous_status && !data[:skip_attempted]
|
||||||
generic_replies(klass.reset_trigger, state)
|
generic_replies(klass.reset_trigger, state)
|
||||||
else
|
else
|
||||||
$redis.del(generic_replies_key(@user))
|
Discourse.redis.del(generic_replies_key(@user))
|
||||||
end
|
end
|
||||||
|
|
||||||
Store.set(@user.id, data)
|
Store.set(@user.id, data)
|
||||||
|
@ -143,14 +143,14 @@ module DiscourseNarrativeBot
|
||||||
|
|
||||||
if public_reply?
|
if public_reply?
|
||||||
key = "#{PUBLIC_DISPLAY_BOT_HELP_KEY}:#{@post.topic_id}"
|
key = "#{PUBLIC_DISPLAY_BOT_HELP_KEY}:#{@post.topic_id}"
|
||||||
last_bot_help_post_number = $redis.get(key)
|
last_bot_help_post_number = Discourse.redis.get(key)
|
||||||
|
|
||||||
if !last_bot_help_post_number ||
|
if !last_bot_help_post_number ||
|
||||||
(last_bot_help_post_number &&
|
(last_bot_help_post_number &&
|
||||||
@post.post_number - 10 > last_bot_help_post_number.to_i &&
|
@post.post_number - 10 > last_bot_help_post_number.to_i &&
|
||||||
(1.day.to_i - $redis.ttl(key)) > 6.hours.to_i)
|
(1.day.to_i - Discourse.redis.ttl(key)) > 6.hours.to_i)
|
||||||
|
|
||||||
$redis.setex(key, 1.day.to_i, @post.post_number)
|
Discourse.redis.setex(key, 1.day.to_i, @post.post_number)
|
||||||
message
|
message
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
|
@ -190,7 +190,7 @@ module DiscourseNarrativeBot
|
||||||
def generic_replies(track_reset_trigger, state = nil)
|
def generic_replies(track_reset_trigger, state = nil)
|
||||||
reset_trigger = "#{self.class.reset_trigger} #{track_reset_trigger}"
|
reset_trigger = "#{self.class.reset_trigger} #{track_reset_trigger}"
|
||||||
key = generic_replies_key(@user)
|
key = generic_replies_key(@user)
|
||||||
count = ($redis.get(key) || $redis.setex(key, 900, 0)).to_i
|
count = (Discourse.redis.get(key) || Discourse.redis.setex(key, 900, 0)).to_i
|
||||||
|
|
||||||
case count
|
case count
|
||||||
when 0
|
when 0
|
||||||
|
@ -210,7 +210,7 @@ module DiscourseNarrativeBot
|
||||||
# Stay out of the user's way
|
# Stay out of the user's way
|
||||||
end
|
end
|
||||||
|
|
||||||
$redis.incr(key)
|
Discourse.redis.incr(key)
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.i18n_key(key)
|
def self.i18n_key(key)
|
||||||
|
|
|
@ -243,7 +243,7 @@ describe DiscourseNarrativeBot::TrackSelector do
|
||||||
|
|
||||||
context 'generic replies' do
|
context 'generic replies' do
|
||||||
after do
|
after do
|
||||||
$redis.del("#{described_class::GENERIC_REPLIES_COUNT_PREFIX}#{user.id}")
|
Discourse.redis.del("#{described_class::GENERIC_REPLIES_COUNT_PREFIX}#{user.id}")
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'should create the right generic do not understand responses' do
|
it 'should create the right generic do not understand responses' do
|
||||||
|
@ -472,17 +472,17 @@ describe DiscourseNarrativeBot::TrackSelector do
|
||||||
let(:post) { Fabricate(:post, topic: topic) }
|
let(:post) { Fabricate(:post, topic: topic) }
|
||||||
|
|
||||||
after do
|
after do
|
||||||
$redis.flushall
|
Discourse.redis.flushall
|
||||||
end
|
end
|
||||||
|
|
||||||
describe 'when random reply massage has been displayed in the last 6 hours' do
|
describe 'when random reply massage has been displayed in the last 6 hours' do
|
||||||
it 'should not do anything' do
|
it 'should not do anything' do
|
||||||
$redis.set(
|
Discourse.redis.set(
|
||||||
"#{described_class::PUBLIC_DISPLAY_BOT_HELP_KEY}:#{other_post.topic_id}",
|
"#{described_class::PUBLIC_DISPLAY_BOT_HELP_KEY}:#{other_post.topic_id}",
|
||||||
post.post_number - 11
|
post.post_number - 11
|
||||||
)
|
)
|
||||||
|
|
||||||
$redis.class.any_instance.expects(:ttl).returns(19.hours.to_i)
|
Discourse.redis.class.any_instance.expects(:ttl).returns(19.hours.to_i)
|
||||||
|
|
||||||
user
|
user
|
||||||
post.update!(raw: "Show me what you can do @discobot")
|
post.update!(raw: "Show me what you can do @discobot")
|
||||||
|
@ -494,12 +494,12 @@ describe DiscourseNarrativeBot::TrackSelector do
|
||||||
|
|
||||||
describe 'when random reply message has not been displayed in the last 6 hours' do
|
describe 'when random reply message has not been displayed in the last 6 hours' do
|
||||||
it 'should create the right reply' do
|
it 'should create the right reply' do
|
||||||
$redis.set(
|
Discourse.redis.set(
|
||||||
"#{described_class::PUBLIC_DISPLAY_BOT_HELP_KEY}:#{other_post.topic_id}",
|
"#{described_class::PUBLIC_DISPLAY_BOT_HELP_KEY}:#{other_post.topic_id}",
|
||||||
post.post_number - 11
|
post.post_number - 11
|
||||||
)
|
)
|
||||||
|
|
||||||
$redis.class.any_instance.expects(:ttl).returns(7.hours.to_i)
|
Discourse.redis.class.any_instance.expects(:ttl).returns(7.hours.to_i)
|
||||||
|
|
||||||
user
|
user
|
||||||
post.update!(raw: "Show me what you can do @discobot")
|
post.update!(raw: "Show me what you can do @discobot")
|
||||||
|
@ -515,7 +515,7 @@ describe DiscourseNarrativeBot::TrackSelector do
|
||||||
described_class.new(:reply, user, post_id: other_post.id).select
|
described_class.new(:reply, user, post_id: other_post.id).select
|
||||||
expect(Post.last.raw).to eq(random_mention_reply)
|
expect(Post.last.raw).to eq(random_mention_reply)
|
||||||
|
|
||||||
expect($redis.get(
|
expect(Discourse.redis.get(
|
||||||
"#{described_class::PUBLIC_DISPLAY_BOT_HELP_KEY}:#{other_post.topic_id}"
|
"#{described_class::PUBLIC_DISPLAY_BOT_HELP_KEY}:#{other_post.topic_id}"
|
||||||
).to_i).to eq(other_post.post_number.to_i)
|
).to_i).to eq(other_post.post_number.to_i)
|
||||||
|
|
||||||
|
|
|
@ -36,20 +36,20 @@ after_initialize do
|
||||||
# return true if a key was added
|
# return true if a key was added
|
||||||
def self.add(type, id, user_id)
|
def self.add(type, id, user_id)
|
||||||
key = get_redis_key(type, id)
|
key = get_redis_key(type, id)
|
||||||
result = $redis.hset(key, user_id, Time.zone.now)
|
result = Discourse.redis.hset(key, user_id, Time.zone.now)
|
||||||
$redis.expire(key, MAX_BACKLOG_AGE)
|
Discourse.redis.expire(key, MAX_BACKLOG_AGE)
|
||||||
result
|
result
|
||||||
end
|
end
|
||||||
|
|
||||||
# return true if a key was deleted
|
# return true if a key was deleted
|
||||||
def self.remove(type, id, user_id)
|
def self.remove(type, id, user_id)
|
||||||
key = get_redis_key(type, id)
|
key = get_redis_key(type, id)
|
||||||
$redis.expire(key, MAX_BACKLOG_AGE)
|
Discourse.redis.expire(key, MAX_BACKLOG_AGE)
|
||||||
$redis.hdel(key, user_id) > 0
|
Discourse.redis.hdel(key, user_id) > 0
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.get_users(type, id)
|
def self.get_users(type, id)
|
||||||
user_ids = $redis.hkeys(get_redis_key(type, id)).map(&:to_i)
|
user_ids = Discourse.redis.hkeys(get_redis_key(type, id)).map(&:to_i)
|
||||||
User.where(id: user_ids)
|
User.where(id: user_ids)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -88,7 +88,7 @@ after_initialize do
|
||||||
has_changed = false
|
has_changed = false
|
||||||
|
|
||||||
# Delete entries older than 20 seconds
|
# Delete entries older than 20 seconds
|
||||||
hash = $redis.hgetall(get_redis_key(type, id))
|
hash = Discourse.redis.hgetall(get_redis_key(type, id))
|
||||||
hash.each do |user_id, time|
|
hash.each do |user_id, time|
|
||||||
if Time.zone.now - Time.parse(time) >= 20
|
if Time.zone.now - Time.parse(time) >= 20
|
||||||
has_changed |= remove(type, id, user_id)
|
has_changed |= remove(type, id, user_id)
|
||||||
|
|
|
@ -13,10 +13,10 @@ describe ::Presence::PresenceManager do
|
||||||
let(:post2) { Fabricate(:post) }
|
let(:post2) { Fabricate(:post) }
|
||||||
|
|
||||||
after(:each) do
|
after(:each) do
|
||||||
$redis.del("presence:topic:#{post1.topic.id}")
|
Discourse.redis.del("presence:topic:#{post1.topic.id}")
|
||||||
$redis.del("presence:topic:#{post2.topic.id}")
|
Discourse.redis.del("presence:topic:#{post2.topic.id}")
|
||||||
$redis.del("presence:post:#{post1.id}")
|
Discourse.redis.del("presence:post:#{post1.id}")
|
||||||
$redis.del("presence:post:#{post2.id}")
|
Discourse.redis.del("presence:post:#{post2.id}")
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'adds, removes and lists users correctly' do
|
it 'adds, removes and lists users correctly' do
|
||||||
|
|
|
@ -17,10 +17,10 @@ describe ::Presence::PresencesController do
|
||||||
let(:manager) { ::Presence::PresenceManager }
|
let(:manager) { ::Presence::PresenceManager }
|
||||||
|
|
||||||
after do
|
after do
|
||||||
$redis.del("presence:topic:#{post1.topic.id}")
|
Discourse.redis.del("presence:topic:#{post1.topic.id}")
|
||||||
$redis.del("presence:topic:#{post2.topic.id}")
|
Discourse.redis.del("presence:topic:#{post2.topic.id}")
|
||||||
$redis.del("presence:post:#{post1.id}")
|
Discourse.redis.del("presence:post:#{post1.id}")
|
||||||
$redis.del("presence:post:#{post2.id}")
|
Discourse.redis.del("presence:post:#{post2.id}")
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'when not logged in' do
|
context 'when not logged in' do
|
||||||
|
|
8
script/benchmarks/cache/bench.rb
vendored
8
script/benchmarks/cache/bench.rb
vendored
|
@ -7,14 +7,14 @@ Benchmark.ips do |x|
|
||||||
|
|
||||||
x.report("redis setex string") do |times|
|
x.report("redis setex string") do |times|
|
||||||
while times > 0
|
while times > 0
|
||||||
$redis.setex("test_key", 60, "test")
|
Discourse.redis.setex("test_key", 60, "test")
|
||||||
times -= 1
|
times -= 1
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
x.report("redis setex marshal string") do |times|
|
x.report("redis setex marshal string") do |times|
|
||||||
while times > 0
|
while times > 0
|
||||||
$redis.setex("test_keym", 60, Marshal.dump("test"))
|
Discourse.redis.setex("test_keym", 60, Marshal.dump("test"))
|
||||||
times -= 1
|
times -= 1
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -39,14 +39,14 @@ end
|
||||||
Benchmark.ips do |x|
|
Benchmark.ips do |x|
|
||||||
x.report("redis get string") do |times|
|
x.report("redis get string") do |times|
|
||||||
while times > 0
|
while times > 0
|
||||||
$redis.get("test_key")
|
Discourse.redis.get("test_key")
|
||||||
times -= 1
|
times -= 1
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
x.report("redis get string marshal") do |times|
|
x.report("redis get string marshal") do |times|
|
||||||
while times > 0
|
while times > 0
|
||||||
Marshal.load($redis.get("test_keym"))
|
Marshal.load(Discourse.redis.get("test_keym"))
|
||||||
times -= 1
|
times -= 1
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
require File.expand_path("../../config/environment", __FILE__)
|
require File.expand_path("../../config/environment", __FILE__)
|
||||||
|
|
||||||
@redis = $redis.without_namespace
|
@redis = Discourse.redis.without_namespace
|
||||||
|
|
||||||
stats = {}
|
stats = {}
|
||||||
|
|
||||||
|
|
|
@ -41,7 +41,7 @@ describe ActiveRecord::ConnectionHandling do
|
||||||
ActiveRecord::Base.clear_all_connections!
|
ActiveRecord::Base.clear_all_connections!
|
||||||
ActiveRecord::Base.establish_connection
|
ActiveRecord::Base.establish_connection
|
||||||
|
|
||||||
$redis.flushall
|
Discourse.redis.flushall
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "#postgresql_fallback_connection" do
|
describe "#postgresql_fallback_connection" do
|
||||||
|
|
|
@ -399,7 +399,7 @@ describe Auth::DefaultCurrentUserProvider do
|
||||||
end
|
end
|
||||||
|
|
||||||
after do
|
after do
|
||||||
$redis.flushall
|
Discourse.redis.flushall
|
||||||
end
|
end
|
||||||
|
|
||||||
it "should not update last seen for suspended users" do
|
it "should not update last seen for suspended users" do
|
||||||
|
@ -416,7 +416,7 @@ describe Auth::DefaultCurrentUserProvider do
|
||||||
u.suspended_till = 1.year.from_now
|
u.suspended_till = 1.year.from_now
|
||||||
u.save!
|
u.save!
|
||||||
|
|
||||||
$redis.del("user:#{user.id}:#{Time.now.to_date}")
|
Discourse.redis.del("user:#{user.id}:#{Time.now.to_date}")
|
||||||
provider2 = provider("/", "HTTP_COOKIE" => "_t=#{unhashed_token}")
|
provider2 = provider("/", "HTTP_COOKIE" => "_t=#{unhashed_token}")
|
||||||
expect(provider2.current_user).to eq(nil)
|
expect(provider2.current_user).to eq(nil)
|
||||||
|
|
||||||
|
|
|
@ -32,12 +32,12 @@ describe Cache do
|
||||||
end
|
end
|
||||||
|
|
||||||
it "can be cleared" do
|
it "can be cleared" do
|
||||||
$redis.set("boo", "boo")
|
Discourse.redis.set("boo", "boo")
|
||||||
cache.write("hello0", "world")
|
cache.write("hello0", "world")
|
||||||
cache.write("hello1", "world")
|
cache.write("hello1", "world")
|
||||||
cache.clear
|
cache.clear
|
||||||
|
|
||||||
expect($redis.get("boo")).to eq("boo")
|
expect(Discourse.redis.get("boo")).to eq("boo")
|
||||||
expect(cache.read("hello0")).to eq(nil)
|
expect(cache.read("hello0")).to eq(nil)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -64,13 +64,13 @@ describe Cache do
|
||||||
"bob"
|
"bob"
|
||||||
end
|
end
|
||||||
|
|
||||||
expect($redis.ttl(key)).to be_within(2.seconds).of(1.minute)
|
expect(Discourse.redis.ttl(key)).to be_within(2.seconds).of(1.minute)
|
||||||
|
|
||||||
# we always expire withing a day
|
# we always expire withing a day
|
||||||
cache.fetch("bla") { "hi" }
|
cache.fetch("bla") { "hi" }
|
||||||
|
|
||||||
key = cache.normalize_key("bla")
|
key = cache.normalize_key("bla")
|
||||||
expect($redis.ttl(key)).to be_within(2.seconds).of(1.day)
|
expect(Discourse.redis.ttl(key)).to be_within(2.seconds).of(1.day)
|
||||||
end
|
end
|
||||||
|
|
||||||
it "can store and fetch correctly" do
|
it "can store and fetch correctly" do
|
||||||
|
|
|
@ -101,12 +101,12 @@ describe DiscourseRedis do
|
||||||
it 'should check the status of the master server' do
|
it 'should check the status of the master server' do
|
||||||
begin
|
begin
|
||||||
fallback_handler.master = false
|
fallback_handler.master = false
|
||||||
$redis.without_namespace.expects(:set).raises(Redis::CommandError.new("READONLY"))
|
Discourse.redis.without_namespace.expects(:set).raises(Redis::CommandError.new("READONLY"))
|
||||||
fallback_handler.expects(:verify_master).once
|
fallback_handler.expects(:verify_master).once
|
||||||
$redis.set('test', '1')
|
Discourse.redis.set('test', '1')
|
||||||
ensure
|
ensure
|
||||||
fallback_handler.master = true
|
fallback_handler.master = true
|
||||||
$redis.del('test')
|
Discourse.redis.del('test')
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -185,21 +185,21 @@ describe Discourse do
|
||||||
let(:user_readonly_mode_key) { Discourse::USER_READONLY_MODE_KEY }
|
let(:user_readonly_mode_key) { Discourse::USER_READONLY_MODE_KEY }
|
||||||
|
|
||||||
after do
|
after do
|
||||||
$redis.del(readonly_mode_key)
|
Discourse.redis.del(readonly_mode_key)
|
||||||
$redis.del(user_readonly_mode_key)
|
Discourse.redis.del(user_readonly_mode_key)
|
||||||
end
|
end
|
||||||
|
|
||||||
def assert_readonly_mode(message, key, ttl = -1)
|
def assert_readonly_mode(message, key, ttl = -1)
|
||||||
expect(message.channel).to eq(Discourse.readonly_channel)
|
expect(message.channel).to eq(Discourse.readonly_channel)
|
||||||
expect(message.data).to eq(true)
|
expect(message.data).to eq(true)
|
||||||
expect($redis.get(key)).to eq("1")
|
expect(Discourse.redis.get(key)).to eq("1")
|
||||||
expect($redis.ttl(key)).to eq(ttl)
|
expect(Discourse.redis.ttl(key)).to eq(ttl)
|
||||||
end
|
end
|
||||||
|
|
||||||
def assert_readonly_mode_disabled(message, key)
|
def assert_readonly_mode_disabled(message, key)
|
||||||
expect(message.channel).to eq(Discourse.readonly_channel)
|
expect(message.channel).to eq(Discourse.readonly_channel)
|
||||||
expect(message.data).to eq(false)
|
expect(message.data).to eq(false)
|
||||||
expect($redis.get(key)).to eq(nil)
|
expect(Discourse.redis.get(key)).to eq(nil)
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_readonly_message
|
def get_readonly_message
|
||||||
|
@ -217,14 +217,14 @@ describe Discourse do
|
||||||
|
|
||||||
describe ".enable_readonly_mode" do
|
describe ".enable_readonly_mode" do
|
||||||
it "adds a key in redis and publish a message through the message bus" do
|
it "adds a key in redis and publish a message through the message bus" do
|
||||||
expect($redis.get(readonly_mode_key)).to eq(nil)
|
expect(Discourse.redis.get(readonly_mode_key)).to eq(nil)
|
||||||
message = get_readonly_message { Discourse.enable_readonly_mode }
|
message = get_readonly_message { Discourse.enable_readonly_mode }
|
||||||
assert_readonly_mode(message, readonly_mode_key, readonly_mode_ttl)
|
assert_readonly_mode(message, readonly_mode_key, readonly_mode_ttl)
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'user enabled readonly mode' do
|
context 'user enabled readonly mode' do
|
||||||
it "adds a key in redis and publish a message through the message bus" do
|
it "adds a key in redis and publish a message through the message bus" do
|
||||||
expect($redis.get(user_readonly_mode_key)).to eq(nil)
|
expect(Discourse.redis.get(user_readonly_mode_key)).to eq(nil)
|
||||||
message = get_readonly_message { Discourse.enable_readonly_mode(user_readonly_mode_key) }
|
message = get_readonly_message { Discourse.enable_readonly_mode(user_readonly_mode_key) }
|
||||||
assert_readonly_mode(message, user_readonly_mode_key)
|
assert_readonly_mode(message, user_readonly_mode_key)
|
||||||
end
|
end
|
||||||
|
@ -252,7 +252,7 @@ describe Discourse do
|
||||||
end
|
end
|
||||||
|
|
||||||
it "returns true when the key is present in redis" do
|
it "returns true when the key is present in redis" do
|
||||||
$redis.set(readonly_mode_key, 1)
|
Discourse.redis.set(readonly_mode_key, 1)
|
||||||
expect(Discourse.readonly_mode?).to eq(true)
|
expect(Discourse.readonly_mode?).to eq(true)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -5,9 +5,9 @@ require 'rails_helper'
|
||||||
describe DistributedMemoizer do
|
describe DistributedMemoizer do
|
||||||
|
|
||||||
before do
|
before do
|
||||||
$redis.del(DistributedMemoizer.redis_key("hello"))
|
Discourse.redis.del(DistributedMemoizer.redis_key("hello"))
|
||||||
$redis.del(DistributedMemoizer.redis_lock_key("hello"))
|
Discourse.redis.del(DistributedMemoizer.redis_lock_key("hello"))
|
||||||
$redis.unwatch
|
Discourse.redis.unwatch
|
||||||
end
|
end
|
||||||
|
|
||||||
# NOTE we could use a mock redis here, but I think it makes sense to test the real thing
|
# NOTE we could use a mock redis here, but I think it makes sense to test the real thing
|
||||||
|
|
|
@ -6,7 +6,7 @@ describe DistributedMutex do
|
||||||
let(:key) { "test_mutex_key" }
|
let(:key) { "test_mutex_key" }
|
||||||
|
|
||||||
after do
|
after do
|
||||||
$redis.del(key)
|
Discourse.redis.del(key)
|
||||||
end
|
end
|
||||||
|
|
||||||
it "allows only one mutex object to have the lock at a time" do
|
it "allows only one mutex object to have the lock at a time" do
|
||||||
|
@ -31,7 +31,7 @@ describe DistributedMutex do
|
||||||
it "handles auto cleanup correctly" do
|
it "handles auto cleanup correctly" do
|
||||||
m = DistributedMutex.new(key)
|
m = DistributedMutex.new(key)
|
||||||
|
|
||||||
$redis.setnx key, Time.now.to_i - 1
|
Discourse.redis.setnx key, Time.now.to_i - 1
|
||||||
|
|
||||||
start = Time.now.to_i
|
start = Time.now.to_i
|
||||||
m.synchronize do
|
m.synchronize do
|
||||||
|
@ -54,16 +54,16 @@ describe DistributedMutex do
|
||||||
mutex = DistributedMutex.new(key, validity: 2)
|
mutex = DistributedMutex.new(key, validity: 2)
|
||||||
|
|
||||||
mutex.synchronize do
|
mutex.synchronize do
|
||||||
expect($redis.ttl(key)).to eq(2)
|
expect(Discourse.redis.ttl(key)).to eq(2)
|
||||||
expect($redis.get(key).to_i).to eq(Time.now.to_i + 2)
|
expect(Discourse.redis.get(key).to_i).to eq(Time.now.to_i + 2)
|
||||||
end
|
end
|
||||||
|
|
||||||
mutex = DistributedMutex.new(key)
|
mutex = DistributedMutex.new(key)
|
||||||
|
|
||||||
mutex.synchronize do
|
mutex.synchronize do
|
||||||
expect($redis.ttl(key)).to eq(DistributedMutex::DEFAULT_VALIDITY)
|
expect(Discourse.redis.ttl(key)).to eq(DistributedMutex::DEFAULT_VALIDITY)
|
||||||
|
|
||||||
expect($redis.get(key).to_i)
|
expect(Discourse.redis.get(key).to_i)
|
||||||
.to eq(Time.now.to_i + DistributedMutex::DEFAULT_VALIDITY)
|
.to eq(Time.now.to_i + DistributedMutex::DEFAULT_VALIDITY)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -80,11 +80,11 @@ describe DistributedMutex do
|
||||||
|
|
||||||
context "readonly redis" do
|
context "readonly redis" do
|
||||||
before do
|
before do
|
||||||
$redis.slaveof "127.0.0.1", "99991"
|
Discourse.redis.slaveof "127.0.0.1", "99991"
|
||||||
end
|
end
|
||||||
|
|
||||||
after do
|
after do
|
||||||
$redis.slaveof "no", "one"
|
Discourse.redis.slaveof "no", "one"
|
||||||
end
|
end
|
||||||
|
|
||||||
it "works even if redis is in readonly" do
|
it "works even if redis is in readonly" do
|
||||||
|
@ -111,7 +111,7 @@ describe DistributedMutex do
|
||||||
Concurrency::Scenario.new do |execution|
|
Concurrency::Scenario.new do |execution|
|
||||||
locked = false
|
locked = false
|
||||||
|
|
||||||
$redis.del('mutex_key')
|
Discourse.redis.del('mutex_key')
|
||||||
|
|
||||||
connections.each do |connection|
|
connections.each do |connection|
|
||||||
connection.unwatch
|
connection.unwatch
|
||||||
|
|
|
@ -5,7 +5,7 @@ require "email/processor"
|
||||||
|
|
||||||
describe Email::Processor do
|
describe Email::Processor do
|
||||||
after do
|
after do
|
||||||
$redis.flushall
|
Discourse.redis.flushall
|
||||||
end
|
end
|
||||||
|
|
||||||
let(:from) { "foo@bar.com" }
|
let(:from) { "foo@bar.com" }
|
||||||
|
@ -78,7 +78,7 @@ describe Email::Processor do
|
||||||
|
|
||||||
it "only sends one rejection email per day" do
|
it "only sends one rejection email per day" do
|
||||||
key = "rejection_email:#{[from]}:email_reject_empty:#{Date.today}"
|
key = "rejection_email:#{[from]}:email_reject_empty:#{Date.today}"
|
||||||
$redis.expire(key, 0)
|
Discourse.redis.expire(key, 0)
|
||||||
|
|
||||||
expect {
|
expect {
|
||||||
Email::Processor.process!(mail)
|
Email::Processor.process!(mail)
|
||||||
|
@ -91,7 +91,7 @@ describe Email::Processor do
|
||||||
freeze_time(Date.today + 1)
|
freeze_time(Date.today + 1)
|
||||||
|
|
||||||
key = "rejection_email:#{[from]}:email_reject_empty:#{Date.today}"
|
key = "rejection_email:#{[from]}:email_reject_empty:#{Date.today}"
|
||||||
$redis.expire(key, 0)
|
Discourse.redis.expire(key, 0)
|
||||||
|
|
||||||
expect {
|
expect {
|
||||||
Email::Processor.process!(mail3)
|
Email::Processor.process!(mail3)
|
||||||
|
@ -131,7 +131,7 @@ describe Email::Processor do
|
||||||
it "sends more than one rejection email per day" do
|
it "sends more than one rejection email per day" do
|
||||||
Email::Receiver.any_instance.stubs(:process_internal).raises("boom")
|
Email::Receiver.any_instance.stubs(:process_internal).raises("boom")
|
||||||
key = "rejection_email:#{[from]}:email_reject_unrecognized_error:#{Date.today}"
|
key = "rejection_email:#{[from]}:email_reject_unrecognized_error:#{Date.today}"
|
||||||
$redis.expire(key, 0)
|
Discourse.redis.expire(key, 0)
|
||||||
|
|
||||||
expect {
|
expect {
|
||||||
Email::Processor.process!(mail)
|
Email::Processor.process!(mail)
|
||||||
|
|
|
@ -143,7 +143,7 @@ describe Middleware::AnonymousCache::Helper do
|
||||||
|
|
||||||
# depends on i7z implementation, but lets assume it is stable unless we discover
|
# depends on i7z implementation, but lets assume it is stable unless we discover
|
||||||
# otherwise
|
# otherwise
|
||||||
expect($redis.get(helper.cache_key_body).length).to eq(16)
|
expect(Discourse.redis.get(helper.cache_key_body).length).to eq(16)
|
||||||
end
|
end
|
||||||
|
|
||||||
it "handles brotli switching" do
|
it "handles brotli switching" do
|
||||||
|
|
|
@ -264,7 +264,7 @@ describe Middleware::RequestTracker do
|
||||||
User.where(id: -100).pluck(:id)
|
User.where(id: -100).pluck(:id)
|
||||||
end
|
end
|
||||||
redis_calls.times do
|
redis_calls.times do
|
||||||
$redis.get("x")
|
Discourse.redis.get("x")
|
||||||
end
|
end
|
||||||
result
|
result
|
||||||
end
|
end
|
||||||
|
|
|
@ -91,7 +91,7 @@ describe PostRevisor do
|
||||||
before do
|
before do
|
||||||
# There used to be a bug where wiki changes were considered posting "too similar"
|
# There used to be a bug where wiki changes were considered posting "too similar"
|
||||||
# so this is enabled and checked
|
# so this is enabled and checked
|
||||||
$redis.delete_prefixed('unique-post')
|
Discourse.redis.delete_prefixed('unique-post')
|
||||||
SiteSetting.unique_posts_mins = 10
|
SiteSetting.unique_posts_mins = 10
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -456,7 +456,7 @@ describe PrettyText do
|
||||||
['apple', 'banana'].each { |w| Fabricate(:watched_word, word: w, action: WatchedWord.actions[:censor]) }
|
['apple', 'banana'].each { |w| Fabricate(:watched_word, word: w, action: WatchedWord.actions[:censor]) }
|
||||||
expect(PrettyText.cook("# banana")).not_to include('banana')
|
expect(PrettyText.cook("# banana")).not_to include('banana')
|
||||||
ensure
|
ensure
|
||||||
$redis.flushall
|
Discourse.redis.flushall
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -1091,7 +1091,7 @@ HTML
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "censoring" do
|
describe "censoring" do
|
||||||
after(:all) { $redis.flushall }
|
after(:all) { Discourse.redis.flushall }
|
||||||
|
|
||||||
def expect_cooked_match(raw, expected_cooked)
|
def expect_cooked_match(raw, expected_cooked)
|
||||||
expect(PrettyText.cook(raw)).to eq(expected_cooked)
|
expect(PrettyText.cook(raw)).to eq(expected_cooked)
|
||||||
|
|
|
@ -63,11 +63,11 @@ describe RateLimiter do
|
||||||
|
|
||||||
context 'handles readonly' do
|
context 'handles readonly' do
|
||||||
before do
|
before do
|
||||||
$redis.without_namespace.slaveof '10.0.0.1', '99999'
|
Discourse.redis.without_namespace.slaveof '10.0.0.1', '99999'
|
||||||
end
|
end
|
||||||
|
|
||||||
after do
|
after do
|
||||||
$redis.without_namespace.slaveof 'no', 'one'
|
Discourse.redis.without_namespace.slaveof 'no', 'one'
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'does not explode' do
|
it 'does not explode' do
|
||||||
|
|
|
@ -767,7 +767,7 @@ describe TopicQuery do
|
||||||
|
|
||||||
context 'suggested_for' do
|
context 'suggested_for' do
|
||||||
def clear_cache!
|
def clear_cache!
|
||||||
$redis.keys('random_topic_cache*').each { |k| $redis.del k }
|
Discourse.redis.keys('random_topic_cache*').each { |k| Discourse.redis.del k }
|
||||||
end
|
end
|
||||||
|
|
||||||
before do
|
before do
|
||||||
|
|
|
@ -198,7 +198,7 @@ describe PostValidator do
|
||||||
end
|
end
|
||||||
|
|
||||||
after do
|
after do
|
||||||
$redis.del(@key)
|
Discourse.redis.del(@key)
|
||||||
end
|
end
|
||||||
|
|
||||||
context "post is unique" do
|
context "post is unique" do
|
||||||
|
|
|
@ -7,12 +7,12 @@ describe Jobs::AboutStats do
|
||||||
begin
|
begin
|
||||||
stats = About.fetch_stats.to_json
|
stats = About.fetch_stats.to_json
|
||||||
cache_key = About.stats_cache_key
|
cache_key = About.stats_cache_key
|
||||||
$redis.del(cache_key)
|
Discourse.redis.del(cache_key)
|
||||||
|
|
||||||
expect(described_class.new.execute({})).to eq(stats)
|
expect(described_class.new.execute({})).to eq(stats)
|
||||||
expect($redis.get(cache_key)).to eq(stats)
|
expect(Discourse.redis.get(cache_key)).to eq(stats)
|
||||||
ensure
|
ensure
|
||||||
$redis.del(cache_key)
|
Discourse.redis.del(cache_key)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -30,7 +30,7 @@ describe Jobs::PollMailbox do
|
||||||
end
|
end
|
||||||
|
|
||||||
after do
|
after do
|
||||||
$redis.del(Jobs::PollMailbox::POLL_MAILBOX_TIMEOUT_ERROR_KEY)
|
Discourse.redis.del(Jobs::PollMailbox::POLL_MAILBOX_TIMEOUT_ERROR_KEY)
|
||||||
end
|
end
|
||||||
|
|
||||||
it "add an admin dashboard message on pop authentication error" do
|
it "add an admin dashboard message on pop authentication error" do
|
||||||
|
|
|
@ -108,7 +108,7 @@ describe UserNotifications do
|
||||||
subject { UserNotifications.digest(user) }
|
subject { UserNotifications.digest(user) }
|
||||||
|
|
||||||
after do
|
after do
|
||||||
$redis.keys('summary-new-users:*').each { |key| $redis.del(key) }
|
Discourse.redis.keys('summary-new-users:*').each { |key| Discourse.redis.del(key) }
|
||||||
end
|
end
|
||||||
|
|
||||||
context "without new topics" do
|
context "without new topics" do
|
||||||
|
|
|
@ -5,7 +5,7 @@ require 'rails_helper'
|
||||||
describe ApplicationRequest do
|
describe ApplicationRequest do
|
||||||
before do
|
before do
|
||||||
ApplicationRequest.last_flush = Time.now.utc
|
ApplicationRequest.last_flush = Time.now.utc
|
||||||
$redis.flushall
|
Discourse.redis.flushall
|
||||||
end
|
end
|
||||||
|
|
||||||
after do
|
after do
|
||||||
|
@ -28,15 +28,15 @@ describe ApplicationRequest do
|
||||||
inc(:http_total)
|
inc(:http_total)
|
||||||
inc(:http_total)
|
inc(:http_total)
|
||||||
|
|
||||||
$redis.without_namespace.stubs(:incr).raises(Redis::CommandError.new("READONLY"))
|
Discourse.redis.without_namespace.stubs(:incr).raises(Redis::CommandError.new("READONLY"))
|
||||||
$redis.without_namespace.stubs(:eval).raises(Redis::CommandError.new("READONLY"))
|
Discourse.redis.without_namespace.stubs(:eval).raises(Redis::CommandError.new("READONLY"))
|
||||||
|
|
||||||
# flush will be deferred no error raised
|
# flush will be deferred no error raised
|
||||||
inc(:http_total, autoflush: 3)
|
inc(:http_total, autoflush: 3)
|
||||||
ApplicationRequest.write_cache!
|
ApplicationRequest.write_cache!
|
||||||
|
|
||||||
$redis.without_namespace.unstub(:incr)
|
Discourse.redis.without_namespace.unstub(:incr)
|
||||||
$redis.without_namespace.unstub(:eval)
|
Discourse.redis.without_namespace.unstub(:eval)
|
||||||
|
|
||||||
inc(:http_total, autoflush: 3)
|
inc(:http_total, autoflush: 3)
|
||||||
expect(ApplicationRequest.http_total.first.count).to eq(3)
|
expect(ApplicationRequest.http_total.first.count).to eq(3)
|
||||||
|
|
|
@ -35,18 +35,18 @@ describe GlobalSetting do
|
||||||
freeze_time Time.now
|
freeze_time Time.now
|
||||||
|
|
||||||
token = GlobalSetting.safe_secret_key_base
|
token = GlobalSetting.safe_secret_key_base
|
||||||
$redis.without_namespace.del(GlobalSetting::REDIS_SECRET_KEY)
|
Discourse.redis.without_namespace.del(GlobalSetting::REDIS_SECRET_KEY)
|
||||||
freeze_time Time.now + 20
|
freeze_time Time.now + 20
|
||||||
|
|
||||||
GlobalSetting.safe_secret_key_base
|
GlobalSetting.safe_secret_key_base
|
||||||
new_token = $redis.without_namespace.get(GlobalSetting::REDIS_SECRET_KEY)
|
new_token = Discourse.redis.without_namespace.get(GlobalSetting::REDIS_SECRET_KEY)
|
||||||
expect(new_token).to eq(nil)
|
expect(new_token).to eq(nil)
|
||||||
|
|
||||||
freeze_time Time.now + 11
|
freeze_time Time.now + 11
|
||||||
|
|
||||||
GlobalSetting.safe_secret_key_base
|
GlobalSetting.safe_secret_key_base
|
||||||
|
|
||||||
new_token = $redis.without_namespace.get(GlobalSetting::REDIS_SECRET_KEY)
|
new_token = Discourse.redis.without_namespace.get(GlobalSetting::REDIS_SECRET_KEY)
|
||||||
expect(new_token).to eq(token)
|
expect(new_token).to eq(token)
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -128,7 +128,7 @@ RSpec.describe SearchLog, type: :model do
|
||||||
expect(action).to eq(:created)
|
expect(action).to eq(:created)
|
||||||
|
|
||||||
freeze_time(10.minutes.from_now)
|
freeze_time(10.minutes.from_now)
|
||||||
$redis.del(SearchLog.redis_key(ip_address: '192.168.0.1', user_id: user.id))
|
Discourse.redis.del(SearchLog.redis_key(ip_address: '192.168.0.1', user_id: user.id))
|
||||||
|
|
||||||
action, _ = SearchLog.log(
|
action, _ = SearchLog.log(
|
||||||
term: 'hello',
|
term: 'hello',
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user