2018-01-17 13:32:52 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2013-10-09 14:22:41 +08:00
|
|
|
require "current_user"
|
2013-02-06 03:16:51 +08:00
|
|
|
|
|
|
|
class ApplicationController < ActionController::Base
|
|
|
|
include CurrentUser
|
2013-02-13 19:04:43 +08:00
|
|
|
include CanonicalURL::ControllerExtensions
|
2014-04-03 01:22:10 +08:00
|
|
|
include JsonError
|
2015-03-10 03:24:16 +08:00
|
|
|
include GlobalPath
|
2017-11-24 12:31:23 +08:00
|
|
|
include Hijack
|
2022-05-18 02:06:08 +08:00
|
|
|
include ReadOnlyMixin
|
2021-10-25 19:53:50 +08:00
|
|
|
include VaryHeader
|
2013-02-06 03:16:51 +08:00
|
|
|
|
2021-06-15 14:57:17 +08:00
|
|
|
attr_reader :theme_id
|
2017-05-13 00:41:26 +08:00
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
serialization_scope :guardian
|
|
|
|
|
|
|
|
protect_from_forgery
|
|
|
|
|
2013-07-29 13:13:13 +08:00
|
|
|
# Default Rails 3.2 lets the request through with a blank session
|
|
|
|
# we are being more pedantic here and nulling session / current_user
|
|
|
|
# and then raising a CSRF exception
|
|
|
|
def handle_unverified_request
|
|
|
|
# NOTE: API key is secret, having it invalidates the need for a CSRF token
|
2016-12-16 09:05:20 +08:00
|
|
|
unless is_api? || is_user_api?
|
2013-07-29 13:13:13 +08:00
|
|
|
super
|
|
|
|
clear_current_user
|
2017-04-10 20:01:25 +08:00
|
|
|
render plain: "[\"BAD CSRF\"]", status: 403
|
2013-07-29 13:13:13 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
FEATURE: Replace `Crawl-delay` directive with proper rate limiting (#15131)
We have a couple of site setting, `slow_down_crawler_user_agents` and `slow_down_crawler_rate`, that are meant to allow site owners to signal to specific crawlers that they're crawling the site too aggressively and that they should slow down.
When a crawler is added to the `slow_down_crawler_user_agents` setting, Discourse currently adds a `Crawl-delay` directive for that crawler in `/robots.txt`. Unfortunately, many crawlers don't support the `Crawl-delay` directive in `/robots.txt` which leaves the site owners no options if a crawler is crawling the site too aggressively.
This PR replaces the `Crawl-delay` directive with proper rate limiting for crawlers added to the `slow_down_crawler_user_agents` list. On every request made by a non-logged in user, Discourse will check the User Agent string and if it contains one of the values of the `slow_down_crawler_user_agents` list, Discourse will only allow 1 request every N seconds for that User Agent (N is the value of the `slow_down_crawler_rate` setting) and the rest of requests made within the same interval will get a 429 response.
The `slow_down_crawler_user_agents` setting becomes quite dangerous with this PR since it could rate limit lots if not all of anonymous traffic if the setting is not used appropriately. So to protect against this scenario, we've added a couple of new validations to the setting when it's changed:
1) each value added to setting must 3 characters or longer
2) each value cannot be a substring of tokens found in popular browser User Agent. The current list of prohibited values is: apple, windows, linux, ubuntu, gecko, firefox, chrome, safari, applewebkit, webkit, mozilla, macintosh, khtml, intel, osx, os x, iphone, ipad and mac.
2021-11-30 17:55:25 +08:00
|
|
|
before_action :rate_limit_crawlers
|
2017-09-07 13:29:30 +08:00
|
|
|
before_action :check_readonly_mode
|
2017-08-31 12:06:56 +08:00
|
|
|
before_action :handle_theme
|
|
|
|
before_action :set_current_user_for_logs
|
2020-09-17 23:18:35 +08:00
|
|
|
before_action :set_mp_snapshot_fields
|
2017-08-31 12:06:56 +08:00
|
|
|
before_action :clear_notifications
|
2020-07-23 00:30:26 +08:00
|
|
|
around_action :with_resolved_locale
|
2017-08-31 12:06:56 +08:00
|
|
|
before_action :set_mobile_view
|
|
|
|
before_action :block_if_readonly_mode
|
|
|
|
before_action :authorize_mini_profiler
|
|
|
|
before_action :redirect_to_login_if_required
|
2018-02-01 12:17:59 +08:00
|
|
|
before_action :block_if_requires_login
|
|
|
|
before_action :preload_json
|
2017-08-31 12:06:56 +08:00
|
|
|
before_action :check_xhr
|
|
|
|
after_action :add_readonly_header
|
|
|
|
after_action :perform_refresh_session
|
|
|
|
after_action :dont_cache_page
|
2019-12-06 20:55:32 +08:00
|
|
|
after_action :conditionally_allow_site_embedding
|
2021-10-25 19:53:50 +08:00
|
|
|
after_action :ensure_vary_header
|
2021-11-26 03:58:39 +08:00
|
|
|
after_action :add_noindex_header,
|
|
|
|
if: -> { is_feed_request? || !SiteSetting.allow_index_in_robots_txt }
|
2022-10-12 07:11:44 +08:00
|
|
|
after_action :add_noindex_header_to_non_canonical, if: :spa_boot_request?
|
|
|
|
around_action :link_preload, if: -> { spa_boot_request? && GlobalSetting.preload_link_header }
|
2013-02-07 23:45:24 +08:00
|
|
|
|
2020-10-02 07:01:40 +08:00
|
|
|
HONEYPOT_KEY ||= "HONEYPOT_KEY"
|
|
|
|
CHALLENGE_KEY ||= "CHALLENGE_KEY"
|
|
|
|
|
2014-02-15 06:10:08 +08:00
|
|
|
layout :set_layout
|
|
|
|
|
2014-02-21 06:02:26 +08:00
|
|
|
def has_escaped_fragment?
|
|
|
|
SiteSetting.enable_escaped_fragments? && params.key?("_escaped_fragment_")
|
|
|
|
end
|
|
|
|
|
2021-03-23 01:41:42 +08:00
|
|
|
def show_browser_update?
|
|
|
|
@show_browser_update ||= CrawlerDetection.show_browser_update?(request.user_agent)
|
|
|
|
end
|
|
|
|
helper_method :show_browser_update?
|
|
|
|
|
2014-10-31 02:26:35 +08:00
|
|
|
def use_crawler_layout?
|
2018-01-16 13:28:11 +08:00
|
|
|
@use_crawler_layout ||=
|
2022-03-21 22:28:52 +08:00
|
|
|
request.user_agent && (request.media_type.blank? || request.media_type.include?("html")) &&
|
2018-01-16 13:28:11 +08:00
|
|
|
!%w[json rss].include?(params[:format]) &&
|
2021-03-23 01:41:42 +08:00
|
|
|
(
|
|
|
|
has_escaped_fragment? || params.key?("print") || show_browser_update? ||
|
2019-06-03 10:13:32 +08:00
|
|
|
CrawlerDetection.crawler?(request.user_agent, request.headers["HTTP_VIA"])
|
|
|
|
)
|
2014-10-31 02:26:35 +08:00
|
|
|
end
|
|
|
|
|
2016-07-25 10:07:31 +08:00
|
|
|
def perform_refresh_session
|
2017-10-20 12:41:40 +08:00
|
|
|
refresh_session(current_user) unless @readonly_mode
|
2016-07-25 10:07:31 +08:00
|
|
|
end
|
|
|
|
|
2017-02-24 02:05:00 +08:00
|
|
|
def immutable_for(duration)
|
|
|
|
response.cache_control[:max_age] = duration.to_i
|
|
|
|
response.cache_control[:public] = true
|
|
|
|
response.cache_control[:extras] = ["immutable"]
|
|
|
|
end
|
|
|
|
|
2016-11-15 14:00:28 +08:00
|
|
|
def dont_cache_page
|
|
|
|
if !response.headers["Cache-Control"] && response.cache_control.blank?
|
2018-06-05 15:29:17 +08:00
|
|
|
response.cache_control[:no_cache] = true
|
|
|
|
response.cache_control[:extras] = ["no-store"]
|
2016-11-15 14:00:28 +08:00
|
|
|
end
|
2022-03-11 14:18:12 +08:00
|
|
|
response.headers["Discourse-No-Onebox"] = "1" if SiteSetting.login_required
|
2016-11-15 14:00:28 +08:00
|
|
|
end
|
|
|
|
|
2019-12-06 20:55:32 +08:00
|
|
|
def conditionally_allow_site_embedding
|
|
|
|
response.headers.delete("X-Frame-Options") if SiteSetting.allow_embedding_site_in_an_iframe
|
|
|
|
end
|
|
|
|
|
2021-04-30 02:13:36 +08:00
|
|
|
def ember_cli_required?
|
2022-06-20 22:33:05 +08:00
|
|
|
Rails.env.development? && ENV["ALLOW_EMBER_CLI_PROXY_BYPASS"] != "1" &&
|
|
|
|
request.headers["X-Discourse-Ember-CLI"] != "true"
|
2021-04-30 02:13:36 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def application_layout
|
|
|
|
ember_cli_required? ? "ember_cli" : "application"
|
|
|
|
end
|
|
|
|
|
2014-02-15 06:10:08 +08:00
|
|
|
def set_layout
|
2019-08-31 02:45:18 +08:00
|
|
|
case request.headers["Discourse-Render"]
|
|
|
|
when "desktop"
|
2021-04-30 02:13:36 +08:00
|
|
|
return application_layout
|
2019-08-31 02:45:18 +08:00
|
|
|
when "crawler"
|
|
|
|
return "crawler"
|
|
|
|
end
|
|
|
|
|
2021-04-30 02:13:36 +08:00
|
|
|
use_crawler_layout? ? "crawler" : application_layout
|
2014-02-15 06:10:08 +08:00
|
|
|
end
|
|
|
|
|
2015-03-23 09:20:50 +08:00
|
|
|
class RenderEmpty < StandardError
|
|
|
|
end
|
2019-10-08 19:15:08 +08:00
|
|
|
class PluginDisabled < StandardError
|
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
|
|
|
|
rescue_from RenderEmpty do
|
2021-12-02 00:10:40 +08:00
|
|
|
with_resolved_locale { render "default/empty" }
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
2019-10-08 19:15:08 +08:00
|
|
|
rescue_from ArgumentError do |e|
|
|
|
|
if e.message == "string contains null byte"
|
|
|
|
raise Discourse::InvalidParameters, e.message
|
2018-08-21 09:54:34 +08:00
|
|
|
else
|
|
|
|
raise e
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-04-29 23:49:58 +08:00
|
|
|
rescue_from PG::ReadOnlySqlTransaction do |e|
|
2019-06-21 22:08:57 +08:00
|
|
|
Discourse.received_postgres_readonly!
|
2017-10-20 12:41:40 +08:00
|
|
|
Rails.logger.error("#{e.class} #{e.message}: #{e.backtrace.join("\n")}")
|
2020-05-26 22:43:29 +08:00
|
|
|
rescue_with_handler(Discourse::ReadOnly.new) || raise
|
2015-04-29 23:49:58 +08:00
|
|
|
end
|
|
|
|
|
2019-10-08 19:15:08 +08:00
|
|
|
rescue_from ActionController::ParameterMissing do |e|
|
|
|
|
render_json_error e.message, status: 400
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
2021-05-11 16:36:57 +08:00
|
|
|
rescue_from Discourse::SiteSettingMissing do |e|
|
|
|
|
render_json_error I18n.t("site_setting_missing", name: e.message), status: 500
|
|
|
|
end
|
|
|
|
|
2019-10-08 19:15:08 +08:00
|
|
|
rescue_from ActionController::RoutingError, PluginDisabled do
|
|
|
|
rescue_discourse_actions(:not_found, 404)
|
|
|
|
end
|
|
|
|
|
|
|
|
# Handles requests for giant IDs that throw pg exceptions
|
|
|
|
rescue_from ActiveModel::RangeError do |e|
|
|
|
|
if e.message =~ /ActiveModel::Type::Integer/
|
|
|
|
rescue_discourse_actions(:not_found, 404)
|
2018-09-04 10:11:42 +08:00
|
|
|
else
|
|
|
|
raise e
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-10-08 19:15:08 +08:00
|
|
|
rescue_from ActiveRecord::RecordInvalid do |e|
|
|
|
|
if request.format && request.format.json?
|
|
|
|
render_json_error e, type: :record_invalid, status: 422
|
2018-01-12 11:15:10 +08:00
|
|
|
else
|
2019-10-08 19:15:08 +08:00
|
|
|
raise e
|
2018-01-12 11:15:10 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-12-05 09:11:46 +08:00
|
|
|
rescue_from ActiveRecord::StatementInvalid do |e|
|
2017-02-18 01:09:53 +08:00
|
|
|
Discourse.reset_active_record_cache_if_needed(e)
|
2016-12-05 09:11:46 +08:00
|
|
|
raise e
|
|
|
|
end
|
|
|
|
|
2019-10-08 19:15:08 +08:00
|
|
|
# If they hit the rate limiter
|
|
|
|
rescue_from RateLimiter::LimitExceeded do |e|
|
|
|
|
retry_time_in_seconds = e&.available_in
|
2016-02-24 17:09:30 +08:00
|
|
|
|
FEATURE: Apply rate limits per user instead of IP for trusted users (#14706)
Currently, Discourse rate limits all incoming requests by the IP address they
originate from regardless of the user making the request. This can be
frustrating if there are multiple users using Discourse simultaneously while
sharing the same IP address (e.g. employees in an office).
This commit implements a new feature to make Discourse apply rate limits by
user id rather than IP address for users at or higher than the configured trust
level (1 is the default).
For example, let's say a Discourse instance is configured to allow 200 requests
per minute per IP address, and we have 10 users at trust level 4 using
Discourse simultaneously from the same IP address. Before this feature, the 10
users could only make a total of 200 requests per minute before they got rate
limited. But with the new feature, each user is allowed to make 200 requests
per minute because the rate limits are applied on user id rather than the IP
address.
The minimum trust level for applying user-id-based rate limits can be
configured by the `skip_per_ip_rate_limit_trust_level` global setting. The
default is 1, but it can be changed by either adding the
`DISCOURSE_SKIP_PER_IP_RATE_LIMIT_TRUST_LEVEL` environment variable with the
desired value to your `app.yml`, or changing the setting's value in the
`discourse.conf` file.
Requests made with API keys are still rate limited by IP address and the
relevant global settings that control API keys rate limits.
Before this commit, Discourse's auth cookie (`_t`) was simply a 32 characters
string that Discourse used to lookup the current user from the database and the
cookie contained no additional information about the user. However, we had to
change the cookie content in this commit so we could identify the user from the
cookie without making a database query before the rate limits logic and avoid
introducing a bottleneck on busy sites.
Besides the 32 characters auth token, the cookie now includes the user id,
trust level and the cookie's generation date, and we encrypt/sign the cookie to
prevent tampering.
Internal ticket number: t54739.
2021-11-18 04:27:30 +08:00
|
|
|
response_headers = { "Retry-After": retry_time_in_seconds.to_s }
|
|
|
|
|
|
|
|
response_headers["Discourse-Rate-Limit-Error-Code"] = e.error_code if e&.error_code
|
|
|
|
|
2020-09-30 03:42:45 +08:00
|
|
|
with_resolved_locale do
|
|
|
|
render_json_error(
|
|
|
|
e.description,
|
|
|
|
type: :rate_limit,
|
|
|
|
status: 429,
|
2022-06-08 17:00:41 +08:00
|
|
|
extras: {
|
|
|
|
wait_seconds: retry_time_in_seconds,
|
|
|
|
time_left: e&.time_left,
|
|
|
|
},
|
FEATURE: Apply rate limits per user instead of IP for trusted users (#14706)
Currently, Discourse rate limits all incoming requests by the IP address they
originate from regardless of the user making the request. This can be
frustrating if there are multiple users using Discourse simultaneously while
sharing the same IP address (e.g. employees in an office).
This commit implements a new feature to make Discourse apply rate limits by
user id rather than IP address for users at or higher than the configured trust
level (1 is the default).
For example, let's say a Discourse instance is configured to allow 200 requests
per minute per IP address, and we have 10 users at trust level 4 using
Discourse simultaneously from the same IP address. Before this feature, the 10
users could only make a total of 200 requests per minute before they got rate
limited. But with the new feature, each user is allowed to make 200 requests
per minute because the rate limits are applied on user id rather than the IP
address.
The minimum trust level for applying user-id-based rate limits can be
configured by the `skip_per_ip_rate_limit_trust_level` global setting. The
default is 1, but it can be changed by either adding the
`DISCOURSE_SKIP_PER_IP_RATE_LIMIT_TRUST_LEVEL` environment variable with the
desired value to your `app.yml`, or changing the setting's value in the
`discourse.conf` file.
Requests made with API keys are still rate limited by IP address and the
relevant global settings that control API keys rate limits.
Before this commit, Discourse's auth cookie (`_t`) was simply a 32 characters
string that Discourse used to lookup the current user from the database and the
cookie contained no additional information about the user. However, we had to
change the cookie content in this commit so we could identify the user from the
cookie without making a database query before the rate limits logic and avoid
introducing a bottleneck on busy sites.
Besides the 32 characters auth token, the cookie now includes the user id,
trust level and the cookie's generation date, and we encrypt/sign the cookie to
prevent tampering.
Internal ticket number: t54739.
2021-11-18 04:27:30 +08:00
|
|
|
headers: response_headers,
|
2020-09-30 03:42:45 +08:00
|
|
|
)
|
|
|
|
end
|
2019-10-08 19:15:08 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
rescue_from Discourse::NotLoggedIn do |e|
|
|
|
|
if (request.format && request.format.json?) || request.xhr? || !request.get?
|
|
|
|
rescue_discourse_actions(:not_logged_in, 403, include_ember: true)
|
|
|
|
else
|
2016-03-24 00:13:29 +08:00
|
|
|
rescue_discourse_actions(:not_found, 404)
|
2019-10-08 19:15:08 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
rescue_from Discourse::InvalidParameters do |e|
|
2020-09-30 03:42:45 +08:00
|
|
|
opts = { custom_message: "invalid_params", custom_message_params: { message: e.message } }
|
|
|
|
|
2019-10-08 19:15:08 +08:00
|
|
|
if (request.format && request.format.json?) || request.xhr? || !request.get?
|
2020-09-30 03:42:45 +08:00
|
|
|
rescue_discourse_actions(:invalid_parameters, 400, opts.merge(include_ember: true))
|
2016-03-24 00:13:29 +08:00
|
|
|
else
|
2020-09-30 03:42:45 +08:00
|
|
|
rescue_discourse_actions(:not_found, 400, opts)
|
2016-03-24 00:13:29 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-09 13:05:12 +08:00
|
|
|
rescue_from Discourse::NotFound do |e|
|
|
|
|
rescue_discourse_actions(
|
|
|
|
:not_found,
|
|
|
|
e.status,
|
|
|
|
check_permalinks: e.check_permalinks,
|
2019-10-08 19:15:08 +08:00
|
|
|
original_path: e.original_path,
|
|
|
|
custom_message: e.custom_message,
|
2018-08-09 13:05:12 +08:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2017-09-23 22:39:58 +08:00
|
|
|
rescue_from Discourse::InvalidAccess do |e|
|
2018-02-10 08:09:54 +08:00
|
|
|
cookies.delete(e.opts[:delete_cookie]) if e.opts[:delete_cookie].present?
|
2019-10-08 19:15:08 +08:00
|
|
|
|
2017-09-23 22:39:58 +08:00
|
|
|
rescue_discourse_actions(
|
|
|
|
:invalid_access,
|
|
|
|
403,
|
|
|
|
include_ember: true,
|
2019-10-08 19:15:08 +08:00
|
|
|
custom_message: e.custom_message,
|
2020-11-24 19:06:52 +08:00
|
|
|
custom_message_params: e.custom_message_params,
|
2019-10-08 19:15:08 +08:00
|
|
|
group: e.group,
|
2017-09-23 22:39:58 +08:00
|
|
|
)
|
2013-06-20 23:47:33 +08:00
|
|
|
end
|
|
|
|
|
2014-02-13 12:37:28 +08:00
|
|
|
rescue_from Discourse::ReadOnly do
|
2020-05-26 22:43:29 +08:00
|
|
|
unless response_body
|
2022-05-13 06:04:49 +08:00
|
|
|
respond_to do |format|
|
|
|
|
format.json do
|
|
|
|
render_json_error I18n.t("read_only_mode_enabled"), type: :read_only, status: 503
|
|
|
|
end
|
|
|
|
format.html { render status: 503, layout: "no_ember", template: "exceptions/read_only" }
|
|
|
|
end
|
2020-05-26 22:43:29 +08:00
|
|
|
end
|
2014-02-13 12:37:28 +08:00
|
|
|
end
|
|
|
|
|
FEATURE: Centralized 2FA page (#15377)
2FA support in Discourse was added and grown gradually over the years: we first
added support for TOTP for logins, then we implemented backup codes, and last
but not least, security keys. 2FA usage was initially limited to logging in,
but it has been expanded and we now require 2FA for risky actions such as
adding a new admin to the site.
As a result of this gradual growth of the 2FA system, technical debt has
accumulated to the point where it has become difficult to require 2FA for more
actions. We now have 5 different 2FA UI implementations and each one has to
support all 3 2FA methods (TOTP, backup codes, and security keys) which makes
it difficult to maintain a consistent UX for these different implementations.
Moreover, there is a lot of repeated logic in the server-side code behind these
5 UI implementations which hinders maintainability even more.
This commit is the first step towards repaying the technical debt: it builds a
system that centralizes as much as possible of the 2FA server-side logic and
UI. The 2 main components of this system are:
1. A dedicated page for 2FA with support for all 3 methods.
2. A reusable server-side class that centralizes the 2FA logic (the
`SecondFactor::AuthManager` class).
From a top-level view, the 2FA flow in this new system looks like this:
1. User initiates an action that requires 2FA;
2. Server is aware that 2FA is required for this action, so it redirects the
user to the 2FA page if the user has a 2FA method, otherwise the action is
performed.
3. User submits the 2FA form on the page;
4. Server validates the 2FA and if it's successful, the action is performed and
the user is redirected to the previous page.
A more technically-detailed explanation/documentation of the new system is
available as a comment at the top of the `lib/second_factor/auth_manager.rb`
file. Please note that the details are not set in stone and will likely change
in the future, so please don't use the system in your plugins yet.
Since this is a new system that needs to be tested, we've decided to migrate
only the 2FA for adding a new admin to the new system at this time (in this
commit). Our plan is to gradually migrate the remaining 2FA implementations to
the new system.
For screenshots of the 2FA page, see PR #15377 on GitHub.
2022-02-17 17:12:59 +08:00
|
|
|
rescue_from SecondFactor::AuthManager::SecondFactorRequired do |e|
|
2022-04-13 20:04:09 +08:00
|
|
|
if request.xhr?
|
|
|
|
render json: { second_factor_challenge_nonce: e.nonce }, status: 403
|
|
|
|
else
|
|
|
|
redirect_to session_2fa_path(nonce: e.nonce)
|
|
|
|
end
|
FEATURE: Centralized 2FA page (#15377)
2FA support in Discourse was added and grown gradually over the years: we first
added support for TOTP for logins, then we implemented backup codes, and last
but not least, security keys. 2FA usage was initially limited to logging in,
but it has been expanded and we now require 2FA for risky actions such as
adding a new admin to the site.
As a result of this gradual growth of the 2FA system, technical debt has
accumulated to the point where it has become difficult to require 2FA for more
actions. We now have 5 different 2FA UI implementations and each one has to
support all 3 2FA methods (TOTP, backup codes, and security keys) which makes
it difficult to maintain a consistent UX for these different implementations.
Moreover, there is a lot of repeated logic in the server-side code behind these
5 UI implementations which hinders maintainability even more.
This commit is the first step towards repaying the technical debt: it builds a
system that centralizes as much as possible of the 2FA server-side logic and
UI. The 2 main components of this system are:
1. A dedicated page for 2FA with support for all 3 methods.
2. A reusable server-side class that centralizes the 2FA logic (the
`SecondFactor::AuthManager` class).
From a top-level view, the 2FA flow in this new system looks like this:
1. User initiates an action that requires 2FA;
2. Server is aware that 2FA is required for this action, so it redirects the
user to the 2FA page if the user has a 2FA method, otherwise the action is
performed.
3. User submits the 2FA form on the page;
4. Server validates the 2FA and if it's successful, the action is performed and
the user is redirected to the previous page.
A more technically-detailed explanation/documentation of the new system is
available as a comment at the top of the `lib/second_factor/auth_manager.rb`
file. Please note that the details are not set in stone and will likely change
in the future, so please don't use the system in your plugins yet.
Since this is a new system that needs to be tested, we've decided to migrate
only the 2FA for adding a new admin to the new system at this time (in this
commit). Our plan is to gradually migrate the remaining 2FA implementations to
the new system.
For screenshots of the 2FA page, see PR #15377 on GitHub.
2022-02-17 17:12:59 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
rescue_from SecondFactor::BadChallenge do |e|
|
|
|
|
render json: { error: I18n.t(e.error_translation_key) }, status: e.status_code
|
|
|
|
end
|
|
|
|
|
2022-03-24 20:50:44 +08:00
|
|
|
def redirect_with_client_support(url, options = {})
|
2018-08-09 13:05:12 +08:00
|
|
|
if request.xhr?
|
|
|
|
response.headers["Discourse-Xhr-Redirect"] = "true"
|
|
|
|
render plain: url
|
|
|
|
else
|
|
|
|
redirect_to url, options
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-23 22:39:58 +08:00
|
|
|
def rescue_discourse_actions(type, status_code, opts = nil)
|
|
|
|
opts ||= {}
|
2017-04-05 04:22:14 +08:00
|
|
|
show_json_errors =
|
|
|
|
(request.format && request.format.json?) || (request.xhr?) ||
|
2017-09-28 16:09:27 +08:00
|
|
|
((params[:external_id] || "").ends_with? ".json")
|
2017-04-05 04:22:14 +08:00
|
|
|
|
2018-08-09 13:05:12 +08:00
|
|
|
if type == :not_found && opts[:check_permalinks]
|
|
|
|
url = opts[:original_path] || request.fullpath
|
|
|
|
permalink = Permalink.find_by_url(url)
|
|
|
|
|
2018-08-20 11:10:49 +08:00
|
|
|
# there are some cases where we have a permalink but no url
|
|
|
|
# cause category / topic was deleted
|
|
|
|
if permalink.present? && permalink.target_url
|
2018-08-09 13:05:12 +08:00
|
|
|
# permalink present, redirect to that URL
|
2022-03-21 22:28:52 +08:00
|
|
|
redirect_with_client_support permalink.target_url,
|
|
|
|
status: :moved_permanently,
|
|
|
|
allow_other_host: true
|
2018-08-09 23:05:08 +08:00
|
|
|
return
|
2018-08-09 13:05:12 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-07-23 00:30:26 +08:00
|
|
|
message = title = nil
|
|
|
|
with_resolved_locale(check_current_user: false) do
|
2020-09-30 03:42:45 +08:00
|
|
|
if opts[:custom_message]
|
|
|
|
title = message = I18n.t(opts[:custom_message], opts[:custom_message_params] || {})
|
2020-05-28 01:10:01 +08:00
|
|
|
else
|
2020-07-23 00:30:26 +08:00
|
|
|
message = I18n.t(type)
|
|
|
|
if status_code == 403
|
|
|
|
title = I18n.t("page_forbidden.title")
|
|
|
|
else
|
|
|
|
title = I18n.t("page_not_found.title")
|
|
|
|
end
|
2020-05-28 01:10:01 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
error_page_opts = { title: title, status: status_code, group: opts[:group] }
|
2018-01-12 11:15:10 +08:00
|
|
|
|
2017-09-28 21:50:01 +08:00
|
|
|
if show_json_errors
|
2019-10-08 19:15:08 +08:00
|
|
|
opts = { type: type, status: status_code }
|
|
|
|
|
2020-07-23 00:30:26 +08:00
|
|
|
with_resolved_locale(check_current_user: false) do
|
|
|
|
# Include error in HTML format for topics#show.
|
|
|
|
if (request.params[:controller] == "topics" && request.params[:action] == "show") ||
|
|
|
|
(
|
|
|
|
request.params[:controller] == "categories" &&
|
|
|
|
request.params[:action] == "find_by_slug"
|
|
|
|
)
|
2022-05-17 23:37:43 +08:00
|
|
|
opts[:extras] = {
|
|
|
|
title: I18n.t("page_not_found.page_title"),
|
|
|
|
html: build_not_found_page(error_page_opts),
|
|
|
|
group: error_page_opts[:group],
|
|
|
|
}
|
2020-07-23 00:30:26 +08:00
|
|
|
end
|
2015-02-09 05:35:09 +08:00
|
|
|
end
|
|
|
|
|
2019-10-08 19:15:08 +08:00
|
|
|
render_json_error message, opts
|
2013-05-31 02:46:02 +08:00
|
|
|
else
|
2017-09-29 08:35:23 +08:00
|
|
|
begin
|
2018-02-15 04:29:01 +08:00
|
|
|
# 404 pages won't have the session and theme_keys without these:
|
2017-09-29 08:35:23 +08:00
|
|
|
current_user
|
2018-02-15 04:29:01 +08:00
|
|
|
handle_theme
|
2017-09-29 08:35:23 +08:00
|
|
|
rescue Discourse::InvalidAccess
|
2018-01-12 11:15:10 +08:00
|
|
|
return render plain: message, status: status_code
|
2017-09-29 08:35:23 +08:00
|
|
|
end
|
2021-12-02 00:10:40 +08:00
|
|
|
with_resolved_locale do
|
2022-07-28 05:29:13 +08:00
|
|
|
error_page_opts[:layout] = (opts[:include_ember] && @preloaded) ? "application" : "no_ember"
|
2021-12-02 00:10:40 +08:00
|
|
|
render html: build_not_found_page(error_page_opts)
|
2020-07-23 00:30:26 +08:00
|
|
|
end
|
2013-05-31 02:46:02 +08:00
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
2015-02-05 05:23:39 +08:00
|
|
|
# If a controller requires a plugin, it will raise an exception if that plugin is
|
2021-05-21 09:43:47 +08:00
|
|
|
# disabled. This allows plugins to be disabled programmatically.
|
2015-02-05 05:23:39 +08:00
|
|
|
def self.requires_plugin(plugin_name)
|
2017-08-31 12:06:56 +08:00
|
|
|
before_action do
|
2015-02-05 05:23:39 +08:00
|
|
|
raise PluginDisabled.new if Discourse.disabled_plugin_names.include?(plugin_name)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-05-12 13:27:58 +08:00
|
|
|
def set_current_user_for_logs
|
|
|
|
if current_user
|
|
|
|
Logster.add_to_env(request.env, "username", current_user.username)
|
2015-06-16 15:43:36 +08:00
|
|
|
response.headers["X-Discourse-Username"] = current_user.username
|
2014-05-12 13:27:58 +08:00
|
|
|
end
|
2015-06-16 08:27:42 +08:00
|
|
|
response.headers["X-Discourse-Route"] = "#{controller_name}/#{action_name}"
|
2014-05-12 13:27:58 +08:00
|
|
|
end
|
|
|
|
|
2020-09-17 23:18:35 +08:00
|
|
|
def set_mp_snapshot_fields
|
|
|
|
if defined?(Rack::MiniProfiler)
|
2020-10-22 00:37:28 +08:00
|
|
|
Rack::MiniProfiler.add_snapshot_custom_field("Application version", Discourse.git_version)
|
|
|
|
if Rack::MiniProfiler.snapshots_transporter?
|
|
|
|
Rack::MiniProfiler.add_snapshot_custom_field("Site", Discourse.current_hostname)
|
|
|
|
end
|
2020-09-17 23:18:35 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-02-15 16:29:35 +08:00
|
|
|
def clear_notifications
|
2017-09-07 13:29:30 +08:00
|
|
|
if current_user && !@readonly_mode
|
2018-01-17 13:32:52 +08:00
|
|
|
cookie_notifications = cookies["cn"]
|
|
|
|
notifications = request.headers["Discourse-Clear-Notifications"]
|
2016-02-15 16:29:35 +08:00
|
|
|
|
|
|
|
if cookie_notifications
|
|
|
|
if notifications.present?
|
2018-01-17 13:32:52 +08:00
|
|
|
notifications += ",#{cookie_notifications}"
|
2016-02-15 16:29:35 +08:00
|
|
|
else
|
|
|
|
notifications = cookie_notifications
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
if notifications.present?
|
|
|
|
notification_ids = notifications.split(",").map(&:to_i)
|
2016-09-16 14:14:00 +08:00
|
|
|
Notification.read(current_user, notification_ids)
|
2018-05-26 09:11:10 +08:00
|
|
|
current_user.reload
|
|
|
|
current_user.publish_notifications_state
|
2018-06-28 23:03:36 +08:00
|
|
|
cookie_args = {}
|
2020-10-09 19:51:24 +08:00
|
|
|
cookie_args[:path] = Discourse.base_path if Discourse.base_path.present?
|
2018-06-28 23:03:36 +08:00
|
|
|
cookies.delete("cn", cookie_args)
|
2016-02-15 16:29:35 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-07-23 00:30:26 +08:00
|
|
|
def with_resolved_locale(check_current_user: true)
|
2020-09-30 03:42:45 +08:00
|
|
|
if check_current_user &&
|
|
|
|
(
|
|
|
|
user =
|
2023-01-09 20:20:10 +08:00
|
|
|
begin
|
2020-09-30 03:42:45 +08:00
|
|
|
current_user
|
|
|
|
rescue StandardError
|
|
|
|
nil
|
2023-01-09 20:20:10 +08:00
|
|
|
end
|
2020-09-30 03:42:45 +08:00
|
|
|
)
|
|
|
|
locale = user.effective_locale
|
2020-07-23 00:30:26 +08:00
|
|
|
else
|
2022-09-27 16:56:06 +08:00
|
|
|
locale = Discourse.anonymous_locale(request)
|
|
|
|
locale ||= SiteSetting.default_locale
|
2016-02-07 03:49:39 +08:00
|
|
|
end
|
2018-01-22 21:44:17 +08:00
|
|
|
|
2020-07-23 00:30:26 +08:00
|
|
|
locale = SiteSettings::DefaultsProvider::DEFAULT_LOCALE if !I18n.locale_available?(locale)
|
|
|
|
|
2015-11-14 04:42:01 +08:00
|
|
|
I18n.ensure_all_loaded!
|
2020-07-23 00:30:26 +08:00
|
|
|
I18n.with_locale(locale) { yield }
|
2013-03-01 03:31:39 +08:00
|
|
|
end
|
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
def store_preloaded(key, json)
|
|
|
|
@preloaded ||= {}
|
2013-02-26 00:42:20 +08:00
|
|
|
# I dislike that there is a gsub as opposed to a gsub!
|
|
|
|
# but we can not be mucking with user input, I wonder if there is a way
|
2021-05-21 09:43:47 +08:00
|
|
|
# to inject this safety deeper in the library or even in AM serializer
|
2013-02-11 14:28:21 +08:00
|
|
|
@preloaded[key] = json.gsub("</", "<\\/")
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
# If we are rendering HTML, preload the session data
|
|
|
|
def preload_json
|
2013-06-05 00:56:12 +08:00
|
|
|
# We don't preload JSON on xhr or JSON request
|
2015-01-24 13:03:44 +08:00
|
|
|
return if request.xhr? || request.format.json?
|
2013-02-06 03:16:51 +08:00
|
|
|
|
2015-05-20 15:12:16 +08:00
|
|
|
# if we are posting in makes no sense to preload
|
|
|
|
return if request.method != "GET"
|
|
|
|
|
|
|
|
# TODO should not be invoked on redirection so this should be further deferred
|
2013-08-06 04:25:44 +08:00
|
|
|
preload_anonymous_data
|
2013-06-05 00:56:12 +08:00
|
|
|
|
2013-08-06 04:25:44 +08:00
|
|
|
if current_user
|
|
|
|
current_user.sync_notification_channel_position
|
2017-03-20 17:16:53 +08:00
|
|
|
preload_current_user_data
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-12-19 03:47:22 +08:00
|
|
|
def set_mobile_view
|
|
|
|
session[:mobile_view] = params[:mobile_view] if params.has_key?(:mobile_view)
|
|
|
|
end
|
|
|
|
|
2022-08-15 22:15:15 +08:00
|
|
|
NO_THEMES = "no_themes"
|
2018-01-17 13:32:52 +08:00
|
|
|
NO_PLUGINS = "no_plugins"
|
2022-08-15 22:15:15 +08:00
|
|
|
NO_UNOFFICIAL_PLUGINS = "no_unofficial_plugins"
|
2018-01-17 13:32:52 +08:00
|
|
|
SAFE_MODE = "safe_mode"
|
2015-01-06 14:39:08 +08:00
|
|
|
|
2022-08-15 22:15:15 +08:00
|
|
|
LEGACY_NO_THEMES = "no_custom"
|
|
|
|
LEGACY_NO_UNOFFICIAL_PLUGINS = "only_official"
|
|
|
|
|
2017-04-15 01:35:12 +08:00
|
|
|
def resolve_safe_mode
|
2018-04-25 23:46:54 +08:00
|
|
|
return unless guardian.can_enable_safe_mode?
|
2018-04-24 02:50:52 +08:00
|
|
|
|
2017-04-15 01:35:12 +08:00
|
|
|
safe_mode = params[SAFE_MODE]
|
2022-08-15 22:15:15 +08:00
|
|
|
if safe_mode&.is_a?(String)
|
|
|
|
safe_mode = safe_mode.split(",")
|
|
|
|
request.env[NO_THEMES] = safe_mode.include?(NO_THEMES) || safe_mode.include?(LEGACY_NO_THEMES)
|
|
|
|
request.env[NO_PLUGINS] = safe_mode.include?(NO_PLUGINS)
|
|
|
|
request.env[NO_UNOFFICIAL_PLUGINS] = safe_mode.include?(NO_UNOFFICIAL_PLUGINS) ||
|
|
|
|
safe_mode.include?(LEGACY_NO_UNOFFICIAL_PLUGINS)
|
2017-04-15 01:35:12 +08:00
|
|
|
end
|
|
|
|
end
|
2015-01-06 14:39:08 +08:00
|
|
|
|
2017-04-15 01:35:12 +08:00
|
|
|
def handle_theme
|
2020-04-03 22:50:13 +08:00
|
|
|
return if request.format == "js"
|
2017-04-15 01:35:12 +08:00
|
|
|
|
|
|
|
resolve_safe_mode
|
2022-08-15 22:15:15 +08:00
|
|
|
return if request.env[NO_THEMES]
|
2017-04-15 01:35:12 +08:00
|
|
|
|
2021-06-15 14:57:17 +08:00
|
|
|
theme_id = nil
|
2018-08-08 12:46:34 +08:00
|
|
|
|
2021-06-15 14:57:17 +08:00
|
|
|
if (preview_theme_id = request[:preview_theme_id]&.to_i) &&
|
|
|
|
guardian.allow_themes?([preview_theme_id], include_preview: true)
|
|
|
|
theme_id = preview_theme_id
|
2018-08-08 12:46:34 +08:00
|
|
|
end
|
2017-05-13 00:41:26 +08:00
|
|
|
|
2017-05-16 00:48:08 +08:00
|
|
|
user_option = current_user&.user_option
|
|
|
|
|
2021-06-15 14:57:17 +08:00
|
|
|
if theme_id.blank?
|
2018-07-12 12:18:21 +08:00
|
|
|
ids, seq = cookies[:theme_ids]&.split("|")
|
2021-06-15 14:57:17 +08:00
|
|
|
id = ids&.split(",")&.map(&:to_i)&.first
|
|
|
|
if id.present? && seq && seq.to_i == user_option&.theme_key_seq.to_i
|
|
|
|
theme_id = id if guardian.allow_themes?([id])
|
2017-05-16 00:48:08 +08:00
|
|
|
end
|
2017-05-13 00:41:26 +08:00
|
|
|
end
|
2017-04-15 01:35:12 +08:00
|
|
|
|
2021-06-15 14:57:17 +08:00
|
|
|
if theme_id.blank?
|
2018-09-07 08:44:57 +08:00
|
|
|
ids = user_option&.theme_ids || []
|
2021-06-15 14:57:17 +08:00
|
|
|
theme_id = ids.first if guardian.allow_themes?(ids)
|
2014-06-21 00:06:36 +08:00
|
|
|
end
|
2015-01-06 14:39:08 +08:00
|
|
|
|
2021-06-15 14:57:17 +08:00
|
|
|
if theme_id.blank? && SiteSetting.default_theme_id != -1 &&
|
|
|
|
guardian.allow_themes?([SiteSetting.default_theme_id])
|
|
|
|
theme_id = SiteSetting.default_theme_id
|
2018-08-08 12:46:34 +08:00
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
|
2021-06-15 14:57:17 +08:00
|
|
|
@theme_id = request.env[:resolved_theme_id] = theme_id
|
2013-11-13 01:13:17 +08:00
|
|
|
end
|
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
def guardian
|
2022-04-13 20:04:09 +08:00
|
|
|
# sometimes we log on a user in the middle of a request so we should throw
|
|
|
|
# away the cached guardian instance when we do that
|
|
|
|
if (@guardian&.user).blank? && current_user.present?
|
|
|
|
@guardian = Guardian.new(current_user, request)
|
|
|
|
end
|
2018-10-09 22:21:41 +08:00
|
|
|
@guardian ||= Guardian.new(current_user, request)
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
2015-06-09 00:07:35 +08:00
|
|
|
def current_homepage
|
2017-11-10 03:45:19 +08:00
|
|
|
current_user&.user_option&.homepage || SiteSetting.anonymous_homepage
|
2015-06-09 00:07:35 +08:00
|
|
|
end
|
|
|
|
|
2015-04-01 00:58:56 +08:00
|
|
|
def serialize_data(obj, serializer, opts = nil)
|
2013-02-06 03:16:51 +08:00
|
|
|
# If it's an array, apply the serializer as an each_serializer to the elements
|
2015-04-01 00:58:56 +08:00
|
|
|
serializer_opts = { scope: guardian }.merge!(opts || {})
|
2014-04-16 23:18:09 +08:00
|
|
|
if obj.respond_to?(:to_ary)
|
2013-02-06 03:16:51 +08:00
|
|
|
serializer_opts[:each_serializer] = serializer
|
2014-04-16 23:18:09 +08:00
|
|
|
ActiveModel::ArraySerializer.new(obj.to_ary, serializer_opts).as_json
|
2013-02-07 23:45:24 +08:00
|
|
|
else
|
2013-05-30 04:49:34 +08:00
|
|
|
serializer.new(obj, serializer_opts).as_json
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
2013-05-30 04:49:34 +08:00
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
|
2013-05-30 04:49:34 +08:00
|
|
|
# This is odd, but it seems that in Rails `render json: obj` is about
|
|
|
|
# 20% slower than calling MultiJSON.dump ourselves. I'm not sure why
|
|
|
|
# Rails doesn't call MultiJson.dump when you pass it json: obj but
|
|
|
|
# it seems we don't need whatever Rails is doing.
|
2015-04-01 00:58:56 +08:00
|
|
|
def render_serialized(obj, serializer, opts = nil)
|
|
|
|
render_json_dump(serialize_data(obj, serializer, opts), opts)
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
2015-04-01 00:58:56 +08:00
|
|
|
def render_json_dump(obj, opts = nil)
|
|
|
|
opts ||= {}
|
2015-04-28 01:52:37 +08:00
|
|
|
if opts[:rest_serializer]
|
|
|
|
obj["__rest_serializer"] = "1"
|
|
|
|
opts.each { |k, v| obj[k] = v if k.to_s.start_with?("refresh_") }
|
2015-12-01 04:22:58 +08:00
|
|
|
|
|
|
|
obj["extras"] = opts[:extras] if opts[:extras]
|
2017-09-12 04:44:20 +08:00
|
|
|
obj["meta"] = opts[:meta] if opts[:meta]
|
2015-04-28 01:52:37 +08:00
|
|
|
end
|
|
|
|
|
2015-04-01 00:58:56 +08:00
|
|
|
render json: MultiJson.dump(obj), status: opts[:status] || 200
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
2013-02-07 00:55:54 +08:00
|
|
|
def can_cache_content?
|
2018-10-30 06:30:36 +08:00
|
|
|
current_user.blank? && cookies[:authentication_data].blank?
|
2013-02-07 00:55:54 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
# Our custom cache method
|
|
|
|
def discourse_expires_in(time_length)
|
|
|
|
return unless can_cache_content?
|
2014-01-04 15:53:27 +08:00
|
|
|
Middleware::AnonymousCache.anon_cache(request.env, time_length)
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
2016-06-28 10:01:00 +08:00
|
|
|
def fetch_user_from_params(opts = nil, eager_load = [])
|
2014-08-29 00:07:13 +08:00
|
|
|
opts ||= {}
|
2014-06-19 02:40:15 +08:00
|
|
|
user =
|
|
|
|
if params[:username]
|
2016-06-11 10:37:33 +08:00
|
|
|
username_lower = params[:username].downcase.chomp(".json")
|
2021-09-06 10:38:07 +08:00
|
|
|
|
|
|
|
if current_user && current_user.username_lower == username_lower
|
|
|
|
current_user
|
|
|
|
else
|
|
|
|
find_opts = { username_lower: username_lower }
|
|
|
|
find_opts[:active] = true unless opts[:include_inactive] || current_user.try(:staff?)
|
|
|
|
result = User
|
|
|
|
(result = result.includes(*eager_load)) if !eager_load.empty?
|
|
|
|
result.find_by(find_opts)
|
|
|
|
end
|
2014-06-19 02:40:15 +08:00
|
|
|
elsif params[:external_id]
|
2016-06-11 10:37:33 +08:00
|
|
|
external_id = params[:external_id].chomp(".json")
|
2020-11-10 18:41:46 +08:00
|
|
|
if provider_name = params[:external_provider]
|
|
|
|
raise Discourse::InvalidAccess unless guardian.is_admin? # external_id might be something sensitive
|
|
|
|
provider = Discourse.enabled_authenticators.find { |a| a.name == provider_name }
|
|
|
|
raise Discourse::NotFound if !provider&.is_managed? # Only managed authenticators use UserAssociatedAccount
|
|
|
|
UserAssociatedAccount.find_by(
|
|
|
|
provider_name: provider_name,
|
|
|
|
provider_uid: external_id,
|
|
|
|
)&.user
|
|
|
|
else
|
|
|
|
SingleSignOnRecord.find_by(external_id: external_id).try(:user)
|
2023-01-09 20:20:10 +08:00
|
|
|
end
|
2020-11-10 18:41:46 +08:00
|
|
|
end
|
2015-05-07 09:00:51 +08:00
|
|
|
raise Discourse::NotFound if user.blank?
|
2013-05-22 23:20:16 +08:00
|
|
|
|
|
|
|
guardian.ensure_can_see!(user)
|
|
|
|
user
|
|
|
|
end
|
|
|
|
|
2013-09-04 23:53:00 +08:00
|
|
|
def post_ids_including_replies
|
2018-01-23 00:23:19 +08:00
|
|
|
post_ids = params[:post_ids].map(&:to_i)
|
2020-01-18 00:24:49 +08:00
|
|
|
post_ids |= PostReply.where(post_id: params[:reply_post_ids]).pluck(:reply_post_id) if params[
|
|
|
|
:reply_post_ids
|
|
|
|
]
|
2013-09-04 23:53:00 +08:00
|
|
|
post_ids
|
|
|
|
end
|
|
|
|
|
2015-05-22 14:15:46 +08:00
|
|
|
def no_cookies
|
|
|
|
# do your best to ensure response has no cookies
|
|
|
|
# longer term we may want to push this into middleware
|
|
|
|
headers.delete "Set-Cookie"
|
|
|
|
request.session_options[:skip] = true
|
|
|
|
end
|
|
|
|
|
2016-12-19 15:00:22 +08:00
|
|
|
def secure_session
|
|
|
|
SecureSession.new(session["secure_session_id"] ||= SecureRandom.hex)
|
|
|
|
end
|
|
|
|
|
2019-03-18 22:24:46 +08:00
|
|
|
def handle_permalink(path)
|
|
|
|
permalink = Permalink.find_by_url(path)
|
|
|
|
if permalink && permalink.target_url
|
|
|
|
redirect_to permalink.target_url, status: :moved_permanently
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-02-04 07:03:30 +08:00
|
|
|
def rate_limit_second_factor!(user)
|
|
|
|
return if params[:second_factor_token].blank?
|
|
|
|
|
|
|
|
RateLimiter.new(nil, "second-factor-min-#{request.remote_ip}", 6, 1.minute).performed!
|
|
|
|
|
|
|
|
RateLimiter.new(nil, "second-factor-min-#{user.username}", 6, 1.minute).performed! if user
|
|
|
|
end
|
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
private
|
|
|
|
|
2013-12-18 01:49:22 +08:00
|
|
|
def preload_anonymous_data
|
2014-02-25 03:24:18 +08:00
|
|
|
store_preloaded("site", Site.json_for(guardian))
|
2013-08-06 04:25:44 +08:00
|
|
|
store_preloaded("siteSettings", SiteSetting.client_settings_json)
|
2013-12-18 01:49:22 +08:00
|
|
|
store_preloaded("customHTML", custom_html_json)
|
2014-06-19 02:04:10 +08:00
|
|
|
store_preloaded("banner", banner_json)
|
2015-02-10 00:48:42 +08:00
|
|
|
store_preloaded("customEmoji", custom_emoji)
|
2020-06-12 09:54:05 +08:00
|
|
|
store_preloaded("isReadOnly", @readonly_mode.to_s)
|
2022-05-18 02:06:08 +08:00
|
|
|
store_preloaded("isStaffWritesOnly", @staff_writes_only_mode.to_s)
|
2021-04-12 20:02:58 +08:00
|
|
|
store_preloaded("activatedThemes", activated_themes_json)
|
2018-06-07 13:28:18 +08:00
|
|
|
end
|
2017-07-28 09:20:09 +08:00
|
|
|
|
2017-04-15 01:35:12 +08:00
|
|
|
def preload_current_user_data
|
|
|
|
store_preloaded(
|
|
|
|
"currentUser",
|
2023-01-13 06:47:58 +08:00
|
|
|
MultiJson.dump(
|
|
|
|
CurrentUserSerializer.new(
|
|
|
|
current_user,
|
|
|
|
scope: guardian,
|
|
|
|
root: false,
|
|
|
|
enable_sidebar_param: params[:enable_sidebar],
|
|
|
|
),
|
|
|
|
),
|
DEV: Topic tracking state improvements (#13218)
I merged this PR in yesterday, finally thinking this was done https://github.com/discourse/discourse/pull/12958 but then a wild performance regression occurred. These are the problem methods:
https://github.com/discourse/discourse/blob/1aa20bd681e634f7fff22953ed62d90c2573b331/app/serializers/topic_tracking_state_serializer.rb#L13-L21
Turns out date comparison is super expensive on the backend _as well as_ the frontend.
The fix was to just move the `treat_as_new_topic_start_date` into the SQL query rather than using the slower `UserOption#treat_as_new_topic_start_date` method in ruby. After this change, 1% of the total time is spent with the `created_in_new_period` comparison instead of ~20%.
----
History:
Original PR which had to be reverted **https://github.com/discourse/discourse/pull/12555**. See the description there for what this PR is achieving, plus below.
The issue with the original PR is addressed in https://github.com/discourse/discourse/pull/12958/commits/92ef54f4020111ffacb0f2a27da5d5c2855f9d5d
If you went to the `x unread` link for a tag Chrome would freeze up and possibly crash, or eventually unfreeze after nearly 10 mins. Other routes for unread/new were similarly slow. From profiling the issue was the `sync` function of `topic-tracking-state.js`, which calls down to `isNew` which in turn calls `moment`, a change I had made in the PR above. The time it takes locally with ~1400 topics in the tracking state is 2.3 seconds.
To solve this issue, I have moved these calculations for "created in new period" and "unread not too old" into the tracking state serializer.
When I was looking at the profiler I also noticed this issue which was just compounding the problem. Every time we modify topic tracking state we recalculate the sidebar tracking/everything/tag counts. However this calls `forEachTracked` and `countTags` which can be quite expensive as they go through the whole tracking state (and were also calling the removed moment functions).
I added some logs and this was being called 30 times when navigating to a new /unread route because `sync` is being called from `build-topic-route` (one for each topic loaded due to pagination). So I just added a debounce here and it makes things even faster.
Finally, I changed topic tracking state to use a Map so our counts of the state keys is faster (Maps have .size whereas objects you have to do Object.keys(obj) which is O(n).)
<!-- NOTE: All pull requests should have tests (rspec in Ruby, qunit in JavaScript). If your code does not include test coverage, please include an explanation of why it was omitted. -->
2021-06-02 07:06:29 +08:00
|
|
|
)
|
2023-01-13 06:47:58 +08:00
|
|
|
|
2017-04-15 01:35:12 +08:00
|
|
|
report = TopicTrackingState.report(current_user)
|
2023-01-13 06:47:58 +08:00
|
|
|
|
DEV: Topic tracking state improvements (#13218)
I merged this PR in yesterday, finally thinking this was done https://github.com/discourse/discourse/pull/12958 but then a wild performance regression occurred. These are the problem methods:
https://github.com/discourse/discourse/blob/1aa20bd681e634f7fff22953ed62d90c2573b331/app/serializers/topic_tracking_state_serializer.rb#L13-L21
Turns out date comparison is super expensive on the backend _as well as_ the frontend.
The fix was to just move the `treat_as_new_topic_start_date` into the SQL query rather than using the slower `UserOption#treat_as_new_topic_start_date` method in ruby. After this change, 1% of the total time is spent with the `created_in_new_period` comparison instead of ~20%.
----
History:
Original PR which had to be reverted **https://github.com/discourse/discourse/pull/12555**. See the description there for what this PR is achieving, plus below.
The issue with the original PR is addressed in https://github.com/discourse/discourse/pull/12958/commits/92ef54f4020111ffacb0f2a27da5d5c2855f9d5d
If you went to the `x unread` link for a tag Chrome would freeze up and possibly crash, or eventually unfreeze after nearly 10 mins. Other routes for unread/new were similarly slow. From profiling the issue was the `sync` function of `topic-tracking-state.js`, which calls down to `isNew` which in turn calls `moment`, a change I had made in the PR above. The time it takes locally with ~1400 topics in the tracking state is 2.3 seconds.
To solve this issue, I have moved these calculations for "created in new period" and "unread not too old" into the tracking state serializer.
When I was looking at the profiler I also noticed this issue which was just compounding the problem. Every time we modify topic tracking state we recalculate the sidebar tracking/everything/tag counts. However this calls `forEachTracked` and `countTags` which can be quite expensive as they go through the whole tracking state (and were also calling the removed moment functions).
I added some logs and this was being called 30 times when navigating to a new /unread route because `sync` is being called from `build-topic-route` (one for each topic loaded due to pagination). So I just added a debounce here and it makes things even faster.
Finally, I changed topic tracking state to use a Map so our counts of the state keys is faster (Maps have .size whereas objects you have to do Object.keys(obj) which is O(n).)
<!-- NOTE: All pull requests should have tests (rspec in Ruby, qunit in JavaScript). If your code does not include test coverage, please include an explanation of why it was omitted. -->
2021-06-02 07:06:29 +08:00
|
|
|
serializer =
|
|
|
|
ActiveModel::ArraySerializer.new(
|
2023-01-09 20:20:10 +08:00
|
|
|
report,
|
DEV: Topic tracking state improvements (#13218)
I merged this PR in yesterday, finally thinking this was done https://github.com/discourse/discourse/pull/12958 but then a wild performance regression occurred. These are the problem methods:
https://github.com/discourse/discourse/blob/1aa20bd681e634f7fff22953ed62d90c2573b331/app/serializers/topic_tracking_state_serializer.rb#L13-L21
Turns out date comparison is super expensive on the backend _as well as_ the frontend.
The fix was to just move the `treat_as_new_topic_start_date` into the SQL query rather than using the slower `UserOption#treat_as_new_topic_start_date` method in ruby. After this change, 1% of the total time is spent with the `created_in_new_period` comparison instead of ~20%.
----
History:
Original PR which had to be reverted **https://github.com/discourse/discourse/pull/12555**. See the description there for what this PR is achieving, plus below.
The issue with the original PR is addressed in https://github.com/discourse/discourse/pull/12958/commits/92ef54f4020111ffacb0f2a27da5d5c2855f9d5d
If you went to the `x unread` link for a tag Chrome would freeze up and possibly crash, or eventually unfreeze after nearly 10 mins. Other routes for unread/new were similarly slow. From profiling the issue was the `sync` function of `topic-tracking-state.js`, which calls down to `isNew` which in turn calls `moment`, a change I had made in the PR above. The time it takes locally with ~1400 topics in the tracking state is 2.3 seconds.
To solve this issue, I have moved these calculations for "created in new period" and "unread not too old" into the tracking state serializer.
When I was looking at the profiler I also noticed this issue which was just compounding the problem. Every time we modify topic tracking state we recalculate the sidebar tracking/everything/tag counts. However this calls `forEachTracked` and `countTags` which can be quite expensive as they go through the whole tracking state (and were also calling the removed moment functions).
I added some logs and this was being called 30 times when navigating to a new /unread route because `sync` is being called from `build-topic-route` (one for each topic loaded due to pagination). So I just added a debounce here and it makes things even faster.
Finally, I changed topic tracking state to use a Map so our counts of the state keys is faster (Maps have .size whereas objects you have to do Object.keys(obj) which is O(n).)
<!-- NOTE: All pull requests should have tests (rspec in Ruby, qunit in JavaScript). If your code does not include test coverage, please include an explanation of why it was omitted. -->
2021-06-02 07:06:29 +08:00
|
|
|
each_serializer: TopicTrackingStateSerializer,
|
2014-09-03 05:37:19 +08:00
|
|
|
scope: guardian,
|
2023-01-09 20:20:10 +08:00
|
|
|
)
|
2023-01-13 06:47:58 +08:00
|
|
|
|
2017-04-15 01:35:12 +08:00
|
|
|
store_preloaded("topicTrackingStates", MultiJson.dump(serializer))
|
|
|
|
end
|
2014-06-05 09:39:33 +08:00
|
|
|
|
|
|
|
def custom_html_json
|
|
|
|
target = view_context.mobile_view? ? :mobile : :desktop
|
|
|
|
|
2017-11-03 23:32:32 +08:00
|
|
|
data =
|
2021-06-15 14:57:17 +08:00
|
|
|
if @theme_id.present?
|
2018-06-07 13:28:18 +08:00
|
|
|
{
|
2021-06-15 14:57:17 +08:00
|
|
|
top: Theme.lookup_field(@theme_id, target, "after_header"),
|
|
|
|
footer: Theme.lookup_field(@theme_id, target, "footer"),
|
2018-06-07 13:28:18 +08:00
|
|
|
}
|
2017-08-07 23:28:56 +08:00
|
|
|
else
|
2018-06-07 13:28:18 +08:00
|
|
|
{}
|
2017-04-18 03:47:21 +08:00
|
|
|
end
|
|
|
|
|
2014-06-05 09:39:33 +08:00
|
|
|
data.merge! DiscoursePluginRegistry.custom_html if DiscoursePluginRegistry.custom_html
|
2013-12-18 01:25:27 +08:00
|
|
|
|
2014-11-14 12:39:17 +08:00
|
|
|
DiscoursePluginRegistry.html_builders.each do |name, _|
|
|
|
|
if name.start_with?("client:")
|
|
|
|
data[name.sub(/^client:/, "")] = DiscoursePluginRegistry.build_html(name, self)
|
2018-06-07 13:28:18 +08:00
|
|
|
end
|
2014-11-14 12:39:17 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
MultiJson.dump(data)
|
2018-06-07 13:28:18 +08:00
|
|
|
end
|
2014-11-14 12:39:17 +08:00
|
|
|
|
2017-02-24 19:56:13 +08:00
|
|
|
def self.banner_json_cache
|
2014-11-14 12:39:17 +08:00
|
|
|
@banner_json_cache ||= DistributedCache.new("banner_json")
|
|
|
|
end
|
|
|
|
|
2014-06-19 02:04:10 +08:00
|
|
|
def banner_json
|
2014-11-14 12:39:17 +08:00
|
|
|
json = ApplicationController.banner_json_cache["json"]
|
2022-06-14 01:10:21 +08:00
|
|
|
return "{}" if !current_user && SiteSetting.login_required?
|
2014-06-19 02:04:10 +08:00
|
|
|
|
2014-11-14 12:39:17 +08:00
|
|
|
unless json
|
2014-12-23 08:12:26 +08:00
|
|
|
topic = Topic.where(archetype: Archetype.banner).first
|
|
|
|
banner = topic.present? ? topic.banner : {}
|
|
|
|
ApplicationController.banner_json_cache["json"] = json = MultiJson.dump(banner)
|
|
|
|
end
|
|
|
|
|
2015-02-09 04:25:03 +08:00
|
|
|
json
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def custom_emoji
|
|
|
|
serializer = ActiveModel::ArraySerializer.new(Emoji.custom, each_serializer: EmojiSerializer)
|
|
|
|
MultiJson.dump(serializer)
|
|
|
|
end
|
|
|
|
|
|
|
|
# Render action for a JSON error.
|
2018-06-07 13:28:18 +08:00
|
|
|
#
|
2013-02-06 03:16:51 +08:00
|
|
|
# obj - a translated string, an ActiveRecord model, or an array of translated strings
|
2018-06-07 13:28:18 +08:00
|
|
|
# opts:
|
2013-02-06 03:16:51 +08:00
|
|
|
# type - a machine-readable description of the error
|
2018-03-13 23:12:41 +08:00
|
|
|
# status - HTTP status code to return
|
|
|
|
# headers - extra headers for the response
|
2013-02-06 03:16:51 +08:00
|
|
|
def render_json_error(obj, opts = {})
|
2014-06-19 02:04:10 +08:00
|
|
|
opts = { status: opts } if opts.is_a?(Integer)
|
2018-03-13 23:12:41 +08:00
|
|
|
opts.fetch(:headers, {}).each { |name, value| headers[name.to_s] = value }
|
2013-02-06 03:16:51 +08:00
|
|
|
|
2019-01-04 01:03:01 +08:00
|
|
|
render(
|
|
|
|
json: MultiJson.dump(create_errors_json(obj, opts)),
|
|
|
|
status: opts[:status] || status_code(obj),
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def status_code(obj)
|
|
|
|
return 403 if obj.try(:forbidden)
|
|
|
|
return 404 if obj.try(:not_found)
|
|
|
|
422
|
2018-06-07 13:28:18 +08:00
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
|
|
|
|
def success_json
|
2013-04-12 08:07:46 +08:00
|
|
|
{ success: "OK" }
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def failed_json
|
2014-06-19 02:04:10 +08:00
|
|
|
{ failed: "FAILED" }
|
2018-06-07 13:28:18 +08:00
|
|
|
end
|
|
|
|
|
2015-02-23 13:28:50 +08:00
|
|
|
def json_result(obj, opts = {})
|
2013-02-06 03:16:51 +08:00
|
|
|
if yield(obj)
|
|
|
|
json = success_json
|
2018-06-07 13:28:18 +08:00
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
# If we were given a serializer, add the class to the json that comes back
|
|
|
|
if opts[:serializer].present?
|
2013-04-12 08:07:46 +08:00
|
|
|
json[obj.class.name.underscore] = opts[:serializer].new(
|
|
|
|
obj,
|
|
|
|
scope: guardian,
|
|
|
|
).serializable_hash
|
2018-06-07 13:28:18 +08:00
|
|
|
end
|
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
render json: MultiJson.dump(json)
|
|
|
|
else
|
2014-09-09 03:17:31 +08:00
|
|
|
error_obj = nil
|
|
|
|
if opts[:additional_errors]
|
|
|
|
error_target =
|
|
|
|
opts[:additional_errors].find do |o|
|
2019-05-07 09:27:05 +08:00
|
|
|
target = obj.public_send(o)
|
2014-09-09 03:17:31 +08:00
|
|
|
target && target.errors.present?
|
|
|
|
end
|
2019-05-07 09:27:05 +08:00
|
|
|
error_obj = obj.public_send(error_target) if error_target
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
2014-09-09 03:17:31 +08:00
|
|
|
render_json_error(error_obj || obj)
|
2013-02-07 23:45:24 +08:00
|
|
|
end
|
2018-06-07 13:28:18 +08:00
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
|
|
|
|
def mini_profiler_enabled?
|
2017-09-06 11:26:03 +08:00
|
|
|
defined?(Rack::MiniProfiler) && (guardian.is_developer? || Rails.env.development?)
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def authorize_mini_profiler
|
|
|
|
return unless mini_profiler_enabled?
|
|
|
|
Rack::MiniProfiler.authorize_request
|
|
|
|
end
|
|
|
|
|
2013-02-07 23:45:24 +08:00
|
|
|
def check_xhr
|
2013-08-06 04:25:44 +08:00
|
|
|
# bypass xhr check on PUT / POST / DELETE provided api key is there, otherwise calling api is annoying
|
2016-12-16 09:05:20 +08:00
|
|
|
return if !request.get? && (is_api? || is_user_api?)
|
2020-03-16 04:56:54 +08:00
|
|
|
unless ((request.format && request.format.json?) || request.xhr?)
|
|
|
|
raise ApplicationController::RenderEmpty.new
|
2023-01-09 20:20:10 +08:00
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|
|
|
|
|
2021-01-29 10:14:49 +08:00
|
|
|
def apply_cdn_headers
|
|
|
|
if Discourse.is_cdn_request?(request.env, request.method)
|
|
|
|
Discourse.apply_cdn_headers(response.headers)
|
2023-01-09 20:20:10 +08:00
|
|
|
end
|
2021-01-29 10:14:49 +08:00
|
|
|
end
|
|
|
|
|
2018-02-01 12:17:59 +08:00
|
|
|
def self.requires_login(arg = {})
|
|
|
|
@requires_login_arg = arg
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.requires_login_arg
|
|
|
|
@requires_login_arg
|
|
|
|
end
|
|
|
|
|
|
|
|
def block_if_requires_login
|
|
|
|
if arg = self.class.requires_login_arg
|
|
|
|
check =
|
|
|
|
if except = arg[:except]
|
|
|
|
!except.include?(action_name.to_sym)
|
|
|
|
elsif only = arg[:only]
|
|
|
|
only.include?(action_name.to_sym)
|
|
|
|
else
|
|
|
|
true
|
|
|
|
end
|
|
|
|
ensure_logged_in if check
|
|
|
|
end
|
2018-06-07 13:28:18 +08:00
|
|
|
end
|
2018-02-01 12:17:59 +08:00
|
|
|
|
2013-02-06 03:16:51 +08:00
|
|
|
def ensure_logged_in
|
|
|
|
raise Discourse::NotLoggedIn.new unless current_user.present?
|
|
|
|
end
|
2013-02-07 23:45:24 +08:00
|
|
|
|
2015-04-11 05:00:50 +08:00
|
|
|
def ensure_staff
|
|
|
|
raise Discourse::InvalidAccess.new unless current_user && current_user.staff?
|
|
|
|
end
|
|
|
|
|
2016-09-22 23:12:34 +08:00
|
|
|
def ensure_admin
|
|
|
|
raise Discourse::InvalidAccess.new unless current_user && current_user.admin?
|
|
|
|
end
|
|
|
|
|
2016-09-15 04:36:08 +08:00
|
|
|
def ensure_wizard_enabled
|
|
|
|
raise Discourse::InvalidAccess.new unless SiteSetting.wizard_enabled?
|
|
|
|
end
|
|
|
|
|
2015-08-11 23:27:56 +08:00
|
|
|
def destination_url
|
|
|
|
request.original_url unless request.original_url =~ /uploads/
|
|
|
|
end
|
|
|
|
|
2019-11-20 15:31:25 +08:00
|
|
|
def redirect_to_login
|
|
|
|
dont_cache_page
|
|
|
|
|
2021-02-08 18:04:33 +08:00
|
|
|
if SiteSetting.auth_immediately && SiteSetting.enable_discourse_connect?
|
2019-11-20 15:31:25 +08:00
|
|
|
# save original URL in a session so we can redirect after login
|
|
|
|
session[:destination_url] = destination_url
|
|
|
|
redirect_to path("/session/sso")
|
2021-02-08 18:04:33 +08:00
|
|
|
elsif SiteSetting.auth_immediately && !SiteSetting.enable_local_logins &&
|
|
|
|
Discourse.enabled_authenticators.length == 1 && !cookies[:authentication_data]
|
2019-11-20 15:31:25 +08:00
|
|
|
# Only one authentication provider, direct straight to it.
|
|
|
|
# If authentication_data is present, then we are halfway though registration. Don't redirect offsite
|
|
|
|
cookies[:destination_url] = destination_url
|
|
|
|
redirect_to path("/auth/#{Discourse.enabled_authenticators.first.name}")
|
|
|
|
else
|
|
|
|
# save original URL in a cookie (javascript redirects after login in this case)
|
|
|
|
cookies[:destination_url] = destination_url
|
|
|
|
redirect_to path("/login")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-06-05 06:32:36 +08:00
|
|
|
def redirect_to_login_if_required
|
2019-03-15 19:09:37 +08:00
|
|
|
return if request.format.json? && is_api?
|
2018-11-09 08:14:35 +08:00
|
|
|
|
2019-04-02 10:13:53 +08:00
|
|
|
# Used by clients authenticated via user API.
|
|
|
|
# Redirects to provided URL scheme if
|
|
|
|
# - request uses a valid public key and auth_redirect scheme
|
|
|
|
# - one_time_password scope is allowed
|
|
|
|
if !current_user && params.has_key?(:user_api_public_key) && params.has_key?(:auth_redirect)
|
|
|
|
begin
|
|
|
|
OpenSSL::PKey::RSA.new(params[:user_api_public_key])
|
|
|
|
rescue OpenSSL::PKey::RSAError
|
|
|
|
return render plain: I18n.t("user_api_key.invalid_public_key")
|
|
|
|
end
|
|
|
|
|
|
|
|
if UserApiKey.invalid_auth_redirect?(params[:auth_redirect])
|
|
|
|
return render plain: I18n.t("user_api_key.invalid_auth_redirect")
|
|
|
|
end
|
|
|
|
|
|
|
|
if UserApiKey.allowed_scopes.superset?(Set.new(["one_time_password"]))
|
2022-03-21 22:28:52 +08:00
|
|
|
redirect_to("#{params[:auth_redirect]}?otp=true", allow_other_host: true)
|
2019-04-02 10:13:53 +08:00
|
|
|
return
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-03-15 19:09:37 +08:00
|
|
|
if !current_user && SiteSetting.login_required?
|
2017-05-26 02:10:15 +08:00
|
|
|
flash.keep
|
2021-12-02 23:12:25 +08:00
|
|
|
if (request.format && request.format.json?) || request.xhr? || !request.get?
|
|
|
|
ensure_logged_in
|
|
|
|
else
|
|
|
|
redirect_to_login
|
|
|
|
end
|
2019-11-20 15:31:25 +08:00
|
|
|
return
|
2013-06-05 06:32:36 +08:00
|
|
|
end
|
2019-03-15 19:09:37 +08:00
|
|
|
|
2020-01-15 18:27:12 +08:00
|
|
|
return if !current_user
|
|
|
|
return if !should_enforce_2fa?
|
|
|
|
|
2020-06-06 00:31:58 +08:00
|
|
|
redirect_path = path("/u/#{current_user.encoded_username}/preferences/second-factor")
|
2020-01-15 18:27:12 +08:00
|
|
|
if !request.fullpath.start_with?(redirect_path)
|
|
|
|
redirect_to path(redirect_path)
|
|
|
|
nil
|
2019-03-15 19:09:37 +08:00
|
|
|
end
|
2018-06-07 13:28:18 +08:00
|
|
|
end
|
2013-06-05 06:32:36 +08:00
|
|
|
|
2020-01-15 18:27:12 +08:00
|
|
|
def should_enforce_2fa?
|
|
|
|
disqualified_from_2fa_enforcement = request.format.json? || is_api? || current_user.anonymous?
|
|
|
|
enforcing_2fa =
|
2023-01-09 20:20:10 +08:00
|
|
|
(
|
2020-01-15 18:27:12 +08:00
|
|
|
(SiteSetting.enforce_second_factor == "staff" && current_user.staff?) ||
|
|
|
|
SiteSetting.enforce_second_factor == "all"
|
|
|
|
)
|
|
|
|
!disqualified_from_2fa_enforcement && enforcing_2fa &&
|
|
|
|
!current_user.has_any_second_factor_methods_enabled?
|
|
|
|
end
|
|
|
|
|
2019-10-08 19:15:08 +08:00
|
|
|
def build_not_found_page(opts = {})
|
2018-05-24 04:58:47 +08:00
|
|
|
if SiteSetting.bootstrap_error_pages?
|
|
|
|
preload_json
|
2019-10-08 19:15:08 +08:00
|
|
|
opts[:layout] = "application" if opts[:layout] == "no_ember"
|
2013-05-31 02:46:02 +08:00
|
|
|
end
|
|
|
|
|
2020-07-30 18:10:16 +08:00
|
|
|
@current_user =
|
2023-01-09 20:20:10 +08:00
|
|
|
begin
|
2020-07-30 18:10:16 +08:00
|
|
|
current_user
|
|
|
|
rescue StandardError
|
|
|
|
nil
|
2023-01-09 20:20:10 +08:00
|
|
|
end
|
2020-07-30 18:10:16 +08:00
|
|
|
|
|
|
|
if !SiteSetting.login_required? || @current_user
|
2020-12-07 20:24:18 +08:00
|
|
|
key = "page_not_found_topics:#{I18n.locale}"
|
|
|
|
@topics_partial =
|
|
|
|
Discourse
|
|
|
|
.cache
|
|
|
|
.fetch(key, expires_in: 10.minutes) do
|
2019-02-12 18:20:33 +08:00
|
|
|
category_topic_ids = Category.pluck(:topic_id).compact
|
|
|
|
@top_viewed =
|
|
|
|
TopicQuery
|
|
|
|
.new(nil, except_topic_ids: category_topic_ids)
|
|
|
|
.list_top_for("monthly")
|
|
|
|
.topics
|
|
|
|
.first(10)
|
|
|
|
@recent = Topic.includes(:category).where.not(id: category_topic_ids).recent(10)
|
2020-12-07 20:24:18 +08:00
|
|
|
render_to_string partial: "/exceptions/not_found_topics", formats: [:html]
|
|
|
|
end
|
|
|
|
.html_safe
|
2019-02-12 18:20:33 +08:00
|
|
|
end
|
|
|
|
|
2015-07-28 16:02:39 +08:00
|
|
|
@container_class = "wrap not-found-container"
|
2022-05-17 23:37:43 +08:00
|
|
|
@page_title = I18n.t("page_not_found.page_title")
|
2019-10-08 19:15:08 +08:00
|
|
|
@title = opts[:title] || I18n.t("page_not_found.title")
|
|
|
|
@group = opts[:group]
|
2018-08-15 09:53:04 +08:00
|
|
|
@hide_search = true if SiteSetting.login_required
|
2019-12-10 16:28:51 +08:00
|
|
|
|
|
|
|
params[:slug] = params[:slug].first if params[:slug].kind_of?(Array)
|
|
|
|
params[:id] = params[:id].first if params[:id].kind_of?(Array)
|
2021-01-27 16:43:33 +08:00
|
|
|
@slug = (params[:slug].presence || params[:id].presence || "").to_s.tr("-", " ")
|
2019-10-08 19:15:08 +08:00
|
|
|
|
|
|
|
render_to_string status: opts[:status],
|
|
|
|
layout: opts[:layout],
|
|
|
|
formats: [:html],
|
|
|
|
template: "/exceptions/not_found"
|
2018-06-07 13:28:18 +08:00
|
|
|
end
|
|
|
|
|
2018-03-06 12:20:39 +08:00
|
|
|
def is_asset_path
|
|
|
|
request.env["DISCOURSE_IS_ASSET_PATH"] = 1
|
|
|
|
end
|
|
|
|
|
2020-01-24 12:00:27 +08:00
|
|
|
def is_feed_request?
|
|
|
|
request.format.atom? || request.format.rss?
|
|
|
|
end
|
|
|
|
|
|
|
|
def add_noindex_header
|
2022-03-09 15:25:20 +08:00
|
|
|
if request.get? && !response.headers["X-Robots-Tag"]
|
2020-05-11 10:14:21 +08:00
|
|
|
if SiteSetting.allow_index_in_robots_txt
|
|
|
|
response.headers["X-Robots-Tag"] = "noindex"
|
|
|
|
else
|
|
|
|
response.headers["X-Robots-Tag"] = "noindex, nofollow"
|
|
|
|
end
|
|
|
|
end
|
2020-01-24 12:00:27 +08:00
|
|
|
end
|
|
|
|
|
2021-11-26 03:58:39 +08:00
|
|
|
def add_noindex_header_to_non_canonical
|
|
|
|
canonical = (@canonical_url || @default_canonical)
|
|
|
|
if canonical.present? && canonical != request.url &&
|
|
|
|
!SiteSetting.allow_indexing_non_canonical_urls
|
|
|
|
response.headers["X-Robots-Tag"] ||= "noindex"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-06-20 10:11:14 +08:00
|
|
|
protected
|
2013-06-17 14:09:59 +08:00
|
|
|
|
2020-10-02 07:01:40 +08:00
|
|
|
def honeypot_value
|
|
|
|
secure_session[HONEYPOT_KEY] ||= SecureRandom.hex
|
|
|
|
end
|
|
|
|
|
|
|
|
def challenge_value
|
|
|
|
secure_session[CHALLENGE_KEY] ||= SecureRandom.hex
|
|
|
|
end
|
|
|
|
|
2019-01-04 01:03:01 +08:00
|
|
|
def render_post_json(post, add_raw: true)
|
2014-09-03 05:37:19 +08:00
|
|
|
post_serializer = PostSerializer.new(post, scope: guardian, root: false)
|
2014-09-25 10:02:41 +08:00
|
|
|
post_serializer.add_raw = add_raw
|
2014-09-03 05:37:19 +08:00
|
|
|
|
|
|
|
counts = PostAction.counts_for([post], current_user)
|
|
|
|
if counts && counts = counts[post.id]
|
|
|
|
post_serializer.post_actions = counts
|
|
|
|
end
|
|
|
|
render_json_dump(post_serializer)
|
2018-06-07 13:28:18 +08:00
|
|
|
end
|
2014-09-03 05:37:19 +08:00
|
|
|
|
2013-06-20 10:11:14 +08:00
|
|
|
# returns an array of integers given a param key
|
|
|
|
# returns nil if key is not found
|
|
|
|
def param_to_integer_list(key, delimiter = ",")
|
2021-11-03 23:56:33 +08:00
|
|
|
case params[key]
|
|
|
|
when String
|
2013-06-20 10:11:14 +08:00
|
|
|
params[key].split(delimiter).map(&:to_i)
|
2021-11-03 23:56:33 +08:00
|
|
|
when Array
|
|
|
|
params[key].map(&:to_i)
|
2013-06-20 10:11:14 +08:00
|
|
|
end
|
2018-06-07 13:28:18 +08:00
|
|
|
end
|
2013-06-20 10:11:14 +08:00
|
|
|
|
2021-04-12 20:02:58 +08:00
|
|
|
def activated_themes_json
|
2021-06-15 14:57:17 +08:00
|
|
|
id = @theme_id
|
|
|
|
return "{}" if id.blank?
|
|
|
|
ids = Theme.transform_ids(id)
|
2021-04-12 20:02:58 +08:00
|
|
|
Theme.where(id: ids).pluck(:id, :name).to_h.to_json
|
|
|
|
end
|
FEATURE: Replace `Crawl-delay` directive with proper rate limiting (#15131)
We have a couple of site setting, `slow_down_crawler_user_agents` and `slow_down_crawler_rate`, that are meant to allow site owners to signal to specific crawlers that they're crawling the site too aggressively and that they should slow down.
When a crawler is added to the `slow_down_crawler_user_agents` setting, Discourse currently adds a `Crawl-delay` directive for that crawler in `/robots.txt`. Unfortunately, many crawlers don't support the `Crawl-delay` directive in `/robots.txt` which leaves the site owners no options if a crawler is crawling the site too aggressively.
This PR replaces the `Crawl-delay` directive with proper rate limiting for crawlers added to the `slow_down_crawler_user_agents` list. On every request made by a non-logged in user, Discourse will check the User Agent string and if it contains one of the values of the `slow_down_crawler_user_agents` list, Discourse will only allow 1 request every N seconds for that User Agent (N is the value of the `slow_down_crawler_rate` setting) and the rest of requests made within the same interval will get a 429 response.
The `slow_down_crawler_user_agents` setting becomes quite dangerous with this PR since it could rate limit lots if not all of anonymous traffic if the setting is not used appropriately. So to protect against this scenario, we've added a couple of new validations to the setting when it's changed:
1) each value added to setting must 3 characters or longer
2) each value cannot be a substring of tokens found in popular browser User Agent. The current list of prohibited values is: apple, windows, linux, ubuntu, gecko, firefox, chrome, safari, applewebkit, webkit, mozilla, macintosh, khtml, intel, osx, os x, iphone, ipad and mac.
2021-11-30 17:55:25 +08:00
|
|
|
|
|
|
|
def rate_limit_crawlers
|
|
|
|
return if current_user.present?
|
|
|
|
return if SiteSetting.slow_down_crawler_user_agents.blank?
|
|
|
|
|
|
|
|
user_agent = request.user_agent&.downcase
|
|
|
|
return if user_agent.blank?
|
|
|
|
|
|
|
|
SiteSetting
|
|
|
|
.slow_down_crawler_user_agents
|
|
|
|
.downcase
|
|
|
|
.split("|")
|
|
|
|
.each do |crawler|
|
|
|
|
if user_agent.include?(crawler)
|
|
|
|
key = "#{crawler}_crawler_rate_limit"
|
|
|
|
limiter =
|
|
|
|
RateLimiter.new(nil, key, 1, SiteSetting.slow_down_crawler_rate, error_code: key)
|
|
|
|
limiter.performed!
|
|
|
|
break
|
2023-01-09 20:20:10 +08:00
|
|
|
end
|
FEATURE: Replace `Crawl-delay` directive with proper rate limiting (#15131)
We have a couple of site setting, `slow_down_crawler_user_agents` and `slow_down_crawler_rate`, that are meant to allow site owners to signal to specific crawlers that they're crawling the site too aggressively and that they should slow down.
When a crawler is added to the `slow_down_crawler_user_agents` setting, Discourse currently adds a `Crawl-delay` directive for that crawler in `/robots.txt`. Unfortunately, many crawlers don't support the `Crawl-delay` directive in `/robots.txt` which leaves the site owners no options if a crawler is crawling the site too aggressively.
This PR replaces the `Crawl-delay` directive with proper rate limiting for crawlers added to the `slow_down_crawler_user_agents` list. On every request made by a non-logged in user, Discourse will check the User Agent string and if it contains one of the values of the `slow_down_crawler_user_agents` list, Discourse will only allow 1 request every N seconds for that User Agent (N is the value of the `slow_down_crawler_rate` setting) and the rest of requests made within the same interval will get a 429 response.
The `slow_down_crawler_user_agents` setting becomes quite dangerous with this PR since it could rate limit lots if not all of anonymous traffic if the setting is not used appropriately. So to protect against this scenario, we've added a couple of new validations to the setting when it's changed:
1) each value added to setting must 3 characters or longer
2) each value cannot be a substring of tokens found in popular browser User Agent. The current list of prohibited values is: apple, windows, linux, ubuntu, gecko, firefox, chrome, safari, applewebkit, webkit, mozilla, macintosh, khtml, intel, osx, os x, iphone, ipad and mac.
2021-11-30 17:55:25 +08:00
|
|
|
end
|
|
|
|
end
|
FEATURE: Centralized 2FA page (#15377)
2FA support in Discourse was added and grown gradually over the years: we first
added support for TOTP for logins, then we implemented backup codes, and last
but not least, security keys. 2FA usage was initially limited to logging in,
but it has been expanded and we now require 2FA for risky actions such as
adding a new admin to the site.
As a result of this gradual growth of the 2FA system, technical debt has
accumulated to the point where it has become difficult to require 2FA for more
actions. We now have 5 different 2FA UI implementations and each one has to
support all 3 2FA methods (TOTP, backup codes, and security keys) which makes
it difficult to maintain a consistent UX for these different implementations.
Moreover, there is a lot of repeated logic in the server-side code behind these
5 UI implementations which hinders maintainability even more.
This commit is the first step towards repaying the technical debt: it builds a
system that centralizes as much as possible of the 2FA server-side logic and
UI. The 2 main components of this system are:
1. A dedicated page for 2FA with support for all 3 methods.
2. A reusable server-side class that centralizes the 2FA logic (the
`SecondFactor::AuthManager` class).
From a top-level view, the 2FA flow in this new system looks like this:
1. User initiates an action that requires 2FA;
2. Server is aware that 2FA is required for this action, so it redirects the
user to the 2FA page if the user has a 2FA method, otherwise the action is
performed.
3. User submits the 2FA form on the page;
4. Server validates the 2FA and if it's successful, the action is performed and
the user is redirected to the previous page.
A more technically-detailed explanation/documentation of the new system is
available as a comment at the top of the `lib/second_factor/auth_manager.rb`
file. Please note that the details are not set in stone and will likely change
in the future, so please don't use the system in your plugins yet.
Since this is a new system that needs to be tested, we've decided to migrate
only the 2FA for adding a new admin to the new system at this time (in this
commit). Our plan is to gradually migrate the remaining 2FA implementations to
the new system.
For screenshots of the 2FA page, see PR #15377 on GitHub.
2022-02-17 17:12:59 +08:00
|
|
|
|
2022-04-13 20:04:09 +08:00
|
|
|
def run_second_factor!(action_class, action_data = nil)
|
|
|
|
action = action_class.new(guardian, request, action_data)
|
FEATURE: Centralized 2FA page (#15377)
2FA support in Discourse was added and grown gradually over the years: we first
added support for TOTP for logins, then we implemented backup codes, and last
but not least, security keys. 2FA usage was initially limited to logging in,
but it has been expanded and we now require 2FA for risky actions such as
adding a new admin to the site.
As a result of this gradual growth of the 2FA system, technical debt has
accumulated to the point where it has become difficult to require 2FA for more
actions. We now have 5 different 2FA UI implementations and each one has to
support all 3 2FA methods (TOTP, backup codes, and security keys) which makes
it difficult to maintain a consistent UX for these different implementations.
Moreover, there is a lot of repeated logic in the server-side code behind these
5 UI implementations which hinders maintainability even more.
This commit is the first step towards repaying the technical debt: it builds a
system that centralizes as much as possible of the 2FA server-side logic and
UI. The 2 main components of this system are:
1. A dedicated page for 2FA with support for all 3 methods.
2. A reusable server-side class that centralizes the 2FA logic (the
`SecondFactor::AuthManager` class).
From a top-level view, the 2FA flow in this new system looks like this:
1. User initiates an action that requires 2FA;
2. Server is aware that 2FA is required for this action, so it redirects the
user to the 2FA page if the user has a 2FA method, otherwise the action is
performed.
3. User submits the 2FA form on the page;
4. Server validates the 2FA and if it's successful, the action is performed and
the user is redirected to the previous page.
A more technically-detailed explanation/documentation of the new system is
available as a comment at the top of the `lib/second_factor/auth_manager.rb`
file. Please note that the details are not set in stone and will likely change
in the future, so please don't use the system in your plugins yet.
Since this is a new system that needs to be tested, we've decided to migrate
only the 2FA for adding a new admin to the new system at this time (in this
commit). Our plan is to gradually migrate the remaining 2FA implementations to
the new system.
For screenshots of the 2FA page, see PR #15377 on GitHub.
2022-02-17 17:12:59 +08:00
|
|
|
manager = SecondFactor::AuthManager.new(guardian, action)
|
|
|
|
yield(manager) if block_given?
|
|
|
|
result = manager.run!(request, params, secure_session)
|
|
|
|
|
2022-04-13 20:04:09 +08:00
|
|
|
if !result.no_second_factors_enabled? && !result.second_factor_auth_completed? &&
|
|
|
|
!result.second_factor_auth_skipped?
|
FEATURE: Centralized 2FA page (#15377)
2FA support in Discourse was added and grown gradually over the years: we first
added support for TOTP for logins, then we implemented backup codes, and last
but not least, security keys. 2FA usage was initially limited to logging in,
but it has been expanded and we now require 2FA for risky actions such as
adding a new admin to the site.
As a result of this gradual growth of the 2FA system, technical debt has
accumulated to the point where it has become difficult to require 2FA for more
actions. We now have 5 different 2FA UI implementations and each one has to
support all 3 2FA methods (TOTP, backup codes, and security keys) which makes
it difficult to maintain a consistent UX for these different implementations.
Moreover, there is a lot of repeated logic in the server-side code behind these
5 UI implementations which hinders maintainability even more.
This commit is the first step towards repaying the technical debt: it builds a
system that centralizes as much as possible of the 2FA server-side logic and
UI. The 2 main components of this system are:
1. A dedicated page for 2FA with support for all 3 methods.
2. A reusable server-side class that centralizes the 2FA logic (the
`SecondFactor::AuthManager` class).
From a top-level view, the 2FA flow in this new system looks like this:
1. User initiates an action that requires 2FA;
2. Server is aware that 2FA is required for this action, so it redirects the
user to the 2FA page if the user has a 2FA method, otherwise the action is
performed.
3. User submits the 2FA form on the page;
4. Server validates the 2FA and if it's successful, the action is performed and
the user is redirected to the previous page.
A more technically-detailed explanation/documentation of the new system is
available as a comment at the top of the `lib/second_factor/auth_manager.rb`
file. Please note that the details are not set in stone and will likely change
in the future, so please don't use the system in your plugins yet.
Since this is a new system that needs to be tested, we've decided to migrate
only the 2FA for adding a new admin to the new system at this time (in this
commit). Our plan is to gradually migrate the remaining 2FA implementations to
the new system.
For screenshots of the 2FA page, see PR #15377 on GitHub.
2022-02-17 17:12:59 +08:00
|
|
|
# should never happen, but I want to know if somehow it does! (osama)
|
|
|
|
raise "2fa process ended up in a bad state!"
|
|
|
|
end
|
|
|
|
|
|
|
|
result
|
|
|
|
end
|
2022-10-12 07:11:44 +08:00
|
|
|
|
|
|
|
def link_preload
|
|
|
|
@links_to_preload = []
|
|
|
|
yield
|
|
|
|
response.headers["Link"] = @links_to_preload.join(", ") if !@links_to_preload.empty?
|
|
|
|
end
|
|
|
|
|
|
|
|
def spa_boot_request?
|
|
|
|
request.get? && !(request.format && request.format.json?) && !request.xhr?
|
|
|
|
end
|
2023-07-28 19:56:35 +08:00
|
|
|
|
|
|
|
def fetch_limit_from_params(params: self.params, default:, max:)
|
|
|
|
raise "default limit cannot be greater than max limit" if default.present? && default > max
|
|
|
|
|
|
|
|
if params.has_key?(:limit)
|
|
|
|
limit =
|
|
|
|
begin
|
|
|
|
Integer(params[:limit])
|
|
|
|
rescue ArgumentError
|
|
|
|
raise Discourse::InvalidParameters.new(:limit)
|
|
|
|
end
|
|
|
|
|
|
|
|
raise Discourse::InvalidParameters.new(:limit) if limit < 0 || limit > max
|
|
|
|
limit
|
|
|
|
else
|
|
|
|
default
|
|
|
|
end
|
|
|
|
end
|
2013-02-06 03:16:51 +08:00
|
|
|
end
|