2019-05-03 06:17:27 +08:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2017-08-07 09:43:09 +08:00
|
|
|
module SiteSettings
|
|
|
|
end
|
2015-08-21 09:27:19 +08:00
|
|
|
|
2017-08-07 09:43:09 +08:00
|
|
|
module SiteSettings::Validations
|
FEATURE: Replace `Crawl-delay` directive with proper rate limiting (#15131)
We have a couple of site setting, `slow_down_crawler_user_agents` and `slow_down_crawler_rate`, that are meant to allow site owners to signal to specific crawlers that they're crawling the site too aggressively and that they should slow down.
When a crawler is added to the `slow_down_crawler_user_agents` setting, Discourse currently adds a `Crawl-delay` directive for that crawler in `/robots.txt`. Unfortunately, many crawlers don't support the `Crawl-delay` directive in `/robots.txt` which leaves the site owners no options if a crawler is crawling the site too aggressively.
This PR replaces the `Crawl-delay` directive with proper rate limiting for crawlers added to the `slow_down_crawler_user_agents` list. On every request made by a non-logged in user, Discourse will check the User Agent string and if it contains one of the values of the `slow_down_crawler_user_agents` list, Discourse will only allow 1 request every N seconds for that User Agent (N is the value of the `slow_down_crawler_rate` setting) and the rest of requests made within the same interval will get a 429 response.
The `slow_down_crawler_user_agents` setting becomes quite dangerous with this PR since it could rate limit lots if not all of anonymous traffic if the setting is not used appropriately. So to protect against this scenario, we've added a couple of new validations to the setting when it's changed:
1) each value added to setting must 3 characters or longer
2) each value cannot be a substring of tokens found in popular browser User Agent. The current list of prohibited values is: apple, windows, linux, ubuntu, gecko, firefox, chrome, safari, applewebkit, webkit, mozilla, macintosh, khtml, intel, osx, os x, iphone, ipad and mac.
2021-11-30 17:55:25 +08:00
|
|
|
PROHIBITED_USER_AGENT_STRINGS = %w[
|
|
|
|
apple
|
|
|
|
windows
|
|
|
|
linux
|
|
|
|
ubuntu
|
|
|
|
gecko
|
|
|
|
firefox
|
|
|
|
chrome
|
|
|
|
safari
|
|
|
|
applewebkit
|
|
|
|
webkit
|
|
|
|
mozilla
|
|
|
|
macintosh
|
|
|
|
khtml
|
|
|
|
intel
|
|
|
|
osx
|
|
|
|
os\ x
|
|
|
|
iphone
|
|
|
|
ipad
|
|
|
|
mac
|
|
|
|
]
|
|
|
|
|
2018-10-15 09:43:31 +08:00
|
|
|
def validate_error(key, opts = {})
|
|
|
|
raise Discourse::InvalidParameters.new(I18n.t("errors.site_settings.#{key}", opts))
|
2015-08-21 09:27:19 +08:00
|
|
|
end
|
|
|
|
|
2019-07-12 01:41:51 +08:00
|
|
|
def validate_category_ids(category_ids)
|
|
|
|
category_ids = category_ids.split("|").map(&:to_i).to_set
|
|
|
|
if Category.where(id: category_ids).count != category_ids.size
|
|
|
|
validate_error :invalid_category_id
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2019-07-12 01:41:51 +08:00
|
|
|
category_ids
|
|
|
|
end
|
|
|
|
|
|
|
|
def validate_default_categories(category_ids, default_categories_selected)
|
|
|
|
if (category_ids & default_categories_selected).size > 0
|
|
|
|
validate_error :default_categories_already_selected
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2015-08-22 02:39:21 +08:00
|
|
|
end
|
|
|
|
|
2015-08-27 04:40:16 +08:00
|
|
|
def validate_default_categories_watching(new_val)
|
2019-07-12 01:41:51 +08:00
|
|
|
category_ids = validate_category_ids(new_val)
|
|
|
|
|
2015-08-27 04:40:16 +08:00
|
|
|
default_categories_selected = [
|
|
|
|
SiteSetting.default_categories_tracking.split("|"),
|
|
|
|
SiteSetting.default_categories_muted.split("|"),
|
2020-08-20 03:05:04 +08:00
|
|
|
SiteSetting.default_categories_watching_first_post.split("|"),
|
2022-06-20 11:49:33 +08:00
|
|
|
SiteSetting.default_categories_normal.split("|"),
|
2019-10-07 02:50:07 +08:00
|
|
|
].flatten.map(&:to_i).to_set
|
2015-08-27 04:40:16 +08:00
|
|
|
|
2019-07-12 01:41:51 +08:00
|
|
|
validate_default_categories(category_ids, default_categories_selected)
|
2015-08-27 04:40:16 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def validate_default_categories_tracking(new_val)
|
2019-07-12 01:41:51 +08:00
|
|
|
category_ids = validate_category_ids(new_val)
|
|
|
|
|
2015-08-27 04:40:16 +08:00
|
|
|
default_categories_selected = [
|
|
|
|
SiteSetting.default_categories_watching.split("|"),
|
|
|
|
SiteSetting.default_categories_muted.split("|"),
|
2020-08-20 03:05:04 +08:00
|
|
|
SiteSetting.default_categories_watching_first_post.split("|"),
|
2022-06-20 11:49:33 +08:00
|
|
|
SiteSetting.default_categories_normal.split("|"),
|
2019-10-07 02:50:07 +08:00
|
|
|
].flatten.map(&:to_i).to_set
|
2015-08-27 04:40:16 +08:00
|
|
|
|
2019-07-12 01:41:51 +08:00
|
|
|
validate_default_categories(category_ids, default_categories_selected)
|
2015-08-27 04:40:16 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def validate_default_categories_muted(new_val)
|
2019-07-12 01:41:51 +08:00
|
|
|
category_ids = validate_category_ids(new_val)
|
|
|
|
|
2015-08-27 04:40:16 +08:00
|
|
|
default_categories_selected = [
|
|
|
|
SiteSetting.default_categories_watching.split("|"),
|
|
|
|
SiteSetting.default_categories_tracking.split("|"),
|
2020-08-20 03:05:04 +08:00
|
|
|
SiteSetting.default_categories_watching_first_post.split("|"),
|
2022-06-20 11:49:33 +08:00
|
|
|
SiteSetting.default_categories_normal.split("|"),
|
2019-10-07 02:50:07 +08:00
|
|
|
].flatten.map(&:to_i).to_set
|
2015-08-27 04:40:16 +08:00
|
|
|
|
2019-07-12 01:41:51 +08:00
|
|
|
validate_default_categories(category_ids, default_categories_selected)
|
2015-08-27 04:40:16 +08:00
|
|
|
end
|
2015-08-22 02:39:21 +08:00
|
|
|
|
2016-11-10 02:37:54 +08:00
|
|
|
def validate_default_categories_watching_first_post(new_val)
|
2019-07-12 01:41:51 +08:00
|
|
|
category_ids = validate_category_ids(new_val)
|
|
|
|
|
2016-11-10 02:37:54 +08:00
|
|
|
default_categories_selected = [
|
|
|
|
SiteSetting.default_categories_watching.split("|"),
|
|
|
|
SiteSetting.default_categories_tracking.split("|"),
|
2020-08-20 03:05:04 +08:00
|
|
|
SiteSetting.default_categories_muted.split("|"),
|
2022-06-20 11:49:33 +08:00
|
|
|
SiteSetting.default_categories_normal.split("|"),
|
2020-08-20 03:05:04 +08:00
|
|
|
].flatten.map(&:to_i).to_set
|
|
|
|
|
|
|
|
validate_default_categories(category_ids, default_categories_selected)
|
|
|
|
end
|
|
|
|
|
2023-03-24 14:10:37 +08:00
|
|
|
def validate_default_categories_normal(new_val)
|
2020-08-20 03:05:04 +08:00
|
|
|
category_ids = validate_category_ids(new_val)
|
|
|
|
|
|
|
|
default_categories_selected = [
|
|
|
|
SiteSetting.default_categories_watching.split("|"),
|
|
|
|
SiteSetting.default_categories_tracking.split("|"),
|
|
|
|
SiteSetting.default_categories_muted.split("|"),
|
|
|
|
SiteSetting.default_categories_watching_first_post.split("|"),
|
2019-10-07 02:50:07 +08:00
|
|
|
].flatten.map(&:to_i).to_set
|
2016-11-10 02:37:54 +08:00
|
|
|
|
2019-07-12 01:41:51 +08:00
|
|
|
validate_default_categories(category_ids, default_categories_selected)
|
2016-11-10 02:37:54 +08:00
|
|
|
end
|
|
|
|
|
2019-11-01 15:10:13 +08:00
|
|
|
def validate_default_tags(tag_names, default_tags_selected)
|
|
|
|
validate_error :default_tags_already_selected if (tag_names & default_tags_selected).size > 0
|
|
|
|
end
|
|
|
|
|
|
|
|
def validate_default_tags_watching(new_val)
|
|
|
|
tag_names = new_val.split("|").to_set
|
|
|
|
|
|
|
|
default_tags_selected = [
|
|
|
|
SiteSetting.default_tags_tracking.split("|"),
|
|
|
|
SiteSetting.default_tags_muted.split("|"),
|
|
|
|
SiteSetting.default_tags_watching_first_post.split("|"),
|
|
|
|
].flatten.to_set
|
|
|
|
|
|
|
|
validate_default_tags(tag_names, default_tags_selected)
|
|
|
|
end
|
|
|
|
|
|
|
|
def validate_default_tags_tracking(new_val)
|
|
|
|
tag_names = new_val.split("|").to_set
|
|
|
|
|
|
|
|
default_tags_selected = [
|
|
|
|
SiteSetting.default_tags_watching.split("|"),
|
|
|
|
SiteSetting.default_tags_muted.split("|"),
|
|
|
|
SiteSetting.default_tags_watching_first_post.split("|"),
|
|
|
|
].flatten.to_set
|
|
|
|
|
|
|
|
validate_default_tags(tag_names, default_tags_selected)
|
|
|
|
end
|
|
|
|
|
|
|
|
def validate_default_tags_muted(new_val)
|
|
|
|
tag_names = new_val.split("|").to_set
|
|
|
|
|
|
|
|
default_tags_selected = [
|
|
|
|
SiteSetting.default_tags_watching.split("|"),
|
|
|
|
SiteSetting.default_tags_tracking.split("|"),
|
|
|
|
SiteSetting.default_tags_watching_first_post.split("|"),
|
|
|
|
].flatten.to_set
|
|
|
|
|
|
|
|
validate_default_tags(tag_names, default_tags_selected)
|
|
|
|
end
|
|
|
|
|
|
|
|
def validate_default_tags_watching_first_post(new_val)
|
|
|
|
tag_names = new_val.split("|").to_set
|
|
|
|
|
|
|
|
default_tags_selected = [
|
|
|
|
SiteSetting.default_tags_watching.split("|"),
|
|
|
|
SiteSetting.default_tags_tracking.split("|"),
|
|
|
|
SiteSetting.default_tags_muted.split("|"),
|
|
|
|
].flatten.to_set
|
|
|
|
|
|
|
|
validate_default_tags(tag_names, default_tags_selected)
|
|
|
|
end
|
|
|
|
|
2015-09-16 21:55:26 +08:00
|
|
|
def validate_enable_s3_uploads(new_val)
|
2019-11-20 05:46:44 +08:00
|
|
|
return if new_val == "f"
|
|
|
|
validate_error :cannot_enable_s3_uploads_when_s3_enabled_globally if GlobalSetting.use_s3?
|
|
|
|
validate_error :s3_upload_bucket_is_required if SiteSetting.s3_upload_bucket.blank?
|
2015-09-16 21:55:26 +08:00
|
|
|
end
|
|
|
|
|
2022-09-29 07:24:33 +08:00
|
|
|
def validate_secure_uploads(new_val)
|
|
|
|
if new_val == "t" && !SiteSetting.Upload.enable_s3_uploads
|
|
|
|
validate_error :secure_uploads_requirements
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2019-11-18 09:25:42 +08:00
|
|
|
end
|
|
|
|
|
2020-11-06 08:33:19 +08:00
|
|
|
def validate_enable_page_publishing(new_val)
|
2022-09-29 07:24:33 +08:00
|
|
|
validate_error :page_publishing_requirements if new_val == "t" && SiteSetting.secure_uploads?
|
2020-11-06 08:33:19 +08:00
|
|
|
end
|
|
|
|
|
2020-07-18 02:44:31 +08:00
|
|
|
def validate_share_quote_buttons(new_val)
|
|
|
|
if new_val.include?("facebook") && SiteSetting.facebook_app_id.blank?
|
|
|
|
validate_error :share_quote_facebook_requirements
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2020-07-18 02:44:31 +08:00
|
|
|
end
|
|
|
|
|
2019-02-01 12:40:48 +08:00
|
|
|
def validate_enable_s3_inventory(new_val)
|
2019-02-01 16:47:10 +08:00
|
|
|
if new_val == "t" && !SiteSetting.Upload.enable_s3_uploads
|
|
|
|
validate_error :enable_s3_uploads_is_required
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2019-02-01 12:40:48 +08:00
|
|
|
end
|
|
|
|
|
2018-10-15 09:43:31 +08:00
|
|
|
def validate_backup_location(new_val)
|
|
|
|
return unless new_val == BackupLocationSiteSetting::S3
|
|
|
|
if SiteSetting.s3_backup_bucket.blank?
|
|
|
|
validate_error(:s3_backup_requires_s3_settings, setting_name: "s3_backup_bucket")
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2018-10-15 09:43:31 +08:00
|
|
|
|
|
|
|
unless SiteSetting.s3_use_iam_profile
|
|
|
|
if SiteSetting.s3_access_key_id.blank?
|
|
|
|
validate_error(:s3_backup_requires_s3_settings, setting_name: "s3_access_key_id")
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2018-10-15 09:43:31 +08:00
|
|
|
if SiteSetting.s3_secret_access_key.blank?
|
|
|
|
validate_error(:s3_backup_requires_s3_settings, setting_name: "s3_secret_access_key")
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2018-10-15 09:43:31 +08:00
|
|
|
end
|
|
|
|
end
|
2018-12-17 07:09:13 +08:00
|
|
|
|
|
|
|
def validate_s3_upload_bucket(new_val)
|
|
|
|
validate_bucket_setting("s3_upload_bucket", new_val, SiteSetting.s3_backup_bucket)
|
2020-03-06 21:49:28 +08:00
|
|
|
|
|
|
|
if new_val.blank? && SiteSetting.enable_s3_uploads?
|
|
|
|
validate_error(:s3_upload_bucket_is_required, setting_name: "s3_upload_bucket")
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
2018-12-17 07:09:13 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def validate_s3_backup_bucket(new_val)
|
|
|
|
validate_bucket_setting("s3_backup_bucket", SiteSetting.s3_upload_bucket, new_val)
|
|
|
|
end
|
|
|
|
|
2019-11-15 14:05:10 +08:00
|
|
|
def validate_enforce_second_factor(new_val)
|
2022-04-26 01:49:36 +08:00
|
|
|
if new_val != "no" && SiteSetting.enable_discourse_connect?
|
2021-02-08 18:04:33 +08:00
|
|
|
return validate_error :second_factor_cannot_be_enforced_with_discourse_connect_enabled
|
2020-10-10 01:06:38 +08:00
|
|
|
end
|
2020-08-20 02:16:31 +08:00
|
|
|
if new_val == "all" && Discourse.enabled_auth_providers.count > 0
|
|
|
|
auth_provider_names = Discourse.enabled_auth_providers.map(&:name).join(", ")
|
|
|
|
return(
|
|
|
|
validate_error(
|
|
|
|
:second_factor_cannot_enforce_with_socials,
|
|
|
|
auth_provider_names: auth_provider_names,
|
|
|
|
)
|
2023-01-09 20:10:19 +08:00
|
|
|
)
|
2020-08-20 02:16:31 +08:00
|
|
|
end
|
2019-11-15 14:05:10 +08:00
|
|
|
return if SiteSetting.enable_local_logins
|
2019-12-07 08:42:15 +08:00
|
|
|
return if new_val == "no"
|
2019-11-15 14:05:10 +08:00
|
|
|
validate_error :second_factor_cannot_be_enforced_with_disabled_local_login
|
|
|
|
end
|
|
|
|
|
|
|
|
def validate_enable_local_logins(new_val)
|
|
|
|
return if new_val == "t"
|
|
|
|
return if SiteSetting.enforce_second_factor == "no"
|
|
|
|
validate_error :local_login_cannot_be_disabled_if_second_factor_enforced
|
|
|
|
end
|
|
|
|
|
2020-10-29 10:01:06 +08:00
|
|
|
def validate_cors_origins(new_val)
|
|
|
|
return if new_val.blank?
|
2023-02-16 17:40:11 +08:00
|
|
|
return if new_val.split("|").none?(%r{/\z})
|
2020-10-29 10:01:06 +08:00
|
|
|
validate_error :cors_origins_should_not_have_trailing_slash
|
|
|
|
end
|
|
|
|
|
FEATURE: Replace `Crawl-delay` directive with proper rate limiting (#15131)
We have a couple of site setting, `slow_down_crawler_user_agents` and `slow_down_crawler_rate`, that are meant to allow site owners to signal to specific crawlers that they're crawling the site too aggressively and that they should slow down.
When a crawler is added to the `slow_down_crawler_user_agents` setting, Discourse currently adds a `Crawl-delay` directive for that crawler in `/robots.txt`. Unfortunately, many crawlers don't support the `Crawl-delay` directive in `/robots.txt` which leaves the site owners no options if a crawler is crawling the site too aggressively.
This PR replaces the `Crawl-delay` directive with proper rate limiting for crawlers added to the `slow_down_crawler_user_agents` list. On every request made by a non-logged in user, Discourse will check the User Agent string and if it contains one of the values of the `slow_down_crawler_user_agents` list, Discourse will only allow 1 request every N seconds for that User Agent (N is the value of the `slow_down_crawler_rate` setting) and the rest of requests made within the same interval will get a 429 response.
The `slow_down_crawler_user_agents` setting becomes quite dangerous with this PR since it could rate limit lots if not all of anonymous traffic if the setting is not used appropriately. So to protect against this scenario, we've added a couple of new validations to the setting when it's changed:
1) each value added to setting must 3 characters or longer
2) each value cannot be a substring of tokens found in popular browser User Agent. The current list of prohibited values is: apple, windows, linux, ubuntu, gecko, firefox, chrome, safari, applewebkit, webkit, mozilla, macintosh, khtml, intel, osx, os x, iphone, ipad and mac.
2021-11-30 17:55:25 +08:00
|
|
|
def validate_slow_down_crawler_user_agents(new_val)
|
|
|
|
return if new_val.blank?
|
|
|
|
|
|
|
|
new_val
|
|
|
|
.downcase
|
|
|
|
.split("|")
|
|
|
|
.each do |crawler|
|
|
|
|
if crawler.size < 3
|
|
|
|
validate_error(:slow_down_crawler_user_agent_must_be_at_least_3_characters)
|
2023-01-09 20:10:19 +08:00
|
|
|
end
|
FEATURE: Replace `Crawl-delay` directive with proper rate limiting (#15131)
We have a couple of site setting, `slow_down_crawler_user_agents` and `slow_down_crawler_rate`, that are meant to allow site owners to signal to specific crawlers that they're crawling the site too aggressively and that they should slow down.
When a crawler is added to the `slow_down_crawler_user_agents` setting, Discourse currently adds a `Crawl-delay` directive for that crawler in `/robots.txt`. Unfortunately, many crawlers don't support the `Crawl-delay` directive in `/robots.txt` which leaves the site owners no options if a crawler is crawling the site too aggressively.
This PR replaces the `Crawl-delay` directive with proper rate limiting for crawlers added to the `slow_down_crawler_user_agents` list. On every request made by a non-logged in user, Discourse will check the User Agent string and if it contains one of the values of the `slow_down_crawler_user_agents` list, Discourse will only allow 1 request every N seconds for that User Agent (N is the value of the `slow_down_crawler_rate` setting) and the rest of requests made within the same interval will get a 429 response.
The `slow_down_crawler_user_agents` setting becomes quite dangerous with this PR since it could rate limit lots if not all of anonymous traffic if the setting is not used appropriately. So to protect against this scenario, we've added a couple of new validations to the setting when it's changed:
1) each value added to setting must 3 characters or longer
2) each value cannot be a substring of tokens found in popular browser User Agent. The current list of prohibited values is: apple, windows, linux, ubuntu, gecko, firefox, chrome, safari, applewebkit, webkit, mozilla, macintosh, khtml, intel, osx, os x, iphone, ipad and mac.
2021-11-30 17:55:25 +08:00
|
|
|
if PROHIBITED_USER_AGENT_STRINGS.any? { |c| c.include?(crawler) }
|
|
|
|
validate_error(
|
|
|
|
:slow_down_crawler_user_agent_cannot_be_popular_browsers,
|
|
|
|
values: PROHIBITED_USER_AGENT_STRINGS.join(I18n.t("word_connector.comma")),
|
2023-01-09 20:10:19 +08:00
|
|
|
)
|
|
|
|
end
|
FEATURE: Replace `Crawl-delay` directive with proper rate limiting (#15131)
We have a couple of site setting, `slow_down_crawler_user_agents` and `slow_down_crawler_rate`, that are meant to allow site owners to signal to specific crawlers that they're crawling the site too aggressively and that they should slow down.
When a crawler is added to the `slow_down_crawler_user_agents` setting, Discourse currently adds a `Crawl-delay` directive for that crawler in `/robots.txt`. Unfortunately, many crawlers don't support the `Crawl-delay` directive in `/robots.txt` which leaves the site owners no options if a crawler is crawling the site too aggressively.
This PR replaces the `Crawl-delay` directive with proper rate limiting for crawlers added to the `slow_down_crawler_user_agents` list. On every request made by a non-logged in user, Discourse will check the User Agent string and if it contains one of the values of the `slow_down_crawler_user_agents` list, Discourse will only allow 1 request every N seconds for that User Agent (N is the value of the `slow_down_crawler_rate` setting) and the rest of requests made within the same interval will get a 429 response.
The `slow_down_crawler_user_agents` setting becomes quite dangerous with this PR since it could rate limit lots if not all of anonymous traffic if the setting is not used appropriately. So to protect against this scenario, we've added a couple of new validations to the setting when it's changed:
1) each value added to setting must 3 characters or longer
2) each value cannot be a substring of tokens found in popular browser User Agent. The current list of prohibited values is: apple, windows, linux, ubuntu, gecko, firefox, chrome, safari, applewebkit, webkit, mozilla, macintosh, khtml, intel, osx, os x, iphone, ipad and mac.
2021-11-30 17:55:25 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-05-06 02:13:17 +08:00
|
|
|
def validate_strip_image_metadata(new_val)
|
|
|
|
return if new_val == "t"
|
|
|
|
return if SiteSetting.composer_media_optimization_image_enabled == false
|
|
|
|
validate_error :strip_image_metadata_cannot_be_disabled_if_composer_media_optimization_image_enabled
|
|
|
|
end
|
|
|
|
|
2022-06-03 07:02:57 +08:00
|
|
|
def validate_twitter_summary_large_image(new_val)
|
|
|
|
return if new_val.blank?
|
|
|
|
return if !Upload.exists?(id: new_val, extension: "svg")
|
|
|
|
validate_error :twitter_summary_large_image_no_svg
|
|
|
|
end
|
|
|
|
|
2018-12-17 07:09:13 +08:00
|
|
|
private
|
|
|
|
|
|
|
|
def validate_bucket_setting(setting_name, upload_bucket, backup_bucket)
|
|
|
|
return if upload_bucket.blank? || backup_bucket.blank?
|
|
|
|
|
|
|
|
backup_bucket_name, backup_prefix = split_s3_bucket(backup_bucket)
|
|
|
|
upload_bucket_name, upload_prefix = split_s3_bucket(upload_bucket)
|
|
|
|
|
|
|
|
return if backup_bucket_name != upload_bucket_name
|
|
|
|
|
|
|
|
if backup_prefix == upload_prefix || backup_prefix.blank? ||
|
|
|
|
upload_prefix&.start_with?(backup_prefix)
|
|
|
|
validate_error(:s3_bucket_reused, setting_name: setting_name)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def split_s3_bucket(s3_bucket)
|
|
|
|
bucket_name, prefix = s3_bucket.downcase.split("/", 2)
|
|
|
|
prefix&.chomp!("/")
|
|
|
|
[bucket_name, prefix]
|
|
|
|
end
|
2015-08-21 09:27:19 +08:00
|
|
|
end
|