SECURITY: Expand and improve SSRF Protections (#18815)

See https://github.com/discourse/discourse/security/advisories/GHSA-rcc5-28r3-23rr

Co-authored-by: OsamaSayegh <asooomaasoooma90@gmail.com>
Co-authored-by: Daniel Waterworth <me@danielwaterworth.com>
This commit is contained in:
David Taylor 2022-11-01 16:33:17 +00:00 committed by GitHub
parent 695b44269b
commit 68b4fe4cf8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
42 changed files with 1164 additions and 443 deletions

View File

@ -244,7 +244,7 @@ GEM
multi_json (1.15.0) multi_json (1.15.0)
multi_xml (0.6.0) multi_xml (0.6.0)
mustache (1.1.1) mustache (1.1.1)
net-http (0.3.0) net-http (0.2.2)
uri uri
net-imap (0.3.1) net-imap (0.3.1)
net-protocol net-protocol

View File

@ -3,8 +3,6 @@ import EmberObject from "@ember/object";
import I18n from "I18n"; import I18n from "I18n";
import { alias } from "@ember/object/computed"; import { alias } from "@ember/object/computed";
import discourseComputed from "discourse-common/utils/decorators"; import discourseComputed from "discourse-common/utils/decorators";
import { extractDomainFromUrl } from "discourse/lib/utilities";
import { isAbsoluteURL } from "discourse-common/lib/get-url";
import { isEmpty } from "@ember/utils"; import { isEmpty } from "@ember/utils";
import { popupAjaxError } from "discourse/lib/ajax-error"; import { popupAjaxError } from "discourse/lib/ajax-error";
import { inject as service } from "@ember/service"; import { inject as service } from "@ember/service";
@ -89,38 +87,20 @@ export default Controller.extend({
actions: { actions: {
save() { save() {
this.set("saved", false); this.set("saved", false);
const url = this.get("model.payload_url");
const domain = extractDomainFromUrl(url);
const model = this.model; const model = this.model;
const isNew = model.get("isNew"); const isNew = model.get("isNew");
const saveWebHook = () => { return model
return model .save()
.save() .then(() => {
.then(() => { this.set("saved", true);
this.set("saved", true); this.adminWebHooks.get("model").addObject(model);
this.adminWebHooks.get("model").addObject(model);
if (isNew) { if (isNew) {
this.transitionToRoute("adminWebHooks.show", model.get("id")); this.transitionToRoute("adminWebHooks.show", model.get("id"));
} }
}) })
.catch(popupAjaxError); .catch(popupAjaxError);
};
if (
domain === "localhost" ||
domain.match(/192\.168\.\d+\.\d+/) ||
domain.match(/127\.\d+\.\d+\.\d+/) ||
isAbsoluteURL(url)
) {
return this.dialog.yesNoConfirm({
message: I18n.t("admin.web_hooks.warn_local_payload_url"),
didConfirm: () => saveWebHook(),
});
}
return saveWebHook();
}, },
destroy() { destroy() {

View File

@ -31,6 +31,7 @@ export default Controller.extend(ModalFunctionality, {
advancedVisible: false, advancedVisible: false,
selectedType: alias("themesController.currentTab"), selectedType: alias("themesController.currentTab"),
component: equal("selectedType", COMPONENTS), component: equal("selectedType", COMPONENTS),
urlPlaceholder: "https://github.com/discourse/sample_theme",
init() { init() {
this._super(...arguments); this._super(...arguments);
@ -79,29 +80,6 @@ export default Controller.extend(ModalFunctionality, {
); );
}, },
@discourseComputed("privateChecked")
urlPlaceholder(privateChecked) {
return privateChecked
? "git@github.com:discourse/sample_theme.git"
: "https://github.com/discourse/sample_theme";
},
@observes("privateChecked")
privateWasChecked() {
const checked = this.privateChecked;
if (checked && !this._keyLoading) {
this._keyLoading = true;
ajax(this.keyGenUrl, { type: "POST" })
.then((pair) => {
this.set("publicKey", pair.public_key);
})
.catch(popupAjaxError)
.finally(() => {
this._keyLoading = false;
});
}
},
@discourseComputed("name") @discourseComputed("name")
nameTooShort(name) { nameTooShort(name) {
return !name || name.length < MIN_NAME_LENGTH; return !name || name.length < MIN_NAME_LENGTH;
@ -116,6 +94,22 @@ export default Controller.extend(ModalFunctionality, {
} }
}, },
@observes("checkPrivate")
privateWasChecked() {
const checked = this.checkPrivate;
if (checked && !this._keyLoading && !this.publicKey) {
this._keyLoading = true;
ajax(this.keyGenUrl, { type: "POST" })
.then((pair) => {
this.set("publicKey", pair.public_key);
})
.catch(popupAjaxError)
.finally(() => {
this._keyLoading = false;
});
}
},
@discourseComputed("selection", "themeCannotBeInstalled") @discourseComputed("selection", "themeCannotBeInstalled")
submitLabel(selection, themeCannotBeInstalled) { submitLabel(selection, themeCannotBeInstalled) {
if (themeCannotBeInstalled) { if (themeCannotBeInstalled) {
@ -127,15 +121,14 @@ export default Controller.extend(ModalFunctionality, {
}`; }`;
}, },
@discourseComputed("privateChecked", "checkPrivate", "publicKey") @discourseComputed("checkPrivate", "publicKey")
showPublicKey(privateChecked, checkPrivate, publicKey) { showPublicKey(checkPrivate, publicKey) {
return privateChecked && checkPrivate && publicKey; return checkPrivate && publicKey;
}, },
onClose() { onClose() {
this.setProperties({ this.setProperties({
duplicateRemoteThemeWarning: null, duplicateRemoteThemeWarning: null,
privateChecked: false,
localFile: null, localFile: null,
uploadUrl: null, uploadUrl: null,
publicKey: null, publicKey: null,
@ -209,11 +202,8 @@ export default Controller.extend(ModalFunctionality, {
options.data = { options.data = {
remote: this.uploadUrl, remote: this.uploadUrl,
branch: this.branch, branch: this.branch,
public_key: this.publicKey,
}; };
if (this.privateChecked) {
options.data.public_key = this.publicKey;
}
} }
// User knows that theme cannot be installed, but they want to continue // User knows that theme cannot be installed, but they want to continue

View File

@ -61,25 +61,15 @@
<div class="label">{{i18n "admin.customize.theme.remote_branch"}}</div> <div class="label">{{i18n "admin.customize.theme.remote_branch"}}</div>
<Input @value={{this.branch}} placeholder="main" /> <Input @value={{this.branch}} placeholder="main" />
</div> </div>
{{/if}}
<div class="check-private"> {{#if this.showPublicKey}}
<label> <div class="public-key">
<Input @type="checkbox" @checked={{this.privateChecked}} /> <div class="label">{{i18n "admin.customize.theme.public_key"}}</div>
{{i18n "admin.customize.theme.is_private"}} <div class="public-key-text-wrapper">
</label> <Textarea class="public-key-value" readonly={{true}} @value={{this.publicKey}} /> <CopyButton @selector="textarea.public-key-value" />
</div>
{{#if this.showPublicKey}}
<div class="public-key">
<div class="label">{{i18n "admin.customize.theme.public_key"}}</div>
<div class="public-key-text-wrapper">
<Textarea class="public-key-value" readonly={{true}} @value={{this.publicKey}} /> <CopyButton @selector="textarea.public-key-value" />
</div>
</div> </div>
{{else}} </div>
{{#if this.privateChecked}}
<div class="public-key-note">{{i18n "admin.customize.theme.public_key_note"}}</div>
{{/if}}
{{/if}}
{{/if}} {{/if}}
</div> </div>
{{/if}} {{/if}}

View File

@ -8,7 +8,6 @@ acceptance("Admin - Themes - Install modal", function (needs) {
test("closing the modal resets the modal inputs", async function (assert) { test("closing the modal resets the modal inputs", async function (assert) {
const urlInput = ".install-theme-content .repo input"; const urlInput = ".install-theme-content .repo input";
const branchInput = ".install-theme-content .branch input"; const branchInput = ".install-theme-content .branch input";
const privateRepoCheckbox = ".install-theme-content .check-private input";
const publicKey = ".install-theme-content .public-key textarea"; const publicKey = ".install-theme-content .public-key textarea";
const themeUrl = "git@github.com:discourse/discourse.git"; const themeUrl = "git@github.com:discourse/discourse.git";
@ -19,17 +18,12 @@ acceptance("Admin - Themes - Install modal", function (needs) {
await fillIn(urlInput, themeUrl); await fillIn(urlInput, themeUrl);
await click(".install-theme-content .inputs .advanced-repo"); await click(".install-theme-content .inputs .advanced-repo");
await fillIn(branchInput, "tests-passed"); await fillIn(branchInput, "tests-passed");
await click(privateRepoCheckbox);
assert.strictEqual(query(urlInput).value, themeUrl, "url input is filled"); assert.strictEqual(query(urlInput).value, themeUrl, "url input is filled");
assert.strictEqual( assert.strictEqual(
query(branchInput).value, query(branchInput).value,
"tests-passed", "tests-passed",
"branch input is filled" "branch input is filled"
); );
assert.ok(
query(privateRepoCheckbox).checked,
"private repo checkbox is checked"
);
assert.ok(query(publicKey), "shows public key"); assert.ok(query(publicKey), "shows public key");
await click(".modal-footer .d-modal-cancel"); await click(".modal-footer .d-modal-cancel");
@ -38,16 +32,11 @@ acceptance("Admin - Themes - Install modal", function (needs) {
await click("#remote"); await click("#remote");
assert.strictEqual(query(urlInput).value, "", "url input is reset"); assert.strictEqual(query(urlInput).value, "", "url input is reset");
assert.strictEqual(query(branchInput).value, "", "branch input is reset"); assert.strictEqual(query(branchInput).value, "", "branch input is reset");
assert.ok(
!query(privateRepoCheckbox).checked,
"private repo checkbox unchecked"
);
assert.notOk(query(publicKey), "hide public key"); assert.notOk(query(publicKey), "hide public key");
}); });
test("show public key for valid ssh theme urls", async function (assert) { test("show public key for valid ssh theme urls", async function (assert) {
const urlInput = ".install-theme-content .repo input"; const urlInput = ".install-theme-content .repo input";
const privateRepoCheckbox = ".install-theme-content .check-private input";
const publicKey = ".install-theme-content .public-key textarea"; const publicKey = ".install-theme-content .public-key textarea";
// Supports backlog repo ssh url format // Supports backlog repo ssh url format
@ -59,12 +48,7 @@ acceptance("Admin - Themes - Install modal", function (needs) {
await click("#remote"); await click("#remote");
await fillIn(urlInput, themeUrl); await fillIn(urlInput, themeUrl);
await click(".install-theme-content .inputs .advanced-repo"); await click(".install-theme-content .inputs .advanced-repo");
await click(privateRepoCheckbox);
assert.strictEqual(query(urlInput).value, themeUrl, "url input is filled"); assert.strictEqual(query(urlInput).value, themeUrl, "url input is filled");
assert.ok(
query(privateRepoCheckbox).checked,
"private repo checkbox is checked"
);
assert.ok(query(publicKey), "shows public key"); assert.ok(query(publicKey), "shows public key");
// Supports AWS CodeCommit style repo URLs // Supports AWS CodeCommit style repo URLs

View File

@ -188,7 +188,6 @@ acceptance("Theme", function (needs) {
"git@github.com:discourse/discourse-inexistent-theme.git" "git@github.com:discourse/discourse-inexistent-theme.git"
); );
await click(".install-theme-content button.advanced-repo"); await click(".install-theme-content button.advanced-repo");
await click(".install-theme-content .check-private input");
assert.notOk( assert.notOk(
exists(".admin-install-theme-modal .modal-footer .install-theme-warning"), exists(".admin-install-theme-modal .modal-footer .install-theme-warning"),

View File

@ -102,8 +102,10 @@ class Admin::ThemesController < Admin::AdminController
private_key = params[:public_key] ? Discourse.redis.get("ssh_key_#{params[:public_key]}") : nil private_key = params[:public_key] ? Discourse.redis.get("ssh_key_#{params[:public_key]}") : nil
return render_json_error I18n.t("themes.import_error.ssh_key_gone") if params[:public_key].present? && private_key.blank? return render_json_error I18n.t("themes.import_error.ssh_key_gone") if params[:public_key].present? && private_key.blank?
@theme = RemoteTheme.import_theme(remote, theme_user, private_key: private_key, branch: branch) hijack do
render json: @theme, status: :created @theme = RemoteTheme.import_theme(remote, theme_user, private_key: private_key, branch: branch)
render json: @theme, status: :created
end
rescue RemoteTheme::ImportError => e rescue RemoteTheme::ImportError => e
if params[:force] if params[:force]
theme_name = params[:remote].gsub(/.git$/, "").split("/").last theme_name = params[:remote].gsub(/.git$/, "").split("/").last

View File

@ -84,22 +84,12 @@ class Admin::WebHooksController < Admin::AdminController
end end
def redeliver_event def redeliver_event
web_hook_event = WebHookEvent.find(params[:event_id]) web_hook_event = WebHookEvent.find_by(id: params[:event_id])
if web_hook_event if web_hook_event
web_hook = web_hook_event.web_hook web_hook = web_hook_event.web_hook
conn = Excon.new(URI(web_hook.payload_url).to_s, emitter = WebHookEmitter.new(web_hook, web_hook_event)
ssl_verify_peer: web_hook.verify_certificate, emitter.emit!(headers: MultiJson.load(web_hook_event.headers), body: web_hook_event.payload)
retry_limit: 0)
now = Time.zone.now
response = conn.post(headers: MultiJson.load(web_hook_event.headers), body: web_hook_event.payload)
web_hook_event.update!(
status: response.status,
response_headers: MultiJson.dump(response.headers),
response_body: response.body,
duration: ((Time.zone.now - now) * 1000).to_i
)
render_serialized(web_hook_event, AdminWebHookEventSerializer, root: 'web_hook_event') render_serialized(web_hook_event, AdminWebHookEventSerializer, root: 'web_hook_event')
else else
render json: failed_json render json: failed_json

View File

@ -9,7 +9,6 @@ module Jobs
PING_EVENT = 'ping' PING_EVENT = 'ping'
MAX_RETRY_COUNT = 4 MAX_RETRY_COUNT = 4
RETRY_BACKOFF = 5 RETRY_BACKOFF = 5
REQUEST_TIMEOUT = 20
def execute(args) def execute(args)
@arguments = args @arguments = args
@ -43,39 +42,13 @@ module Jobs
end end
def send_webhook! def send_webhook!
uri = URI(@web_hook.payload_url.strip)
conn = Excon.new(
uri.to_s,
ssl_verify_peer: @web_hook.verify_certificate,
retry_limit: 0,
write_timeout: REQUEST_TIMEOUT,
read_timeout: REQUEST_TIMEOUT,
connect_timeout: REQUEST_TIMEOUT
)
web_hook_body = build_webhook_body web_hook_body = build_webhook_body
web_hook_event = create_webhook_event(web_hook_body) web_hook_event = create_webhook_event(web_hook_body)
uri = URI(@web_hook.payload_url.strip)
web_hook_headers = build_webhook_headers(uri, web_hook_body, web_hook_event) web_hook_headers = build_webhook_headers(uri, web_hook_body, web_hook_event)
web_hook_response = nil
begin emitter = WebHookEmitter.new(@web_hook, web_hook_event)
now = Time.zone.now web_hook_response = emitter.emit!(headers: web_hook_headers, body: web_hook_body)
web_hook_response = conn.post(headers: web_hook_headers, body: web_hook_body)
web_hook_event.update!(
headers: MultiJson.dump(web_hook_headers),
status: web_hook_response.status,
response_headers: MultiJson.dump(web_hook_response.headers),
response_body: web_hook_response.body,
duration: ((Time.zone.now - now) * 1000).to_i
)
rescue => e
web_hook_event.update!(
headers: MultiJson.dump(web_hook_headers),
status: -1,
response_headers: MultiJson.dump(error: e),
duration: ((Time.zone.now - now) * 1000).to_i
)
end
publish_webhook_event(web_hook_event) publish_webhook_event(web_hook_event)
process_webhook_response(web_hook_response) process_webhook_response(web_hook_response)
@ -151,12 +124,12 @@ module Jobs
headers = { headers = {
'Accept' => '*/*', 'Accept' => '*/*',
'Connection' => 'close', 'Connection' => 'close',
'Content-Length' => web_hook_body.bytesize, 'Content-Length' => web_hook_body.bytesize.to_s,
'Content-Type' => content_type, 'Content-Type' => content_type,
'Host' => uri.host, 'Host' => uri.host,
'User-Agent' => "Discourse/#{Discourse::VERSION::STRING}", 'User-Agent' => "Discourse/#{Discourse::VERSION::STRING}",
'X-Discourse-Instance' => Discourse.base_url, 'X-Discourse-Instance' => Discourse.base_url,
'X-Discourse-Event-Id' => web_hook_event.id, 'X-Discourse-Event-Id' => web_hook_event.id.to_s,
'X-Discourse-Event-Type' => @arguments[:event_type] 'X-Discourse-Event-Type' => @arguments[:event_type]
} }

View File

@ -15,7 +15,7 @@ class RemoteTheme < ActiveRecord::Base
ALLOWED_FIELDS = %w{scss embedded_scss head_tag header after_header body_tag footer} ALLOWED_FIELDS = %w{scss embedded_scss head_tag header after_header body_tag footer}
GITHUB_REGEXP = /^https?:\/\/github\.com\// GITHUB_REGEXP = /^https?:\/\/github\.com\//
GITHUB_SSH_REGEXP = /^git@github\.com:/ GITHUB_SSH_REGEXP = /^ssh:\/\/git@github\.com:/
has_one :theme, autosave: false has_one :theme, autosave: false
scope :joined_remotes, -> { scope :joined_remotes, -> {
@ -25,8 +25,10 @@ class RemoteTheme < ActiveRecord::Base
validates_format_of :minimum_discourse_version, :maximum_discourse_version, with: Discourse::VERSION_REGEXP, allow_nil: true validates_format_of :minimum_discourse_version, :maximum_discourse_version, with: Discourse::VERSION_REGEXP, allow_nil: true
def self.extract_theme_info(importer) def self.extract_theme_info(importer)
JSON.parse(importer["about.json"]) json = JSON.parse(importer["about.json"])
rescue TypeError, JSON::ParserError json.fetch("name")
json
rescue TypeError, JSON::ParserError, KeyError
raise ImportError.new I18n.t("themes.import_error.about_json") raise ImportError.new I18n.t("themes.import_error.about_json")
end end
@ -80,6 +82,7 @@ class RemoteTheme < ActiveRecord::Base
importer.import! importer.import!
theme_info = RemoteTheme.extract_theme_info(importer) theme_info = RemoteTheme.extract_theme_info(importer)
component = [true, "true"].include?(theme_info["component"]) component = [true, "true"].include?(theme_info["component"])
theme = Theme.new(user_id: user&.id || -1, name: theme_info["name"], component: component) theme = Theme.new(user_id: user&.id || -1, name: theme_info["name"], component: component)
theme.child_components = theme_info["components"].presence || [] theme.child_components = theme_info["components"].presence || []

View File

@ -15,6 +15,7 @@ class WebHook < ActiveRecord::Base
validates_presence_of :content_type validates_presence_of :content_type
validates_presence_of :last_delivery_status validates_presence_of :last_delivery_status
validates_presence_of :web_hook_event_types, unless: :wildcard_web_hook? validates_presence_of :web_hook_event_types, unless: :wildcard_web_hook?
validate :ensure_payload_url_allowed, if: :payload_url_changed?
before_save :strip_url before_save :strip_url
@ -113,6 +114,23 @@ class WebHook < ActiveRecord::Base
def self.guardian def self.guardian
Guardian.new(Discourse.system_user) Guardian.new(Discourse.system_user)
end end
# This check is to improve UX
# IPs are re-checked at request time
def ensure_payload_url_allowed
return if payload_url.blank?
uri = URI(payload_url.strip)
allowed = begin
FinalDestination::SSRFDetector.lookup_and_filter_ips(uri.hostname).present?
rescue FinalDestination::SSRFDetector::DisallowedIpError
false
end
if !allowed
self.errors.add(:base, I18n.t("webhooks.payload_url.blocked_or_internal"))
end
end
end end
# == Schema Information # == Schema Information

View File

@ -0,0 +1,57 @@
# frozen_string_literal: true
class WebHookEmitter
REQUEST_TIMEOUT = 20
def initialize(webhook, webhook_event)
@webhook = webhook
@webhook_event = webhook_event
end
def emit!(headers:, body:)
uri = URI(@webhook.payload_url.strip)
connection_opts = {
request: {
write_timeout: REQUEST_TIMEOUT,
read_timeout: REQUEST_TIMEOUT,
open_timeout: REQUEST_TIMEOUT
},
}
if !@webhook.verify_certificate
connection_opts[:ssl] = { verify: false }
end
conn = Faraday.new(nil, connection_opts) do |f|
f.adapter FinalDestination::FaradayAdapter
end
start = Process.clock_gettime(Process::CLOCK_MONOTONIC, :millisecond)
error = nil
response = nil
begin
response = conn.post(uri.to_s, body, headers)
rescue => e
error = e
end
duration = Process.clock_gettime(Process::CLOCK_MONOTONIC, :millisecond) - start
event_update_args = {
headers: MultiJson.dump(headers),
duration: duration,
}
if response
event_update_args[:response_headers] = MultiJson.dump(response.headers)
event_update_args[:response_body] = response.body
event_update_args[:status] = response.status
else
event_update_args[:status] = -1
if error.is_a?(Faraday::Error) && error.wrapped_exception.is_a?(FinalDestination::SSRFDetector::DisallowedIpError)
error = I18n.t("webhooks.payload_url.blocked_or_internal")
end
event_update_args[:response_headers] = MultiJson.dump(error: error)
end
@webhook_event.update!(**event_update_args)
response
end
end

View File

@ -40,7 +40,9 @@ Rails.autoloaders.each do |autoloader|
'omniauth_strategies' => 'OmniAuthStrategies', 'omniauth_strategies' => 'OmniAuthStrategies',
'csrf_token_verifier' => 'CSRFTokenVerifier', 'csrf_token_verifier' => 'CSRFTokenVerifier',
'html' => 'HTML', 'html' => 'HTML',
'json' => 'JSON' 'json' => 'JSON',
'ssrf_detector' => 'SSRFDetector',
'http' => 'HTTP',
) )
end end
Rails.autoloaders.main.ignore("lib/tasks", Rails.autoloaders.main.ignore("lib/tasks",

View File

@ -4539,7 +4539,6 @@ en:
go_back: "Back to list" go_back: "Back to list"
payload_url: "Payload URL" payload_url: "Payload URL"
payload_url_placeholder: "https://example.com/postreceive" payload_url_placeholder: "https://example.com/postreceive"
warn_local_payload_url: "It seems you are trying to set up the webhook to a local url. Event delivered to a local address may cause side-effect or unexpected behaviors. Continue?"
secret_invalid: "Secret must not have any blank characters." secret_invalid: "Secret must not have any blank characters."
secret_too_short: "Secret should be at least 12 characters." secret_too_short: "Secret should be at least 12 characters."
secret_placeholder: "An optional string, used for generating signature" secret_placeholder: "An optional string, used for generating signature"
@ -4833,7 +4832,6 @@ en:
last_attempt: "Installation process did not finish, last attempted:" last_attempt: "Installation process did not finish, last attempted:"
remote_branch: "Branch name (optional)" remote_branch: "Branch name (optional)"
public_key: "Grant the following public key access to the repo:" public_key: "Grant the following public key access to the repo:"
public_key_note: "After entering a valid private repository URL above, an SSH key will be generated and displayed here."
install: "Install" install: "Install"
installed: "Installed" installed: "Installed"
install_popular: "Popular" install_popular: "Popular"

View File

@ -76,6 +76,7 @@ en:
modifier_values: "about.json modifiers contain invalid values: %{errors}" modifier_values: "about.json modifiers contain invalid values: %{errors}"
git: "Error cloning git repository, access is denied or repository is not found" git: "Error cloning git repository, access is denied or repository is not found"
git_ref_not_found: "Unable to checkout git reference: %{ref}" git_ref_not_found: "Unable to checkout git reference: %{ref}"
git_unsupported_scheme: "Unable to clone git repo: scheme unsupported"
unpack_failed: "Failed to unpack file" unpack_failed: "Failed to unpack file"
file_too_big: "The uncompressed file is too big." file_too_big: "The uncompressed file is too big."
unknown_file_type: "The file you uploaded does not appear to be a valid Discourse theme." unknown_file_type: "The file you uploaded does not appear to be a valid Discourse theme."
@ -5209,3 +5210,7 @@ en:
user_status: user_status:
errors: errors:
ends_at_should_be_greater_than_set_at: "ends_at should be greater than set_at" ends_at_should_be_greater_than_set_at: "ends_at should be greater than set_at"
webhooks:
payload_url:
blocked_or_internal: "Payload URL cannot be used because it resolves to a blocked or internal IP"
unsafe: "Payload URL cannot be used because it's unsafe"

View File

@ -44,7 +44,6 @@ class FinalDestination
@force_custom_user_agent_hosts = @opts[:force_custom_user_agent_hosts] || [] @force_custom_user_agent_hosts = @opts[:force_custom_user_agent_hosts] || []
@default_user_agent = @opts[:default_user_agent] || DEFAULT_USER_AGENT @default_user_agent = @opts[:default_user_agent] || DEFAULT_USER_AGENT
@opts[:max_redirects] ||= 5 @opts[:max_redirects] ||= 5
@opts[:lookup_ip] ||= lambda { |host| FinalDestination.lookup_ip(host) }
@https_redirect_ignore_limit = @opts[:initial_https_redirect_ignore_limit] @https_redirect_ignore_limit = @opts[:initial_https_redirect_ignore_limit]
@max_redirects = @opts[:max_redirects] @max_redirects = @opts[:max_redirects]
@ -83,6 +82,10 @@ class FinalDestination
20 20
end end
def self.resolve(url)
new(url).resolve
end
def http_verb(force_get_hosts, follow_canonical) def http_verb(force_get_hosts, follow_canonical)
if follow_canonical || force_get_hosts.any? { |host| hostname_matches?(host) } if follow_canonical || force_get_hosts.any? { |host| hostname_matches?(host) }
:get :get
@ -116,7 +119,7 @@ class FinalDestination
status_code, response_headers = nil status_code, response_headers = nil
catch(:done) do catch(:done) do
Net::HTTP.start(@uri.host, @uri.port, use_ssl: @uri.is_a?(URI::HTTPS), open_timeout: timeout) do |http| FinalDestination::HTTP.start(@uri.host, @uri.port, use_ssl: @uri.is_a?(URI::HTTPS), open_timeout: timeout) do |http|
http.read_timeout = timeout http.read_timeout = timeout
http.request_get(@uri.request_uri, request_headers) do |resp| http.request_get(@uri.request_uri, request_headers) do |resp|
status_code = resp.code.to_i status_code = resp.code.to_i
@ -226,13 +229,25 @@ class FinalDestination
raise Excon::Errors::ExpectationFailed.new("connect timeout reached: #{@uri.to_s}") if Time.now - request_start_time > MAX_REQUEST_TIME_SECONDS raise Excon::Errors::ExpectationFailed.new("connect timeout reached: #{@uri.to_s}") if Time.now - request_start_time > MAX_REQUEST_TIME_SECONDS
end end
# This technique will only use the first resolved IP
# TODO: Can we standardise this by using FinalDestination::HTTP?
begin
resolved_ip = SSRFDetector.lookup_and_filter_ips(@uri.hostname).first
rescue SSRFDetector::DisallowedIpError, SocketError, Timeout::Error
@status = :invalid_address
return
end
request_uri = @uri.dup
request_uri.hostname = resolved_ip unless Rails.env.test? # WebMock doesn't understand the IP-based requests
response = Excon.public_send(@http_verb, response = Excon.public_send(@http_verb,
@uri.to_s, request_uri.to_s,
read_timeout: timeout, read_timeout: timeout,
connect_timeout: timeout, connect_timeout: timeout,
headers: headers, headers: { "Host" => @uri.hostname }.merge(headers),
middlewares: middlewares, middlewares: middlewares,
response_block: request_validator response_block: request_validator,
ssl_verify_peer_host: @uri.hostname
) )
if @stop_at_blocked_pages if @stop_at_blocked_pages
@ -351,12 +366,16 @@ class FinalDestination
nil nil
end end
def skip_validations?
!@validate_uri
end
def validate_uri def validate_uri
!@validate_uri || (validate_uri_format && is_dest_valid?) skip_validations? || validate_uri_format
end end
def validate_uri_format def validate_uri_format
return false unless @uri return false unless @uri && @uri.host
return false unless ['https', 'http'].include?(@uri.scheme) return false unless ['https', 'http'].include?(@uri.scheme)
return false if @uri.scheme == 'http' && @uri.port != 80 return false if @uri.scheme == 'http' && @uri.port != 80
return false if @uri.scheme == 'https' && @uri.port != 443 return false if @uri.scheme == 'https' && @uri.port != 443
@ -384,48 +403,10 @@ class FinalDestination
end end
end end
def is_dest_valid?
return false unless @uri && @uri.host
# Allowlisted hosts
return true if hostname_matches?(SiteSetting.Upload.s3_cdn_url) ||
hostname_matches?(GlobalSetting.try(:cdn_url)) ||
hostname_matches?(Discourse.base_url_no_prefix)
if SiteSetting.allowed_internal_hosts.present?
return true if SiteSetting.allowed_internal_hosts.split("|").any? { |h| h.downcase == @uri.hostname.downcase }
end
address_s = @opts[:lookup_ip].call(@uri.hostname)
return false unless address_s
address = IPAddr.new(address_s)
if private_ranges.any? { |r| r === address }
@status = :invalid_address
return false
end
# Rate limit how often this IP can be crawled
if !@opts[:skip_rate_limit] && !@limited_ips.include?(address)
@limited_ips << address
RateLimiter.new(nil, "crawl-destination-ip:#{address_s}", 1000, 1.hour).performed!
end
true
rescue RateLimiter::LimitExceeded
false
end
def normalized_url def normalized_url
UrlHelper.normalized_encode(@url) UrlHelper.normalized_encode(@url)
end end
def private_ranges
FinalDestination.standard_private_ranges +
SiteSetting.blocked_ip_blocks.split('|').map { |r| IPAddr.new(r) rescue nil }.compact
end
def log(log_level, message) def log(log_level, message)
return unless @verbose return unless @verbose
return if @status_code == 404 return if @status_code == 404
@ -436,27 +417,6 @@ class FinalDestination
) )
end end
def self.standard_private_ranges
@private_ranges ||= [
IPAddr.new('0.0.0.0/8'),
IPAddr.new('127.0.0.1'),
IPAddr.new('172.16.0.0/12'),
IPAddr.new('192.168.0.0/16'),
IPAddr.new('10.0.0.0/8'),
IPAddr.new('fc00::/7')
]
end
def self.lookup_ip(host)
if Rails.env.test?
"1.1.1.1"
else
IPSocket::getaddress(host)
end
rescue SocketError
nil
end
protected protected
def safe_get(uri) def safe_get(uri)
@ -470,7 +430,7 @@ class FinalDestination
'Host' => uri.host 'Host' => uri.host
) )
req = Net::HTTP::Get.new(uri.request_uri, headers) req = FinalDestination::HTTP::Get.new(uri.request_uri, headers)
http.request(req) do |resp| http.request(req) do |resp|
headers_subset.set_cookie = resp['Set-Cookie'] headers_subset.set_cookie = resp['Set-Cookie']
@ -530,7 +490,7 @@ class FinalDestination
end end
def safe_session(uri) def safe_session(uri)
Net::HTTP.start(uri.host, uri.port, use_ssl: (uri.scheme == "https"), open_timeout: timeout) do |http| FinalDestination::HTTP.start(uri.host, uri.port, use_ssl: (uri.scheme == "https"), open_timeout: timeout) do |http|
http.read_timeout = timeout http.read_timeout = timeout
yield http yield http
end end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
class FinalDestination
class FaradayAdapter < Faraday::Adapter::NetHttp
def net_http_connection(env)
proxy = env[:request][:proxy]
port = env[:url].port || (env[:url].scheme == "https" ? 443 : 80)
if proxy
FinalDestination::HTTP.new(
env[:url].hostname,
port,
proxy[:uri].hostname,
proxy[:uri].port,
proxy[:user],
proxy[:password],
)
else
FinalDestination::HTTP.new(env[:url].hostname, port, nil)
end
end
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
class FinalDestination::HTTP < Net::HTTP
def connect
original_open_timeout = @open_timeout
return super if @ipaddr
timeout_at = current_time + @open_timeout
# This iteration through addresses would normally happen in Socket#tcp
# We do it here because we're tightly controlling addresses rather than
# handing Socket#tcp a hostname
ips = FinalDestination::SSRFDetector.lookup_and_filter_ips(@address, timeout: @connect_timeout)
ips.each_with_index do |ip, index|
debug "[FinalDestination] Attempting connection to #{ip}..."
self.ipaddr = ip
remaining_time = timeout_at - current_time
if remaining_time <= 0
raise Net::OpenTimeout.new("Operation timed out - FinalDestination::HTTP")
end
@open_timeout = remaining_time
return super
rescue SystemCallError, Net::OpenTimeout => e
debug "[FinalDestination] Error connecting to #{ip}... #{e.message}"
was_last_attempt = index == ips.length - 1
raise if was_last_attempt
end
ensure
@open_timeout = original_open_timeout
end
private
def current_time
Process.clock_gettime(Process::CLOCK_MONOTONIC)
end
end

View File

@ -0,0 +1,54 @@
# frozen_string_literal: true
class FinalDestination::Resolver
@mutex = Mutex.new
def self.lookup(addr, timeout: nil)
timeout ||= 2
@mutex.synchronize do
@result = nil
@queue ||= Queue.new
@queue << ""
ensure_lookup_thread
@lookup = addr
@parent = Thread.current
# This sleep will be interrupted by the lookup thread
# if completed within timeout
sleep timeout
if !@result
@thread.kill
@thread.join
@thread = nil
if @error
@error.backtrace.push(*caller)
raise @error
else
raise Timeout::Error
end
end
@result
end
end
private
def self.ensure_lookup_thread
return if @thread&.alive?
@thread = Thread.new do
while true
@queue.deq
@error = nil
begin
@result = Addrinfo.getaddrinfo(@lookup, 80, nil, :STREAM).map(&:ip_address)
rescue => e
@error = e
end
@parent.wakeup
end
end
@thread.name = "final-destination_resolver_thread"
end
end

View File

@ -0,0 +1,88 @@
# frozen_string_literal: true
class FinalDestination
module SSRFDetector
class DisallowedIpError < SocketError
end
def self.standard_private_ranges
@private_ranges ||= [
IPAddr.new("0.0.0.0/8"),
IPAddr.new("127.0.0.1"),
IPAddr.new("172.16.0.0/12"),
IPAddr.new("192.168.0.0/16"),
IPAddr.new("10.0.0.0/8"),
IPAddr.new("::1"),
IPAddr.new("fc00::/7"),
IPAddr.new("fe80::/10"),
]
end
def self.blocked_ip_blocks
SiteSetting
.blocked_ip_blocks
.split(/[|\n]/)
.filter_map do |r|
IPAddr.new(r.strip)
rescue IPAddr::InvalidAddressError
nil
end
end
def self.allowed_internal_hosts
hosts =
[
SiteSetting.Upload.s3_cdn_url,
GlobalSetting.try(:cdn_url),
Discourse.base_url_no_prefix,
].filter_map do |url|
URI.parse(url).hostname if url
rescue URI::Error
nil
end
hosts += SiteSetting.allowed_internal_hosts.split(/[|\n]/).filter_map { |h| h.strip.presence }
hosts
end
def self.host_bypasses_checks?(hostname)
allowed_internal_hosts.any? { |h| h.downcase == hostname.downcase }
end
def self.ip_allowed?(ip)
ip = ip.is_a?(IPAddr) ? ip : IPAddr.new(ip)
if ip_in_ranges?(ip, blocked_ip_blocks) || ip_in_ranges?(ip, standard_private_ranges)
return false
end
true
end
def self.lookup_and_filter_ips(name, timeout: nil)
ips = lookup_ips(name, timeout: timeout)
return ips if host_bypasses_checks?(name)
ips.filter! { |ip| FinalDestination::SSRFDetector.ip_allowed?(ip) }
raise DisallowedIpError, "FinalDestination: all resolved IPs were disallowed" if ips.empty?
ips
end
private
def self.ip_in_ranges?(ip, ranges)
ranges.any? { |r| r === ip }
end
def self.lookup_ips(name, timeout: nil)
if Rails.env.test?
["1.2.3.4"]
else
FinalDestination::Resolver.lookup(name, timeout: timeout)
end
end
end
end

20
lib/git_url.rb Normal file
View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
module GitUrl
class << self
SSH_REGEXP = /(\w+@(\w+\.)*\w+):(.*)/
def normalize(url)
if m = SSH_REGEXP.match(url)
url = "ssh://#{m[1]}/#{m[3]}"
end
if url.start_with?("https://github.com/") && !url.end_with?(".git")
url = url.gsub(/\/$/, '')
url += ".git"
end
url
end
end
end

View File

@ -162,7 +162,7 @@ module Onebox
def follow_redirect! def follow_redirect!
begin begin
http = Net::HTTP.start( http = FinalDestination::HTTP.start(
uri.host, uri.host,
uri.port, uri.port,
use_ssl: uri.scheme == 'https', use_ssl: uri.scheme == 'https',

View File

@ -66,7 +66,7 @@ module Onebox
end end
result = StringIO.new result = StringIO.new
Net::HTTP.start(uri.host, uri.port, open_timeout: Onebox.options.connect_timeout, use_ssl: uri.normalized_scheme == 'https') do |http| FinalDestination::HTTP.start(uri.host, uri.port, open_timeout: Onebox.options.connect_timeout, use_ssl: uri.normalized_scheme == 'https') do |http|
http.read_timeout = Onebox.options.timeout http.read_timeout = Onebox.options.timeout
http.verify_mode = OpenSSL::SSL::VERIFY_NONE # Work around path building bugs http.verify_mode = OpenSSL::SSL::VERIFY_NONE # Work around path building bugs
@ -120,7 +120,7 @@ module Onebox
def self.fetch_content_length(location) def self.fetch_content_length(location)
uri = URI(location) uri = URI(location)
Net::HTTP.start(uri.host, uri.port, open_timeout: Onebox.options.connect_timeout, use_ssl: uri.is_a?(URI::HTTPS)) do |http| FinalDestination::HTTP.start(uri.host, uri.port, open_timeout: Onebox.options.connect_timeout, use_ssl: uri.is_a?(URI::HTTPS)) do |http|
http.read_timeout = Onebox.options.timeout http.read_timeout = Onebox.options.timeout
if uri.is_a?(URI::HTTPS) if uri.is_a?(URI::HTTPS)
http.use_ssl = true http.use_ssl = true

View File

@ -35,11 +35,9 @@ module Onebox
private private
def check def check
res = URI.parse(@url).open(read_timeout: (@options.timeout || Onebox.options.timeout)) status, headers = FinalDestination.new(@url).small_get({})
@status = res.status.first.to_i @status = status
rescue OpenURI::HTTPError => e rescue Timeout::Error, Errno::ECONNREFUSED, Net::HTTPError, SocketError
@status = e.io.status.first.to_i
rescue Timeout::Error, Errno::ECONNREFUSED, Net::HTTPError
@status = 0 @status = 0
end end
end end

View File

@ -8,22 +8,15 @@ class ThemeStore::GitImporter
attr_reader :url attr_reader :url
def initialize(url, private_key: nil, branch: nil) def initialize(url, private_key: nil, branch: nil)
@url = url @url = GitUrl.normalize(url)
if @url.start_with?("https://github.com") && !@url.end_with?(".git")
@url = @url.gsub(/\/$/, '')
@url += ".git"
end
@temp_folder = "#{Pathname.new(Dir.tmpdir).realpath}/discourse_theme_#{SecureRandom.hex}" @temp_folder = "#{Pathname.new(Dir.tmpdir).realpath}/discourse_theme_#{SecureRandom.hex}"
@private_key = private_key @private_key = private_key
@branch = branch @branch = branch
end end
def import! def import!
if @private_key clone!
import_private!
else
import_public!
end
if version = Discourse.find_compatible_git_resource(@temp_folder) if version = Discourse.find_compatible_git_resource(@temp_folder)
begin begin
execute "git", "cat-file", "-e", version execute "git", "cat-file", "-e", version
@ -84,35 +77,104 @@ class ThemeStore::GitImporter
protected protected
def import_public! def raise_import_error!
raise RemoteTheme::ImportError.new(I18n.t("themes.import_error.git"))
end
def clone!
begin begin
if @branch.present? @uri = URI.parse(@url)
Discourse::Utils.execute_command({ "GIT_TERMINAL_PROMPT" => "0" }, "git", "clone", "--single-branch", "-b", @branch, @url, @temp_folder) rescue URI::Error
else raise_import_error!
Discourse::Utils.execute_command({ "GIT_TERMINAL_PROMPT" => "0" }, "git", "clone", @url, @temp_folder) end
end
rescue RuntimeError case @uri&.scheme
raise RemoteTheme::ImportError.new(I18n.t("themes.import_error.git")) when "http", "https"
clone_http!
when "ssh"
clone_ssh!
else
raise RemoteTheme::ImportError.new(I18n.t("themes.import_error.git_unsupported_scheme"))
end end
end end
def import_private! def clone_args(config = {})
args = ["git"]
config.each do |key, value|
args.concat(['-c', "#{key}=#{value}"])
end
args << "clone"
if @branch.present?
args.concat(["-b", @branch])
end
args.concat([@url, @temp_folder])
args
end
def clone_http!
begin
@uri = FinalDestination.resolve(@uri.to_s)
rescue
raise_import_error!
end
@url = @uri.to_s
unless ["http", "https"].include?(@uri.scheme)
raise_import_error!
end
addresses = FinalDestination::SSRFDetector.lookup_and_filter_ips(@uri.host)
if addresses.empty?
raise_import_error!
end
env = { "GIT_TERMINAL_PROMPT" => "0" }
args = clone_args(
"http.followRedirects" => "false",
"http.curloptResolve" => "#{@uri.host}:#{@uri.port}:#{addresses.join(',')}",
)
begin
Discourse::Utils.execute_command(env, *args, timeout: COMMAND_TIMEOUT_SECONDS)
rescue RuntimeError
raise_import_error!
end
end
def clone_ssh!
unless @private_key.present?
raise_import_error!
end
with_ssh_private_key do |ssh_folder|
# Use only the specified SSH key
env = { 'GIT_SSH_COMMAND' => "ssh -i #{ssh_folder}/id_rsa -o IdentitiesOnly=yes -o IdentityFile=#{ssh_folder}/id_rsa -o StrictHostKeyChecking=no" }
args = clone_args
begin
Discourse::Utils.execute_command(env, *args, timeout: COMMAND_TIMEOUT_SECONDS)
rescue RuntimeError
raise_import_error!
end
end
end
def with_ssh_private_key
ssh_folder = "#{Pathname.new(Dir.tmpdir).realpath}/discourse_theme_ssh_#{SecureRandom.hex}" ssh_folder = "#{Pathname.new(Dir.tmpdir).realpath}/discourse_theme_ssh_#{SecureRandom.hex}"
FileUtils.mkdir_p ssh_folder FileUtils.mkdir_p ssh_folder
File.write("#{ssh_folder}/id_rsa", @private_key) File.write("#{ssh_folder}/id_rsa", @private_key)
FileUtils.chmod(0600, "#{ssh_folder}/id_rsa") FileUtils.chmod(0600, "#{ssh_folder}/id_rsa")
begin yield ssh_folder
git_ssh_command = { 'GIT_SSH_COMMAND' => "ssh -i #{ssh_folder}/id_rsa -o StrictHostKeyChecking=no" }
if @branch.present?
Discourse::Utils.execute_command(git_ssh_command, "git", "clone", "--single-branch", "-b", @branch, @url, @temp_folder)
else
Discourse::Utils.execute_command(git_ssh_command, "git", "clone", @url, @temp_folder)
end
rescue RuntimeError
raise RemoteTheme::ImportError.new(I18n.t("themes.import_error.git"))
end
ensure ensure
FileUtils.rm_rf ssh_folder FileUtils.rm_rf ssh_folder
end end

View File

@ -168,6 +168,20 @@ RSpec.describe Jobs::EmitWebHookEvent do
) )
end end
it "doesn't emit if the payload URL resolves to a disallowed IP" do
FinalDestination::TestHelper.stub_to_fail do
subject.execute(
web_hook_id: post_hook.id,
event_type: 'post',
payload: { test: "some payload" }.to_json
)
end
event = post_hook.web_hook_events.last
expect(event.response_headers).to eq({ error: I18n.t("webhooks.payload_url.blocked_or_internal") }.to_json)
expect(event.response_body).to eq(nil)
expect(event.status).to eq(-1)
end
context 'with category filters' do context 'with category filters' do
fab!(:category) { Fabricate(:category) } fab!(:category) { Fabricate(:category) }
fab!(:topic) { Fabricate(:topic) } fab!(:topic) { Fabricate(:topic) }
@ -299,20 +313,20 @@ RSpec.describe Jobs::EmitWebHookEvent do
event = WebHookEvent.last event = WebHookEvent.last
headers = MultiJson.load(event.headers) headers = MultiJson.load(event.headers)
expect(headers['Content-Length']).to eq(13) expect(headers['Content-Length']).to eq("13")
expect(headers['Host']).to eq("meta.discourse.org") expect(headers['Host']).to eq("meta.discourse.org")
expect(headers['X-Discourse-Event-Id']).to eq(event.id) expect(headers['X-Discourse-Event-Id']).to eq(event.id.to_s)
expect(headers['X-Discourse-Event-Type']).to eq(described_class::PING_EVENT) expect(headers['X-Discourse-Event-Type']).to eq(described_class::PING_EVENT)
expect(headers['X-Discourse-Event']).to eq(described_class::PING_EVENT) expect(headers['X-Discourse-Event']).to eq(described_class::PING_EVENT)
expect(headers['X-Discourse-Event-Signature']).to eq('sha256=162f107f6b5022353274eb1a7197885cfd35744d8d08e5bcea025d309386b7d6') expect(headers['X-Discourse-Event-Signature']).to eq('sha256=162f107f6b5022353274eb1a7197885cfd35744d8d08e5bcea025d309386b7d6')
expect(event.payload).to eq(MultiJson.dump(ping: 'OK')) expect(event.payload).to eq(MultiJson.dump(ping: 'OK'))
expect(event.status).to eq(200) expect(event.status).to eq(200)
expect(MultiJson.load(event.response_headers)['Test']).to eq('string') expect(MultiJson.load(event.response_headers)['test']).to eq('string')
expect(event.response_body).to eq('OK') expect(event.response_body).to eq('OK')
end end
it 'sets up proper request headers when an error raised' do it 'sets up proper request headers when an error raised' do
Excon::Connection.any_instance.expects(:post).raises("error") stub_request(:post, post_hook.payload_url).to_raise("error")
subject.execute( subject.execute(
web_hook_id: post_hook.id, web_hook_id: post_hook.id,
@ -323,9 +337,9 @@ RSpec.describe Jobs::EmitWebHookEvent do
event = WebHookEvent.last event = WebHookEvent.last
headers = MultiJson.load(event.headers) headers = MultiJson.load(event.headers)
expect(headers['Content-Length']).to eq(13) expect(headers['Content-Length']).to eq("13")
expect(headers['Host']).to eq("meta.discourse.org") expect(headers['Host']).to eq("meta.discourse.org")
expect(headers['X-Discourse-Event-Id']).to eq(event.id) expect(headers['X-Discourse-Event-Id']).to eq(event.id.to_s)
expect(headers['X-Discourse-Event-Type']).to eq(described_class::PING_EVENT) expect(headers['X-Discourse-Event-Type']).to eq(described_class::PING_EVENT)
expect(headers['X-Discourse-Event']).to eq(described_class::PING_EVENT) expect(headers['X-Discourse-Event']).to eq(described_class::PING_EVENT)
expect(headers['X-Discourse-Event-Signature']).to eq('sha256=162f107f6b5022353274eb1a7197885cfd35744d8d08e5bcea025d309386b7d6') expect(headers['X-Discourse-Event-Signature']).to eq('sha256=162f107f6b5022353274eb1a7197885cfd35744d8d08e5bcea025d309386b7d6')

View File

@ -1295,7 +1295,6 @@ RSpec.describe CookedPostProcessor do
stub_request(:head, url) stub_request(:head, url)
stub_request(:get , url).to_return(body: body) stub_request(:get , url).to_return(body: body)
FinalDestination.stubs(:lookup_ip).returns('1.2.3.4')
# not an ideal stub but shipping the whole image to fast image can add # not an ideal stub but shipping the whole image to fast image can add
# a lot of cost to this test # a lot of cost to this test

View File

@ -0,0 +1,110 @@
# frozen_string_literal: true
describe FinalDestination::HTTP do
before do
# We need to test low-level stuff, switch off WebMock for FinalDestination::HTTP
WebMock.enable!(except: [:final_destination])
Socket.stubs(:tcp).never
Addrinfo.stubs(:getaddrinfo).never
end
after do
WebMock.enable!
end
def expect_tcp_and_abort(stub_addr, &blk)
success = Class.new(StandardError)
Socket.stubs(:tcp).with { |addr| stub_addr == addr }.once.raises(success)
begin
yield
rescue success
end
end
def stub_ip_lookup(stub_addr, ips)
FinalDestination::SSRFDetector.stubs(:lookup_ips).with { |addr| stub_addr == addr }.returns(ips)
end
def stub_tcp_to_raise(stub_addr, exception)
Socket.stubs(:tcp).with { |addr| addr == stub_addr }.once.raises(exception)
end
it "works through each IP address until success" do
stub_ip_lookup("example.com", %w[1.1.1.1 2.2.2.2 3.3.3.3])
stub_tcp_to_raise("1.1.1.1", Errno::ETIMEDOUT)
stub_tcp_to_raise("2.2.2.2", Errno::EPIPE)
expect_tcp_and_abort("3.3.3.3") { FinalDestination::HTTP.get(URI("https://example.com")) }
end
it "handles nxdomain with SocketError" do
FinalDestination::SSRFDetector
.stubs(:lookup_ips)
.with { |addr| addr == "example.com" }
.raises(SocketError)
expect { FinalDestination::HTTP.get(URI("https://example.com")) }.to raise_error(SocketError)
end
it "raises the normal error when all IPs fail" do
stub_ip_lookup("example.com", %w[1.1.1.1 2.2.2.2])
stub_tcp_to_raise("1.1.1.1", Errno::ETIMEDOUT)
stub_tcp_to_raise("2.2.2.2", Errno::EPIPE)
expect { FinalDestination::HTTP.get(URI("https://example.com")) }.to raise_error(Errno::EPIPE)
end
it "ignores private IPs" do
stub_ip_lookup("example.com", %w[0.0.0.0 2.2.2.2])
expect_tcp_and_abort("2.2.2.2") { FinalDestination::HTTP.get(URI("https://example.com")) }
end
it "raises DisallowedIpError if all IPs are private" do
stub_ip_lookup("example.com", %w[0.0.0.0 127.0.0.1])
expect { FinalDestination::HTTP.get(URI("https://example.com")) }.to raise_error(
FinalDestination::SSRFDetector::DisallowedIpError,
)
expect(FinalDestination::SSRFDetector::DisallowedIpError.new).to be_a(SocketError)
end
it "handles short IPs" do
stub_ip_lookup("0", %w[0.0.0.0])
expect { FinalDestination::HTTP.get(URI("https://0/path")) }.to raise_error(
FinalDestination::SSRFDetector::DisallowedIpError,
)
expect(FinalDestination::SSRFDetector::DisallowedIpError.new).to be_a(SocketError)
end
it "raises DisallowedIpError if all IPs are blocked" do
SiteSetting.blocked_ip_blocks = "98.0.0.0/8|78.13.47.0/24|9001:82f3::/32"
stub_ip_lookup("ip6.example.com", %w[9001:82f3:8873::3])
stub_ip_lookup("ip4.example.com", %w[98.23.19.111])
expect { FinalDestination::HTTP.get(URI("https://ip4.example.com")) }.to raise_error(
FinalDestination::SSRFDetector::DisallowedIpError,
)
expect { FinalDestination::HTTP.get(URI("https://ip6.example.com")) }.to raise_error(
FinalDestination::SSRFDetector::DisallowedIpError,
)
end
it "allows specified hosts to bypass IP checks" do
SiteSetting.blocked_ip_blocks = "98.0.0.0/8|78.13.47.0/24|9001:82f3::/32"
SiteSetting.allowed_internal_hosts = "internal.example.com|blocked-ip.example.com"
stub_ip_lookup("internal.example.com", %w[0.0.0.0 127.0.0.1])
stub_ip_lookup("blocked-ip.example.com", %w[98.23.19.111])
expect_tcp_and_abort("0.0.0.0") do
FinalDestination::HTTP.get(URI("https://internal.example.com"))
end
expect_tcp_and_abort("98.23.19.111") do
FinalDestination::HTTP.get(URI("https://blocked-ip.example.com"))
end
end
it "stops iterating over DNS records once timeout reached" do
stub_ip_lookup("example.com", %w[1.1.1.1 2.2.2.2 3.3.3.3 4.4.4.4])
Socket.stubs(:tcp).with { |addr| addr == "1.1.1.1" }.raises(Errno::ECONNREFUSED)
Socket.stubs(:tcp).with { |addr| addr == "2.2.2.2" }.raises(Errno::ECONNREFUSED)
Socket.stubs(:tcp).with { |*args, **kwargs| kwargs[:open_timeout] == 0 }.raises(Errno::ETIMEDOUT)
FinalDestination::HTTP.any_instance.stubs(:current_time).returns(0, 1, 5)
expect do
FinalDestination::HTTP.start("example.com", 80, open_timeout: 5) {}
end.to raise_error(Net::OpenTimeout)
end
end

View File

@ -0,0 +1,44 @@
# frozen_string_literal: true
describe FinalDestination::Resolver do
let(:mock_response) { [Addrinfo.ip("1.1.1.1"), Addrinfo.ip("2.2.2.2")] }
before do
# No DNS lookups in tests
Addrinfo.stubs(:getaddrinfo).never
end
def alive_thread_count
Thread.list.filter(&:alive?).count
end
it "handles timeouts correctly" do
Addrinfo.stubs(:getaddrinfo).with { |addr| sleep if addr == "sleep.example.com" } # timeout
Addrinfo.stubs(:getaddrinfo).with { |addr| addr == "example.com" }.returns(mock_response)
expect {
FinalDestination::Resolver.lookup("sleep.example.com", timeout: 0.001)
}.to raise_error(Timeout::Error)
start_thread_count = alive_thread_count
expect {
FinalDestination::Resolver.lookup("sleep.example.com", timeout: 0.001)
}.to raise_error(Timeout::Error)
expect(alive_thread_count).to eq(start_thread_count)
expect(FinalDestination::Resolver.lookup("example.com")).to eq(
%w[1.1.1.1 2.2.2.2],
)
# Thread available for reuse after successful lookup
expect(alive_thread_count).to eq(start_thread_count + 1)
end
it "can lookup correctly" do
Addrinfo.stubs(:getaddrinfo).with { |addr| addr == "example.com" }.returns(mock_response)
expect(FinalDestination::Resolver.lookup("example.com")).to eq(%w[1.1.1.1 2.2.2.2])
end
end

View File

@ -0,0 +1,104 @@
# frozen_string_literal: true
describe FinalDestination::SSRFDetector do
describe "site setting parsing" do
it "can parse the blocked_ip_blocks and allowed_internal_hosts settings when the delimiter is pipe" do
SiteSetting.blocked_ip_blocks = "13.134.89.0/24|73.234.19.0/30\n"
SiteSetting.allowed_internal_hosts = "awesomesauce.com\n|goodbye.net"
expect(subject.blocked_ip_blocks).to eq(%w[13.134.89.0/24 73.234.19.0/30])
expect(subject.allowed_internal_hosts).to eq(
[
"test.localhost", # Discourse.base_url
"awesomesauce.com",
"goodbye.net",
],
)
end
it "can parse the blocked_ip_blocks and allowed_internal_hosts settings when the delimiter is newline" do
SiteSetting.blocked_ip_blocks = "13.134.89.0/24\n73.234.19.0/30\n\n"
SiteSetting.allowed_internal_hosts = "awesomesauce.com\n\ngoodbye.net\n\n"
expect(subject.blocked_ip_blocks).to eq(%w[13.134.89.0/24 73.234.19.0/30])
expect(subject.allowed_internal_hosts).to eq(
[
"test.localhost", # Discourse.base_url
"awesomesauce.com",
"goodbye.net",
],
)
end
it "ignores invalid IP blocks" do
SiteSetting.blocked_ip_blocks = "2001:abc:de::/48|notanip"
expect(subject.blocked_ip_blocks).to eq(%w[2001:abc:de::/48])
end
end
describe ".ip_allowed?" do
it "returns false for blocked IPs" do
SiteSetting.blocked_ip_blocks = "98.0.0.0/8|78.13.47.0/24|9001:82f3::/32"
expect(subject.ip_allowed?("98.23.19.111")).to eq(false)
expect(subject.ip_allowed?("9001:82f3:8873::3")).to eq(false)
end
it "returns false for standard internal IPs" do
expect(subject.ip_allowed?("172.31.100.31")).to eq(false)
expect(subject.ip_allowed?("fd02:77fa:ffea::f")).to eq(false)
end
end
describe ".host_bypasses_checks?" do
it "returns true for URLs when allowed_internal_hosts allows the host" do
SiteSetting.allowed_internal_hosts = "allowedhost1.com|allowedhost2.com"
expect(subject.host_bypasses_checks?("allowedhost1.com")).to eq(true)
expect(subject.host_bypasses_checks?("allowedhost2.com")).to eq(true)
end
it "returns false for other hosts" do
expect(subject.host_bypasses_checks?("otherhost.com")).to eq(false)
end
it "returns true for the base uri" do
SiteSetting.force_hostname = "final-test.example.com"
expect(subject.host_bypasses_checks?("final-test.example.com")).to eq(true)
end
it "returns true for the S3 CDN url" do
SiteSetting.enable_s3_uploads = true
SiteSetting.s3_cdn_url = "https://s3.example.com"
expect(subject.host_bypasses_checks?("s3.example.com")).to eq(true)
end
it "returns true for the CDN url" do
GlobalSetting.stubs(:cdn_url).returns("https://cdn.example.com/discourse")
expect(subject.host_bypasses_checks?("cdn.example.com")).to eq(true)
end
end
describe ".lookup_and_filter_ips" do
it "returns a fake response in tests" do
expect(subject.lookup_and_filter_ips("example.com")).to eq(["1.2.3.4"])
end
it "correctly filters private and blocked IPs" do
SiteSetting.blocked_ip_blocks = "9.10.11.12/24"
subject.stubs(:lookup_ips).returns(%w[127.0.0.1 5.6.7.8 9.10.11.12])
expect(subject.lookup_and_filter_ips("example.com")).to eq(["5.6.7.8"])
end
it "raises an exception if all IPs are blocked" do
subject.stubs(:lookup_ips).returns(["127.0.0.1"])
expect { subject.lookup_and_filter_ips("example.com") }.to raise_error(
subject::DisallowedIpError,
)
end
it "bypasses filtering for allowlisted hosts" do
SiteSetting.allowed_internal_hosts = "example.com"
subject.stubs(:lookup_ips).returns(["127.0.0.1"])
expect(subject.lookup_and_filter_ips("example.com")).to eq(["127.0.0.1"])
end
end
end

View File

@ -10,26 +10,6 @@ RSpec.describe FinalDestination do
force_get_hosts: ['https://force.get.com', 'https://*.ihaveawildcard.com/'], force_get_hosts: ['https://force.get.com', 'https://*.ihaveawildcard.com/'],
preserve_fragment_url_hosts: ['https://eviltrout.com'], preserve_fragment_url_hosts: ['https://eviltrout.com'],
# avoid IP lookups in test
lookup_ip: lambda do |host|
case host
when 'eviltrout.com' then '52.84.143.152'
when 'particularly.eviltrout.com' then '52.84.143.152'
when 'codinghorror.com' then '91.146.108.148'
when 'discourse.org' then '104.25.152.10'
when 'some_thing.example.com' then '104.25.152.10'
when 'private-host.com' then '192.168.10.1'
when 'internal-ipv6.com' then '2001:abc:de:01:3:3d0:6a65:c2bf'
when 'ignore-me.com' then '53.84.143.152'
when 'force.get.com' then '22.102.29.40'
when 'any-subdomain.ihaveawildcard.com' then '104.25.152.11'
when 'wikipedia.com' then '1.2.3.4'
else
_as_ip = IPAddr.new(host)
host
end
end
} }
end end
@ -54,15 +34,32 @@ RSpec.describe FinalDestination do
} }
end end
def fd_stub_request(method, url)
uri = URI.parse(url)
host = uri.hostname
ip = "1.2.3.4"
# In Excon we pass the IP in the URL, so we need to stub
# that version as well
uri.hostname = "HOSTNAME_PLACEHOLDER"
matcher = Regexp.escape(uri.to_s).sub(
"HOSTNAME_PLACEHOLDER",
"(#{Regexp.escape(host)}|#{Regexp.escape(ip)})"
)
stub_request(method, /\A#{matcher}\z/).with(headers: { "Host" => host })
end
def canonical_follow(from, dest) def canonical_follow(from, dest)
stub_request(:get, from).to_return( fd_stub_request(:get, from).to_return(
status: 200, status: 200,
body: "<head><link rel=\"canonical\" href=\"#{dest}\"></head>" body: "<head><link rel=\"canonical\" href=\"#{dest}\"></head>"
) )
end end
def redirect_response(from, dest) def redirect_response(from, dest)
stub_request(:head, from).to_return( fd_stub_request(:head, from).to_return(
status: 302, status: 302,
headers: { "Location" => dest } headers: { "Location" => dest }
) )
@ -90,6 +87,16 @@ RSpec.describe FinalDestination do
expect(fd('asdf').resolve).to be_nil expect(fd('asdf').resolve).to be_nil
end end
it "returns nil for unresolvable url" do
FinalDestination::SSRFDetector.stubs(:lookup_ips).raises(SocketError)
expect(fd("https://example.com").resolve).to eq(nil)
end
it "returns nil for url timeout" do
FinalDestination::SSRFDetector.stubs(:lookup_ips).raises(Timeout::Error)
expect(fd("https://example.com").resolve).to eq(nil)
end
it "returns nil when read timeouts" do it "returns nil when read timeouts" do
Excon.expects(:public_send).raises(Excon::Errors::Timeout) Excon.expects(:public_send).raises(Excon::Errors::Timeout)
@ -98,7 +105,7 @@ RSpec.describe FinalDestination do
context "without redirects" do context "without redirects" do
before do before do
stub_request(:head, "https://eviltrout.com").to_return(doc_response) fd_stub_request(:head, "https://eviltrout.com/").to_return(doc_response)
end end
it "returns the final url" do it "returns the final url" do
@ -118,7 +125,7 @@ RSpec.describe FinalDestination do
context "with underscores in URLs" do context "with underscores in URLs" do
before do before do
stub_request(:head, 'https://some_thing.example.com').to_return(doc_response) fd_stub_request(:head, 'https://some_thing.example.com').to_return(doc_response)
end end
it "doesn't raise errors with underscores in urls" do it "doesn't raise errors with underscores in urls" do
@ -133,7 +140,7 @@ RSpec.describe FinalDestination do
before do before do
redirect_response("https://eviltrout.com", "https://codinghorror.com/blog") redirect_response("https://eviltrout.com", "https://codinghorror.com/blog")
redirect_response("https://codinghorror.com/blog", "https://discourse.org") redirect_response("https://codinghorror.com/blog", "https://discourse.org")
stub_request(:head, "https://discourse.org").to_return(doc_response) fd_stub_request(:head, "https://discourse.org").to_return(doc_response)
end end
it "returns the final url" do it "returns the final url" do
@ -148,7 +155,7 @@ RSpec.describe FinalDestination do
before do before do
redirect_response("https://eviltrout.com", "https://codinghorror.com/blog") redirect_response("https://eviltrout.com", "https://codinghorror.com/blog")
redirect_response("https://codinghorror.com/blog", "https://discourse.org") redirect_response("https://codinghorror.com/blog", "https://discourse.org")
stub_request(:head, "https://discourse.org").to_return(doc_response) fd_stub_request(:head, "https://discourse.org").to_return(doc_response)
end end
it "returns the final url" do it "returns the final url" do
@ -162,7 +169,8 @@ RSpec.describe FinalDestination do
context "with a redirect to an internal IP" do context "with a redirect to an internal IP" do
before do before do
redirect_response("https://eviltrout.com", "https://private-host.com") redirect_response("https://eviltrout.com", "https://private-host.com")
stub_request(:head, "https://private-host.com").to_return(doc_response) FinalDestination::SSRFDetector.stubs(:lookup_and_filter_ips).with("eviltrout.com").returns(["1.2.3.4"])
FinalDestination::SSRFDetector.stubs(:lookup_and_filter_ips).with("private-host.com").raises(FinalDestination::SSRFDetector::DisallowedIpError)
end end
it "returns the final url" do it "returns the final url" do
@ -188,14 +196,14 @@ RSpec.describe FinalDestination do
it 'raises error when response is too big' do it 'raises error when response is too big' do
stub_const(described_class, "MAX_REQUEST_SIZE_BYTES", 1) do stub_const(described_class, "MAX_REQUEST_SIZE_BYTES", 1) do
stub_request(:get, "https://codinghorror.com/blog").to_return(body_response) fd_stub_request(:get, "https://codinghorror.com/blog").to_return(body_response)
final = FinalDestination.new('https://codinghorror.com/blog', opts.merge(follow_canonical: true)) final = FinalDestination.new('https://codinghorror.com/blog', opts.merge(follow_canonical: true))
expect { final.resolve }.to raise_error(Excon::Errors::ExpectationFailed, "response size too big: https://codinghorror.com/blog") expect { final.resolve }.to raise_error(Excon::Errors::ExpectationFailed, "response size too big: https://codinghorror.com/blog")
end end
end end
it 'raises error when response is too slow' do it 'raises error when response is too slow' do
stub_request(:get, "https://codinghorror.com/blog").to_return(lambda { |request| freeze_time(11.seconds.from_now) ; body_response }) fd_stub_request(:get, "https://codinghorror.com/blog").to_return(lambda { |request| freeze_time(11.seconds.from_now) ; body_response })
final = FinalDestination.new('https://codinghorror.com/blog', opts.merge(follow_canonical: true)) final = FinalDestination.new('https://codinghorror.com/blog', opts.merge(follow_canonical: true))
expect { final.resolve }.to raise_error(Excon::Errors::ExpectationFailed, "connect timeout reached: https://codinghorror.com/blog") expect { final.resolve }.to raise_error(Excon::Errors::ExpectationFailed, "connect timeout reached: https://codinghorror.com/blog")
end end
@ -203,7 +211,7 @@ RSpec.describe FinalDestination do
context 'when following canonical links' do context 'when following canonical links' do
it 'resolves the canonical link as the final destination' do it 'resolves the canonical link as the final destination' do
canonical_follow("https://eviltrout.com", "https://codinghorror.com/blog") canonical_follow("https://eviltrout.com", "https://codinghorror.com/blog")
stub_request(:head, "https://codinghorror.com/blog").to_return(doc_response) fd_stub_request(:head, "https://codinghorror.com/blog").to_return(doc_response)
final = FinalDestination.new('https://eviltrout.com', opts.merge(follow_canonical: true)) final = FinalDestination.new('https://eviltrout.com', opts.merge(follow_canonical: true))
@ -216,7 +224,7 @@ RSpec.describe FinalDestination do
host = "https://codinghorror.com" host = "https://codinghorror.com"
canonical_follow("#{host}/blog", "/blog/canonical") canonical_follow("#{host}/blog", "/blog/canonical")
stub_request(:head, "#{host}/blog/canonical").to_return(doc_response) fd_stub_request(:head, "#{host}/blog/canonical").to_return(doc_response)
final = FinalDestination.new("#{host}/blog", opts.merge(follow_canonical: true)) final = FinalDestination.new("#{host}/blog", opts.merge(follow_canonical: true))
@ -228,7 +236,7 @@ RSpec.describe FinalDestination do
it 'resolves the canonical link when the URL is relative and does not start with the / symbol' do it 'resolves the canonical link when the URL is relative and does not start with the / symbol' do
host = "https://codinghorror.com" host = "https://codinghorror.com"
canonical_follow("#{host}/blog", "blog/canonical") canonical_follow("#{host}/blog", "blog/canonical")
stub_request(:head, "#{host}/blog/canonical").to_return(doc_response) fd_stub_request(:head, "#{host}/blog/canonical").to_return(doc_response)
final = FinalDestination.new("#{host}/blog", opts.merge(follow_canonical: true)) final = FinalDestination.new("#{host}/blog", opts.merge(follow_canonical: true))
@ -259,65 +267,71 @@ RSpec.describe FinalDestination do
end end
context "when forcing GET" do context "when forcing GET" do
before do
stub_request(:head, 'https://force.get.com/posts?page=4')
stub_request(:get, 'https://force.get.com/posts?page=4')
stub_request(:get, 'https://any-subdomain.ihaveawildcard.com/some/other/content')
stub_request(:head, 'https://eviltrout.com/posts?page=2')
stub_request(:get, 'https://eviltrout.com/posts?page=2')
stub_request(:head, 'https://particularly.eviltrout.com/has/a/secret/plan')
stub_request(:get, 'https://particularly.eviltrout.com/has/a/secret/plan')
end
it "will do a GET when forced" do it "will do a GET when forced" do
final = FinalDestination.new('https://force.get.com/posts?page=4', opts) url = 'https://force.get.com/posts?page=4'
expect(final.resolve.to_s).to eq('https://force.get.com/posts?page=4') get_stub = fd_stub_request(:get, url)
head_stub = fd_stub_request(:head, url)
final = FinalDestination.new(url, opts)
expect(final.resolve.to_s).to eq(url)
expect(final.status).to eq(:resolved) expect(final.status).to eq(:resolved)
expect(WebMock).to have_requested(:get, 'https://force.get.com/posts?page=4') expect(get_stub).to have_been_requested
expect(WebMock).to_not have_requested(:head, 'https://force.get.com/posts?page=4') expect(head_stub).to_not have_been_requested
end end
it "will do a HEAD if not forced" do it "will do a HEAD if not forced" do
final = FinalDestination.new('https://eviltrout.com/posts?page=2', opts) url = 'https://eviltrout.com/posts?page=2'
expect(final.resolve.to_s).to eq('https://eviltrout.com/posts?page=2') get_stub = fd_stub_request(:get, url)
head_stub = fd_stub_request(:head, url)
final = FinalDestination.new(url, opts)
expect(final.resolve.to_s).to eq(url)
expect(final.status).to eq(:resolved) expect(final.status).to eq(:resolved)
expect(WebMock).to_not have_requested(:get, 'https://eviltrout.com/posts?page=2') expect(get_stub).to_not have_been_requested
expect(WebMock).to have_requested(:head, 'https://eviltrout.com/posts?page=2') expect(head_stub).to have_been_requested
end end
it "will do a GET when forced on a wildcard subdomain" do it "will do a GET when forced on a wildcard subdomain" do
final = FinalDestination.new('https://any-subdomain.ihaveawildcard.com/some/other/content', opts) url = 'https://any-subdomain.ihaveawildcard.com/some/other/content'
expect(final.resolve.to_s).to eq('https://any-subdomain.ihaveawildcard.com/some/other/content') get_stub = fd_stub_request(:get, url)
head_stub = fd_stub_request(:head, url)
final = FinalDestination.new(url, opts)
expect(final.resolve.to_s).to eq(url)
expect(final.status).to eq(:resolved) expect(final.status).to eq(:resolved)
expect(WebMock).to have_requested(:get, 'https://any-subdomain.ihaveawildcard.com/some/other/content') expect(get_stub).to have_been_requested
expect(WebMock).to_not have_requested(:head, 'https://any-subdomain.ihaveawildcard.com/some/other/content') expect(head_stub).to_not have_been_requested
end end
it "will do a HEAD if on a subdomain of a forced get domain without a wildcard" do it "will do a HEAD if on a subdomain of a forced get domain without a wildcard" do
final = FinalDestination.new('https://particularly.eviltrout.com/has/a/secret/plan', opts) url = 'https://particularly.eviltrout.com/has/a/secret/plan'
expect(final.resolve.to_s).to eq('https://particularly.eviltrout.com/has/a/secret/plan') get_stub = fd_stub_request(:get, url)
head_stub = fd_stub_request(:head, url)
final = FinalDestination.new(url, opts)
expect(final.resolve.to_s).to eq(url)
expect(final.status).to eq(:resolved) expect(final.status).to eq(:resolved)
expect(WebMock).to_not have_requested(:get, 'https://particularly.eviltrout.com/has/a/secret/plan') expect(get_stub).to_not have_been_requested
expect(WebMock).to have_requested(:head, 'https://particularly.eviltrout.com/has/a/secret/plan') expect(head_stub).to have_been_requested
end end
end end
context "when HEAD not supported" do context "when HEAD not supported" do
before do before do
stub_request(:get, 'https://eviltrout.com').to_return( fd_stub_request(:get, 'https://eviltrout.com').to_return(
status: 301, status: 301,
headers: { headers: {
"Location" => 'https://discourse.org', "Location" => 'https://discourse.org',
'Set-Cookie' => 'evil=trout' 'Set-Cookie' => 'evil=trout'
} }
) )
stub_request(:head, 'https://discourse.org') fd_stub_request(:head, 'https://discourse.org')
end end
context "when the status code is 405" do context "when the status code is 405" do
before do before do
stub_request(:head, 'https://eviltrout.com').to_return(status: 405) fd_stub_request(:head, 'https://eviltrout.com').to_return(status: 405)
end end
it "will try a GET" do it "will try a GET" do
@ -330,7 +344,7 @@ RSpec.describe FinalDestination do
context "when the status code is 501" do context "when the status code is 501" do
before do before do
stub_request(:head, 'https://eviltrout.com').to_return(status: 501) fd_stub_request(:head, 'https://eviltrout.com').to_return(status: 501)
end end
it "will try a GET" do it "will try a GET" do
@ -342,9 +356,9 @@ RSpec.describe FinalDestination do
end end
it "correctly extracts cookies during GET" do it "correctly extracts cookies during GET" do
stub_request(:head, "https://eviltrout.com").to_return(status: 405) fd_stub_request(:head, "https://eviltrout.com").to_return(status: 405)
stub_request(:get, "https://eviltrout.com") fd_stub_request(:get, "https://eviltrout.com")
.to_return(status: 302, body: "" , headers: { .to_return(status: 302, body: "" , headers: {
"Location" => "https://eviltrout.com", "Location" => "https://eviltrout.com",
"Set-Cookie" => ["foo=219ffwef9w0f; expires=Mon, 19-Feb-2018 10:44:24 GMT; path=/; domain=eviltrout.com", "Set-Cookie" => ["foo=219ffwef9w0f; expires=Mon, 19-Feb-2018 10:44:24 GMT; path=/; domain=eviltrout.com",
@ -352,7 +366,7 @@ RSpec.describe FinalDestination do
"baz=2; expires=Tue, 19-Feb-2019 10:14:24 GMT; path=/; domain=eviltrout.com"] "baz=2; expires=Tue, 19-Feb-2019 10:14:24 GMT; path=/; domain=eviltrout.com"]
}) })
stub_request(:head, "https://eviltrout.com") fd_stub_request(:head, "https://eviltrout.com")
.with(headers: { "Cookie" => "bar=1; baz=2; foo=219ffwef9w0f" }) .with(headers: { "Cookie" => "bar=1; baz=2; foo=219ffwef9w0f" })
final = FinalDestination.new("https://eviltrout.com", opts) final = FinalDestination.new("https://eviltrout.com", opts)
@ -363,13 +377,13 @@ RSpec.describe FinalDestination do
end end
it "should use the correct format for cookies when there is only one cookie" do it "should use the correct format for cookies when there is only one cookie" do
stub_request(:head, "https://eviltrout.com") fd_stub_request(:head, "https://eviltrout.com")
.to_return(status: 302, headers: { .to_return(status: 302, headers: {
"Location" => "https://eviltrout.com", "Location" => "https://eviltrout.com",
"Set-Cookie" => "foo=219ffwef9w0f; expires=Mon, 19-Feb-2018 10:44:24 GMT; path=/; domain=eviltrout.com" "Set-Cookie" => "foo=219ffwef9w0f; expires=Mon, 19-Feb-2018 10:44:24 GMT; path=/; domain=eviltrout.com"
}) })
stub_request(:head, "https://eviltrout.com") fd_stub_request(:head, "https://eviltrout.com")
.with(headers: { "Cookie" => "foo=219ffwef9w0f" }) .with(headers: { "Cookie" => "foo=219ffwef9w0f" })
final = FinalDestination.new("https://eviltrout.com", opts) final = FinalDestination.new("https://eviltrout.com", opts)
@ -379,7 +393,7 @@ RSpec.describe FinalDestination do
end end
it "should use the correct format for cookies when there are multiple cookies" do it "should use the correct format for cookies when there are multiple cookies" do
stub_request(:head, "https://eviltrout.com") fd_stub_request(:head, "https://eviltrout.com")
.to_return(status: 302, headers: { .to_return(status: 302, headers: {
"Location" => "https://eviltrout.com", "Location" => "https://eviltrout.com",
"Set-Cookie" => ["foo=219ffwef9w0f; expires=Mon, 19-Feb-2018 10:44:24 GMT; path=/; domain=eviltrout.com", "Set-Cookie" => ["foo=219ffwef9w0f; expires=Mon, 19-Feb-2018 10:44:24 GMT; path=/; domain=eviltrout.com",
@ -387,7 +401,7 @@ RSpec.describe FinalDestination do
"baz=2; expires=Tue, 19-Feb-2019 10:14:24 GMT; path=/; domain=eviltrout.com"] "baz=2; expires=Tue, 19-Feb-2019 10:14:24 GMT; path=/; domain=eviltrout.com"]
}) })
stub_request(:head, "https://eviltrout.com") fd_stub_request(:head, "https://eviltrout.com")
.with(headers: { "Cookie" => "bar=1; baz=2; foo=219ffwef9w0f" }) .with(headers: { "Cookie" => "bar=1; baz=2; foo=219ffwef9w0f" })
final = FinalDestination.new("https://eviltrout.com", opts) final = FinalDestination.new("https://eviltrout.com", opts)
@ -401,7 +415,7 @@ RSpec.describe FinalDestination do
upstream_url = "https://eviltrout.com/upstream/lib/code/foobar.rb" upstream_url = "https://eviltrout.com/upstream/lib/code/foobar.rb"
redirect_response(origin_url, upstream_url) redirect_response(origin_url, upstream_url)
stub_request(:head, upstream_url).to_return(doc_response) fd_stub_request(:head, upstream_url).to_return(doc_response)
final = FinalDestination.new("#{origin_url}#L154-L205", opts) final = FinalDestination.new("#{origin_url}#L154-L205", opts)
expect(final.resolve.to_s).to eq("#{upstream_url}#L154-L205") expect(final.resolve.to_s).to eq("#{upstream_url}#L154-L205")
@ -410,7 +424,7 @@ RSpec.describe FinalDestination do
context "with content_type" do context "with content_type" do
before do before do
stub_request(:head, "https://eviltrout.com/this/is/an/image").to_return(image_response) fd_stub_request(:head, "https://eviltrout.com/this/is/an/image").to_return(image_response)
end end
it "returns a content_type" do it "returns a content_type" do
@ -489,15 +503,6 @@ RSpec.describe FinalDestination do
end end
end end
describe '.validate_uri' do
context "with host lookups" do
it "works for various hosts" do
expect(fd('https://private-host.com').validate_uri).to eq(false)
expect(fd('https://eviltrout.com:443').validate_uri).to eq(true)
end
end
end
describe ".validate_url_format" do describe ".validate_url_format" do
it "supports http urls" do it "supports http urls" do
expect(fd('http://eviltrout.com').validate_uri_format).to eq(true) expect(fd('http://eviltrout.com').validate_uri_format).to eq(true)
@ -535,89 +540,19 @@ RSpec.describe FinalDestination do
end end
end end
describe ".is_dest_valid" do
it "returns false for a valid ipv4" do
expect(fd("https://52.84.143.67").is_dest_valid?).to eq(true)
expect(fd("https://104.25.153.10").is_dest_valid?).to eq(true)
end
it "returns false for short ip" do
lookup = lambda do |host|
# How IPs are looked up for single digits
if host == "0"
"0.0.0.0"
elsif host == "1"
"0.0.0.1"
end
end
expect(FinalDestination.new('https://0/logo.png', lookup_ip: lookup).is_dest_valid?).to eq(false)
expect(FinalDestination.new('https://1/logo.png', lookup_ip: lookup).is_dest_valid?).to eq(false)
end
it "returns false for private ipv4" do
expect(fd("https://127.0.0.1").is_dest_valid?).to eq(false)
expect(fd("https://192.168.1.3").is_dest_valid?).to eq(false)
expect(fd("https://10.0.0.5").is_dest_valid?).to eq(false)
expect(fd("https://172.16.0.1").is_dest_valid?).to eq(false)
end
it "returns false for IPV6 via site settings" do
SiteSetting.blocked_ip_blocks = '2001:abc:de::/48|2002:abc:de::/48'
expect(fd('https://[2001:abc:de:01:0:3f0:6a65:c2bf]').is_dest_valid?).to eq(false)
expect(fd('https://[2002:abc:de:01:0:3f0:6a65:c2bf]').is_dest_valid?).to eq(false)
expect(fd('https://internal-ipv6.com').is_dest_valid?).to eq(false)
expect(fd('https://[2003:abc:de:01:0:3f0:6a65:c2bf]').is_dest_valid?).to eq(true)
end
it "ignores invalid ranges" do
SiteSetting.blocked_ip_blocks = '2001:abc:de::/48|eviltrout'
expect(fd('https://[2001:abc:de:01:0:3f0:6a65:c2bf]').is_dest_valid?).to eq(false)
end
it "returns true for public ipv6" do
expect(fd("https://[2001:470:1:3a8::251]").is_dest_valid?).to eq(true)
end
it "returns false for private ipv6" do
expect(fd("https://[fdd7:b450:d4d1:6b44::1]").is_dest_valid?).to eq(false)
end
it "returns true for the base uri" do
SiteSetting.force_hostname = "final-test.example.com"
expect(fd("https://final-test.example.com/onebox").is_dest_valid?).to eq(true)
end
it "returns true for the S3 CDN url" do
SiteSetting.enable_s3_uploads = true
SiteSetting.s3_cdn_url = "https://s3.example.com"
expect(fd("https://s3.example.com/some/thing").is_dest_valid?).to eq(true)
end
it "returns true for the CDN url" do
GlobalSetting.stubs(:cdn_url).returns("https://cdn.example.com/discourse")
expect(fd("https://cdn.example.com/some/asset").is_dest_valid?).to eq(true)
end
it 'supports allowlisting via a site setting' do
SiteSetting.allowed_internal_hosts = 'private-host.com'
expect(fd("https://private-host.com/some/url").is_dest_valid?).to eq(true)
end
end
describe "https cache" do describe "https cache" do
it 'will cache https lookups' do it 'will cache https lookups' do
FinalDestination.clear_https_cache!("wikipedia.com") FinalDestination.clear_https_cache!("wikipedia.com")
stub_request(:head, "http://wikipedia.com/image.png") fd_stub_request(:head, "http://wikipedia.com/image.png")
.to_return(status: 302, body: "", headers: { location: 'https://wikipedia.com/image.png' }) .to_return(status: 302, body: "", headers: { location: 'https://wikipedia.com/image.png' })
stub_request(:head, "https://wikipedia.com/image.png") fd_stub_request(:head, "https://wikipedia.com/image.png")
fd('http://wikipedia.com/image.png').resolve fd('http://wikipedia.com/image.png').resolve
stub_request(:head, "https://wikipedia.com/image2.png") fd_stub_request(:head, "https://wikipedia.com/image2.png")
fd('http://wikipedia.com/image2.png').resolve fd('http://wikipedia.com/image2.png').resolve
end end

15
spec/lib/git_url_spec.rb Normal file
View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
RSpec.describe GitUrl do
it "handles the discourse github repo by ssh" do
expect(GitUrl.normalize("git@github.com:discourse/discourse.git")).to eq(
"ssh://git@github.com/discourse/discourse.git"
)
end
it "handles the discourse github repo by https" do
expect(GitUrl.normalize("https://github.com/discourse/discourse.git")).to eq(
"https://github.com/discourse/discourse.git"
)
end
end

View File

@ -41,6 +41,14 @@ RSpec.describe Onebox::Helpers do
described_class.fetch_response('http://example.com/large-file') described_class.fetch_response('http://example.com/large-file')
}.to raise_error(Onebox::Helpers::DownloadTooLarge) }.to raise_error(Onebox::Helpers::DownloadTooLarge)
end end
it "raises an exception when private url requested" do
FinalDestination::TestHelper.stub_to_fail do
expect {
described_class.fetch_response('http://example.com/large-file')
}.to raise_error(FinalDestination::SSRFDetector::DisallowedIpError)
end
end
end end
describe "fetch_html_doc" do describe "fetch_html_doc" do
@ -63,13 +71,25 @@ RSpec.describe Onebox::Helpers do
it "does not follow canonical link pointing at localhost" do it "does not follow canonical link pointing at localhost" do
uri = 'https://www.example.com' uri = 'https://www.example.com'
stub_request(:get, uri).to_return(status: 200, body: "<!DOCTYPE html><link rel='canonical' href='http://localhost:3000/'/><p>success</p>") FinalDestination::SSRFDetector.stubs(:lookup_ips).with { |h| h == "localhost" }.returns(["127.0.0.1"])
stub_request(:get, uri).to_return(status: 200, body: "<!DOCTYPE html><link rel='canonical' href='http://localhost/test'/><p>success</p>")
expect(described_class.fetch_html_doc(uri).to_s).to match("success") expect(described_class.fetch_html_doc(uri).to_s).to match("success")
end end
end end
end end
describe ".fetch_content_length" do
it "does not connect to private IP" do
uri = 'https://www.example.com'
FinalDestination::TestHelper.stub_to_fail do
expect {
described_class.fetch_content_length(uri)
}.to raise_error(FinalDestination::SSRFDetector::DisallowedIpError)
end
end
end
describe "redirects" do describe "redirects" do
describe "redirect limit" do describe "redirect limit" do
before do before do

View File

@ -54,6 +54,12 @@ RSpec.describe Onebox::StatusCheck do
it 'returns :connection_error if there is a general HTTP error' do it 'returns :connection_error if there is a general HTTP error' do
expect(described_class.new("http://www.amazon.com/http-error").human_status).to eq(:connection_error) expect(described_class.new("http://www.amazon.com/http-error").human_status).to eq(:connection_error)
end end
it 'returns :connection_error for private ips' do
FinalDestination::TestHelper.stub_to_fail do
expect(described_class.new("http://www.amazon.com/http-error").human_status).to eq(:connection_error)
end
end
end end
describe '#ok?' do describe '#ok?' do

View File

@ -16,44 +16,67 @@ RSpec.describe ThemeStore::GitImporter do
before do before do
hex = "xxx" hex = "xxx"
SecureRandom.stubs(:hex).returns(hex) SecureRandom.stubs(:hex).returns(hex)
FinalDestination.stubs(:resolve).with(url).returns(URI.parse(url))
FinalDestination::SSRFDetector.stubs(:lookup_and_filter_ips).with("github.com").returns(["192.0.2.100"])
@temp_folder = "#{Pathname.new(Dir.tmpdir).realpath}/discourse_theme_#{hex}" @temp_folder = "#{Pathname.new(Dir.tmpdir).realpath}/discourse_theme_#{hex}"
@ssh_folder = "#{Pathname.new(Dir.tmpdir).realpath}/discourse_theme_ssh_#{hex}" @ssh_folder = "#{Pathname.new(Dir.tmpdir).realpath}/discourse_theme_ssh_#{hex}"
end end
it "should import from http url" do it "imports http urls" do
Discourse::Utils.expects(:execute_command).with({ "GIT_TERMINAL_PROMPT" => "0" }, "git", "clone", url, @temp_folder) Discourse::Utils
.expects(:execute_command)
.with(
{ "GIT_TERMINAL_PROMPT" => "0" },
"git", "-c", "http.followRedirects=false", "-c", "http.curloptResolve=github.com:443:192.0.2.100", "clone", "https://github.com/example/example.git", @temp_folder, { timeout: 20 }
)
importer = ThemeStore::GitImporter.new(url) importer = ThemeStore::GitImporter.new(url)
importer.import! importer.import!
end end
it "should work with trailing slash url" do it "imports when the url has a trailing slash" do
Discourse::Utils.expects(:execute_command).with({ "GIT_TERMINAL_PROMPT" => "0" }, "git", "clone", url, @temp_folder) Discourse::Utils
.expects(:execute_command)
.with(
{ "GIT_TERMINAL_PROMPT" => "0" },
"git", "-c", "http.followRedirects=false", "-c", "http.curloptResolve=github.com:443:192.0.2.100", "clone", "https://github.com/example/example.git", @temp_folder, { timeout: 20 }
)
importer = ThemeStore::GitImporter.new(trailing_slash_url) importer = ThemeStore::GitImporter.new(trailing_slash_url)
importer.import! importer.import!
end end
it "should import from ssh url" do it "imports ssh urls" do
Discourse::Utils.expects(:execute_command).with({ Discourse::Utils
'GIT_SSH_COMMAND' => "ssh -i #{@ssh_folder}/id_rsa -o StrictHostKeyChecking=no" .expects(:execute_command)
}, "git", "clone", ssh_url, @temp_folder) .with(
{ "GIT_SSH_COMMAND" => "ssh -i #{@ssh_folder}/id_rsa -o IdentitiesOnly=yes -o IdentityFile=#{@ssh_folder}/id_rsa -o StrictHostKeyChecking=no" },
"git", "clone", "ssh://git@github.com/example/example.git", @temp_folder, { timeout: 20 }
)
importer = ThemeStore::GitImporter.new(ssh_url, private_key: "private_key") importer = ThemeStore::GitImporter.new(ssh_url, private_key: "private_key")
importer.import! importer.import!
end end
it "should import branch from http url" do it "imports http urls with a particular branch" do
Discourse::Utils.expects(:execute_command).with({ "GIT_TERMINAL_PROMPT" => "0" }, "git", "clone", "--single-branch", "-b", branch, url, @temp_folder) Discourse::Utils
.expects(:execute_command)
.with(
{ "GIT_TERMINAL_PROMPT" => "0" },
"git", "-c", "http.followRedirects=false", "-c", "http.curloptResolve=github.com:443:192.0.2.100", "clone", "-b", branch, "https://github.com/example/example.git", @temp_folder, { timeout: 20 }
)
importer = ThemeStore::GitImporter.new(url, branch: branch) importer = ThemeStore::GitImporter.new(url, branch: branch)
importer.import! importer.import!
end end
it "should import branch from ssh url" do it "imports ssh urls with a particular branch" do
Discourse::Utils.expects(:execute_command).with({ Discourse::Utils
'GIT_SSH_COMMAND' => "ssh -i #{@ssh_folder}/id_rsa -o StrictHostKeyChecking=no" .expects(:execute_command)
}, "git", "clone", "--single-branch", "-b", branch, ssh_url, @temp_folder) .with(
{ "GIT_SSH_COMMAND" => "ssh -i #{@ssh_folder}/id_rsa -o IdentitiesOnly=yes -o IdentityFile=#{@ssh_folder}/id_rsa -o StrictHostKeyChecking=no" },
"git", "clone", "-b", branch, "ssh://git@github.com/example/example.git", @temp_folder, { timeout: 20 }
)
importer = ThemeStore::GitImporter.new(ssh_url, private_key: "private_key", branch: branch) importer = ThemeStore::GitImporter.new(ssh_url, private_key: "private_key", branch: branch)
importer.import! importer.import!

View File

@ -49,20 +49,29 @@ RSpec.describe RemoteTheme do
) )
end end
let :initial_repo_url do
MockGitImporter.register("https://example.com/initial_repo.git", initial_repo)
end
after do after do
`rm -fr #{initial_repo}` `rm -fr #{initial_repo}`
end end
it 'can correctly import a remote theme' do around(:each) do |group|
MockGitImporter.with_mock do
group.run
end
end
it 'can correctly import a remote theme' do
time = Time.new('2000') time = Time.new('2000')
freeze_time time freeze_time time
@theme = RemoteTheme.import_theme(initial_repo) @theme = RemoteTheme.import_theme(initial_repo_url)
remote = @theme.remote_theme remote = @theme.remote_theme
expect(@theme.name).to eq('awesome theme') expect(@theme.name).to eq('awesome theme')
expect(remote.remote_url).to eq(initial_repo) expect(remote.remote_url).to eq(initial_repo_url)
expect(remote.remote_version).to eq(`cd #{initial_repo} && git rev-parse HEAD`.strip) expect(remote.remote_version).to eq(`cd #{initial_repo} && git rev-parse HEAD`.strip)
expect(remote.local_version).to eq(`cd #{initial_repo} && git rev-parse HEAD`.strip) expect(remote.local_version).to eq(`cd #{initial_repo} && git rev-parse HEAD`.strip)
@ -160,12 +169,12 @@ RSpec.describe RemoteTheme do
end end
it "can update themes with overwritten history" do it "can update themes with overwritten history" do
theme = RemoteTheme.import_theme(initial_repo) theme = RemoteTheme.import_theme(initial_repo_url)
remote = theme.remote_theme remote = theme.remote_theme
old_version = `cd #{initial_repo} && git rev-parse HEAD`.strip old_version = `cd #{initial_repo} && git rev-parse HEAD`.strip
expect(theme.name).to eq('awesome theme') expect(theme.name).to eq('awesome theme')
expect(remote.remote_url).to eq(initial_repo) expect(remote.remote_url).to eq(initial_repo_url)
expect(remote.local_version).to eq(old_version) expect(remote.local_version).to eq(old_version)
expect(remote.remote_version).to eq(old_version) expect(remote.remote_version).to eq(old_version)

View File

@ -658,4 +658,33 @@ RSpec.describe WebHook do
end end
end end
end end
describe '#payload_url_safety' do
fab!(:post_hook) { Fabricate(:web_hook, payload_url: "https://example.com") }
it 'errors if payload_url resolves to a blocked IP' do
SiteSetting.blocked_ip_blocks = "92.110.0.0/16"
FinalDestination::SSRFDetector.stubs(:lookup_ips).with { |h| h == "badhostname.com" }.returns(["92.110.44.17"])
post_hook.payload_url = "https://badhostname.com"
post_hook.save
expect(post_hook.errors.full_messages).to contain_exactly(
I18n.t("webhooks.payload_url.blocked_or_internal")
)
end
it 'errors if payload_url resolves to an internal IP' do
FinalDestination::SSRFDetector.stubs(:lookup_ips).with { |h| h == "badhostname.com" }.returns(["172.18.11.39"])
post_hook.payload_url = "https://badhostname.com"
post_hook.save
expect(post_hook.errors.full_messages).to contain_exactly(
I18n.t("webhooks.payload_url.blocked_or_internal")
)
end
it "doesn't error if payload_url resolves to an allowed IP" do
FinalDestination::SSRFDetector.stubs(:lookup_ips).with { |h| h == "goodhostname.com" }.returns(["172.32.11.39"])
post_hook.payload_url = "https://goodhostname.com"
post_hook.save!
end
end
end end

View File

@ -11,6 +11,22 @@ RSpec.describe Admin::ThemesController do
sign_in(admin) sign_in(admin)
end end
let! :repo do
setup_git_repo(
"about.json" => { name: "discourse-branch-header" }.to_json,
)
end
let! :repo_url do
MockGitImporter.register('https://github.com/discourse/discourse-brand-header.git', repo)
end
around(:each) do |group|
MockGitImporter.with_mock do
group.run
end
end
describe '#generate_key_pair' do describe '#generate_key_pair' do
it 'can generate key pairs' do it 'can generate key pairs' do
post "/admin/themes/generate_key_pair.json" post "/admin/themes/generate_key_pair.json"
@ -111,8 +127,8 @@ RSpec.describe Admin::ThemesController do
remote: ' https://github.com/discourse/discourse-brand-header.git ' remote: ' https://github.com/discourse/discourse-brand-header.git '
} }
expect(Theme.allowed_remote_theme_ids.length).to eq(1)
expect(response.status).to eq(201) expect(response.status).to eq(201)
expect(Theme.allowed_remote_theme_ids.length).to eq(1)
end end
it "prevents adding disallowed themes" do it "prevents adding disallowed themes" do

View File

@ -94,5 +94,63 @@ RSpec.describe Admin::WebHooksController do
expect(job_args["event_type"]).to eq("ping") expect(job_args["event_type"]).to eq("ping")
end end
end end
describe '#redeliver_event' do
let!(:web_hook_event) do
WebHookEvent.create!(
web_hook: web_hook,
payload: "abc",
headers: JSON.dump(aa: "1", bb: "2"),
)
end
it 'emits the web hook and updates the response headers and body' do
stub_request(:post, web_hook.payload_url)
.with(body: "abc", headers: { "aa" => 1, "bb" => 2 })
.to_return(
status: 402,
body: "efg",
headers: { "Content-Type" => "application/json", "yoo" => "man" }
)
post "/admin/api/web_hooks/#{web_hook.id}/events/#{web_hook_event.id}/redeliver.json"
expect(response.status).to eq(200)
parsed_event = response.parsed_body["web_hook_event"]
expect(parsed_event["id"]).to eq(web_hook_event.id)
expect(parsed_event["status"]).to eq(402)
expect(JSON.parse(parsed_event["headers"])).to eq({ "aa" => "1", "bb" => "2" })
expect(parsed_event["payload"]).to eq("abc")
expect(JSON.parse(parsed_event["response_headers"])).to eq({ "content-type" => "application/json", "yoo" => "man" })
expect(parsed_event["response_body"]).to eq("efg")
end
it "doesn't emit the web hook if the payload URL resolves to an internal IP" do
FinalDestination::TestHelper.stub_to_fail do
post "/admin/api/web_hooks/#{web_hook.id}/events/#{web_hook_event.id}/redeliver.json"
end
expect(response.status).to eq(200)
parsed_event = response.parsed_body["web_hook_event"]
expect(parsed_event["id"]).to eq(web_hook_event.id)
expect(parsed_event["response_headers"]).to eq({ error: I18n.t("webhooks.payload_url.blocked_or_internal") }.to_json)
expect(parsed_event["status"]).to eq(-1)
expect(parsed_event["response_body"]).to eq(nil)
end
it "doesn't emit the web hook if the payload URL resolves to a blocked IP" do
FinalDestination::TestHelper.stub_to_fail do
post "/admin/api/web_hooks/#{web_hook.id}/events/#{web_hook_event.id}/redeliver.json"
end
expect(response.status).to eq(200)
parsed_event = response.parsed_body["web_hook_event"]
expect(parsed_event["id"]).to eq(web_hook_event.id)
expect(parsed_event["response_headers"]).to eq({ error: I18n.t("webhooks.payload_url.blocked_or_internal") }.to_json)
expect(parsed_event["status"]).to eq(-1)
expect(parsed_event["response_body"]).to eq(nil)
end
end
end end
end end

View File

@ -56,6 +56,10 @@ RSpec.describe ThemesInstallTask do
) )
end end
let :theme_repo_url do
MockGitImporter.register("https://example.com/theme_repo.git", theme_repo)
end
let :component_repo do let :component_repo do
setup_git_repo( setup_git_repo(
"about.json" => about_json(component: true), "about.json" => about_json(component: true),
@ -73,6 +77,10 @@ RSpec.describe ThemesInstallTask do
) )
end end
let :component_repo_url do
MockGitImporter.register("https://example.com/component_repo.git", component_repo)
end
after do after do
`rm -fr #{theme_repo}` `rm -fr #{theme_repo}`
`rm -fr #{component_repo}` `rm -fr #{component_repo}`
@ -83,39 +91,50 @@ RSpec.describe ThemesInstallTask do
expect(Theme.where(name: "fail!").exists?).to eq(false) expect(Theme.where(name: "fail!").exists?).to eq(false)
end end
before do
FinalDestination.stubs(:resolve).with(theme_repo_url).returns(URI.parse(theme_repo_url))
FinalDestination.stubs(:resolve).with(component_repo_url).returns(URI.parse(component_repo_url))
end
around(:each) do |group|
MockGitImporter.with_mock do
group.run
end
end
describe "no options" do describe "no options" do
it 'installs a theme' do it 'installs a theme' do
ThemesInstallTask.install("some_theme": theme_repo) ThemesInstallTask.install("some_theme": theme_repo_url)
expect(Theme.where(name: THEME_NAME).exists?).to eq(true) expect(Theme.where(name: THEME_NAME).exists?).to eq(true)
end end
end end
describe "with options" do describe "with options" do
it 'installs a theme from only a url' do it 'installs a theme from only a url' do
ThemesInstallTask.install({ "some_theme": { "url": theme_repo } }) ThemesInstallTask.install({ "some_theme": { "url": theme_repo_url } })
expect(Theme.where(name: THEME_NAME).exists?).to eq(true) expect(Theme.where(name: THEME_NAME).exists?).to eq(true)
end end
it 'does not set the theme to default if the key/value is not present' do it 'does not set the theme to default if the key/value is not present' do
ThemesInstallTask.install({ "some_theme": { "url": theme_repo } }) ThemesInstallTask.install({ "some_theme": { "url": theme_repo_url } })
theme = Theme.find_by(name: THEME_NAME) theme = Theme.find_by(name: THEME_NAME)
expect(theme.default?).to eq(false) expect(theme.default?).to eq(false)
end end
it 'sets the theme to default if the key/value is true' do it 'sets the theme to default if the key/value is true' do
ThemesInstallTask.install({ "some_theme": { "url": theme_repo, default: true } }) ThemesInstallTask.install({ "some_theme": { "url": theme_repo_url, default: true } })
theme = Theme.find_by(name: THEME_NAME) theme = Theme.find_by(name: THEME_NAME)
expect(theme.default?).to eq(true) expect(theme.default?).to eq(true)
end end
it 'installs theme components, but does not add them to themes' do it 'installs theme components, but does not add them to themes' do
ThemesInstallTask.install({ "some_theme": { "url": component_repo } }) ThemesInstallTask.install({ "some_theme": { "url": component_repo_url } })
theme = Theme.find_by(name: THEME_NAME) theme = Theme.find_by(name: THEME_NAME)
expect(theme.component).to eq(true) expect(theme.component).to eq(true)
end end
it 'adds component to all themes if "add_to_all_themes" is true' do it 'adds component to all themes if "add_to_all_themes" is true' do
ThemesInstallTask.install({ "some_theme": { "url": component_repo, add_to_all_themes: true } }) ThemesInstallTask.install({ "some_theme": { "url": component_repo_url, add_to_all_themes: true } })
theme = Theme.find_by(name: THEME_NAME) theme = Theme.find_by(name: THEME_NAME)
Theme.where(component: false).each do |parent_theme| Theme.where(component: false).each do |parent_theme|
expect(ChildTheme.find_by(parent_theme_id: parent_theme.id, child_theme_id: theme.id).nil?).to eq(false) expect(ChildTheme.find_by(parent_theme_id: parent_theme.id, child_theme_id: theme.id).nil?).to eq(false)
@ -123,7 +142,7 @@ RSpec.describe ThemesInstallTask do
end end
it 'updates theme fields' do it 'updates theme fields' do
ThemesInstallTask.install({ "some_theme": component_repo }) ThemesInstallTask.install({ "some_theme": component_repo_url })
theme = Theme.find_by(name: THEME_NAME) theme = Theme.find_by(name: THEME_NAME)
remote = theme.remote_theme remote = theme.remote_theme
@ -140,7 +159,7 @@ RSpec.describe ThemesInstallTask do
expect(remote.commits_behind).to eq(1) expect(remote.commits_behind).to eq(1)
expect(remote.remote_version).to eq(`cd #{component_repo} && git rev-parse HEAD`.strip) expect(remote.remote_version).to eq(`cd #{component_repo} && git rev-parse HEAD`.strip)
ThemesInstallTask.install({ "some_theme": component_repo }) ThemesInstallTask.install({ "some_theme": component_repo_url })
expect(theme.theme_fields.find_by(name: 'scss', value: scss)).not_to be_nil expect(theme.theme_fields.find_by(name: 'scss', value: scss)).not_to be_nil
expect(remote.reload.commits_behind).to eq(0) expect(remote.reload.commits_behind).to eq(0)

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
WebMock::HttpLibAdapterRegistry.instance.register(
:final_destination,
Class.new do
OriginalHTTP = FinalDestination::HTTP unless const_defined?(:OriginalHTTP)
def self.enable!
FinalDestination.send(:remove_const, :HTTP)
FinalDestination.send(:const_set, :HTTP, Net::HTTP)
end
def self.disable!
FinalDestination.send(:remove_const, :HTTP)
FinalDestination.send(:const_set, :HTTP, OriginalHTTP)
end
end,
)
module FinalDestination::TestHelper
def self.stub_to_fail(&blk)
WebMock::HttpLibAdapterRegistry.instance.http_lib_adapters[:final_destination].disable!
FinalDestination::SSRFDetector.stubs(:lookup_ips).returns(["0.0.0.0"])
yield
ensure
WebMock::HttpLibAdapterRegistry.instance.http_lib_adapters[:final_destination].enable!
FinalDestination::SSRFDetector.unstub(:lookup_ips)
end
end

View File

@ -0,0 +1,56 @@
# frozen_string_literal: true
class MockGitImporter < ThemeStore::GitImporter
attr_reader :s3_client
class << self
def with_mock
git_importer = ThemeStore::GitImporter
ThemeStore.send(:remove_const, :GitImporter)
ThemeStore.const_set(:GitImporter, MockGitImporter)
begin
yield
ensure
ThemeStore.send(:remove_const, :GitImporter)
ThemeStore.const_set(:GitImporter, git_importer)
end
end
def register(url, path)
repos[url] = path
url
end
def [](url)
repos.fetch(url)
end
def reset!
repos = nil
end
private
def repos
@repos ||= {}
end
end
def initialize(url, private_key: nil, branch: nil)
@url = url
@private_key = private_key
@branch = branch
@temp_folder = "#{Pathname.new(Dir.tmpdir).realpath}/discourse_theme_#{SecureRandom.hex}"
end
def import!
begin
path = MockGitImporter[@url]
rescue KeyError
raise_import_error!
end
Discourse::Utils.execute_command("git", "clone", path, @temp_folder)
end
end