DEV: Extract shared external upload routes into controller helper (#14984)

This commit refactors the direct external upload routes (get presigned
put, complete external, create/abort/complete multipart) into a
helper which is then included in both BackupController and the
UploadController. This is done so UploadController doesn't need
strange backup logic added to it, and so each controller implementing
this helper can do their own validation/error handling nicely.

This is a follow up to e4350bb966
This commit is contained in:
Martin Brennan 2021-11-18 09:17:23 +10:00 committed by GitHub
parent b86127ad12
commit b96c10a903
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 541 additions and 439 deletions

View File

@ -7,6 +7,7 @@ export default Component.extend(UppyUploadMixin, {
tagName: "span", tagName: "span",
type: "backup", type: "backup",
useMultipartUploadsIfAvailable: true, useMultipartUploadsIfAvailable: true,
uploadRootPath: "/admin/backups",
@discourseComputed("uploading", "uploadProgress") @discourseComputed("uploading", "uploadProgress")
uploadButtonText(uploading, progress) { uploadButtonText(uploading, progress) {

View File

@ -33,6 +33,7 @@ import bootbox from "bootbox";
// functionality and event binding. // functionality and event binding.
// //
export default Mixin.create(ExtendableUploader, UppyS3Multipart, { export default Mixin.create(ExtendableUploader, UppyS3Multipart, {
uploadRootPath: "/uploads",
uploadTargetBound: false, uploadTargetBound: false,
@observes("composerModel.uploadCancelled") @observes("composerModel.uploadCancelled")

View File

@ -1,4 +1,5 @@
import Mixin from "@ember/object/mixin"; import Mixin from "@ember/object/mixin";
import getUrl from "discourse-common/lib/get-url";
import { bind } from "discourse-common/utils/decorators"; import { bind } from "discourse-common/utils/decorators";
import { Promise } from "rsvp"; import { Promise } from "rsvp";
import { ajax } from "discourse/lib/ajax"; import { ajax } from "discourse/lib/ajax";
@ -50,7 +51,7 @@ export default Mixin.create({
data.metadata = { "sha1-checksum": file.meta.sha1_checksum }; data.metadata = { "sha1-checksum": file.meta.sha1_checksum };
} }
return ajax("/uploads/create-multipart.json", { return ajax(getUrl(`${this.uploadRootPath}/create-multipart.json`), {
type: "POST", type: "POST",
data, data,
// uppy is inconsistent, an error here fires the upload-error event // uppy is inconsistent, an error here fires the upload-error event
@ -70,13 +71,16 @@ export default Mixin.create({
if (file.preparePartsRetryAttempts === undefined) { if (file.preparePartsRetryAttempts === undefined) {
file.preparePartsRetryAttempts = 0; file.preparePartsRetryAttempts = 0;
} }
return ajax("/uploads/batch-presign-multipart-parts.json", { return ajax(
getUrl(`${this.uploadRootPath}/batch-presign-multipart-parts.json`),
{
type: "POST", type: "POST",
data: { data: {
part_numbers: partData.partNumbers, part_numbers: partData.partNumbers,
unique_identifier: file.meta.unique_identifier, unique_identifier: file.meta.unique_identifier,
}, },
}) }
)
.then((data) => { .then((data) => {
if (file.preparePartsRetryAttempts) { if (file.preparePartsRetryAttempts) {
delete file.preparePartsRetryAttempts; delete file.preparePartsRetryAttempts;
@ -122,7 +126,7 @@ export default Mixin.create({
const parts = data.parts.map((part) => { const parts = data.parts.map((part) => {
return { part_number: part.PartNumber, etag: part.ETag }; return { part_number: part.PartNumber, etag: part.ETag };
}); });
return ajax("/uploads/complete-multipart.json", { return ajax(getUrl(`${this.uploadRootPath}/complete-multipart.json`), {
type: "POST", type: "POST",
contentType: "application/json", contentType: "application/json",
data: JSON.stringify({ data: JSON.stringify({
@ -155,7 +159,7 @@ export default Mixin.create({
return; return;
} }
return ajax("/uploads/abort-multipart.json", { return ajax(getUrl(`${this.uploadRootPath}/abort-multipart.json`), {
type: "POST", type: "POST",
data: { data: {
external_upload_identifier: uploadId, external_upload_identifier: uploadId,

View File

@ -27,6 +27,7 @@ export default Mixin.create(UppyS3Multipart, {
autoStartUploads: true, autoStartUploads: true,
_inProgressUploads: 0, _inProgressUploads: 0,
id: null, id: null,
uploadRootPath: "/uploads",
uploadDone() { uploadDone() {
warn("You should implement `uploadDone`", { warn("You should implement `uploadDone`", {
@ -223,7 +224,7 @@ export default Mixin.create(UppyS3Multipart, {
data.metadata = { "sha1-checksum": file.meta.sha1_checksum }; data.metadata = { "sha1-checksum": file.meta.sha1_checksum };
} }
return ajax(getUrl("/uploads/generate-presigned-put"), { return ajax(getUrl(`${this.uploadRootPath}/generate-presigned-put`), {
type: "POST", type: "POST",
data, data,
}) })
@ -277,7 +278,7 @@ export default Mixin.create(UppyS3Multipart, {
}, },
_completeExternalUpload(file) { _completeExternalUpload(file) {
return ajax(getUrl("/uploads/complete-external-upload"), { return ajax(getUrl(`${this.uploadRootPath}/complete-external-upload`), {
type: "POST", type: "POST",
data: deepMerge( data: deepMerge(
{ unique_identifier: file.meta.uniqueUploadIdentifier }, { unique_identifier: file.meta.uniqueUploadIdentifier },

View File

@ -4,6 +4,8 @@ require "backup_restore"
require "backup_restore/backup_store" require "backup_restore/backup_store"
class Admin::BackupsController < Admin::AdminController class Admin::BackupsController < Admin::AdminController
include ExternalUploadHelpers
before_action :ensure_backups_enabled before_action :ensure_backups_enabled
skip_before_action :check_xhr, only: [:index, :show, :logs, :check_backup_chunk, :upload_backup_chunk] skip_before_action :check_xhr, only: [:index, :show, :logs, :check_backup_chunk, :upload_backup_chunk]
@ -234,4 +236,24 @@ class Admin::BackupsController < Admin::AdminController
def render_error(message_key) def render_error(message_key)
render json: failed_json.merge(message: I18n.t(message_key)) render json: failed_json.merge(message: I18n.t(message_key))
end end
def validate_before_create_multipart(file_name:, file_size:, upload_type:)
raise ExternalUploadHelpers::ExternalUploadValidationError.new(I18n.t("backup.backup_file_should_be_tar_gz")) unless valid_extension?(file_name)
raise ExternalUploadHelpers::ExternalUploadValidationError.new(I18n.t("backup.invalid_filename")) unless valid_filename?(file_name)
end
def self.serialize_upload(_upload)
{} # noop, the backup does not create an upload record
end
def create_direct_multipart_upload
begin
yield
rescue BackupRestore::BackupStore::StorageError => err
message = debug_upload_error(err, I18n.t("upload.create_multipart_failure", additional_detail: err.message))
raise ExternalUploadHelpers::ExternalUploadValidationError.new(message)
rescue BackupRestore::BackupStore::BackupFileExists
raise ExternalUploadHelpers::ExternalUploadValidationError.new(I18n.t("backup.file_exists"))
end
end
end end

View File

@ -3,36 +3,17 @@
require "mini_mime" require "mini_mime"
class UploadsController < ApplicationController class UploadsController < ApplicationController
include ExternalUploadHelpers
requires_login except: [:show, :show_short, :show_secure] requires_login except: [:show, :show_short, :show_secure]
skip_before_action :preload_json, :check_xhr, :redirect_to_login_if_required, only: [:show, :show_short, :show_secure] skip_before_action :preload_json, :check_xhr, :redirect_to_login_if_required, only: [:show, :show_short, :show_secure]
protect_from_forgery except: :show protect_from_forgery except: :show
before_action :is_asset_path, :apply_cdn_headers, only: [:show, :show_short, :show_secure] before_action :is_asset_path, :apply_cdn_headers, only: [:show, :show_short, :show_secure]
before_action :external_store_check, only: [ before_action :external_store_check, only: [:show_secure]
:show_secure,
:generate_presigned_put,
:complete_external_upload,
:create_multipart,
:batch_presign_multipart_parts,
:abort_multipart,
:complete_multipart
]
before_action :direct_s3_uploads_check, only: [
:generate_presigned_put,
:complete_external_upload,
:create_multipart,
:batch_presign_multipart_parts,
:abort_multipart,
:complete_multipart
]
before_action :can_upload_external?, only: [:create_multipart, :generate_presigned_put]
SECURE_REDIRECT_GRACE_SECONDS = 5 SECURE_REDIRECT_GRACE_SECONDS = 5
PRESIGNED_PUT_RATE_LIMIT_PER_MINUTE = 10
CREATE_MULTIPART_RATE_LIMIT_PER_MINUTE = 10
COMPLETE_MULTIPART_RATE_LIMIT_PER_MINUTE = 10
BATCH_PRESIGN_RATE_LIMIT_PER_MINUTE = 10
def create def create
# capture current user for block later on # capture current user for block later on
@ -208,285 +189,25 @@ class UploadsController < ApplicationController
} }
end end
def generate_presigned_put
RateLimiter.new(
current_user, "generate-presigned-put-upload-stub", PRESIGNED_PUT_RATE_LIMIT_PER_MINUTE, 1.minute
).performed!
file_name = params.require(:file_name)
file_size = params.require(:file_size).to_i
type = params.require(:type)
if file_size_too_big?(file_name, file_size)
return render_json_error(
I18n.t("upload.attachments.too_large_humanized", max_size: ActiveSupport::NumberHelper.number_to_human_size(SiteSetting.max_attachment_size_kb.kilobytes)),
status: 422
)
end
external_upload_data = ExternalUploadManager.create_direct_upload(
current_user: current_user,
file_name: file_name,
file_size: file_size,
upload_type: type,
metadata: parse_allowed_metadata(params[:metadata])
)
render json: external_upload_data
end
def complete_external_upload
unique_identifier = params.require(:unique_identifier)
external_upload_stub = ExternalUploadStub.find_by(
unique_identifier: unique_identifier, created_by: current_user
)
return render_404 if external_upload_stub.blank?
complete_external_upload_via_manager(external_upload_stub)
end
def complete_external_upload_via_manager(external_upload_stub)
opts = {
for_private_message: params[:for_private_message]&.to_s == "true",
for_site_setting: params[:for_site_setting]&.to_s == "true",
pasted: params[:pasted]&.to_s == "true",
}
external_upload_manager = ExternalUploadManager.new(external_upload_stub, opts)
hijack do
begin
upload = external_upload_manager.transform!
if upload.errors.empty?
response_serialized = external_upload_stub.upload_type != "backup" ? UploadsController.serialize_upload(upload) : {}
external_upload_stub.destroy!
render json: response_serialized, status: 200
else
render_json_error(upload.errors.to_hash.values.flatten, status: 422)
end
rescue ExternalUploadManager::SizeMismatchError => err
render_json_error(
debug_upload_error(err, "upload.size_mismatch_failure", additional_detail: err.message),
status: 422
)
rescue ExternalUploadManager::ChecksumMismatchError => err
render_json_error(
debug_upload_error(err, "upload.checksum_mismatch_failure", additional_detail: err.message),
status: 422
)
rescue ExternalUploadManager::CannotPromoteError => err
render_json_error(
debug_upload_error(err, "upload.cannot_promote_failure", additional_detail: err.message),
status: 422
)
rescue ExternalUploadManager::DownloadFailedError, Aws::S3::Errors::NotFound => err
render_json_error(
debug_upload_error(err, "upload.download_failure", additional_detail: err.message),
status: 422
)
rescue => err
Discourse.warn_exception(
err, message: "Complete external upload failed unexpectedly for user #{current_user.id}"
)
render_json_error(I18n.t("upload.failed"), status: 422)
end
end
end
def create_multipart
RateLimiter.new(
current_user, "create-multipart-upload", CREATE_MULTIPART_RATE_LIMIT_PER_MINUTE, 1.minute
).performed!
file_name = params.require(:file_name)
file_size = params.require(:file_size).to_i
upload_type = params.require(:upload_type)
if upload_type == "backup"
ensure_staff
return render_json_error(I18n.t("backup.backup_file_should_be_tar_gz")) unless valid_backup_extension?(file_name)
return render_json_error(I18n.t("backup.invalid_filename")) unless valid_backup_filename?(file_name)
else
if file_size_too_big?(file_name, file_size)
return render_json_error(
I18n.t("upload.attachments.too_large_humanized", max_size: ActiveSupport::NumberHelper.number_to_human_size(SiteSetting.max_attachment_size_kb.kilobytes)),
status: 422
)
end
end
begin
external_upload_data = ExternalUploadManager.create_direct_multipart_upload(
current_user: current_user,
file_name: file_name,
file_size: file_size,
upload_type: upload_type,
metadata: parse_allowed_metadata(params[:metadata])
)
rescue Aws::S3::Errors::ServiceError => err
return render_json_error(
debug_upload_error(err, "upload.create_multipart_failure", additional_detail: err.message),
status: 422
)
rescue BackupRestore::BackupStore::BackupFileExists
return render_json_error(I18n.t("backup.file_exists"), status: 422)
rescue BackupRestore::BackupStore::StorageError => err
return render_json_error(
debug_upload_error(err, "upload.create_multipart_failure", additional_detail: err.message),
status: 422
)
end
render json: external_upload_data
end
def batch_presign_multipart_parts
part_numbers = params.require(:part_numbers)
unique_identifier = params.require(:unique_identifier)
RateLimiter.new(
current_user, "batch-presign", BATCH_PRESIGN_RATE_LIMIT_PER_MINUTE, 1.minute
).performed!
part_numbers = part_numbers.map do |part_number|
validate_part_number(part_number)
end
external_upload_stub = ExternalUploadStub.find_by(
unique_identifier: unique_identifier, created_by: current_user
)
return render_404 if external_upload_stub.blank?
if !multipart_upload_exists?(external_upload_stub)
return render_404
end
store = multipart_store(external_upload_stub.upload_type)
presigned_urls = {}
part_numbers.each do |part_number|
presigned_urls[part_number] = store.presign_multipart_part(
upload_id: external_upload_stub.external_upload_identifier,
key: external_upload_stub.key,
part_number: part_number
)
end
render json: { presigned_urls: presigned_urls }
end
def validate_part_number(part_number)
part_number = part_number.to_i
if !part_number.between?(1, 10000)
raise Discourse::InvalidParameters.new(
"Each part number should be between 1 and 10000"
)
end
part_number
end
def multipart_upload_exists?(external_upload_stub)
store = multipart_store(external_upload_stub.upload_type)
begin
store.list_multipart_parts(
upload_id: external_upload_stub.external_upload_identifier,
key: external_upload_stub.key,
max_parts: 1
)
rescue Aws::S3::Errors::NoSuchUpload => err
debug_upload_error(err, "upload.external_upload_not_found", { additional_detail: "path: #{external_upload_stub.key}" })
return false
end
true
end
def abort_multipart
external_upload_identifier = params.require(:external_upload_identifier)
external_upload_stub = ExternalUploadStub.find_by(
external_upload_identifier: external_upload_identifier
)
# The stub could have already been deleted by an earlier error via
# ExternalUploadManager, so we consider this a great success if the
# stub is already gone.
return render json: success_json if external_upload_stub.blank?
return render_404 if external_upload_stub.created_by_id != current_user.id
store = multipart_store(external_upload_stub.upload_type)
begin
store.abort_multipart(
upload_id: external_upload_stub.external_upload_identifier,
key: external_upload_stub.key
)
rescue Aws::S3::Errors::ServiceError => err
return render_json_error(
debug_upload_error(err, "upload.abort_multipart_failure", additional_detail: "external upload stub id: #{external_upload_stub.id}"),
status: 422
)
end
external_upload_stub.destroy!
render json: success_json
end
def complete_multipart
unique_identifier = params.require(:unique_identifier)
parts = params.require(:parts)
RateLimiter.new(
current_user, "complete-multipart-upload", COMPLETE_MULTIPART_RATE_LIMIT_PER_MINUTE, 1.minute
).performed!
external_upload_stub = ExternalUploadStub.find_by(
unique_identifier: unique_identifier, created_by: current_user
)
return render_404 if external_upload_stub.blank?
if !multipart_upload_exists?(external_upload_stub)
return render_404
end
store = multipart_store(external_upload_stub.upload_type)
parts = parts.map do |part|
part_number = part[:part_number]
etag = part[:etag]
part_number = validate_part_number(part_number)
if etag.blank?
raise Discourse::InvalidParameters.new("All parts must have an etag and a valid part number")
end
# this is done so it's an array of hashes rather than an array of
# ActionController::Parameters
{ part_number: part_number, etag: etag }
end.sort_by do |part|
part[:part_number]
end
begin
complete_response = store.complete_multipart(
upload_id: external_upload_stub.external_upload_identifier,
key: external_upload_stub.key,
parts: parts
)
rescue Aws::S3::Errors::ServiceError => err
return render_json_error(
debug_upload_error(err, "upload.complete_multipart_failure", additional_detail: "external upload stub id: #{external_upload_stub.id}"),
status: 422
)
end
complete_external_upload_via_manager(external_upload_stub)
end
protected protected
def multipart_store(upload_type) def validate_before_create_multipart(file_name:, file_size:, upload_type:)
ensure_staff if upload_type == "backup" validate_file_size(file_name: file_name, file_size: file_size)
ExternalUploadManager.store_for_upload_type(upload_type) end
def validate_before_create_direct_upload(file_name:, file_size:, upload_type:)
validate_file_size(file_name: file_name, file_size: file_size)
end
def validate_file_size(file_name:, file_size:)
if file_size_too_big?(file_name, file_size)
raise ExternalUploadValidationError.new(
I18n.t(
"upload.attachments.too_large_humanized",
max_size: ActiveSupport::NumberHelper.number_to_human_size(SiteSetting.max_attachment_size_kb.kilobytes)
)
)
end
end end
def force_download? def force_download?
@ -497,10 +218,6 @@ class UploadsController < ApplicationController
raise Discourse::InvalidParameters.new("XHR not allowed") raise Discourse::InvalidParameters.new("XHR not allowed")
end end
def render_404
raise Discourse::NotFound
end
def self.serialize_upload(data) def self.serialize_upload(data)
# as_json.as_json is not a typo... as_json in AM serializer returns keys as symbols, we need them # as_json.as_json is not a typo... as_json in AM serializer returns keys as symbols, we need them
# as strings here # as strings here
@ -556,18 +273,6 @@ class UploadsController < ApplicationController
private private
def external_store_check
return render_404 if !Discourse.store.external?
end
def direct_s3_uploads_check
return render_404 if !SiteSetting.enable_direct_s3_uploads
end
def can_upload_external?
raise Discourse::InvalidAccess if !guardian.can_upload_external?
end
# We can pre-emptively check size for attachments, but not for images # We can pre-emptively check size for attachments, but not for images
# as they may be further reduced in size by UploadCreator (at this point # as they may be further reduced in size by UploadCreator (at this point
# they may have already been reduced in size by preprocessors) # they may have already been reduced in size by preprocessors)
@ -593,29 +298,12 @@ class UploadsController < ApplicationController
send_file(file_path, opts) send_file(file_path, opts)
end end
def debug_upload_error(err, translation_key, translation_params = {}) def create_direct_multipart_upload
return if !SiteSetting.enable_upload_debug_mode begin
message = I18n.t(translation_key, translation_params) yield
Discourse.warn_exception(err, message: message) rescue Aws::S3::Errors::ServiceError => err
Rails.env.development? ? message : I18n.t("upload.failed") message = debug_upload_error(err, I18n.t("upload.create_multipart_failure", additional_detail: err.message))
raise ExternalUploadHelpers::ExternalUploadValidationError.new(message)
end end
# don't want people posting arbitrary S3 metadata so we just take the
# one we need. all of these will be converted to x-amz-meta- metadata
# fields in S3 so it's best to use dashes in the names for consistency
#
# this metadata is baked into the presigned url and is not altered when
# sending the PUT from the clientside to the presigned url
def parse_allowed_metadata(metadata)
return if metadata.blank?
metadata.permit("sha1-checksum").to_h
end
def valid_backup_extension?(filename)
/\.(tar\.gz|t?gz)$/i =~ filename
end
def valid_backup_filename?(filename)
!!(/^[a-zA-Z0-9\._\-]+$/ =~ filename)
end end
end end

View File

@ -304,6 +304,12 @@ Discourse::Application.routes.draw do
post "restore" => "backups#restore", constraints: { id: RouteFormat.backup } post "restore" => "backups#restore", constraints: { id: RouteFormat.backup }
end end
collection do collection do
# multipart uploads
post "create-multipart" => "backups#create_multipart", format: :json
post "complete-multipart" => "backups#complete_multipart", format: :json
post "abort-multipart" => "backups#abort_multipart", format: :json
post "batch-presign-multipart-parts" => "backups#batch_presign_multipart_parts", format: :json
get "logs" => "backups#logs" get "logs" => "backups#logs"
get "status" => "backups#status" get "status" => "backups#status"
delete "cancel" => "backups#cancel" delete "cancel" => "backups#cancel"

View File

@ -0,0 +1,347 @@
# frozen_string_literal: true
# Extends controllers with the methods required to do direct
# external uploads.
module ExternalUploadHelpers
extend ActiveSupport::Concern
class ExternalUploadValidationError < StandardError; end
PRESIGNED_PUT_RATE_LIMIT_PER_MINUTE = 10
CREATE_MULTIPART_RATE_LIMIT_PER_MINUTE = 10
COMPLETE_MULTIPART_RATE_LIMIT_PER_MINUTE = 10
BATCH_PRESIGN_RATE_LIMIT_PER_MINUTE = 10
included do
before_action :external_store_check, only: [
:generate_presigned_put,
:complete_external_upload,
:create_multipart,
:batch_presign_multipart_parts,
:abort_multipart,
:complete_multipart
]
before_action :direct_s3_uploads_check, only: [
:generate_presigned_put,
:complete_external_upload,
:create_multipart,
:batch_presign_multipart_parts,
:abort_multipart,
:complete_multipart
]
before_action :can_upload_external?, only: [:create_multipart, :generate_presigned_put]
end
def generate_presigned_put
RateLimiter.new(
current_user, "generate-presigned-put-upload-stub", ExternalUploadHelpers::PRESIGNED_PUT_RATE_LIMIT_PER_MINUTE, 1.minute
).performed!
file_name = params.require(:file_name)
file_size = params.require(:file_size).to_i
type = params.require(:type)
begin
validate_before_create_direct_upload(
file_name: file_name,
file_size: file_size,
upload_type: type
)
rescue ExternalUploadValidationError => err
return render_json_error(err.message, status: 422)
end
external_upload_data = ExternalUploadManager.create_direct_upload(
current_user: current_user,
file_name: file_name,
file_size: file_size,
upload_type: type,
metadata: parse_allowed_metadata(params[:metadata])
)
render json: external_upload_data
end
def complete_external_upload
unique_identifier = params.require(:unique_identifier)
external_upload_stub = ExternalUploadStub.find_by(
unique_identifier: unique_identifier, created_by: current_user
)
return render_404 if external_upload_stub.blank?
complete_external_upload_via_manager(external_upload_stub)
end
def create_multipart
RateLimiter.new(
current_user, "create-multipart-upload", ExternalUploadHelpers::CREATE_MULTIPART_RATE_LIMIT_PER_MINUTE, 1.minute
).performed!
file_name = params.require(:file_name)
file_size = params.require(:file_size).to_i
upload_type = params.require(:upload_type)
begin
validate_before_create_multipart(
file_name: file_name,
file_size: file_size,
upload_type: upload_type
)
rescue ExternalUploadValidationError => err
return render_json_error(err.message, status: 422)
end
begin
external_upload_data = create_direct_multipart_upload do
ExternalUploadManager.create_direct_multipart_upload(
current_user: current_user,
file_name: file_name,
file_size: file_size,
upload_type: upload_type,
metadata: parse_allowed_metadata(params[:metadata])
)
end
rescue ExternalUploadHelpers::ExternalUploadValidationError => err
return render_json_error(err.message, status: 422)
end
render json: external_upload_data
end
def batch_presign_multipart_parts
part_numbers = params.require(:part_numbers)
unique_identifier = params.require(:unique_identifier)
RateLimiter.new(
current_user, "batch-presign", ExternalUploadHelpers::BATCH_PRESIGN_RATE_LIMIT_PER_MINUTE, 1.minute
).performed!
part_numbers = part_numbers.map do |part_number|
validate_part_number(part_number)
end
external_upload_stub = ExternalUploadStub.find_by(
unique_identifier: unique_identifier, created_by: current_user
)
return render_404 if external_upload_stub.blank?
if !multipart_upload_exists?(external_upload_stub)
return render_404
end
store = multipart_store(external_upload_stub.upload_type)
presigned_urls = {}
part_numbers.each do |part_number|
presigned_urls[part_number] = store.presign_multipart_part(
upload_id: external_upload_stub.external_upload_identifier,
key: external_upload_stub.key,
part_number: part_number
)
end
render json: { presigned_urls: presigned_urls }
end
def multipart_upload_exists?(external_upload_stub)
store = multipart_store(external_upload_stub.upload_type)
begin
store.list_multipart_parts(
upload_id: external_upload_stub.external_upload_identifier,
key: external_upload_stub.key,
max_parts: 1
)
rescue Aws::S3::Errors::NoSuchUpload => err
debug_upload_error(err, I18n.t("upload.external_upload_not_found", additional_detail: "path: #{external_upload_stub.key}"))
return false
end
true
end
def abort_multipart
external_upload_identifier = params.require(:external_upload_identifier)
external_upload_stub = ExternalUploadStub.find_by(
external_upload_identifier: external_upload_identifier
)
# The stub could have already been deleted by an earlier error via
# ExternalUploadManager, so we consider this a great success if the
# stub is already gone.
return render json: success_json if external_upload_stub.blank?
return render_404 if external_upload_stub.created_by_id != current_user.id
store = multipart_store(external_upload_stub.upload_type)
begin
store.abort_multipart(
upload_id: external_upload_stub.external_upload_identifier,
key: external_upload_stub.key
)
rescue Aws::S3::Errors::ServiceError => err
return render_json_error(
debug_upload_error(err, I18n.t("upload.abort_multipart_failure", additional_detail: "external upload stub id: #{external_upload_stub.id}")),
status: 422
)
end
external_upload_stub.destroy!
render json: success_json
end
def complete_multipart
unique_identifier = params.require(:unique_identifier)
parts = params.require(:parts)
RateLimiter.new(
current_user, "complete-multipart-upload", ExternalUploadHelpers::COMPLETE_MULTIPART_RATE_LIMIT_PER_MINUTE, 1.minute
).performed!
external_upload_stub = ExternalUploadStub.find_by(
unique_identifier: unique_identifier, created_by: current_user
)
return render_404 if external_upload_stub.blank?
if !multipart_upload_exists?(external_upload_stub)
return render_404
end
store = multipart_store(external_upload_stub.upload_type)
parts = parts.map do |part|
part_number = part[:part_number]
etag = part[:etag]
part_number = validate_part_number(part_number)
if etag.blank?
raise Discourse::InvalidParameters.new("All parts must have an etag and a valid part number")
end
# this is done so it's an array of hashes rather than an array of
# ActionController::Parameters
{ part_number: part_number, etag: etag }
end.sort_by do |part|
part[:part_number]
end
begin
complete_response = store.complete_multipart(
upload_id: external_upload_stub.external_upload_identifier,
key: external_upload_stub.key,
parts: parts
)
rescue Aws::S3::Errors::ServiceError => err
return render_json_error(
debug_upload_error(err, I18n.t("upload.complete_multipart_failure", additional_detail: "external upload stub id: #{external_upload_stub.id}")),
status: 422
)
end
complete_external_upload_via_manager(external_upload_stub)
end
private
def complete_external_upload_via_manager(external_upload_stub)
opts = {
for_private_message: params[:for_private_message]&.to_s == "true",
for_site_setting: params[:for_site_setting]&.to_s == "true",
pasted: params[:pasted]&.to_s == "true",
}
external_upload_manager = ExternalUploadManager.new(external_upload_stub, opts)
hijack do
begin
upload = external_upload_manager.transform!
if upload.errors.empty?
response_serialized = self.class.serialize_upload(upload)
external_upload_stub.destroy!
render json: response_serialized, status: 200
else
render_json_error(upload.errors.to_hash.values.flatten, status: 422)
end
rescue ExternalUploadManager::SizeMismatchError => err
render_json_error(
debug_upload_error(err, I18n.t("upload.size_mismatch_failure", additional_detail: err.message)),
status: 422
)
rescue ExternalUploadManager::ChecksumMismatchError => err
render_json_error(
debug_upload_error(err, I18n.t("upload.checksum_mismatch_failure", additional_detail: err.message)),
status: 422
)
rescue ExternalUploadManager::CannotPromoteError => err
render_json_error(
debug_upload_error(err, I18n.t("upload.cannot_promote_failure", additional_detail: err.message)),
status: 422
)
rescue ExternalUploadManager::DownloadFailedError, Aws::S3::Errors::NotFound => err
render_json_error(
debug_upload_error(err, I18n.t("upload.download_failure", additional_detail: err.message)),
status: 422
)
rescue => err
Discourse.warn_exception(
err, message: "Complete external upload failed unexpectedly for user #{current_user.id}"
)
render_json_error(I18n.t("upload.failed"), status: 422)
end
end
end
def validate_before_create_direct_upload(file_name:, file_size:, upload_type:)
# noop, should be overridden
end
def validate_before_create_multipart(file_name:, file_size:, upload_type:)
# noop, should be overridden
end
def validate_part_number(part_number)
part_number = part_number.to_i
if !part_number.between?(1, 10000)
raise Discourse::InvalidParameters.new(
"Each part number should be between 1 and 10000"
)
end
part_number
end
def debug_upload_error(err, friendly_message)
return if !SiteSetting.enable_upload_debug_mode
Discourse.warn_exception(err, message: friendly_message)
Rails.env.development? ? friendly_message : I18n.t("upload.failed")
end
def multipart_store(upload_type)
ExternalUploadManager.store_for_upload_type(upload_type)
end
def external_store_check
return render_404 if !Discourse.store.external?
end
def direct_s3_uploads_check
return render_404 if !SiteSetting.enable_direct_s3_uploads
end
def can_upload_external?
raise Discourse::InvalidAccess if !guardian.can_upload_external?
end
# don't want people posting arbitrary S3 metadata so we just take the
# one we need. all of these will be converted to x-amz-meta- metadata
# fields in S3 so it's best to use dashes in the names for consistency
#
# this metadata is baked into the presigned url and is not altered when
# sending the PUT from the clientside to the presigned url
def parse_allowed_metadata(metadata)
return if metadata.blank?
metadata.permit("sha1-checksum").to_h
end
def render_404
raise Discourse::NotFound
end
end

View File

@ -416,4 +416,120 @@ RSpec.describe Admin::BackupsController do
expect(response).to be_not_found expect(response).to be_not_found
end end
end end
describe "S3 multipart uploads" do
let(:upload_type) { "backup" }
let(:test_bucket_prefix) { "test_#{ENV['TEST_ENV_NUMBER'].presence || '0'}" }
let(:backup_file_exists_response) { { status: 404 } }
let(:mock_multipart_upload_id) { "ibZBv_75gd9r8lH_gqXatLdxMVpAlj6CFTR.OwyF3953YdwbcQnMA2BLGn8Lx12fQNICtMw5KyteFeHw.Sjng--" }
before do
setup_s3
SiteSetting.enable_direct_s3_uploads = true
SiteSetting.s3_backup_bucket = "s3-backup-bucket"
SiteSetting.backup_location = BackupLocationSiteSetting::S3
stub_request(:head, "https://s3-backup-bucket.s3.us-west-1.amazonaws.com/").to_return(status: 200, body: "", headers: {})
stub_request(:head, "https://s3-backup-bucket.s3.us-west-1.amazonaws.com/default/test.tar.gz").to_return(
backup_file_exists_response
)
end
context "when the user is not admin" do
before do
admin.update(admin: false)
end
it "errors with invalid access error" do
post "/admin/backups/create-multipart.json", params: {
file_name: "test.tar.gz",
upload_type: upload_type,
file_size: 4098
}
expect(response.status).to eq(404)
end
end
context "when the user is admin" do
def stub_create_multipart_backup_request
BackupRestore::S3BackupStore.any_instance.stubs(:temporary_upload_path).returns(
"temp/default/#{test_bucket_prefix}/28fccf8259bbe75b873a2bd2564b778c/2u98j832nx93272x947823.gz"
)
create_multipart_result = <<~BODY
<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n
<InitiateMultipartUploadResult>
<Bucket>s3-backup-bucket</Bucket>
<Key>temp/default/#{test_bucket_prefix}/28fccf8259bbe75b873a2bd2564b778c/2u98j832nx93272x947823.gz</Key>
<UploadId>#{mock_multipart_upload_id}</UploadId>
</InitiateMultipartUploadResult>
BODY
stub_request(:post, "https://s3-backup-bucket.s3.us-west-1.amazonaws.com/temp/default/#{test_bucket_prefix}/28fccf8259bbe75b873a2bd2564b778c/2u98j832nx93272x947823.gz?uploads").
to_return(status: 200, body: create_multipart_result)
end
it "creates the multipart upload" do
stub_create_multipart_backup_request
post "/admin/backups/create-multipart.json", params: {
file_name: "test.tar.gz",
upload_type: upload_type,
file_size: 4098
}
expect(response.status).to eq(200)
result = response.parsed_body
external_upload_stub = ExternalUploadStub.where(
unique_identifier: result["unique_identifier"],
original_filename: "test.tar.gz",
created_by: admin,
upload_type: upload_type,
key: result["key"],
multipart: true
)
expect(external_upload_stub.exists?).to eq(true)
end
context "when backup of same filename already exists" do
let(:backup_file_exists_response) { { status: 200, body: "" } }
it "throws an error" do
post "/admin/backups/create-multipart.json", params: {
file_name: "test.tar.gz",
upload_type: upload_type,
file_size: 4098
}
expect(response.status).to eq(422)
expect(response.parsed_body["errors"]).to include(
I18n.t("backup.file_exists")
)
end
end
context "when filename is invalid" do
it "throws an error" do
post "/admin/backups/create-multipart.json", params: {
file_name: "blah $$##.tar.gz",
upload_type: upload_type,
file_size: 4098
}
expect(response.status).to eq(422)
expect(response.parsed_body["errors"]).to include(
I18n.t("backup.invalid_filename")
)
end
end
context "when extension is invalid" do
it "throws an error" do
post "/admin/backups/create-multipart.json", params: {
file_name: "test.png",
upload_type: upload_type,
file_size: 4098
}
expect(response.status).to eq(422)
expect(response.parsed_body["errors"]).to include(
I18n.t("backup.backup_file_should_be_tar_gz")
)
end
end
end
end
end end

View File

@ -764,7 +764,7 @@ describe UploadsController do
RateLimiter.enable RateLimiter.enable
RateLimiter.clear_all! RateLimiter.clear_all!
stub_const(UploadsController, "PRESIGNED_PUT_RATE_LIMIT_PER_MINUTE", 1) do stub_const(ExternalUploadHelpers, "PRESIGNED_PUT_RATE_LIMIT_PER_MINUTE", 1) do
post "/uploads/generate-presigned-put.json", params: { file_name: "test.png", type: "card_background", file_size: 1024 } post "/uploads/generate-presigned-put.json", params: { file_name: "test.png", type: "card_background", file_size: 1024 }
post "/uploads/generate-presigned-put.json", params: { file_name: "test.png", type: "card_background", file_size: 1024 } post "/uploads/generate-presigned-put.json", params: { file_name: "test.png", type: "card_background", file_size: 1024 }
end end
@ -793,8 +793,6 @@ describe UploadsController do
sign_in(user) sign_in(user)
SiteSetting.enable_direct_s3_uploads = true SiteSetting.enable_direct_s3_uploads = true
setup_s3 setup_s3
SiteSetting.s3_backup_bucket = "s3-backup-bucket"
SiteSetting.backup_location = BackupLocationSiteSetting::S3
end end
it "errors if the correct params are not provided" do it "errors if the correct params are not provided" do
@ -902,7 +900,7 @@ describe UploadsController do
RateLimiter.clear_all! RateLimiter.clear_all!
stub_create_multipart_request stub_create_multipart_request
stub_const(UploadsController, "CREATE_MULTIPART_RATE_LIMIT_PER_MINUTE", 1) do stub_const(ExternalUploadHelpers, "CREATE_MULTIPART_RATE_LIMIT_PER_MINUTE", 1) do
post "/uploads/create-multipart.json", params: { post "/uploads/create-multipart.json", params: {
file_name: "test.png", file_name: "test.png",
upload_type: "composer", upload_type: "composer",
@ -918,88 +916,6 @@ describe UploadsController do
expect(response.status).to eq(429) expect(response.status).to eq(429)
end end
end end
context "when the upload_type is backup" do
let(:upload_type) { "backup" }
let(:backup_file_exists_response) { { status: 404 } }
before do
stub_request(:head, "https://s3-backup-bucket.s3.us-west-1.amazonaws.com/").to_return(status: 200, body: "", headers: {})
stub_request(:head, "https://s3-backup-bucket.s3.us-west-1.amazonaws.com/default/test.tar.gz").to_return(
backup_file_exists_response
)
end
context "when the user is not admin" do
it "errors with invalid access error" do
post "/uploads/create-multipart.json", params: {
file_name: "test.tar.gz",
upload_type: upload_type,
file_size: 4098
}
expect(response.status).to eq(403)
end
end
context "when the user is admin" do
before do
user.update(admin: true)
end
def stub_create_multipart_backup_request
BackupRestore::S3BackupStore.any_instance.stubs(:temporary_upload_path).returns(
"temp/default/#{test_bucket_prefix}/28fccf8259bbe75b873a2bd2564b778c/2u98j832nx93272x947823.gz"
)
create_multipart_result = <<~BODY
<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n
<InitiateMultipartUploadResult>
<Bucket>s3-backup-bucket</Bucket>
<Key>temp/default/#{test_bucket_prefix}/28fccf8259bbe75b873a2bd2564b778c/2u98j832nx93272x947823.gz</Key>
<UploadId>#{mock_multipart_upload_id}</UploadId>
</InitiateMultipartUploadResult>
BODY
stub_request(:post, "https://s3-backup-bucket.s3.us-west-1.amazonaws.com/temp/default/#{test_bucket_prefix}/28fccf8259bbe75b873a2bd2564b778c/2u98j832nx93272x947823.gz?uploads").
to_return(status: 200, body: create_multipart_result)
end
it "creates the multipart upload" do
stub_create_multipart_backup_request
post "/uploads/create-multipart.json", params: {
file_name: "test.tar.gz",
upload_type: upload_type,
file_size: 4098
}
expect(response.status).to eq(200)
result = response.parsed_body
external_upload_stub = ExternalUploadStub.where(
unique_identifier: result["unique_identifier"],
original_filename: "test.tar.gz",
created_by: user,
upload_type: upload_type,
key: result["key"],
multipart: true
)
expect(external_upload_stub.exists?).to eq(true)
end
context "when backup of same filename already exists" do
let(:backup_file_exists_response) { { status: 200, body: "" } }
it "throws an error" do
post "/uploads/create-multipart.json", params: {
file_name: "test.tar.gz",
upload_type: upload_type,
file_size: 4098
}
expect(response.status).to eq(422)
expect(response.parsed_body["errors"]).to include(
I18n.t("backup.file_exists")
)
end
end
end
end
end end
context "when the store is not external" do context "when the store is not external" do
@ -1127,7 +1043,7 @@ describe UploadsController do
RateLimiter.enable RateLimiter.enable
RateLimiter.clear_all! RateLimiter.clear_all!
stub_const(UploadsController, "BATCH_PRESIGN_RATE_LIMIT_PER_MINUTE", 1) do stub_const(ExternalUploadHelpers, "BATCH_PRESIGN_RATE_LIMIT_PER_MINUTE", 1) do
stub_list_multipart_request stub_list_multipart_request
post "/uploads/batch-presign-multipart-parts.json", params: { post "/uploads/batch-presign-multipart-parts.json", params: {
unique_identifier: external_upload_stub.unique_identifier, unique_identifier: external_upload_stub.unique_identifier,
@ -1316,7 +1232,7 @@ describe UploadsController do
RateLimiter.enable RateLimiter.enable
RateLimiter.clear_all! RateLimiter.clear_all!
stub_const(UploadsController, "COMPLETE_MULTIPART_RATE_LIMIT_PER_MINUTE", 1) do stub_const(ExternalUploadHelpers, "COMPLETE_MULTIPART_RATE_LIMIT_PER_MINUTE", 1) do
post "/uploads/complete-multipart.json", params: { post "/uploads/complete-multipart.json", params: {
unique_identifier: "blah", unique_identifier: "blah",
parts: [{ part_number: 1, etag: "test1" }, { part_number: 2, etag: "test2" }] parts: [{ part_number: 1, etag: "test1" }, { part_number: 2, etag: "test2" }]