mirror of
https://github.com/discourse/discourse.git
synced 2024-12-11 20:55:25 +08:00
e4350bb966
This PR introduces a new `enable_experimental_backup_uploads` site setting (default false and hidden), which when enabled alongside `enable_direct_s3_uploads` will allow for direct S3 multipart uploads of backup .tar.gz files. To make multipart external uploads work with both the S3BackupStore and the S3Store, I've had to move several methods out of S3Store and into S3Helper, including: * presigned_url * create_multipart * abort_multipart * complete_multipart * presign_multipart_part * list_multipart_parts Then, S3Store and S3BackupStore either delegate directly to S3Helper or have their own special methods to call S3Helper for these methods. FileStore.temporary_upload_path has also removed its dependence on upload_path, and can now be used interchangeably between the stores. A similar change was made in the frontend as well, moving the multipart related JS code out of ComposerUppyUpload and into a mixin of its own, so it can also be used by UppyUploadMixin. Some changes to ExternalUploadManager had to be made here as well. The backup direct uploads do not need an Upload record made for them in the database, so they can be moved to their final S3 resting place when completing the multipart upload. This changeset is not perfect; it introduces some special cases in UploadController to handle backups that was previously in BackupController, because UploadController is where the multipart routes are located. A subsequent pull request will pull these routes into a module or some other sharing pattern, along with hooks, so the backup controller and the upload controller (and any future controllers that may need them) can include these routes in a nicer way.
358 lines
15 KiB
Ruby
358 lines
15 KiB
Ruby
# frozen_string_literal: true
|
|
|
|
require 'rails_helper'
|
|
require 'file_store/s3_store'
|
|
|
|
RSpec.describe 'Multisite s3 uploads', type: :multisite do
|
|
let(:original_filename) { "smallest.png" }
|
|
let(:uploaded_file) { file_from_fixtures(original_filename) }
|
|
let(:upload_sha1) { Digest::SHA1.hexdigest(File.read(uploaded_file)) }
|
|
let(:upload_path) { Discourse.store.upload_path }
|
|
|
|
def build_upload
|
|
Fabricate.build(:upload, sha1: upload_sha1, id: 1, original_filename: original_filename)
|
|
end
|
|
|
|
context 'uploading to s3' do
|
|
before(:each) do
|
|
setup_s3
|
|
end
|
|
|
|
describe "#store_upload" do
|
|
let(:s3_client) { Aws::S3::Client.new(stub_responses: true) }
|
|
let(:s3_helper) { S3Helper.new(SiteSetting.s3_upload_bucket, '', client: s3_client) }
|
|
let(:store) { FileStore::S3Store.new(s3_helper) }
|
|
let(:upload_opts) do
|
|
{
|
|
acl: "public-read",
|
|
cache_control: "max-age=31556952, public, immutable",
|
|
content_type: "image/png"
|
|
}
|
|
end
|
|
|
|
it "does not provide a content_disposition for images" do
|
|
s3_helper.expects(:upload).with(uploaded_file, kind_of(String), upload_opts).returns(["path", "etag"])
|
|
upload = build_upload
|
|
store.store_upload(uploaded_file, upload)
|
|
end
|
|
|
|
context "when the file is a PDF" do
|
|
let(:original_filename) { "small.pdf" }
|
|
let(:uploaded_file) { file_from_fixtures("small.pdf", "pdf") }
|
|
|
|
it "adds an attachment content-disposition with the original filename" do
|
|
disp_opts = { content_disposition: "attachment; filename=\"#{original_filename}\"; filename*=UTF-8''#{original_filename}", content_type: "application/pdf" }
|
|
s3_helper.expects(:upload).with(uploaded_file, kind_of(String), upload_opts.merge(disp_opts)).returns(["path", "etag"])
|
|
upload = build_upload
|
|
store.store_upload(uploaded_file, upload)
|
|
end
|
|
end
|
|
|
|
context "when the file is a video" do
|
|
let(:original_filename) { "small.mp4" }
|
|
let(:uploaded_file) { file_from_fixtures("small.mp4", "media") }
|
|
|
|
it "adds an attachment content-disposition with the original filename" do
|
|
disp_opts = { content_disposition: "attachment; filename=\"#{original_filename}\"; filename*=UTF-8''#{original_filename}", content_type: "application/mp4" }
|
|
s3_helper.expects(:upload).with(uploaded_file, kind_of(String), upload_opts.merge(disp_opts)).returns(["path", "etag"])
|
|
upload = build_upload
|
|
store.store_upload(uploaded_file, upload)
|
|
end
|
|
end
|
|
|
|
context "when the file is audio" do
|
|
let(:original_filename) { "small.mp3" }
|
|
let(:uploaded_file) { file_from_fixtures("small.mp3", "media") }
|
|
|
|
it "adds an attachment content-disposition with the original filename" do
|
|
disp_opts = { content_disposition: "attachment; filename=\"#{original_filename}\"; filename*=UTF-8''#{original_filename}", content_type: "audio/mpeg" }
|
|
s3_helper.expects(:upload).with(uploaded_file, kind_of(String), upload_opts.merge(disp_opts)).returns(["path", "etag"])
|
|
upload = build_upload
|
|
store.store_upload(uploaded_file, upload)
|
|
end
|
|
end
|
|
|
|
it "returns the correct url for default and second multisite db" do
|
|
test_multisite_connection('default') do
|
|
upload = build_upload
|
|
expect(store.store_upload(uploaded_file, upload)).to eq(
|
|
"//#{SiteSetting.s3_upload_bucket}.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/original/1X/c530c06cf89c410c0355d7852644a73fc3ec8c04.png"
|
|
)
|
|
expect(upload.etag).to eq("ETag")
|
|
end
|
|
|
|
test_multisite_connection('second') do
|
|
upload_path = Discourse.store.upload_path
|
|
upload = build_upload
|
|
expect(store.store_upload(uploaded_file, upload)).to eq(
|
|
"//#{SiteSetting.s3_upload_bucket}.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/original/1X/c530c06cf89c410c0355d7852644a73fc3ec8c04.png"
|
|
)
|
|
expect(upload.etag).to eq("ETag")
|
|
end
|
|
end
|
|
end
|
|
end
|
|
|
|
context 'removal from s3' do
|
|
before do
|
|
setup_s3
|
|
end
|
|
|
|
describe "#remove_upload" do
|
|
let(:store) { FileStore::S3Store.new }
|
|
let(:client) { Aws::S3::Client.new(stub_responses: true) }
|
|
let(:resource) { Aws::S3::Resource.new(client: client) }
|
|
let(:s3_bucket) { resource.bucket(SiteSetting.s3_upload_bucket) }
|
|
let(:s3_helper) { store.s3_helper }
|
|
|
|
it "removes the file from s3 on multisite" do
|
|
test_multisite_connection('default') do
|
|
upload = build_upload
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
|
upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/original/1X/#{upload.sha1}.png")
|
|
s3_object = stub
|
|
|
|
s3_bucket.expects(:object).with("uploads/tombstone/default/original/1X/#{upload.sha1}.png").returns(s3_object)
|
|
expect_copy_from(s3_object, "s3-upload-bucket/#{upload_path}/original/1X/#{upload.sha1}.png")
|
|
s3_bucket.expects(:object).with("#{upload_path}/original/1X/#{upload.sha1}.png").returns(s3_object)
|
|
s3_object.expects(:delete)
|
|
|
|
store.remove_upload(upload)
|
|
end
|
|
end
|
|
|
|
it "removes the file from s3 on another multisite db" do
|
|
test_multisite_connection('second') do
|
|
upload = build_upload
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
|
upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/original/1X/#{upload.sha1}.png")
|
|
s3_object = stub
|
|
|
|
s3_bucket.expects(:object).with("uploads/tombstone/second/original/1X/#{upload.sha1}.png").returns(s3_object)
|
|
expect_copy_from(s3_object, "s3-upload-bucket/#{upload_path}/original/1X/#{upload.sha1}.png")
|
|
s3_bucket.expects(:object).with("#{upload_path}/original/1X/#{upload.sha1}.png").returns(s3_object)
|
|
s3_object.expects(:delete)
|
|
|
|
store.remove_upload(upload)
|
|
end
|
|
end
|
|
|
|
describe "when s3_upload_bucket includes folders path" do
|
|
before do
|
|
SiteSetting.s3_upload_bucket = "s3-upload-bucket/discourse-uploads"
|
|
end
|
|
|
|
it "removes the file from s3 on multisite" do
|
|
test_multisite_connection('default') do
|
|
upload = build_upload
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
|
upload.update!(url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/discourse-uploads/#{upload_path}/original/1X/#{upload.sha1}.png")
|
|
s3_object = stub
|
|
|
|
s3_bucket.expects(:object).with("discourse-uploads/uploads/tombstone/default/original/1X/#{upload.sha1}.png").returns(s3_object)
|
|
expect_copy_from(s3_object, "s3-upload-bucket/discourse-uploads/#{upload_path}/original/1X/#{upload.sha1}.png")
|
|
s3_bucket.expects(:object).with("discourse-uploads/#{upload_path}/original/1X/#{upload.sha1}.png").returns(s3_object)
|
|
s3_object.expects(:delete)
|
|
|
|
store.remove_upload(upload)
|
|
end
|
|
end
|
|
end
|
|
end
|
|
end
|
|
|
|
context 'secure uploads' do
|
|
let(:store) { FileStore::S3Store.new }
|
|
let(:client) { Aws::S3::Client.new(stub_responses: true) }
|
|
let(:resource) { Aws::S3::Resource.new(client: client) }
|
|
let(:s3_bucket) { resource.bucket("some-really-cool-bucket") }
|
|
let(:s3_helper) { store.s3_helper }
|
|
let(:s3_object) { stub }
|
|
|
|
before(:each) do
|
|
setup_s3
|
|
SiteSetting.s3_upload_bucket = "some-really-cool-bucket"
|
|
SiteSetting.authorized_extensions = "pdf|png|jpg|gif"
|
|
end
|
|
|
|
before do
|
|
s3_object.stubs(:put).returns(Aws::S3::Types::PutObjectOutput.new(etag: "etag"))
|
|
end
|
|
|
|
describe "when secure attachments are enabled" do
|
|
it "returns signed URL with correct path" do
|
|
test_multisite_connection('default') do
|
|
upload = Fabricate(:upload, original_filename: "small.pdf", extension: "pdf", secure: true)
|
|
path = Discourse.store.get_path_for_upload(upload)
|
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
|
s3_bucket.expects(:object).with("#{upload_path}/#{path}").returns(s3_object).at_least_once
|
|
s3_object.expects(:presigned_url).with(:get, expires_in: S3Helper::DOWNLOAD_URL_EXPIRES_AFTER_SECONDS)
|
|
|
|
upload.url = store.store_upload(uploaded_file, upload)
|
|
expect(upload.url).to eq(
|
|
"//some-really-cool-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/#{path}"
|
|
)
|
|
expect(store.url_for(upload)).not_to eq(upload.url)
|
|
end
|
|
end
|
|
end
|
|
|
|
describe "when secure media are enabled" do
|
|
before do
|
|
SiteSetting.login_required = true
|
|
SiteSetting.secure_media = true
|
|
s3_helper.stubs(:s3_client).returns(client)
|
|
Discourse.stubs(:store).returns(store)
|
|
end
|
|
|
|
it "returns signed URL with correct path" do
|
|
test_multisite_connection('default') do
|
|
upload = Fabricate.build(:upload_s3, sha1: upload_sha1, id: 1)
|
|
|
|
signed_url = Discourse.store.signed_url_for_path(upload.url)
|
|
expect(signed_url).to match(/Amz-Expires/)
|
|
expect(signed_url).to match("#{upload_path}")
|
|
end
|
|
|
|
test_multisite_connection('second') do
|
|
upload_path = Discourse.store.upload_path
|
|
upload = Fabricate.build(:upload_s3, sha1: upload_sha1, id: 1)
|
|
|
|
signed_url = Discourse.store.signed_url_for_path(upload.url)
|
|
expect(signed_url).to match(/Amz-Expires/)
|
|
expect(signed_url).to match("#{upload_path}")
|
|
end
|
|
end
|
|
end
|
|
|
|
describe "#update_upload_ACL" do
|
|
it "updates correct file for default and second multisite db" do
|
|
test_multisite_connection('default') do
|
|
upload = build_upload
|
|
upload.update!(original_filename: "small.pdf", extension: "pdf", secure: true)
|
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
|
s3_bucket.expects(:object).with("#{upload_path}/original/1X/#{upload.sha1}.pdf").returns(s3_object)
|
|
s3_object.expects(:acl).returns(s3_object)
|
|
s3_object.expects(:put).with(acl: "private").returns(s3_object)
|
|
|
|
expect(store.update_upload_ACL(upload)).to be_truthy
|
|
end
|
|
|
|
test_multisite_connection('second') do
|
|
upload_path = Discourse.store.upload_path
|
|
upload = build_upload
|
|
upload.update!(original_filename: "small.pdf", extension: "pdf", secure: true)
|
|
|
|
s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once
|
|
s3_bucket.expects(:object).with("#{upload_path}/original/1X/#{upload.sha1}.pdf").returns(s3_object)
|
|
s3_object.expects(:acl).returns(s3_object)
|
|
s3_object.expects(:put).with(acl: "private").returns(s3_object)
|
|
|
|
expect(store.update_upload_ACL(upload)).to be_truthy
|
|
end
|
|
end
|
|
end
|
|
end
|
|
|
|
describe "#has_been_uploaded?" do
|
|
before do
|
|
setup_s3
|
|
SiteSetting.s3_upload_bucket = "s3-upload-bucket/test"
|
|
end
|
|
|
|
let(:store) { FileStore::S3Store.new }
|
|
|
|
it "returns false for blank urls and bad urls" do
|
|
expect(store.has_been_uploaded?("")).to eq(false)
|
|
expect(store.has_been_uploaded?("http://test@test.com:test/test.git")).to eq(false)
|
|
expect(store.has_been_uploaded?("http:///+test@test.com/test.git")).to eq(false)
|
|
end
|
|
|
|
it "returns true if the base hostname is the same for both urls" do
|
|
url = "https://s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/test/original/2X/d/dd7964f5fd13e1103c5244ca30abe1936c0a4b88.png"
|
|
expect(store.has_been_uploaded?(url)).to eq(true)
|
|
end
|
|
|
|
it "returns false if the base hostname is the same for both urls BUT the bucket name is different in the path" do
|
|
bucket = "someotherbucket"
|
|
url = "https://s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com/#{bucket}/original/2X/d/dd7964f5fd13e1103c5244ca30abe1936c0a4b88.png"
|
|
expect(store.has_been_uploaded?(url)).to eq(false)
|
|
end
|
|
|
|
it "returns false if the hostnames do not match and the s3_cdn_url is blank" do
|
|
url = "https://www.someotherhostname.com/test/original/2X/d/dd7964f5fd13e1103c5244ca30abe1936c0a4b88.png"
|
|
expect(store.has_been_uploaded?(url)).to eq(false)
|
|
end
|
|
|
|
it "returns true if the s3_cdn_url is present and matches the url hostname" do
|
|
SiteSetting.s3_cdn_url = "https://www.someotherhostname.com"
|
|
url = "https://www.someotherhostname.com/test/original/2X/d/dd7964f5fd13e1103c5244ca30abe1936c0a4b88.png"
|
|
expect(store.has_been_uploaded?(url)).to eq(true)
|
|
end
|
|
|
|
it "returns false if the URI is an invalid mailto link" do
|
|
link = 'mailto: roman;@test.com'
|
|
|
|
expect(store.has_been_uploaded?(link)).to eq(false)
|
|
end
|
|
end
|
|
|
|
describe "#signed_url_for_temporary_upload" do
|
|
before do
|
|
setup_s3
|
|
end
|
|
|
|
let(:store) { FileStore::S3Store.new }
|
|
|
|
context "for a bucket with no folder path" do
|
|
before { SiteSetting.s3_upload_bucket = "s3-upload-bucket" }
|
|
|
|
it "returns a presigned url with the correct params and the key for the temporary file" do
|
|
url = store.signed_url_for_temporary_upload("test.png")
|
|
key = store.s3_helper.path_from_url(url)
|
|
expect(url).to match(/Amz-Expires/)
|
|
expect(key).to match(/temp\/uploads\/default\/test_[0-9]\/[a-zA-z0-9]{0,32}\/[a-zA-z0-9]{0,32}.png/)
|
|
end
|
|
|
|
it "presigned url contans the metadata when provided" do
|
|
url = store.signed_url_for_temporary_upload("test.png", metadata: { "test-meta": "testing" })
|
|
expect(url).to include("&x-amz-meta-test-meta=testing")
|
|
end
|
|
end
|
|
|
|
context "for a bucket with a folder path" do
|
|
before { SiteSetting.s3_upload_bucket = "s3-upload-bucket/site" }
|
|
|
|
it "returns a presigned url with the correct params and the key for the temporary file" do
|
|
url = store.signed_url_for_temporary_upload("test.png")
|
|
key = store.s3_helper.path_from_url(url)
|
|
expect(url).to match(/Amz-Expires/)
|
|
expect(key).to match(/temp\/site\/uploads\/default\/test_[0-9]\/[a-zA-z0-9]{0,32}\/[a-zA-z0-9]{0,32}.png/)
|
|
end
|
|
end
|
|
|
|
context "for a multisite site" do
|
|
before { SiteSetting.s3_upload_bucket = "s3-upload-bucket/standard99" }
|
|
|
|
it "returns a presigned url with the correct params and the key for the temporary file" do
|
|
test_multisite_connection('second') do
|
|
url = store.signed_url_for_temporary_upload("test.png")
|
|
key = store.s3_helper.path_from_url(url)
|
|
expect(url).to match(/Amz-Expires/)
|
|
expect(key).to match(/temp\/standard99\/uploads\/second\/test_[0-9]\/[a-zA-z0-9]{0,32}\/[a-zA-z0-9]{0,32}.png/)
|
|
end
|
|
end
|
|
end
|
|
end
|
|
|
|
def expect_copy_from(s3_object, source)
|
|
s3_object.expects(:copy_from).with(
|
|
copy_source: source
|
|
).returns(
|
|
stub(copy_object_result: stub(etag: '"etagtest"'))
|
|
)
|
|
end
|
|
end
|