From d295a16dab994c45e33e1df8da46c337ebd6a917 Mon Sep 17 00:00:00 2001 From: Martin Brennan Date: Wed, 25 Aug 2021 08:46:54 +1000 Subject: [PATCH] FEATURE: Uppy direct S3 multipart uploads in composer (#14051) This pull request introduces the endpoints required, and the JavaScript functionality in the `ComposerUppyUpload` mixin, for direct S3 multipart uploads. There are four new endpoints in the uploads controller: * `create-multipart.json` - Creates the multipart upload in S3 along with an `ExternalUploadStub` record, storing information about the file in the same way as `generate-presigned-put.json` does for regular direct S3 uploads * `batch-presign-multipart-parts.json` - Takes a list of part numbers and the unique identifier for an `ExternalUploadStub` record, and generates the presigned URLs for those parts if the multipart upload still exists and if the user has permission to access that upload * `complete-multipart.json` - Completes the multipart upload in S3. Needs the full list of part numbers and their associated ETags which are returned when the part is uploaded to the presigned URL above. Only works if the user has permission to access the associated `ExternalUploadStub` record and the multipart upload still exists. After we confirm the upload is complete in S3, we go through the regular `UploadCreator` flow, the same as `complete-external-upload.json`, and promote the temporary upload S3 into a full `Upload` record, moving it to its final destination. * `abort-multipart.json` - Aborts the multipart upload on S3 and destroys the `ExternalUploadStub` record if the user has permission to access that upload. Also added are a few new columns to `ExternalUploadStub`: * multipart - Whether or not this is a multipart upload * external_upload_identifier - The "upload ID" for an S3 multipart upload * filesize - The size of the file when the `create-multipart.json` or `generate-presigned-put.json` is called. This is used for validation. When the user completes a direct S3 upload, either regular or multipart, we take the `filesize` that was captured when the `ExternalUploadStub` was first created and compare it with the final `Content-Length` size of the file where it is stored in S3. Then, if the two do not match, we throw an error, delete the file on S3, and ban the user from uploading files for N (default 5) minutes. This would only happen if the user uploads a different file than what they first specified, or in the case of multipart uploads uploaded larger chunks than needed. This is done to prevent abuse of S3 storage by bad actors. Also included in this PR is an update to vendor/uppy.js. This has been built locally from the latest uppy source at https://github.com/transloadit/uppy/commit/d613b849a6591083f8a0968aa8d66537e231bbcd. This must be done so that I can get my multipart upload changes into Discourse. When the Uppy team cuts a proper release, we can bump the package.json versions instead. --- app/assets/javascripts/discourse-shims.js | 2 +- .../discourse/app/lib/uppy-checksum-plugin.js | 4 +- .../app/lib/uppy-media-optimization-plugin.js | 9 +- .../app/mixins/composer-upload-uppy.js | 129 +- .../discourse/app/mixins/uppy-upload.js | 6 +- app/controllers/uploads_controller.rb | 250 +- app/models/external_upload_stub.rb | 34 +- app/services/external_upload_manager.rb | 35 +- config/locales/server.en.yml | 5 + config/routes.rb | 11 +- ...d_size_columns_to_external_upload_stubs.rb | 23 + lib/file_store/s3_store.rb | 92 +- lib/guardian/user_guardian.rb | 4 + lib/upload_creator.rb | 8 + .../components/guardian/user_guardian_spec.rb | 13 + .../external_upload_stub_fabricator.rb | 3 + spec/requests/uploads_controller_spec.rb | 582 +- spec/services/external_upload_manager_spec.rb | 35 +- vendor/assets/javascripts/uppy.js | 14941 +++++++--------- 19 files changed, 8108 insertions(+), 8078 deletions(-) create mode 100644 db/migrate/20210812033033_add_multipart_and_size_columns_to_external_upload_stubs.rb diff --git a/app/assets/javascripts/discourse-shims.js b/app/assets/javascripts/discourse-shims.js index 71ce8478e37..3f038c12d85 100644 --- a/app/assets/javascripts/discourse-shims.js +++ b/app/assets/javascripts/discourse-shims.js @@ -32,7 +32,7 @@ define("@popperjs/core", ["exports"], function (__exports__) { define("@uppy/core", ["exports"], function (__exports__) { __exports__.default = window.Uppy.Core; - __exports__.Plugin = window.Uppy.Plugin; + __exports__.BasePlugin = window.Uppy.BasePlugin; }); define("@uppy/aws-s3", ["exports"], function (__exports__) { diff --git a/app/assets/javascripts/discourse/app/lib/uppy-checksum-plugin.js b/app/assets/javascripts/discourse/app/lib/uppy-checksum-plugin.js index 4cd0cea0476..3e74b36252f 100644 --- a/app/assets/javascripts/discourse/app/lib/uppy-checksum-plugin.js +++ b/app/assets/javascripts/discourse/app/lib/uppy-checksum-plugin.js @@ -1,8 +1,8 @@ -import { Plugin } from "@uppy/core"; +import { BasePlugin } from "@uppy/core"; import { warn } from "@ember/debug"; import { Promise } from "rsvp"; -export default class UppyChecksum extends Plugin { +export default class UppyChecksum extends BasePlugin { constructor(uppy, opts) { super(uppy, opts); this.id = opts.id || "uppy-checksum"; diff --git a/app/assets/javascripts/discourse/app/lib/uppy-media-optimization-plugin.js b/app/assets/javascripts/discourse/app/lib/uppy-media-optimization-plugin.js index 3ba4cc8fca0..dcf3c3121ba 100644 --- a/app/assets/javascripts/discourse/app/lib/uppy-media-optimization-plugin.js +++ b/app/assets/javascripts/discourse/app/lib/uppy-media-optimization-plugin.js @@ -1,8 +1,8 @@ -import { Plugin } from "@uppy/core"; +import { BasePlugin } from "@uppy/core"; import { warn } from "@ember/debug"; import { Promise } from "rsvp"; -export default class UppyMediaOptimization extends Plugin { +export default class UppyMediaOptimization extends BasePlugin { constructor(uppy, opts) { super(uppy, opts); this.id = opts.id || "uppy-media-optimization"; @@ -30,7 +30,10 @@ export default class UppyMediaOptimization extends Plugin { id: "discourse.uppy-media-optimization", }); } else { - this.uppy.setFileState(fileId, { data: optimizedFile }); + this.uppy.setFileState(fileId, { + data: optimizedFile, + size: optimizedFile.size, + }); } this.uppy.emit("preprocess-complete", this.pluginClass, file); }) diff --git a/app/assets/javascripts/discourse/app/mixins/composer-upload-uppy.js b/app/assets/javascripts/discourse/app/mixins/composer-upload-uppy.js index d7b88d7de43..97cd755e4a3 100644 --- a/app/assets/javascripts/discourse/app/mixins/composer-upload-uppy.js +++ b/app/assets/javascripts/discourse/app/mixins/composer-upload-uppy.js @@ -1,10 +1,12 @@ import Mixin from "@ember/object/mixin"; +import { ajax } from "discourse/lib/ajax"; import { deepMerge } from "discourse-common/lib/object"; import UppyChecksum from "discourse/lib/uppy-checksum-plugin"; import UppyMediaOptimization from "discourse/lib/uppy-media-optimization-plugin"; import Uppy from "@uppy/core"; import DropTarget from "@uppy/drop-target"; import XHRUpload from "@uppy/xhr-upload"; +import AwsS3Multipart from "@uppy/aws-s3-multipart"; import { warn } from "@ember/debug"; import I18n from "I18n"; import getURL from "discourse-common/lib/get-url"; @@ -70,6 +72,7 @@ export default Mixin.create({ _bindUploadTarget() { this.placeholders = {}; + this._inProgressUploads = 0; this._preProcessorStatus = {}; this.fileInputEl = document.getElementById("file-uploader"); const isPrivateMessage = this.get("composer.privateMessage"); @@ -140,9 +143,12 @@ export default Mixin.create({ // name for the preprocess-X events. this._trackPreProcessorStatus(UppyChecksum); - // TODO (martin) support for direct S3 uploads will come later, for now - // we just want the regular /uploads.json endpoint to work well - this._useXHRUploads(); + // hidden setting like enable_experimental_image_uploader + if (this.siteSettings.enable_direct_s3_uploads) { + this._useS3MultipartUploads(); + } else { + this._useXHRUploads(); + } // TODO (martin) develop upload handler guidance and an API to use; will // likely be using uppy plugins for this @@ -171,6 +177,7 @@ export default Mixin.create({ }); files.forEach((file) => { + this._inProgressUploads++; const placeholder = this._uploadPlaceholder(file); this.placeholders[file.id] = { uploadPlaceholder: placeholder, @@ -199,14 +206,7 @@ export default Mixin.create({ this.appEvents.trigger("composer:upload-success", file.name, upload); }); - this._uppyInstance.on("upload-error", (file, error, response) => { - this._resetUpload(file, { removePlaceholder: true }); - - if (!this.userCancelled) { - displayErrorForUpload(response, this.siteSettings, file.name); - this.appEvents.trigger("composer:upload-error", file); - } - }); + this._uppyInstance.on("upload-error", this._handleUploadError.bind(this)); this._uppyInstance.on("complete", () => { this.appEvents.trigger("composer:all-uploads-complete"); @@ -235,6 +235,20 @@ export default Mixin.create({ this._setupPreprocessing(); }, + _handleUploadError(file, error, response) { + this._inProgressUploads--; + this._resetUpload(file, { removePlaceholder: true }); + + if (!this.userCancelled) { + displayErrorForUpload(response || error, this.siteSettings, file.name); + this.appEvents.trigger("composer:upload-error", file); + } + + if (this._inProgressUploads === 0) { + this._reset(); + } + }, + _setupPreprocessing() { Object.keys(this.uploadProcessorActions).forEach((action) => { switch (action) { @@ -343,6 +357,99 @@ export default Mixin.create({ }); }, + _useS3MultipartUploads() { + const self = this; + + this._uppyInstance.use(AwsS3Multipart, { + // controls how many simultaneous _chunks_ are uploaded, not files, + // which in turn controls the minimum number of chunks presigned + // in each batch (limit / 2) + // + // the default, and minimum, chunk size is 5mb. we can control the + // chunk size via getChunkSize(file), so we may want to increase + // the chunk size for larger files + limit: 10, + + createMultipartUpload(file) { + return ajax("/uploads/create-multipart.json", { + type: "POST", + data: { + file_name: file.name, + file_size: file.size, + upload_type: file.meta.upload_type, + }, + // uppy is inconsistent, an error here fires the upload-error event + }).then((data) => { + file.meta.unique_identifier = data.unique_identifier; + return { + uploadId: data.external_upload_identifier, + key: data.key, + }; + }); + }, + + prepareUploadParts(file, partData) { + return ( + ajax("/uploads/batch-presign-multipart-parts.json", { + type: "POST", + data: { + part_numbers: partData.partNumbers, + unique_identifier: file.meta.unique_identifier, + }, + }) + .then((data) => { + return { presignedUrls: data.presigned_urls }; + }) + // uppy is inconsistent, an error here does not fire the upload-error event + .catch((err) => { + self._handleUploadError(file, err); + }) + ); + }, + + completeMultipartUpload(file, data) { + const parts = data.parts.map((part) => { + return { part_number: part.PartNumber, etag: part.ETag }; + }); + return ajax("/uploads/complete-multipart.json", { + type: "POST", + contentType: "application/json", + data: JSON.stringify({ + parts, + unique_identifier: file.meta.unique_identifier, + }), + // uppy is inconsistent, an error here fires the upload-error event + }).then((responseData) => { + return responseData; + }); + }, + + abortMultipartUpload(file, { key, uploadId }) { + // if the user cancels the upload before the key and uploadId + // are stored from the createMultipartUpload response then they + // will not be set, and we don't have to abort the upload because + // it will not exist yet + if (!key || !uploadId) { + return; + } + + return ajax("/uploads/abort-multipart.json", { + type: "POST", + data: { + external_upload_identifier: uploadId, + }, + // uppy is inconsistent, an error here does not fire the upload-error event + }).catch((err) => { + self._handleUploadError(file, err); + }); + }, + + // we will need a listParts function at some point when we want to + // resume multipart uploads; this is used by uppy to figure out + // what parts are uploaded and which still need to be + }); + }, + _reset() { this._uppyInstance?.reset(); this.setProperties({ diff --git a/app/assets/javascripts/discourse/app/mixins/uppy-upload.js b/app/assets/javascripts/discourse/app/mixins/uppy-upload.js index d4d0d4770d9..101fce1ccc9 100644 --- a/app/assets/javascripts/discourse/app/mixins/uppy-upload.js +++ b/app/assets/javascripts/discourse/app/mixins/uppy-upload.js @@ -175,7 +175,11 @@ export default Mixin.create({ this.set("usingS3Uploads", true); this._uppyInstance.use(AwsS3, { getUploadParameters: (file) => { - const data = { file_name: file.name, type: this.type }; + const data = { + file_name: file.name, + file_size: file.size, + type: this.type, + }; // the sha1 checksum is set by the UppyChecksum plugin, except // for in cases where the browser does not support the required diff --git a/app/controllers/uploads_controller.rb b/app/controllers/uploads_controller.rb index 2048a9fc169..0a5c0bb0560 100644 --- a/app/controllers/uploads_controller.rb +++ b/app/controllers/uploads_controller.rb @@ -9,14 +9,30 @@ class UploadsController < ApplicationController protect_from_forgery except: :show before_action :is_asset_path, :apply_cdn_headers, only: [:show, :show_short, :show_secure] - before_action :external_store_check, only: [:show_secure, :generate_presigned_put, :complete_external_upload] + before_action :external_store_check, only: [ + :show_secure, + :generate_presigned_put, + :complete_external_upload, + :create_multipart, + :batch_presign_multipart_parts, + :abort_multipart, + :complete_multipart + ] + before_action :direct_s3_uploads_check, only: [ + :generate_presigned_put, + :complete_external_upload, + :create_multipart, + :batch_presign_multipart_parts, + :abort_multipart, + :complete_multipart + ] + before_action :can_upload_external?, only: [:create_multipart, :generate_presigned_put] SECURE_REDIRECT_GRACE_SECONDS = 5 - PRESIGNED_PUT_RATE_LIMIT_PER_MINUTE = 5 - - def external_store_check - return render_404 if !Discourse.store.external? - end + PRESIGNED_PUT_RATE_LIMIT_PER_MINUTE = 10 + CREATE_MULTIPART_RATE_LIMIT_PER_MINUTE = 10 + COMPLETE_MULTIPART_RATE_LIMIT_PER_MINUTE = 10 + BATCH_PRESIGN_RATE_LIMIT_PER_MINUTE = 10 def create # capture current user for block later on @@ -193,15 +209,21 @@ class UploadsController < ApplicationController end def generate_presigned_put - return render_404 if !SiteSetting.enable_direct_s3_uploads - RateLimiter.new( current_user, "generate-presigned-put-upload-stub", PRESIGNED_PUT_RATE_LIMIT_PER_MINUTE, 1.minute ).performed! file_name = params.require(:file_name) + file_size = params.require(:file_size).to_i type = params.require(:type) + if file_size_too_big?(file_name, file_size) + return render_json_error( + I18n.t("upload.attachments.too_large", max_size_kb: SiteSetting.max_attachment_size_kb), + status: 422 + ) + end + # don't want people posting arbitrary S3 metadata so we just take the # one we need. all of these will be converted to x-amz-meta- metadata # fields in S3 so it's best to use dashes in the names for consistency @@ -225,33 +247,37 @@ class UploadsController < ApplicationController key: key, created_by: current_user, original_filename: file_name, - upload_type: type + upload_type: type, + filesize: file_size ) render json: { url: url, key: key, unique_identifier: upload_stub.unique_identifier } end def complete_external_upload - return render_404 if !SiteSetting.enable_direct_s3_uploads - unique_identifier = params.require(:unique_identifier) external_upload_stub = ExternalUploadStub.find_by( unique_identifier: unique_identifier, created_by: current_user ) return render_404 if external_upload_stub.blank? - raise Discourse::InvalidAccess if external_upload_stub.created_by_id != current_user.id - external_upload_manager = ExternalUploadManager.new(external_upload_stub) + complete_external_upload_via_manager(external_upload_stub) + end + def complete_external_upload_via_manager(external_upload_stub) + external_upload_manager = ExternalUploadManager.new(external_upload_stub) hijack do begin upload = external_upload_manager.promote_to_upload! if upload.errors.empty? - external_upload_manager.destroy! + external_upload_stub.destroy! render json: UploadsController.serialize_upload(upload), status: 200 else render_json_error(upload.errors.to_hash.values.flatten, status: 422) end + rescue ExternalUploadManager::SizeMismatchError => err + debug_upload_error(err, "upload.size_mismatch_failure", additional_detail: err.message) + render_json_error(I18n.t("upload.failed"), status: 422) rescue ExternalUploadManager::ChecksumMismatchError => err debug_upload_error(err, "upload.checksum_mismatch_failure") render_json_error(I18n.t("upload.failed"), status: 422) @@ -270,6 +296,179 @@ class UploadsController < ApplicationController end end + def create_multipart + RateLimiter.new( + current_user, "create-multipart-upload", CREATE_MULTIPART_RATE_LIMIT_PER_MINUTE, 1.minute + ).performed! + + file_name = params.require(:file_name) + file_size = params.require(:file_size).to_i + upload_type = params.require(:upload_type) + content_type = MiniMime.lookup_by_filename(file_name)&.content_type + + if file_size_too_big?(file_name, file_size) + return render_json_error( + I18n.t("upload.attachments.too_large", max_size_kb: SiteSetting.max_attachment_size_kb), + status: 422 + ) + end + + begin + multipart_upload = Discourse.store.create_multipart( + file_name, content_type + ) + rescue Aws::S3::Errors::ServiceError => err + debug_upload_error(err, "upload.create_mutlipart_failure") + return render_json_error(I18n.t("upload.failed"), status: 422) + end + + upload_stub = ExternalUploadStub.create!( + key: multipart_upload[:key], + created_by: current_user, + original_filename: file_name, + upload_type: upload_type, + external_upload_identifier: multipart_upload[:upload_id], + multipart: true, + filesize: file_size + ) + + render json: { + external_upload_identifier: upload_stub.external_upload_identifier, + key: upload_stub.key, + unique_identifier: upload_stub.unique_identifier + } + end + + def batch_presign_multipart_parts + part_numbers = params.require(:part_numbers) + unique_identifier = params.require(:unique_identifier) + + RateLimiter.new( + current_user, "batch-presign", BATCH_PRESIGN_RATE_LIMIT_PER_MINUTE, 1.minute + ).performed! + + part_numbers = part_numbers.map do |part_number| + validate_part_number(part_number) + end + + external_upload_stub = ExternalUploadStub.find_by( + unique_identifier: unique_identifier, created_by: current_user + ) + return render_404 if external_upload_stub.blank? + + if !multipart_upload_exists?(external_upload_stub) + return render_404 + end + + presigned_urls = {} + part_numbers.each do |part_number| + presigned_urls[part_number] = Discourse.store.presign_multipart_part( + upload_id: external_upload_stub.external_upload_identifier, + key: external_upload_stub.key, + part_number: part_number + ) + end + + render json: { presigned_urls: presigned_urls } + end + + def validate_part_number(part_number) + part_number = part_number.to_i + if !part_number.between?(1, 10000) + raise Discourse::InvalidParameters.new( + "Each part number should be between 1 and 10000" + ) + end + part_number + end + + def multipart_upload_exists?(external_upload_stub) + begin + Discourse.store.list_multipart_parts( + upload_id: external_upload_stub.external_upload_identifier, key: external_upload_stub.key + ) + rescue Aws::S3::Errors::NoSuchUpload => err + debug_upload_error(err, "upload.external_upload_not_found", { additional_detail: "path: #{external_upload_stub.key}" }) + return false + end + true + end + + def abort_multipart + external_upload_identifier = params.require(:external_upload_identifier) + external_upload_stub = ExternalUploadStub.find_by( + external_upload_identifier: external_upload_identifier + ) + + # The stub could have already been deleted by an earlier error via + # ExternalUploadManager, so we consider this a great success if the + # stub is already gone. + return render json: success_json if external_upload_stub.blank? + + return render_404 if external_upload_stub.created_by_id != current_user.id + + begin + Discourse.store.abort_multipart( + upload_id: external_upload_stub.external_upload_identifier, + key: external_upload_stub.key + ) + rescue Aws::S3::Errors::ServiceError => err + debug_upload_error(err, "upload.abort_mutlipart_failure", additional_detail: "external upload stub id: #{external_upload_stub.id}") + return render_json_error(I18n.t("upload.failed"), status: 422) + end + + external_upload_stub.destroy! + + render json: success_json + end + + def complete_multipart + unique_identifier = params.require(:unique_identifier) + parts = params.require(:parts) + + RateLimiter.new( + current_user, "complete-multipart-upload", COMPLETE_MULTIPART_RATE_LIMIT_PER_MINUTE, 1.minute + ).performed! + + external_upload_stub = ExternalUploadStub.find_by( + unique_identifier: unique_identifier, created_by: current_user + ) + return render_404 if external_upload_stub.blank? + + if !multipart_upload_exists?(external_upload_stub) + return render_404 + end + + parts = parts.map do |part| + part_number = part[:part_number] + etag = part[:etag] + part_number = validate_part_number(part_number) + + if etag.blank? + raise Discourse::InvalidParameters.new("All parts must have an etag and a valid part number") + end + + # this is done so it's an array of hashes rather than an array of + # ActionController::Parameters + { part_number: part_number, etag: etag } + end.sort_by do |part| + part[:part_number] + end + + begin + complete_response = Discourse.store.complete_multipart( + upload_id: external_upload_stub.external_upload_identifier, + key: external_upload_stub.key, + parts: parts + ) + rescue Aws::S3::Errors::ServiceError => err + debug_upload_error(err, "upload.complete_mutlipart_failure", additional_detail: "external upload stub id: #{external_upload_stub.id}") + return render_json_error(I18n.t("upload.failed"), status: 422) + end + + complete_external_upload_via_manager(external_upload_stub) + end + protected def force_download? @@ -339,6 +538,25 @@ class UploadsController < ApplicationController private + def external_store_check + return render_404 if !Discourse.store.external? + end + + def direct_s3_uploads_check + return render_404 if !SiteSetting.enable_direct_s3_uploads + end + + def can_upload_external? + raise Discourse::InvalidAccess if !guardian.can_upload_external? + end + + # We can pre-emptively check size for attachments, but not for images + # as they may be further reduced in size by UploadCreator (at this point + # they may have already been reduced in size by preprocessors) + def file_size_too_big?(file_name, file_size) + !FileHelper.is_supported_image?(file_name) && file_size >= SiteSetting.max_attachment_size_kb.kilobytes + end + def send_file_local_upload(upload) opts = { filename: upload.original_filename, @@ -357,8 +575,8 @@ class UploadsController < ApplicationController send_file(file_path, opts) end - def debug_upload_error(translation_key, err) + def debug_upload_error(err, translation_key, translation_params = {}) return if !SiteSetting.enable_upload_debug_mode - Discourse.warn_exception(err, message: I18n.t(translation_key)) + Discourse.warn_exception(err, message: I18n.t(translation_key, translation_params)) end end diff --git a/app/models/external_upload_stub.rb b/app/models/external_upload_stub.rb index 82a20ff2064..6f50a042ecf 100644 --- a/app/models/external_upload_stub.rb +++ b/app/models/external_upload_stub.rb @@ -5,9 +5,14 @@ require "digest/sha1" class ExternalUploadStub < ActiveRecord::Base CREATED_EXPIRY_HOURS = 1 UPLOADED_EXPIRY_HOURS = 24 + FAILED_EXPIRY_HOURS = 48 belongs_to :created_by, class_name: 'User' + validates :filesize, numericality: { + allow_nil: false, only_integer: true, greater_than_or_equal_to: 1 + } + scope :expired_created, -> { where( "status = ? AND created_at <= ?", @@ -33,7 +38,6 @@ class ExternalUploadStub < ActiveRecord::Base @statuses ||= Enum.new( created: 1, uploaded: 2, - failed: 3 ) end @@ -50,19 +54,23 @@ end # # Table name: external_upload_stubs # -# id :bigint not null, primary key -# key :string not null -# original_filename :string not null -# status :integer default(1), not null -# unique_identifier :uuid not null -# created_by_id :integer not null -# upload_type :string not null -# created_at :datetime not null -# updated_at :datetime not null +# id :bigint not null, primary key +# key :string not null +# original_filename :string not null +# status :integer default(1), not null +# unique_identifier :uuid not null +# created_by_id :integer not null +# upload_type :string not null +# created_at :datetime not null +# updated_at :datetime not null +# multipart :boolean default(FALSE), not null +# external_upload_identifier :string +# filesize :bigint not null # # Indexes # -# index_external_upload_stubs_on_created_by_id (created_by_id) -# index_external_upload_stubs_on_key (key) UNIQUE -# index_external_upload_stubs_on_status (status) +# index_external_upload_stubs_on_created_by_id (created_by_id) +# index_external_upload_stubs_on_external_upload_identifier (external_upload_identifier) +# index_external_upload_stubs_on_key (key) UNIQUE +# index_external_upload_stubs_on_status (status) # diff --git a/app/services/external_upload_manager.rb b/app/services/external_upload_manager.rb index aacfb99ecad..aa0e7040b2f 100644 --- a/app/services/external_upload_manager.rb +++ b/app/services/external_upload_manager.rb @@ -2,13 +2,24 @@ class ExternalUploadManager DOWNLOAD_LIMIT = 100.megabytes + SIZE_MISMATCH_BAN_MINUTES = 5 + BAN_USER_REDIS_PREFIX = "ban_user_from_external_uploads_" class ChecksumMismatchError < StandardError; end class DownloadFailedError < StandardError; end class CannotPromoteError < StandardError; end + class SizeMismatchError < StandardError; end attr_reader :external_upload_stub + def self.ban_user_from_external_uploads!(user:, ban_minutes: 5) + Discourse.redis.setex("#{BAN_USER_REDIS_PREFIX}#{user.id}", ban_minutes.minutes.to_i, "1") + end + + def self.user_banned?(user) + Discourse.redis.get("#{BAN_USER_REDIS_PREFIX}#{user.id}") == "1" + end + def initialize(external_upload_stub) @external_upload_stub = external_upload_stub end @@ -31,6 +42,19 @@ class ExternalUploadManager # variable as well to check. tempfile = nil should_download = external_size < DOWNLOAD_LIMIT + + # We require that the file size is specified ahead of time, and compare + # it here to make sure that people are not uploading excessively large + # files to the external provider. If this happens, the user will be banned + # from uploading to the external provider for N minutes. + if external_size != external_upload_stub.filesize + ExternalUploadManager.ban_user_from_external_uploads!( + user: external_upload_stub.created_by, + ban_minutes: SIZE_MISMATCH_BAN_MINUTES + ) + raise SizeMismatchError.new("expected: #{external_upload_stub.filesize}, actual: #{external_size}") + end + if should_download tempfile = download(external_upload_stub.key, external_upload_stub.upload_type) @@ -60,16 +84,17 @@ class ExternalUploadManager external_upload_stub.created_by_id ) rescue - external_upload_stub.update!(status: ExternalUploadStub.statuses[:failed]) + # We don't need to do anything special to abort multipart uploads here, + # because at this point (calling promote_to_upload!), the multipart + # upload would already be complete. + Discourse.store.delete_file(external_upload_stub.key) + external_upload_stub.destroy! + raise ensure tempfile&.close! end - def destroy! - external_upload_stub.destroy! - end - private def download(key, type) diff --git a/config/locales/server.en.yml b/config/locales/server.en.yml index d7150cf44c2..c19136c70cf 100644 --- a/config/locales/server.en.yml +++ b/config/locales/server.en.yml @@ -4009,6 +4009,11 @@ en: png_to_jpg_conversion_failure_message: "An error happened when converting from PNG to JPG." optimize_failure_message: "An error occurred while optimizing the uploaded image." download_failure: "Downloading the file from the external provider failed." + size_mismatch_failure: "The size of the file uploaded to S3 did not match the external upload stub's intended size. %{additional_detail}" + create_mutlipart_failure: "Failed to create multipart upload in the external store." + abort_mutlipart_failure: "Failed to abort multipart upload in the external store." + complete_mutlipart_failure: "Failed to complete multipart upload in the external store." + external_upload_not_found: "The upload was not found in the external store. %{additional_detail}" checksum_mismatch_failure: "The checksum of the file you uploaded does not match. The file contents may have changed on upload. Please try again." cannot_promote_failure: "The upload cannot be completed, it may have already completed or previously failed." attachments: diff --git a/config/routes.rb b/config/routes.rb index 28cb935b343..c7737d9fce0 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -541,8 +541,15 @@ Discourse::Application.routes.draw do post "uploads" => "uploads#create" post "uploads/lookup-urls" => "uploads#lookup_urls" - post "uploads/generate-presigned-put" => "uploads#generate_presigned_put" - post "uploads/complete-external-upload" => "uploads#complete_external_upload" + # direct to s3 uploads + post "uploads/generate-presigned-put" => "uploads#generate_presigned_put", format: :json + post "uploads/complete-external-upload" => "uploads#complete_external_upload", format: :json + + # multipart uploads + post "uploads/create-multipart" => "uploads#create_multipart", format: :json + post "uploads/complete-multipart" => "uploads#complete_multipart", format: :json + post "uploads/abort-multipart" => "uploads#abort_multipart", format: :json + post "uploads/batch-presign-multipart-parts" => "uploads#batch_presign_multipart_parts", format: :json # used to download original images get "uploads/:site/:sha(.:extension)" => "uploads#show", constraints: { site: /\w+/, sha: /\h{40}/, extension: /[a-z0-9\._]+/i } diff --git a/db/migrate/20210812033033_add_multipart_and_size_columns_to_external_upload_stubs.rb b/db/migrate/20210812033033_add_multipart_and_size_columns_to_external_upload_stubs.rb new file mode 100644 index 00000000000..97fdb885ec1 --- /dev/null +++ b/db/migrate/20210812033033_add_multipart_and_size_columns_to_external_upload_stubs.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +class AddMultipartAndSizeColumnsToExternalUploadStubs < ActiveRecord::Migration[6.1] + def up + add_column :external_upload_stubs, :multipart, :boolean, default: false, null: false + add_column :external_upload_stubs, :external_upload_identifier, :string, null: true + add_column :external_upload_stubs, :filesize, :bigint + + add_index :external_upload_stubs, :external_upload_identifier + + # this feature is not actively used yet so this will be safe, also the rows in this + # table are regularly deleted + DB.exec("UPDATE external_upload_stubs SET filesize = 0 WHERE filesize IS NULL") + + change_column_null :external_upload_stubs, :filesize, false + end + + def down + remove_column :external_upload_stubs, :multipart + remove_column :external_upload_stubs, :external_upload_identifier + remove_column :external_upload_stubs, :filesize + end +end diff --git a/lib/file_store/s3_store.rb b/lib/file_store/s3_store.rb index 2c85c77eba2..66642ec91d8 100644 --- a/lib/file_store/s3_store.rb +++ b/lib/file_store/s3_store.rb @@ -97,12 +97,13 @@ module FileStore # if this fails, it will throw an exception if opts[:move_existing] && opts[:existing_external_upload_key] + original_path = opts[:existing_external_upload_key] path, etag = s3_helper.copy( - opts[:existing_external_upload_key], + original_path, path, options: options ) - s3_helper.delete_object(opts[:existing_external_upload_key]) + delete_file(original_path) else path, etag = s3_helper.upload(file, path, options) end @@ -111,6 +112,12 @@ module FileStore [File.join(absolute_base_url, path), etag] end + def delete_file(path) + # delete the object outright without moving to tombstone, + # not recommended for most use cases + s3_helper.delete_object(path) + end + def remove_file(url, path) return unless has_been_uploaded?(url) # copy the removed file to tombstone @@ -217,7 +224,15 @@ module FileStore def signed_url_for_temporary_upload(file_name, expires_in: S3Helper::UPLOAD_URL_EXPIRES_AFTER_SECONDS, metadata: {}) key = temporary_upload_path(file_name) - presigned_put_url(key, expires_in: expires_in, metadata: metadata) + presigned_url( + key, + method: :put_object, + expires_in: expires_in, + opts: { + metadata: metadata, + acl: "private" + } + ) end def temporary_upload_path(file_name) @@ -297,17 +312,72 @@ module FileStore FileUtils.mv(old_upload_path, public_upload_path) if old_upload_path end - private - - def presigned_put_url(key, expires_in: S3Helper::UPLOAD_URL_EXPIRES_AFTER_SECONDS, metadata: {}) - signer = Aws::S3::Presigner.new(client: s3_helper.s3_client) - signer.presigned_url( - :put_object, + def abort_multipart(key:, upload_id:) + s3_helper.s3_client.abort_multipart_upload( bucket: s3_bucket_name, key: key, + upload_id: upload_id + ) + end + + def create_multipart(file_name, content_type) + key = temporary_upload_path(file_name) + response = s3_helper.s3_client.create_multipart_upload( acl: "private", - expires_in: expires_in, - metadata: metadata + bucket: s3_bucket_name, + key: key, + content_type: content_type + ) + { upload_id: response.upload_id, key: key } + end + + def presign_multipart_part(upload_id:, key:, part_number:) + presigned_url( + key, + method: :upload_part, + expires_in: S3Helper::UPLOAD_URL_EXPIRES_AFTER_SECONDS, + opts: { + part_number: part_number, + upload_id: upload_id + } + ) + end + + def list_multipart_parts(upload_id:, key:) + s3_helper.s3_client.list_parts( + bucket: s3_bucket_name, + key: key, + upload_id: upload_id + ) + end + + def complete_multipart(upload_id:, key:, parts:) + s3_helper.s3_client.complete_multipart_upload( + bucket: s3_bucket_name, + key: key, + upload_id: upload_id, + multipart_upload: { + parts: parts + } + ) + end + + private + + def presigned_url( + key, + method:, + expires_in: S3Helper::UPLOAD_URL_EXPIRES_AFTER_SECONDS, + opts: {} + ) + signer = Aws::S3::Presigner.new(client: s3_helper.s3_client) + signer.presigned_url( + method, + { + bucket: s3_bucket_name, + key: key, + expires_in: expires_in, + }.merge(opts) ) end diff --git a/lib/guardian/user_guardian.rb b/lib/guardian/user_guardian.rb index cd9dda0921e..ef13588cef2 100644 --- a/lib/guardian/user_guardian.rb +++ b/lib/guardian/user_guardian.rb @@ -176,6 +176,10 @@ module UserGuardian (is_me?(user) && user.has_trust_level?(SiteSetting.min_trust_level_to_allow_user_card_background.to_i)) || is_staff? end + def can_upload_external? + !ExternalUploadManager.user_banned?(user) + end + def can_delete_sso_record?(user) SiteSetting.enable_discourse_connect && user && is_admin? end diff --git a/lib/upload_creator.rb b/lib/upload_creator.rb index d29a3c5d2b5..affbe78775a 100644 --- a/lib/upload_creator.rb +++ b/lib/upload_creator.rb @@ -32,6 +32,9 @@ class UploadCreator @opts = opts @filesize = @opts[:filesize] if @opts[:external_upload_too_big] @opts[:validate] = opts[:skip_validations].present? ? !ActiveRecord::Type::Boolean.new.cast(opts[:skip_validations]) : true + + # TODO (martin) Validate @opts[:type] to make sure only blessed types are passed + # in, since the clientside can pass any type it wants. end def create_for(user_id) @@ -50,6 +53,11 @@ class UploadCreator # so we have not downloaded it to a tempfile. no modifications can be made to the # file in this case because it does not exist; we simply move it to its new location # in S3 + # + # TODO (martin) I've added a bunch of external_upload_too_big checks littered + # throughout the UploadCreator code. It would be better to have two seperate + # classes with shared methods, rather than doing all these checks all over the + # place. Needs a refactor. external_upload_too_big = @opts[:external_upload_too_big] sha1_before_changes = Upload.generate_digest(@file) if @file diff --git a/spec/components/guardian/user_guardian_spec.rb b/spec/components/guardian/user_guardian_spec.rb index 63f8b3a5e77..f07a4062b30 100644 --- a/spec/components/guardian/user_guardian_spec.rb +++ b/spec/components/guardian/user_guardian_spec.rb @@ -492,4 +492,17 @@ describe UserGuardian do end end end + + describe "#can_upload_external?" do + after { Discourse.redis.flushdb } + + it "is true by default" do + expect(Guardian.new(user).can_upload_external?).to eq(true) + end + + it "is false if the user has been banned from external uploads for a time period" do + ExternalUploadManager.ban_user_from_external_uploads!(user: user) + expect(Guardian.new(user).can_upload_external?).to eq(false) + end + end end diff --git a/spec/fabricators/external_upload_stub_fabricator.rb b/spec/fabricators/external_upload_stub_fabricator.rb index 57d26d10a3b..ce7b1ec6163 100644 --- a/spec/fabricators/external_upload_stub_fabricator.rb +++ b/spec/fabricators/external_upload_stub_fabricator.rb @@ -5,15 +5,18 @@ Fabricator(:external_upload_stub) do original_filename "test.txt" key { Discourse.store.temporary_upload_path("test.txt") } upload_type "card_background" + filesize 1024 status 1 end Fabricator(:image_external_upload_stub, from: :external_upload_stub) do original_filename "logo.png" + filesize 1024 key { Discourse.store.temporary_upload_path("logo.png") } end Fabricator(:attachment_external_upload_stub, from: :external_upload_stub) do original_filename "file.pdf" + filesize 1024 key { Discourse.store.temporary_upload_path("file.pdf") } end diff --git a/spec/requests/uploads_controller_spec.rb b/spec/requests/uploads_controller_spec.rb index 737c992d522..c50711a952a 100644 --- a/spec/requests/uploads_controller_spec.rb +++ b/spec/requests/uploads_controller_spec.rb @@ -721,7 +721,9 @@ describe UploadsController do end it "generates a presigned URL and creates an external upload stub" do - post "/uploads/generate-presigned-put.json", params: { file_name: "test.png", type: "card_background" } + post "/uploads/generate-presigned-put.json", params: { + file_name: "test.png", type: "card_background", file_size: 1024 + } expect(response.status).to eq(200) result = response.parsed_body @@ -730,7 +732,8 @@ describe UploadsController do unique_identifier: result["unique_identifier"], original_filename: "test.png", created_by: user, - upload_type: "card_background" + upload_type: "card_background", + filesize: 1024 ) expect(external_upload_stub.exists?).to eq(true) expect(result["key"]).to include(FileStore::S3Store::TEMPORARY_UPLOAD_PREFIX) @@ -742,6 +745,7 @@ describe UploadsController do post "/uploads/generate-presigned-put.json", { params: { file_name: "test.png", + file_size: 1024, type: "card_background", metadata: { "sha1-checksum" => "testing", @@ -761,8 +765,8 @@ describe UploadsController do RateLimiter.clear_all! stub_const(UploadsController, "PRESIGNED_PUT_RATE_LIMIT_PER_MINUTE", 1) do - post "/uploads/generate-presigned-put.json", params: { file_name: "test.png", type: "card_background" } - post "/uploads/generate-presigned-put.json", params: { file_name: "test.png", type: "card_background" } + post "/uploads/generate-presigned-put.json", params: { file_name: "test.png", type: "card_background", file_size: 1024 } + post "/uploads/generate-presigned-put.json", params: { file_name: "test.png", type: "card_background", file_size: 1024 } end expect(response.status).to eq(429) end @@ -774,7 +778,566 @@ describe UploadsController do end it "returns 404" do - post "/uploads/generate-presigned-put.json", params: { file_name: "test.png", type: "card_background" } + post "/uploads/generate-presigned-put.json", params: { file_name: "test.png", type: "card_background", file_size: 1024 } + expect(response.status).to eq(404) + end + end + end + + describe "#create_multipart" do + context "when the store is external" do + let(:mock_multipart_upload_id) { "ibZBv_75gd9r8lH_gqXatLdxMVpAlj6CFTR.OwyF3953YdwbcQnMA2BLGn8Lx12fQNICtMw5KyteFeHw.Sjng--" } + + before do + sign_in(user) + SiteSetting.enable_direct_s3_uploads = true + setup_s3 + FileStore::S3Store.any_instance.stubs(:temporary_upload_path).returns( + "uploads/default/test_0/temp/28fccf8259bbe75b873a2bd2564b778c/test.png" + ) + end + + it "errors if the correct params are not provided" do + post "/uploads/create-multipart.json", params: { file_name: "test.png" } + expect(response.status).to eq(400) + post "/uploads/create-multipart.json", params: { upload_type: "composer" } + expect(response.status).to eq(400) + post "/uploads/create-multipart.json", params: { content_type: "image/jpeg" } + expect(response.status).to eq(400) + end + + it "returns 422 when the create request errors" do + FileStore::S3Store.any_instance.stubs(:create_multipart).raises(Aws::S3::Errors::ServiceError.new({}, "test")) + post "/uploads/create-multipart.json", { + params: { + file_name: "test.png", + file_size: 1024, + upload_type: "composer", + content_type: "image/png" + } + } + expect(response.status).to eq(422) + end + + it "returns 422 when the file is an attachment and it's too big" do + SiteSetting.max_attachment_size_kb = 1000 + post "/uploads/create-multipart.json", { + params: { + file_name: "test.zip", + file_size: 9999999, + upload_type: "composer", + content_type: "application/zip" + } + } + expect(response.status).to eq(422) + expect(response.body).to include(I18n.t("upload.attachments.too_large", max_size_kb: SiteSetting.max_attachment_size_kb)) + end + + def stub_create_multipart_request + create_multipart_result = <<~BODY + \n + + s3-upload-bucket + uploads/default/test_0/temp/28fccf8259bbe75b873a2bd2564b778c/test.png + #{mock_multipart_upload_id} + + BODY + stub_request( + :post, + "https://s3-upload-bucket.s3.us-west-1.amazonaws.com/uploads/default/test_0/temp/28fccf8259bbe75b873a2bd2564b778c/test.png?uploads" + ).to_return({ status: 200, body: create_multipart_result }) + end + + it "creates a multipart upload and creates an external upload stub that is marked as multipart" do + stub_create_multipart_request + post "/uploads/create-multipart.json", { + params: { + file_name: "test.png", + file_size: 1024, + upload_type: "composer", + content_type: "image/png" + } + } + + expect(response.status).to eq(200) + result = response.parsed_body + + external_upload_stub = ExternalUploadStub.where( + unique_identifier: result["unique_identifier"], + original_filename: "test.png", + created_by: user, + upload_type: "composer", + key: result["key"], + external_upload_identifier: mock_multipart_upload_id, + multipart: true, + filesize: 1024 + ) + expect(external_upload_stub.exists?).to eq(true) + expect(result["key"]).to include(FileStore::S3Store::TEMPORARY_UPLOAD_PREFIX) + expect(result["external_upload_identifier"]).to eq(mock_multipart_upload_id) + expect(result["key"]).to eq(external_upload_stub.last.key) + end + + it "rate limits" do + RateLimiter.enable + RateLimiter.clear_all! + + stub_create_multipart_request + stub_const(UploadsController, "CREATE_MULTIPART_RATE_LIMIT_PER_MINUTE", 1) do + post "/uploads/create-multipart.json", params: { + file_name: "test.png", + upload_type: "composer", + content_type: "image/png", + file_size: 1024 + } + expect(response.status).to eq(200) + + post "/uploads/create-multipart.json", params: { + file_name: "test.png", + upload_type: "composer", + content_type: "image/png", + file_size: 1024 + } + expect(response.status).to eq(429) + end + end + end + + context "when the store is not external" do + before do + sign_in(user) + end + + it "returns 404" do + post "/uploads/create-multipart.json", params: { + file_name: "test.png", + upload_type: "composer", + content_type: "image/png", + file_size: 1024 + } + expect(response.status).to eq(404) + end + end + end + + describe "#batch_presign_multipart_parts" do + fab!(:mock_multipart_upload_id) { "ibZBv_75gd9r8lH_gqXatLdxMVpAlj6CFTR.OwyF3953YdwbcQnMA2BLGn8Lx12fQNICtMw5KyteFeHw.Sjng--" } + fab!(:external_upload_stub) do + Fabricate(:image_external_upload_stub, created_by: user, multipart: true, external_upload_identifier: mock_multipart_upload_id) + end + + context "when the store is external" do + before do + sign_in(user) + SiteSetting.enable_direct_s3_uploads = true + setup_s3 + end + + def stub_list_multipart_request + list_multipart_result = <<~BODY + \n + + s3-upload-bucket + #{external_upload_stub.key} + #{mock_multipart_upload_id} + 0 + 0 + 1 + false + + test + #{Time.zone.now} + 1 + #{5.megabytes} + + + test-upload-user + arn:aws:iam::123:user/test-upload-user + + + + 12345 + + STANDARD + + BODY + stub_request(:get, "https://s3-upload-bucket.s3.us-west-1.amazonaws.com/#{external_upload_stub.key}?uploadId=#{mock_multipart_upload_id}").to_return({ status: 200, body: list_multipart_result }) + end + + it "errors if the correct params are not provided" do + post "/uploads/batch-presign-multipart-parts.json", params: {} + expect(response.status).to eq(400) + end + + it "errors if the part_numbers do not contain numbers between 1 and 10000" do + post "/uploads/batch-presign-multipart-parts.json", params: { + unique_identifier: external_upload_stub.unique_identifier, + part_numbers: [-1, 0, 1, 2, 3, 4] + } + expect(response.status).to eq(400) + expect(response.body).to include("You supplied invalid parameters to the request: Each part number should be between 1 and 10000") + post "/uploads/batch-presign-multipart-parts.json", params: { + unique_identifier: external_upload_stub.unique_identifier, + part_numbers: [3, 4, "blah"] + } + expect(response.status).to eq(400) + expect(response.body).to include("You supplied invalid parameters to the request: Each part number should be between 1 and 10000") + end + + it "returns 404 when the upload stub does not exist" do + post "/uploads/batch-presign-multipart-parts.json", params: { + unique_identifier: "unknown", + part_numbers: [1, 2, 3] + } + expect(response.status).to eq(404) + end + + it "returns 404 when the upload stub does not belong to the user" do + external_upload_stub.update!(created_by: Fabricate(:user)) + post "/uploads/batch-presign-multipart-parts.json", params: { + unique_identifier: external_upload_stub.unique_identifier, + part_numbers: [1, 2, 3] + } + expect(response.status).to eq(404) + end + + it "returns 404 when the multipart upload does not exist" do + FileStore::S3Store.any_instance.stubs(:list_multipart_parts).raises(Aws::S3::Errors::NoSuchUpload.new("test", "test")) + post "/uploads/batch-presign-multipart-parts.json", params: { + unique_identifier: external_upload_stub.unique_identifier, + part_numbers: [1, 2, 3] + } + expect(response.status).to eq(404) + end + + it "returns an object with the presigned URLs with the part numbers as keys" do + stub_list_multipart_request + post "/uploads/batch-presign-multipart-parts.json", params: { + unique_identifier: external_upload_stub.unique_identifier, + part_numbers: [2, 3, 4] + } + + expect(response.status).to eq(200) + result = response.parsed_body + expect(result["presigned_urls"].keys).to eq(["2", "3", "4"]) + expect(result["presigned_urls"]["2"]).to include("?partNumber=2&uploadId=#{mock_multipart_upload_id}") + expect(result["presigned_urls"]["3"]).to include("?partNumber=3&uploadId=#{mock_multipart_upload_id}") + expect(result["presigned_urls"]["4"]).to include("?partNumber=4&uploadId=#{mock_multipart_upload_id}") + end + + it "rate limits" do + RateLimiter.enable + RateLimiter.clear_all! + + stub_const(UploadsController, "BATCH_PRESIGN_RATE_LIMIT_PER_MINUTE", 1) do + stub_list_multipart_request + post "/uploads/batch-presign-multipart-parts.json", params: { + unique_identifier: external_upload_stub.unique_identifier, + part_numbers: [1, 2, 3] + } + + expect(response.status).to eq(200) + + post "/uploads/batch-presign-multipart-parts.json", params: { + unique_identifier: external_upload_stub.unique_identifier, + part_numbers: [1, 2, 3] + } + + expect(response.status).to eq(429) + end + end + end + + context "when the store is not external" do + before do + sign_in(user) + end + + it "returns 404" do + post "/uploads/batch-presign-multipart-parts.json", params: { + unique_identifier: external_upload_stub.unique_identifier, + part_numbers: [1, 2, 3] + } + expect(response.status).to eq(404) + end + end + end + + describe "#complete_multipart" do + let(:upload_base_url) { "https://#{SiteSetting.s3_upload_bucket}.s3.#{SiteSetting.s3_region}.amazonaws.com" } + let(:mock_multipart_upload_id) { "ibZBv_75gd9r8lH_gqXatLdxMVpAlj6CFTR.OwyF3953YdwbcQnMA2BLGn8Lx12fQNICtMw5KyteFeHw.Sjng--" } + let!(:external_upload_stub) do + Fabricate(:image_external_upload_stub, created_by: user, multipart: true, external_upload_identifier: mock_multipart_upload_id) + end + + context "when the store is external" do + before do + sign_in(user) + SiteSetting.enable_direct_s3_uploads = true + setup_s3 + end + + def stub_list_multipart_request + list_multipart_result = <<~BODY + \n + + s3-upload-bucket + #{external_upload_stub.key} + #{mock_multipart_upload_id} + 0 + 0 + 1 + false + + test + #{Time.zone.now} + 1 + #{5.megabytes} + + + test-upload-user + arn:aws:iam::123:user/test-upload-user + + + + 12345 + + STANDARD + + BODY + stub_request(:get, "#{upload_base_url}/#{external_upload_stub.key}?uploadId=#{mock_multipart_upload_id}").to_return({ status: 200, body: list_multipart_result }) + end + + it "errors if the correct params are not provided" do + post "/uploads/complete-multipart.json", params: {} + expect(response.status).to eq(400) + end + + it "errors if the part_numbers do not contain numbers between 1 and 10000" do + stub_list_multipart_request + post "/uploads/complete-multipart.json", params: { + unique_identifier: external_upload_stub.unique_identifier, + parts: [{ part_number: -1, etag: "test1" }] + } + expect(response.status).to eq(400) + expect(response.body).to include("You supplied invalid parameters to the request: Each part number should be between 1 and 10000") + post "/uploads/complete-multipart.json", params: { + unique_identifier: external_upload_stub.unique_identifier, + parts: [{ part_number: 20001, etag: "test1" }] + } + expect(response.status).to eq(400) + expect(response.body).to include("You supplied invalid parameters to the request: Each part number should be between 1 and 10000") + post "/uploads/complete-multipart.json", params: { + unique_identifier: external_upload_stub.unique_identifier, + parts: [{ part_number: "blah", etag: "test1" }] + } + expect(response.status).to eq(400) + expect(response.body).to include("You supplied invalid parameters to the request: Each part number should be between 1 and 10000") + end + + it "errors if any of the parts objects have missing values" do + stub_list_multipart_request + post "/uploads/complete-multipart.json", params: { + unique_identifier: external_upload_stub.unique_identifier, + parts: [{ part_number: 1 }] + } + expect(response.status).to eq(400) + expect(response.body).to include("All parts must have an etag") + end + + it "returns 404 when the upload stub does not exist" do + post "/uploads/complete-multipart.json", params: { + unique_identifier: "unknown", + parts: [{ part_number: 1, etag: "test1" }] + } + expect(response.status).to eq(404) + end + + it "returns 422 when the complete request errors" do + FileStore::S3Store.any_instance.stubs(:complete_multipart).raises(Aws::S3::Errors::ServiceError.new({}, "test")) + stub_list_multipart_request + post "/uploads/complete-multipart.json", params: { + unique_identifier: external_upload_stub.unique_identifier, + parts: [{ part_number: 1, etag: "test1" }] + } + expect(response.status).to eq(422) + end + + it "returns 404 when the upload stub does not belong to the user" do + external_upload_stub.update!(created_by: Fabricate(:user)) + post "/uploads/complete-multipart.json", params: { + unique_identifier: external_upload_stub.unique_identifier, + parts: [{ part_number: 1, etag: "test1" }] + } + expect(response.status).to eq(404) + end + + it "returns 404 when the multipart upload does not exist" do + FileStore::S3Store.any_instance.stubs(:list_multipart_parts).raises(Aws::S3::Errors::NoSuchUpload.new("test", "test")) + post "/uploads/complete-multipart.json", params: { + unique_identifier: external_upload_stub.unique_identifier, + parts: [{ part_number: 1, etag: "test1" }] + } + expect(response.status).to eq(404) + end + + it "completes the multipart upload, creates the Upload record, and returns a serialized Upload record" do + temp_location = "#{upload_base_url}/#{external_upload_stub.key}" + stub_list_multipart_request + stub_request( + :post, + "#{temp_location}?uploadId=#{external_upload_stub.external_upload_identifier}" + ).with( + body: "\n \n test1\n 1\n \n \n test2\n 2\n \n\n" + ).to_return(status: 200, body: <<~XML) + + + #{temp_location} + s3-upload-bucket + #{external_upload_stub.key} + testfinal + + XML + + # all the functionality for ExternalUploadManager is already tested along + # with stubs to S3 in its own test, we can just stub the response here + upload = Fabricate(:upload) + ExternalUploadManager.any_instance.stubs(:promote_to_upload!).returns(upload) + + post "/uploads/complete-multipart.json", params: { + unique_identifier: external_upload_stub.unique_identifier, + parts: [{ part_number: 1, etag: "test1" }, { part_number: 2, etag: "test2" }] + } + + expect(response.status).to eq(200) + result = response.parsed_body + expect(result[:upload]).to eq(JSON.parse(UploadSerializer.new(upload).to_json)[:upload]) + end + + it "rate limits" do + RateLimiter.enable + RateLimiter.clear_all! + + stub_const(UploadsController, "COMPLETE_MULTIPART_RATE_LIMIT_PER_MINUTE", 1) do + post "/uploads/complete-multipart.json", params: { + unique_identifier: "blah", + parts: [{ part_number: 1, etag: "test1" }, { part_number: 2, etag: "test2" }] + } + post "/uploads/complete-multipart.json", params: { + unique_identifier: "blah", + parts: [{ part_number: 1, etag: "test1" }, { part_number: 2, etag: "test2" }] + } + end + expect(response.status).to eq(429) + end + end + + context "when the store is not external" do + before do + sign_in(user) + end + + it "returns 404" do + post "/uploads/complete-multipart.json", params: { + unique_identifier: external_upload_stub.external_upload_identifier, + parts: [ + { + part_number: 1, + etag: "test1" + }, + { + part_number: 2, + etag: "test2" + } + ] + } + expect(response.status).to eq(404) + end + end + end + + describe "#abort_multipart" do + let(:upload_base_url) { "https://#{SiteSetting.s3_upload_bucket}.s3.#{SiteSetting.s3_region}.amazonaws.com" } + let(:mock_multipart_upload_id) { "ibZBv_75gd9r8lH_gqXatLdxMVpAlj6CFTR.OwyF3953YdwbcQnMA2BLGn8Lx12fQNICtMw5KyteFeHw.Sjng--" } + let!(:external_upload_stub) do + Fabricate(:image_external_upload_stub, created_by: user, multipart: true, external_upload_identifier: mock_multipart_upload_id) + end + + context "when the store is external" do + before do + sign_in(user) + SiteSetting.enable_direct_s3_uploads = true + setup_s3 + end + + def stub_abort_request + temp_location = "#{upload_base_url}/#{external_upload_stub.key}" + stub_request( + :delete, + "#{temp_location}?uploadId=#{external_upload_stub.external_upload_identifier}" + ).to_return(status: 200, body: "") + end + + it "errors if the correct params are not provided" do + post "/uploads/abort-multipart.json", params: {} + expect(response.status).to eq(400) + end + + it "returns 200 when the stub does not exist, assumes it has already been deleted" do + FileStore::S3Store.any_instance.expects(:abort_multipart).never + post "/uploads/abort-multipart.json", params: { + external_upload_identifier: "unknown", + } + expect(response.status).to eq(200) + end + + it "returns 404 when the upload stub does not belong to the user" do + external_upload_stub.update!(created_by: Fabricate(:user)) + post "/uploads/abort-multipart.json", params: { + external_upload_identifier: external_upload_stub.external_upload_identifier + } + expect(response.status).to eq(404) + end + + it "aborts the multipart upload and deletes the stub" do + stub_abort_request + + post "/uploads/abort-multipart.json", params: { + external_upload_identifier: external_upload_stub.external_upload_identifier + } + + expect(response.status).to eq(200) + expect(ExternalUploadStub.exists?(id: external_upload_stub.id)).to eq(false) + end + + it "returns 422 when the abort request errors" do + FileStore::S3Store.any_instance.stubs(:abort_multipart).raises(Aws::S3::Errors::ServiceError.new({}, "test")) + post "/uploads/abort-multipart.json", params: { + external_upload_identifier: external_upload_stub.external_upload_identifier + } + expect(response.status).to eq(422) + end + end + + context "when the store is not external" do + before do + sign_in(user) + end + + it "returns 404" do + post "/uploads/complete-multipart.json", params: { + unique_identifier: external_upload_stub.external_upload_identifier, + parts: [ + { + part_number: 1, + etag: "test1" + }, + { + part_number: 2, + etag: "test2" + } + ] + } expect(response.status).to eq(404) end end @@ -786,7 +1349,7 @@ describe UploadsController do end context "when the store is external" do - fab!(:external_upload_stub) { Fabricate(:external_upload_stub, created_by: user) } + fab!(:external_upload_stub) { Fabricate(:image_external_upload_stub, created_by: user) } let(:upload) { Fabricate(:upload) } before do @@ -813,6 +1376,13 @@ describe UploadsController do expect(response.parsed_body["errors"].first).to eq(I18n.t("upload.failed")) end + it "handles SizeMismatchError" do + ExternalUploadManager.any_instance.stubs(:promote_to_upload!).raises(ExternalUploadManager::SizeMismatchError.new("expected: 10, actual: 1000")) + post "/uploads/complete-external-upload.json", params: { unique_identifier: external_upload_stub.unique_identifier } + expect(response.status).to eq(422) + expect(response.parsed_body["errors"].first).to eq(I18n.t("upload.failed")) + end + it "handles CannotPromoteError" do ExternalUploadManager.any_instance.stubs(:promote_to_upload!).raises(ExternalUploadManager::CannotPromoteError) post "/uploads/complete-external-upload.json", params: { unique_identifier: external_upload_stub.unique_identifier } diff --git a/spec/services/external_upload_manager_spec.rb b/spec/services/external_upload_manager_spec.rb index 59a7ad92818..aafad046341 100644 --- a/spec/services/external_upload_manager_spec.rb +++ b/spec/services/external_upload_manager_spec.rb @@ -31,6 +31,15 @@ RSpec.describe ExternalUploadManager do stub_delete_object end + describe "#ban_user_from_external_uploads!" do + after { Discourse.redis.flushdb } + + it "bans the user from external uploads using a redis key" do + ExternalUploadManager.ban_user_from_external_uploads!(user: user) + expect(ExternalUploadManager.user_banned?(user)).to eq(true) + end + end + describe "#can_promote?" do it "returns false if the external stub status is not created" do external_upload_stub.update!(status: ExternalUploadStub.statuses[:uploaded]) @@ -40,7 +49,7 @@ RSpec.describe ExternalUploadManager do describe "#promote_to_upload!" do context "when stubbed upload is < DOWNLOAD_LIMIT (small enough to download + generate sha)" do - let!(:external_upload_stub) { Fabricate(:image_external_upload_stub, created_by: user) } + let!(:external_upload_stub) { Fabricate(:image_external_upload_stub, created_by: user, filesize: object_size) } let(:object_size) { 1.megabyte } let(:object_file) { logo_file } @@ -114,18 +123,36 @@ RSpec.describe ExternalUploadManager do context "when the downloaded file sha1 does not match the client sha1" do let(:client_sha1) { "blahblah" } - it "raises an error and marks upload as failed" do + it "raises an error, deletes the stub" do expect { subject.promote_to_upload! }.to raise_error(ExternalUploadManager::ChecksumMismatchError) - expect(external_upload_stub.reload.status).to eq(ExternalUploadStub.statuses[:failed]) + expect(ExternalUploadStub.exists?(id: external_upload_stub.id)).to eq(false) end end end + + context "when the downloaded file size does not match the expected file size for the upload stub" do + before do + external_upload_stub.update!(filesize: 10) + end + + after { Discourse.redis.flushdb } + + it "raises an error, deletes the file immediately, and prevents the user from uploading external files for a few minutes" do + expect { subject.promote_to_upload! }.to raise_error(ExternalUploadManager::SizeMismatchError) + expect(ExternalUploadStub.exists?(id: external_upload_stub.id)).to eq(false) + expect(Discourse.redis.get("#{ExternalUploadManager::BAN_USER_REDIS_PREFIX}#{external_upload_stub.created_by_id}")).to eq("1") + expect(WebMock).to have_requested( + :delete, + "#{upload_base_url}/#{external_upload_stub.key}" + ) + end + end end context "when stubbed upload is > DOWNLOAD_LIMIT (too big to download, generate a fake sha)" do let(:object_size) { 200.megabytes } let(:object_file) { pdf_file } - let!(:external_upload_stub) { Fabricate(:attachment_external_upload_stub, created_by: user) } + let!(:external_upload_stub) { Fabricate(:attachment_external_upload_stub, created_by: user, filesize: object_size) } before do UploadCreator.any_instance.stubs(:generate_fake_sha1_hash).returns("testbc60eb18e8f974cbfae8bb0f069c3a311024") diff --git a/vendor/assets/javascripts/uppy.js b/vendor/assets/javascripts/uppy.js index 36cf75c3923..ef9e4517322 100644 --- a/vendor/assets/javascripts/uppy.js +++ b/vendor/assets/javascripts/uppy.js @@ -1,4 +1,18 @@ (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i= need) { - break; - } - } - - candidates.forEach(function (index) { - _this3._uploadPartRetryable(index).then(function () { - // Continue uploading parts - _this3._uploadParts(); - }, function (err) { - _this3._onError(err); - }); - }); - }; - - _proto._retryable = function _retryable(_ref) { - var _this4 = this; - - var before = _ref.before, - attempt = _ref.attempt, - after = _ref.after; - var retryDelays = this.options.retryDelays; - var signal = this.abortController.signal; - if (before) before(); - - function shouldRetry(err) { - if (err.source && typeof err.source.status === 'number') { - var status = err.source.status; // 0 probably indicates network failure - - return status === 0 || status === 409 || status === 423 || status >= 500 && status < 600; - } - - return false; - } - - var doAttempt = function doAttempt(retryAttempt) { - return attempt().catch(function (err) { - if (_this4._aborted()) throw createAbortError(); - - if (shouldRetry(err) && retryAttempt < retryDelays.length) { - return delay(retryDelays[retryAttempt], { - signal: signal - }).then(function () { - return doAttempt(retryAttempt + 1); - }); - } - - throw err; - }); - }; - - return doAttempt(0).then(function (result) { - if (after) after(); - return result; - }, function (err) { - if (after) after(); - throw err; - }); - }; - - _proto._uploadPartRetryable = function _uploadPartRetryable(index) { - var _this5 = this; - - return this._retryable({ - before: function before() { - _this5.partsInProgress += 1; - }, - attempt: function attempt() { - return _this5._uploadPart(index); - }, - after: function after() { - _this5.partsInProgress -= 1; - } - }); - }; - - _proto._uploadPart = function _uploadPart(index) { - var _this6 = this; - - var body = this.chunks[index]; - this.chunkState[index].busy = true; - return Promise.resolve().then(function () { - return _this6.options.prepareUploadPart({ - key: _this6.key, - uploadId: _this6.uploadId, - body: body, - number: index + 1 - }); - }).then(function (result) { - var valid = typeof result === 'object' && result && typeof result.url === 'string'; - - if (!valid) { - throw new TypeError('AwsS3/Multipart: Got incorrect result from `prepareUploadPart()`, expected an object `{ url }`.'); - } - - return result; - }).then(function (_ref2) { - var url = _ref2.url, - headers = _ref2.headers; - - if (_this6._aborted()) { - _this6.chunkState[index].busy = false; - throw createAbortError(); - } - - return _this6._uploadPartBytes(index, url, headers); - }); - }; - - _proto._onPartProgress = function _onPartProgress(index, sent, total) { - this.chunkState[index].uploaded = ensureInt(sent); - var totalUploaded = this.chunkState.reduce(function (n, c) { - return n + c.uploaded; - }, 0); - this.options.onProgress(totalUploaded, this.file.size); - }; - - _proto._onPartComplete = function _onPartComplete(index, etag) { - this.chunkState[index].etag = etag; - this.chunkState[index].done = true; - var part = { - PartNumber: index + 1, - ETag: etag - }; - this.parts.push(part); - this.options.onPartComplete(part); - }; - - _proto._uploadPartBytes = function _uploadPartBytes(index, url, headers) { - var _this7 = this; - - var body = this.chunks[index]; - var signal = this.abortController.signal; - var defer; - var promise = new Promise(function (resolve, reject) { - defer = { - resolve: resolve, - reject: reject - }; - }); - var xhr = new XMLHttpRequest(); - xhr.open('PUT', url, true); - - if (headers) { - Object.keys(headers).map(function (key) { - xhr.setRequestHeader(key, headers[key]); - }); - } - - xhr.responseType = 'text'; - - function cleanup() { - signal.removeEventListener('abort', onabort); - } - - function onabort() { - xhr.abort(); - } - - signal.addEventListener('abort', onabort); - xhr.upload.addEventListener('progress', function (ev) { - if (!ev.lengthComputable) return; - - _this7._onPartProgress(index, ev.loaded, ev.total); - }); - xhr.addEventListener('abort', function (ev) { - cleanup(); - _this7.chunkState[index].busy = false; - defer.reject(createAbortError()); - }); - xhr.addEventListener('load', function (ev) { - cleanup(); - _this7.chunkState[index].busy = false; - - if (ev.target.status < 200 || ev.target.status >= 300) { - var error = new Error('Non 2xx'); - error.source = ev.target; - defer.reject(error); - return; - } - - _this7._onPartProgress(index, body.size, body.size); // NOTE This must be allowed by CORS. - - - var etag = ev.target.getResponseHeader('ETag'); - - if (etag === null) { - defer.reject(new Error('AwsS3/Multipart: Could not read the ETag header. This likely means CORS is not configured correctly on the S3 Bucket. See https://uppy.io/docs/aws-s3-multipart#S3-Bucket-Configuration for instructions.')); - return; - } - - _this7._onPartComplete(index, etag); - - defer.resolve(); - }); - xhr.addEventListener('error', function (ev) { - cleanup(); - _this7.chunkState[index].busy = false; - var error = new Error('Unknown error'); - error.source = ev.target; - defer.reject(error); - }); - xhr.send(body); - return promise; - }; - - _proto._completeUpload = function _completeUpload() { - var _this8 = this; - - // Parts may not have completed uploading in sorted order, if limit > 1. - this.parts.sort(function (a, b) { - return a.PartNumber - b.PartNumber; - }); - return Promise.resolve().then(function () { - return _this8.options.completeMultipartUpload({ - key: _this8.key, - uploadId: _this8.uploadId, - parts: _this8.parts - }); - }).then(function (result) { - _this8.options.onSuccess(result); - }, function (err) { - _this8._onError(err); - }); - }; - - _proto._abortUpload = function _abortUpload() { - var _this9 = this; - - this.abortController.abort(); - this.createdPromise.then(function () { - _this9.options.abortMultipartUpload({ - key: _this9.key, - uploadId: _this9.uploadId - }); - }, function () {// if the creation failed we do not need to abort - }); - }; - - _proto._onError = function _onError(err) { - if (err && err.name === 'AbortError') { - return; - } - - this.options.onError(err); - }; - - _proto.start = function start() { - this.isPaused = false; - - if (this.uploadId) { - this._resumeUpload(); - } else { - this._createUpload(); - } - }; - - _proto.pause = function pause() { - this.abortController.abort(); // Swap it out for a new controller, because this instance may be resumed later. - - this.abortController = new AbortController(); - this.isPaused = true; - }; - - _proto.abort = function abort(opts) { - if (opts === void 0) { - opts = {}; - } - - var really = opts.really || false; - if (!really) return this.pause(); - - this._abortUpload(); - }; - - return MultipartUploader; -}(); - -module.exports = MultipartUploader; -},{"@uppy/utils/lib/AbortController":21,"@uppy/utils/lib/delay":27}],3:[function(require,module,exports){ -var _class, _temp; - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); } - -var _require = require('@uppy/core'), - Plugin = _require.Plugin; - -var _require2 = require('@uppy/companion-client'), - Socket = _require2.Socket, - Provider = _require2.Provider, - RequestClient = _require2.RequestClient; - -var EventTracker = require('@uppy/utils/lib/EventTracker'); - -var emitSocketProgress = require('@uppy/utils/lib/emitSocketProgress'); - -var getSocketHost = require('@uppy/utils/lib/getSocketHost'); - -var RateLimitedQueue = require('@uppy/utils/lib/RateLimitedQueue'); - -var Uploader = require('./MultipartUploader'); - -function assertServerError(res) { - if (res && res.error) { - var error = new Error(res.message); - - _extends(error, res.error); - - throw error; - } - - return res; -} - -module.exports = (_temp = _class = /*#__PURE__*/function (_Plugin) { - _inheritsLoose(AwsS3Multipart, _Plugin); - - function AwsS3Multipart(uppy, opts) { - var _this; - - _this = _Plugin.call(this, uppy, opts) || this; - _this.type = 'uploader'; - _this.id = _this.opts.id || 'AwsS3Multipart'; - _this.title = 'AWS S3 Multipart'; - _this.client = new RequestClient(uppy, opts); - var defaultOptions = { - timeout: 30 * 1000, - limit: 0, - retryDelays: [0, 1000, 3000, 5000], - createMultipartUpload: _this.createMultipartUpload.bind(_assertThisInitialized(_this)), - listParts: _this.listParts.bind(_assertThisInitialized(_this)), - prepareUploadPart: _this.prepareUploadPart.bind(_assertThisInitialized(_this)), - abortMultipartUpload: _this.abortMultipartUpload.bind(_assertThisInitialized(_this)), - completeMultipartUpload: _this.completeMultipartUpload.bind(_assertThisInitialized(_this)) - }; - _this.opts = _extends({}, defaultOptions, opts); - _this.upload = _this.upload.bind(_assertThisInitialized(_this)); - _this.requests = new RateLimitedQueue(_this.opts.limit); - _this.uploaders = Object.create(null); - _this.uploaderEvents = Object.create(null); - _this.uploaderSockets = Object.create(null); - return _this; - } - /** - * Clean up all references for a file's upload: the MultipartUploader instance, - * any events related to the file, and the Companion WebSocket connection. - * - * Set `opts.abort` to tell S3 that the multipart upload is cancelled and must be removed. - * This should be done when the user cancels the upload, not when the upload is completed or errored. - */ - - - var _proto = AwsS3Multipart.prototype; - - _proto.resetUploaderReferences = function resetUploaderReferences(fileID, opts) { - if (opts === void 0) { - opts = {}; - } - - if (this.uploaders[fileID]) { - this.uploaders[fileID].abort({ - really: opts.abort || false - }); - this.uploaders[fileID] = null; - } - - if (this.uploaderEvents[fileID]) { - this.uploaderEvents[fileID].remove(); - this.uploaderEvents[fileID] = null; - } - - if (this.uploaderSockets[fileID]) { - this.uploaderSockets[fileID].close(); - this.uploaderSockets[fileID] = null; - } - }; - - _proto.assertHost = function assertHost(method) { - if (!this.opts.companionUrl) { - throw new Error("Expected a `companionUrl` option containing a Companion address, or if you are not using Companion, a custom `" + method + "` implementation."); - } - }; - - _proto.createMultipartUpload = function createMultipartUpload(file) { - this.assertHost('createMultipartUpload'); - var metadata = {}; - Object.keys(file.meta).map(function (key) { - if (file.meta[key] != null) { - metadata[key] = file.meta[key].toString(); - } - }); - return this.client.post('s3/multipart', { - filename: file.name, - type: file.type, - metadata: metadata - }).then(assertServerError); - }; - - _proto.listParts = function listParts(file, _ref) { - var key = _ref.key, - uploadId = _ref.uploadId; - this.assertHost('listParts'); - var filename = encodeURIComponent(key); - return this.client.get("s3/multipart/" + uploadId + "?key=" + filename).then(assertServerError); - }; - - _proto.prepareUploadPart = function prepareUploadPart(file, _ref2) { - var key = _ref2.key, - uploadId = _ref2.uploadId, - number = _ref2.number; - this.assertHost('prepareUploadPart'); - var filename = encodeURIComponent(key); - return this.client.get("s3/multipart/" + uploadId + "/" + number + "?key=" + filename).then(assertServerError); - }; - - _proto.completeMultipartUpload = function completeMultipartUpload(file, _ref3) { - var key = _ref3.key, - uploadId = _ref3.uploadId, - parts = _ref3.parts; - this.assertHost('completeMultipartUpload'); - var filename = encodeURIComponent(key); - var uploadIdEnc = encodeURIComponent(uploadId); - return this.client.post("s3/multipart/" + uploadIdEnc + "/complete?key=" + filename, { - parts: parts - }).then(assertServerError); - }; - - _proto.abortMultipartUpload = function abortMultipartUpload(file, _ref4) { - var key = _ref4.key, - uploadId = _ref4.uploadId; - this.assertHost('abortMultipartUpload'); - var filename = encodeURIComponent(key); - var uploadIdEnc = encodeURIComponent(uploadId); - return this.client.delete("s3/multipart/" + uploadIdEnc + "?key=" + filename).then(assertServerError); - }; - - _proto.uploadFile = function uploadFile(file) { - var _this2 = this; - - return new Promise(function (resolve, reject) { - var onStart = function onStart(data) { - var cFile = _this2.uppy.getFile(file.id); - - _this2.uppy.setFileState(file.id, { - s3Multipart: _extends({}, cFile.s3Multipart, { - key: data.key, - uploadId: data.uploadId - }) - }); - }; - - var onProgress = function onProgress(bytesUploaded, bytesTotal) { - _this2.uppy.emit('upload-progress', file, { - uploader: _this2, - bytesUploaded: bytesUploaded, - bytesTotal: bytesTotal - }); - }; - - var onError = function onError(err) { - _this2.uppy.log(err); - - _this2.uppy.emit('upload-error', file, err); - - queuedRequest.done(); - - _this2.resetUploaderReferences(file.id); - - reject(err); - }; - - var onSuccess = function onSuccess(result) { - var uploadResp = { - body: _extends({}, result), - uploadURL: result.location - }; - queuedRequest.done(); - - _this2.resetUploaderReferences(file.id); - - var cFile = _this2.uppy.getFile(file.id); - - _this2.uppy.emit('upload-success', cFile || file, uploadResp); - - if (result.location) { - _this2.uppy.log("Download " + upload.file.name + " from " + result.location); - } - - resolve(upload); - }; - - var onPartComplete = function onPartComplete(part) { - var cFile = _this2.uppy.getFile(file.id); - - if (!cFile) { - return; - } - - _this2.uppy.emit('s3-multipart:part-uploaded', cFile, part); - }; - - var upload = new Uploader(file.data, _extends({ - // .bind to pass the file object to each handler. - createMultipartUpload: _this2.opts.createMultipartUpload.bind(_this2, file), - listParts: _this2.opts.listParts.bind(_this2, file), - prepareUploadPart: _this2.opts.prepareUploadPart.bind(_this2, file), - completeMultipartUpload: _this2.opts.completeMultipartUpload.bind(_this2, file), - abortMultipartUpload: _this2.opts.abortMultipartUpload.bind(_this2, file), - getChunkSize: _this2.opts.getChunkSize ? _this2.opts.getChunkSize.bind(_this2) : null, - onStart: onStart, - onProgress: onProgress, - onError: onError, - onSuccess: onSuccess, - onPartComplete: onPartComplete, - limit: _this2.opts.limit || 5, - retryDelays: _this2.opts.retryDelays || [] - }, file.s3Multipart)); - _this2.uploaders[file.id] = upload; - _this2.uploaderEvents[file.id] = new EventTracker(_this2.uppy); - - var queuedRequest = _this2.requests.run(function () { - if (!file.isPaused) { - upload.start(); - } // Don't do anything here, the caller will take care of cancelling the upload itself - // using resetUploaderReferences(). This is because resetUploaderReferences() has to be - // called when this request is still in the queue, and has not been started yet, too. At - // that point this cancellation function is not going to be called. - - - return function () {}; - }); - - _this2.onFileRemove(file.id, function (removed) { - queuedRequest.abort(); - - _this2.resetUploaderReferences(file.id, { - abort: true - }); - - resolve("upload " + removed.id + " was removed"); - }); - - _this2.onCancelAll(file.id, function () { - queuedRequest.abort(); - - _this2.resetUploaderReferences(file.id, { - abort: true - }); - - resolve("upload " + file.id + " was canceled"); - }); - - _this2.onFilePause(file.id, function (isPaused) { - if (isPaused) { - // Remove this file from the queue so another file can start in its place. - queuedRequest.abort(); - upload.pause(); - } else { - // Resuming an upload should be queued, else you could pause and then resume a queued upload to make it skip the queue. - queuedRequest.abort(); - queuedRequest = _this2.requests.run(function () { - upload.start(); - return function () {}; - }); - } - }); - - _this2.onPauseAll(file.id, function () { - queuedRequest.abort(); - upload.pause(); - }); - - _this2.onResumeAll(file.id, function () { - queuedRequest.abort(); - - if (file.error) { - upload.abort(); - } - - queuedRequest = _this2.requests.run(function () { - upload.start(); - return function () {}; - }); - }); // Don't double-emit upload-started for Golden Retriever-restored files that were already started - - - if (!file.progress.uploadStarted || !file.isRestored) { - _this2.uppy.emit('upload-started', file); - } - }); - }; - - _proto.uploadRemote = function uploadRemote(file) { - var _this3 = this; - - this.resetUploaderReferences(file.id); // Don't double-emit upload-started for Golden Retriever-restored files that were already started - - if (!file.progress.uploadStarted || !file.isRestored) { - this.uppy.emit('upload-started', file); - } - - if (file.serverToken) { - return this.connectToServerSocket(file); - } - - return new Promise(function (resolve, reject) { - var Client = file.remote.providerOptions.provider ? Provider : RequestClient; - var client = new Client(_this3.uppy, file.remote.providerOptions); - client.post(file.remote.url, _extends({}, file.remote.body, { - protocol: 's3-multipart', - size: file.data.size, - metadata: file.meta - })).then(function (res) { - _this3.uppy.setFileState(file.id, { - serverToken: res.token - }); - - file = _this3.uppy.getFile(file.id); - return file; - }).then(function (file) { - return _this3.connectToServerSocket(file); - }).then(function () { - resolve(); - }).catch(function (err) { - _this3.uppy.emit('upload-error', file, err); - - reject(err); - }); - }); - }; - - _proto.connectToServerSocket = function connectToServerSocket(file) { - var _this4 = this; - - return new Promise(function (resolve, reject) { - var token = file.serverToken; - var host = getSocketHost(file.remote.companionUrl); - var socket = new Socket({ - target: host + "/api/" + token, - autoOpen: false - }); - _this4.uploaderSockets[file.id] = socket; - _this4.uploaderEvents[file.id] = new EventTracker(_this4.uppy); - - _this4.onFileRemove(file.id, function (removed) { - queuedRequest.abort(); - socket.send('pause', {}); - - _this4.resetUploaderReferences(file.id, { - abort: true - }); - - resolve("upload " + file.id + " was removed"); - }); - - _this4.onFilePause(file.id, function (isPaused) { - if (isPaused) { - // Remove this file from the queue so another file can start in its place. - queuedRequest.abort(); - socket.send('pause', {}); - } else { - // Resuming an upload should be queued, else you could pause and then resume a queued upload to make it skip the queue. - queuedRequest.abort(); - queuedRequest = _this4.requests.run(function () { - socket.send('resume', {}); - return function () {}; - }); - } - }); - - _this4.onPauseAll(file.id, function () { - queuedRequest.abort(); - socket.send('pause', {}); - }); - - _this4.onCancelAll(file.id, function () { - queuedRequest.abort(); - socket.send('pause', {}); - - _this4.resetUploaderReferences(file.id); - - resolve("upload " + file.id + " was canceled"); - }); - - _this4.onResumeAll(file.id, function () { - queuedRequest.abort(); - - if (file.error) { - socket.send('pause', {}); - } - - queuedRequest = _this4.requests.run(function () { - socket.send('resume', {}); - }); - }); - - _this4.onRetry(file.id, function () { - // Only do the retry if the upload is actually in progress; - // else we could try to send these messages when the upload is still queued. - // We may need a better check for this since the socket may also be closed - // for other reasons, like network failures. - if (socket.isOpen) { - socket.send('pause', {}); - socket.send('resume', {}); - } - }); - - _this4.onRetryAll(file.id, function () { - if (socket.isOpen) { - socket.send('pause', {}); - socket.send('resume', {}); - } - }); - - socket.on('progress', function (progressData) { - return emitSocketProgress(_this4, progressData, file); - }); - socket.on('error', function (errData) { - _this4.uppy.emit('upload-error', file, new Error(errData.error)); - - _this4.resetUploaderReferences(file.id); - - queuedRequest.done(); - reject(new Error(errData.error)); - }); - socket.on('success', function (data) { - var uploadResp = { - uploadURL: data.url - }; - - _this4.uppy.emit('upload-success', file, uploadResp); - - _this4.resetUploaderReferences(file.id); - - queuedRequest.done(); - resolve(); - }); - - var queuedRequest = _this4.requests.run(function () { - socket.open(); - - if (file.isPaused) { - socket.send('pause', {}); - } - - return function () {}; - }); - }); - }; - - _proto.upload = function upload(fileIDs) { - var _this5 = this; - - if (fileIDs.length === 0) return Promise.resolve(); - var promises = fileIDs.map(function (id) { - var file = _this5.uppy.getFile(id); - - if (file.isRemote) { - return _this5.uploadRemote(file); - } - - return _this5.uploadFile(file); - }); - return Promise.all(promises); - }; - - _proto.onFileRemove = function onFileRemove(fileID, cb) { - this.uploaderEvents[fileID].on('file-removed', function (file) { - if (fileID === file.id) cb(file.id); - }); - }; - - _proto.onFilePause = function onFilePause(fileID, cb) { - this.uploaderEvents[fileID].on('upload-pause', function (targetFileID, isPaused) { - if (fileID === targetFileID) { - // const isPaused = this.uppy.pauseResume(fileID) - cb(isPaused); - } - }); - }; - - _proto.onRetry = function onRetry(fileID, cb) { - this.uploaderEvents[fileID].on('upload-retry', function (targetFileID) { - if (fileID === targetFileID) { - cb(); - } - }); - }; - - _proto.onRetryAll = function onRetryAll(fileID, cb) { - var _this6 = this; - - this.uploaderEvents[fileID].on('retry-all', function (filesToRetry) { - if (!_this6.uppy.getFile(fileID)) return; - cb(); - }); - }; - - _proto.onPauseAll = function onPauseAll(fileID, cb) { - var _this7 = this; - - this.uploaderEvents[fileID].on('pause-all', function () { - if (!_this7.uppy.getFile(fileID)) return; - cb(); - }); - }; - - _proto.onCancelAll = function onCancelAll(fileID, cb) { - var _this8 = this; - - this.uploaderEvents[fileID].on('cancel-all', function () { - if (!_this8.uppy.getFile(fileID)) return; - cb(); - }); - }; - - _proto.onResumeAll = function onResumeAll(fileID, cb) { - var _this9 = this; - - this.uploaderEvents[fileID].on('resume-all', function () { - if (!_this9.uppy.getFile(fileID)) return; - cb(); - }); - }; - - _proto.install = function install() { - var _this$uppy$getState = this.uppy.getState(), - capabilities = _this$uppy$getState.capabilities; - - this.uppy.setState({ - capabilities: _extends({}, capabilities, { - resumableUploads: true - }) - }); - this.uppy.addUploader(this.upload); - }; - - _proto.uninstall = function uninstall() { - var _this$uppy$getState2 = this.uppy.getState(), - capabilities = _this$uppy$getState2.capabilities; - - this.uppy.setState({ - capabilities: _extends({}, capabilities, { - resumableUploads: false - }) - }); - this.uppy.removeUploader(this.upload); - }; - - return AwsS3Multipart; -}(Plugin), _class.VERSION = "1.8.18", _temp); -},{"./MultipartUploader":2,"@uppy/companion-client":12,"@uppy/core":15,"@uppy/utils/lib/EventTracker":22,"@uppy/utils/lib/RateLimitedQueue":25,"@uppy/utils/lib/emitSocketProgress":28,"@uppy/utils/lib/getSocketHost":40}],4:[function(require,module,exports){ -function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); } - -var cuid = require('cuid'); - -var _require = require('@uppy/companion-client'), - Provider = _require.Provider, - RequestClient = _require.RequestClient, - Socket = _require.Socket; - -var emitSocketProgress = require('@uppy/utils/lib/emitSocketProgress'); - -var getSocketHost = require('@uppy/utils/lib/getSocketHost'); - -var EventTracker = require('@uppy/utils/lib/EventTracker'); - -var ProgressTimeout = require('@uppy/utils/lib/ProgressTimeout'); - -var NetworkError = require('@uppy/utils/lib/NetworkError'); - -var isNetworkError = require('@uppy/utils/lib/isNetworkError'); // See XHRUpload - - -function buildResponseError(xhr, error) { - // No error message - if (!error) error = new Error('Upload error'); // Got an error message string - - if (typeof error === 'string') error = new Error(error); // Got something else - - if (!(error instanceof Error)) { - error = _extends(new Error('Upload error'), { - data: error - }); - } - - if (isNetworkError(xhr)) { - error = new NetworkError(error, xhr); - return error; - } - - error.request = xhr; - return error; -} // See XHRUpload - - -function setTypeInBlob(file) { - var dataWithUpdatedType = file.data.slice(0, file.data.size, file.meta.type); - return dataWithUpdatedType; -} - -module.exports = /*#__PURE__*/function () { - function MiniXHRUpload(uppy, opts) { - this.uppy = uppy; - this.opts = _extends({ - validateStatus: function validateStatus(status, responseText, response) { - return status >= 200 && status < 300; - } - }, opts); - this.requests = opts.__queue; - this.uploaderEvents = Object.create(null); - this.i18n = opts.i18n; - } - - var _proto = MiniXHRUpload.prototype; - - _proto._getOptions = function _getOptions(file) { - var uppy = this.uppy; - var overrides = uppy.getState().xhrUpload; - - var opts = _extends({}, this.opts, overrides || {}, file.xhrUpload || {}, { - headers: {} - }); - - _extends(opts.headers, this.opts.headers); - - if (overrides) { - _extends(opts.headers, overrides.headers); - } - - if (file.xhrUpload) { - _extends(opts.headers, file.xhrUpload.headers); - } - - return opts; - }; - - _proto.uploadFile = function uploadFile(id, current, total) { - var file = this.uppy.getFile(id); - - if (file.error) { - throw new Error(file.error); - } else if (file.isRemote) { - return this._uploadRemoteFile(file, current, total); - } - - return this._uploadLocalFile(file, current, total); - }; - - _proto._addMetadata = function _addMetadata(formData, meta, opts) { - var metaFields = Array.isArray(opts.metaFields) ? opts.metaFields // Send along all fields by default. - : Object.keys(meta); - metaFields.forEach(function (item) { - formData.append(item, meta[item]); - }); - }; - - _proto._createFormDataUpload = function _createFormDataUpload(file, opts) { - var formPost = new FormData(); - - this._addMetadata(formPost, file.meta, opts); - - var dataWithUpdatedType = setTypeInBlob(file); - - if (file.name) { - formPost.append(opts.fieldName, dataWithUpdatedType, file.meta.name); - } else { - formPost.append(opts.fieldName, dataWithUpdatedType); - } - - return formPost; - }; - - _proto._createBareUpload = function _createBareUpload(file, opts) { - return file.data; - }; - - _proto._onFileRemoved = function _onFileRemoved(fileID, cb) { - this.uploaderEvents[fileID].on('file-removed', function (file) { - if (fileID === file.id) cb(file.id); - }); - }; - - _proto._onRetry = function _onRetry(fileID, cb) { - this.uploaderEvents[fileID].on('upload-retry', function (targetFileID) { - if (fileID === targetFileID) { - cb(); - } - }); - }; - - _proto._onRetryAll = function _onRetryAll(fileID, cb) { - var _this = this; - - this.uploaderEvents[fileID].on('retry-all', function (filesToRetry) { - if (!_this.uppy.getFile(fileID)) return; - cb(); - }); - }; - - _proto._onCancelAll = function _onCancelAll(fileID, cb) { - var _this2 = this; - - this.uploaderEvents[fileID].on('cancel-all', function () { - if (!_this2.uppy.getFile(fileID)) return; - cb(); - }); - }; - - _proto._uploadLocalFile = function _uploadLocalFile(file, current, total) { - var _this3 = this; - - var opts = this._getOptions(file); - - this.uppy.log("uploading " + current + " of " + total); - return new Promise(function (resolve, reject) { - // This is done in index.js in the S3 plugin. - // this.uppy.emit('upload-started', file) - var data = opts.formData ? _this3._createFormDataUpload(file, opts) : _this3._createBareUpload(file, opts); - var xhr = new XMLHttpRequest(); - _this3.uploaderEvents[file.id] = new EventTracker(_this3.uppy); - var timer = new ProgressTimeout(opts.timeout, function () { - xhr.abort(); - queuedRequest.done(); - var error = new Error(_this3.i18n('timedOut', { - seconds: Math.ceil(opts.timeout / 1000) - })); - - _this3.uppy.emit('upload-error', file, error); - - reject(error); - }); - var id = cuid(); - xhr.upload.addEventListener('loadstart', function (ev) { - _this3.uppy.log("[AwsS3/XHRUpload] " + id + " started"); - }); - xhr.upload.addEventListener('progress', function (ev) { - _this3.uppy.log("[AwsS3/XHRUpload] " + id + " progress: " + ev.loaded + " / " + ev.total); // Begin checking for timeouts when progress starts, instead of loading, - // to avoid timing out requests on browser concurrency queue - - - timer.progress(); - - if (ev.lengthComputable) { - _this3.uppy.emit('upload-progress', file, { - uploader: _this3, - bytesUploaded: ev.loaded, - bytesTotal: ev.total - }); - } - }); - xhr.addEventListener('load', function (ev) { - _this3.uppy.log("[AwsS3/XHRUpload] " + id + " finished"); - - timer.done(); - queuedRequest.done(); - - if (_this3.uploaderEvents[file.id]) { - _this3.uploaderEvents[file.id].remove(); - - _this3.uploaderEvents[file.id] = null; - } - - if (opts.validateStatus(ev.target.status, xhr.responseText, xhr)) { - var _body = opts.getResponseData(xhr.responseText, xhr); - - var uploadURL = _body[opts.responseUrlFieldName]; - var uploadResp = { - status: ev.target.status, - body: _body, - uploadURL: uploadURL - }; - - _this3.uppy.emit('upload-success', file, uploadResp); - - if (uploadURL) { - _this3.uppy.log("Download " + file.name + " from " + uploadURL); - } - - return resolve(file); - } - - var body = opts.getResponseData(xhr.responseText, xhr); - var error = buildResponseError(xhr, opts.getResponseError(xhr.responseText, xhr)); - var response = { - status: ev.target.status, - body: body - }; - - _this3.uppy.emit('upload-error', file, error, response); - - return reject(error); - }); - xhr.addEventListener('error', function (ev) { - _this3.uppy.log("[AwsS3/XHRUpload] " + id + " errored"); - - timer.done(); - queuedRequest.done(); - - if (_this3.uploaderEvents[file.id]) { - _this3.uploaderEvents[file.id].remove(); - - _this3.uploaderEvents[file.id] = null; - } - - var error = buildResponseError(xhr, opts.getResponseError(xhr.responseText, xhr)); - - _this3.uppy.emit('upload-error', file, error); - - return reject(error); - }); - xhr.open(opts.method.toUpperCase(), opts.endpoint, true); // IE10 does not allow setting `withCredentials` and `responseType` - // before `open()` is called. - - xhr.withCredentials = opts.withCredentials; - - if (opts.responseType !== '') { - xhr.responseType = opts.responseType; - } - - Object.keys(opts.headers).forEach(function (header) { - xhr.setRequestHeader(header, opts.headers[header]); - }); - - var queuedRequest = _this3.requests.run(function () { - xhr.send(data); - return function () { - timer.done(); - xhr.abort(); - }; - }, { - priority: 1 - }); - - _this3._onFileRemoved(file.id, function () { - queuedRequest.abort(); - reject(new Error('File removed')); - }); - - _this3._onCancelAll(file.id, function () { - queuedRequest.abort(); - reject(new Error('Upload cancelled')); - }); - }); - }; - - _proto._uploadRemoteFile = function _uploadRemoteFile(file, current, total) { - var _this4 = this; - - var opts = this._getOptions(file); - - return new Promise(function (resolve, reject) { - // This is done in index.js in the S3 plugin. - // this.uppy.emit('upload-started', file) - var fields = {}; - var metaFields = Array.isArray(opts.metaFields) ? opts.metaFields // Send along all fields by default. - : Object.keys(file.meta); - metaFields.forEach(function (name) { - fields[name] = file.meta[name]; - }); - var Client = file.remote.providerOptions.provider ? Provider : RequestClient; - var client = new Client(_this4.uppy, file.remote.providerOptions); - client.post(file.remote.url, _extends({}, file.remote.body, { - endpoint: opts.endpoint, - size: file.data.size, - fieldname: opts.fieldName, - metadata: fields, - httpMethod: opts.method, - useFormData: opts.formData, - headers: opts.headers - })).then(function (res) { - var token = res.token; - var host = getSocketHost(file.remote.companionUrl); - var socket = new Socket({ - target: host + "/api/" + token, - autoOpen: false - }); - _this4.uploaderEvents[file.id] = new EventTracker(_this4.uppy); - - _this4._onFileRemoved(file.id, function () { - socket.send('pause', {}); - queuedRequest.abort(); - resolve("upload " + file.id + " was removed"); - }); - - _this4._onCancelAll(file.id, function () { - socket.send('pause', {}); - queuedRequest.abort(); - resolve("upload " + file.id + " was canceled"); - }); - - _this4._onRetry(file.id, function () { - socket.send('pause', {}); - socket.send('resume', {}); - }); - - _this4._onRetryAll(file.id, function () { - socket.send('pause', {}); - socket.send('resume', {}); - }); - - socket.on('progress', function (progressData) { - return emitSocketProgress(_this4, progressData, file); - }); - socket.on('success', function (data) { - var body = opts.getResponseData(data.response.responseText, data.response); - var uploadURL = body[opts.responseUrlFieldName]; - var uploadResp = { - status: data.response.status, - body: body, - uploadURL: uploadURL - }; - - _this4.uppy.emit('upload-success', file, uploadResp); - - queuedRequest.done(); - - if (_this4.uploaderEvents[file.id]) { - _this4.uploaderEvents[file.id].remove(); - - _this4.uploaderEvents[file.id] = null; - } - - return resolve(); - }); - socket.on('error', function (errData) { - var resp = errData.response; - var error = resp ? opts.getResponseError(resp.responseText, resp) : _extends(new Error(errData.error.message), { - cause: errData.error - }); - - _this4.uppy.emit('upload-error', file, error); - - queuedRequest.done(); - - if (_this4.uploaderEvents[file.id]) { - _this4.uploaderEvents[file.id].remove(); - - _this4.uploaderEvents[file.id] = null; - } - - reject(error); - }); - - var queuedRequest = _this4.requests.run(function () { - socket.open(); - - if (file.isPaused) { - socket.send('pause', {}); - } - - return function () { - return socket.close(); - }; - }); - }).catch(function (err) { - _this4.uppy.emit('upload-error', file, err); - - reject(err); - }); - }); - }; - - return MiniXHRUpload; -}(); -},{"@uppy/companion-client":12,"@uppy/utils/lib/EventTracker":22,"@uppy/utils/lib/NetworkError":23,"@uppy/utils/lib/ProgressTimeout":24,"@uppy/utils/lib/emitSocketProgress":28,"@uppy/utils/lib/getSocketHost":40,"@uppy/utils/lib/isNetworkError":44,"cuid":50}],5:[function(require,module,exports){ -var _class, _temp; - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); } - -/** - * This plugin is currently a A Big Hack™! The core reason for that is how this plugin - * interacts with Uppy's current pipeline design. The pipeline can handle files in steps, - * including preprocessing, uploading, and postprocessing steps. This plugin initially - * was designed to do its work in a preprocessing step, and let XHRUpload deal with the - * actual file upload as an uploading step. However, Uppy runs steps on all files at once, - * sequentially: first, all files go through a preprocessing step, then, once they are all - * done, they go through the uploading step. - * - * For S3, this causes severely broken behaviour when users upload many files. The - * preprocessing step will request S3 upload URLs that are valid for a short time only, - * but it has to do this for _all_ files, which can take a long time if there are hundreds - * or even thousands of files. By the time the uploader step starts, the first URLs may - * already have expired. If not, the uploading might take such a long time that later URLs - * will expire before some files can be uploaded. - * - * The long-term solution to this problem is to change the upload pipeline so that files - * can be sent to the next step individually. That requires a breaking change, so it is - * planned for some future Uppy version. - * - * In the mean time, this plugin is stuck with a hackier approach: the necessary parts - * of the XHRUpload implementation were copied into this plugin, as the MiniXHRUpload - * class, and this plugin calls into it immediately once it receives an upload URL. - * This isn't as nicely modular as we'd like and requires us to maintain two copies of - * the XHRUpload code, but at least it's not horrifically broken :) - */ -// If global `URL` constructor is available, use it -var URL_ = typeof URL === 'function' ? URL : require('url-parse'); - -var _require = require('@uppy/core'), - Plugin = _require.Plugin; - -var Translator = require('@uppy/utils/lib/Translator'); - -var RateLimitedQueue = require('@uppy/utils/lib/RateLimitedQueue'); - -var settle = require('@uppy/utils/lib/settle'); - -var hasProperty = require('@uppy/utils/lib/hasProperty'); - -var _require2 = require('@uppy/companion-client'), - RequestClient = _require2.RequestClient; - -var qsStringify = require('qs-stringify'); - -var MiniXHRUpload = require('./MiniXHRUpload'); - -var isXml = require('./isXml'); - -function resolveUrl(origin, link) { - return origin ? new URL_(link, origin).toString() : new URL_(link).toString(); -} -/** - * Get the contents of a named tag in an XML source string. - * - * @param {string} source - The XML source string. - * @param {string} tagName - The name of the tag. - * @returns {string} The contents of the tag, or the empty string if the tag does not exist. - */ - - -function getXmlValue(source, tagName) { - var start = source.indexOf("<" + tagName + ">"); - var end = source.indexOf("", start); - return start !== -1 && end !== -1 ? source.slice(start + tagName.length + 2, end) : ''; -} - -function assertServerError(res) { - if (res && res.error) { - var error = new Error(res.message); - - _extends(error, res.error); - - throw error; - } - - return res; -} // warning deduplication flag: see `getResponseData()` XHRUpload option definition - - -var warnedSuccessActionStatus = false; -module.exports = (_temp = _class = /*#__PURE__*/function (_Plugin) { - _inheritsLoose(AwsS3, _Plugin); - - function AwsS3(uppy, opts) { - var _this; - - _this = _Plugin.call(this, uppy, opts) || this; - _this.type = 'uploader'; - _this.id = _this.opts.id || 'AwsS3'; - _this.title = 'AWS S3'; - _this.defaultLocale = { - strings: { - timedOut: 'Upload stalled for %{seconds} seconds, aborting.' - } - }; - var defaultOptions = { - timeout: 30 * 1000, - limit: 0, - metaFields: [], - // have to opt in - getUploadParameters: _this.getUploadParameters.bind(_assertThisInitialized(_this)) - }; - _this.opts = _extends({}, defaultOptions, opts); - - _this.i18nInit(); - - _this.client = new RequestClient(uppy, opts); - _this.handleUpload = _this.handleUpload.bind(_assertThisInitialized(_this)); - _this.requests = new RateLimitedQueue(_this.opts.limit); - return _this; - } - - var _proto = AwsS3.prototype; - - _proto.setOptions = function setOptions(newOpts) { - _Plugin.prototype.setOptions.call(this, newOpts); - - this.i18nInit(); - }; - - _proto.i18nInit = function i18nInit() { - this.translator = new Translator([this.defaultLocale, this.uppy.locale, this.opts.locale]); - this.i18n = this.translator.translate.bind(this.translator); - this.setPluginState(); // so that UI re-renders and we see the updated locale - }; - - _proto.getUploadParameters = function getUploadParameters(file) { - if (!this.opts.companionUrl) { - throw new Error('Expected a `companionUrl` option containing a Companion address.'); - } - - var filename = file.meta.name; - var type = file.meta.type; - var metadata = {}; - this.opts.metaFields.forEach(function (key) { - if (file.meta[key] != null) { - metadata[key] = file.meta[key].toString(); - } - }); - var query = qsStringify({ - filename: filename, - type: type, - metadata: metadata - }); - return this.client.get("s3/params?" + query).then(assertServerError); - }; - - _proto.validateParameters = function validateParameters(file, params) { - var valid = typeof params === 'object' && params && typeof params.url === 'string' && (typeof params.fields === 'object' || params.fields == null); - - if (!valid) { - var err = new TypeError("AwsS3: got incorrect result from 'getUploadParameters()' for file '" + file.name + "', expected an object '{ url, method, fields, headers }' but got '" + JSON.stringify(params) + "' instead.\nSee https://uppy.io/docs/aws-s3/#getUploadParameters-file for more on the expected format."); - console.error(err); - throw err; - } - - var methodIsValid = params.method == null || /^(put|post)$/i.test(params.method); - - if (!methodIsValid) { - var _err = new TypeError("AwsS3: got incorrect method from 'getUploadParameters()' for file '" + file.name + "', expected 'put' or 'post' but got '" + params.method + "' instead.\nSee https://uppy.io/docs/aws-s3/#getUploadParameters-file for more on the expected format."); - - console.error(_err); - throw _err; - } - }; - - _proto.handleUpload = function handleUpload(fileIDs) { - var _this2 = this; - - /** - * keep track of `getUploadParameters()` responses - * so we can cancel the calls individually using just a file ID - * - * @type {object.} - */ - var paramsPromises = Object.create(null); - - function onremove(file) { - var id = file.id; - - if (hasProperty(paramsPromises, id)) { - paramsPromises[id].abort(); - } - } - - this.uppy.on('file-removed', onremove); - fileIDs.forEach(function (id) { - var file = _this2.uppy.getFile(id); - - _this2.uppy.emit('upload-started', file); - }); - var getUploadParameters = this.requests.wrapPromiseFunction(function (file) { - return _this2.opts.getUploadParameters(file); - }); - var numberOfFiles = fileIDs.length; - return settle(fileIDs.map(function (id, index) { - paramsPromises[id] = getUploadParameters(_this2.uppy.getFile(id)); - return paramsPromises[id].then(function (params) { - delete paramsPromises[id]; - - var file = _this2.uppy.getFile(id); - - _this2.validateParameters(file, params); - - var _params$method = params.method, - method = _params$method === void 0 ? 'post' : _params$method, - url = params.url, - fields = params.fields, - headers = params.headers; - var xhrOpts = { - method: method, - formData: method.toLowerCase() === 'post', - endpoint: url, - metaFields: fields ? Object.keys(fields) : [] - }; - - if (headers) { - xhrOpts.headers = headers; - } - - _this2.uppy.setFileState(file.id, { - meta: _extends({}, file.meta, fields), - xhrUpload: xhrOpts - }); - - return _this2._uploader.uploadFile(file.id, index, numberOfFiles); - }).catch(function (error) { - delete paramsPromises[id]; - - var file = _this2.uppy.getFile(id); - - _this2.uppy.emit('upload-error', file, error); - }); - })).then(function (settled) { - // cleanup. - _this2.uppy.off('file-removed', onremove); - - return settled; - }); - }; - - _proto.install = function install() { - var uppy = this.uppy; - this.uppy.addUploader(this.handleUpload); // Get the response data from a successful XMLHttpRequest instance. - // `content` is the S3 response as a string. - // `xhr` is the XMLHttpRequest instance. - - function defaultGetResponseData(content, xhr) { - var opts = this; // If no response, we've hopefully done a PUT request to the file - // in the bucket on its full URL. - - if (!isXml(content, xhr)) { - if (opts.method.toUpperCase() === 'POST') { - if (!warnedSuccessActionStatus) { - uppy.log('[AwsS3] No response data found, make sure to set the success_action_status AWS SDK option to 201. See https://uppy.io/docs/aws-s3/#POST-Uploads', 'warning'); - warnedSuccessActionStatus = true; - } // The responseURL won't contain the object key. Give up. - - - return { - location: null - }; - } // responseURL is not available in older browsers. - - - if (!xhr.responseURL) { - return { - location: null - }; - } // Trim the query string because it's going to be a bunch of presign - // parameters for a PUT request—doing a GET request with those will - // always result in an error - - - return { - location: xhr.responseURL.replace(/\?.*$/, '') - }; - } - - return { - // Some S3 alternatives do not reply with an absolute URL. - // Eg DigitalOcean Spaces uses /$bucketName/xyz - location: resolveUrl(xhr.responseURL, getXmlValue(content, 'Location')), - bucket: getXmlValue(content, 'Bucket'), - key: getXmlValue(content, 'Key'), - etag: getXmlValue(content, 'ETag') - }; - } // Get the error data from a failed XMLHttpRequest instance. - // `content` is the S3 response as a string. - // `xhr` is the XMLHttpRequest instance. - - - function defaultGetResponseError(content, xhr) { - // If no response, we don't have a specific error message, use the default. - if (!isXml(content, xhr)) { - return; - } - - var error = getXmlValue(content, 'Message'); - return new Error(error); - } - - var xhrOptions = { - fieldName: 'file', - responseUrlFieldName: 'location', - timeout: this.opts.timeout, - // Share the rate limiting queue with XHRUpload. - __queue: this.requests, - responseType: 'text', - getResponseData: this.opts.getResponseData || defaultGetResponseData, - getResponseError: defaultGetResponseError - }; // Only for MiniXHRUpload, remove once we can depend on XHRUpload directly again - - xhrOptions.i18n = this.i18n; // Revert to `this.uppy.use(XHRUpload)` once the big comment block at the top of - // this file is solved - - this._uploader = new MiniXHRUpload(this.uppy, xhrOptions); - }; - - _proto.uninstall = function uninstall() { - this.uppy.removeUploader(this.handleUpload); - }; - - return AwsS3; -}(Plugin), _class.VERSION = "1.7.12", _temp); -},{"./MiniXHRUpload":4,"./isXml":6,"@uppy/companion-client":12,"@uppy/core":15,"@uppy/utils/lib/RateLimitedQueue":25,"@uppy/utils/lib/Translator":26,"@uppy/utils/lib/hasProperty":42,"@uppy/utils/lib/settle":46,"qs-stringify":58,"url-parse":61}],6:[function(require,module,exports){ -/** - * Remove parameters like `charset=utf-8` from the end of a mime type string. - * - * @param {string} mimeType - The mime type string that may have optional parameters. - * @returns {string} The "base" mime type, i.e. only 'category/type'. - */ -function removeMimeParameters(mimeType) { - return mimeType.replace(/;.*$/, ''); -} -/** - * Check if a response contains XML based on the response object and its text content. - * - * @param {string} content - The text body of the response. - * @param {object|XMLHttpRequest} xhr - The XHR object or response object from Companion. - * @returns {bool} Whether the content is (probably) XML. - */ - - -function isXml(content, xhr) { - var rawContentType = xhr.headers ? xhr.headers['content-type'] : xhr.getResponseHeader('Content-Type'); - - if (typeof rawContentType === 'string') { - var contentType = removeMimeParameters(rawContentType).toLowerCase(); - - if (contentType === 'application/xml' || contentType === 'text/xml') { - return true; - } // GCS uses text/html for some reason - // https://github.com/transloadit/uppy/issues/896 - - - if (contentType === 'text/html' && /^<\?xml /.test(content)) { - return true; - } - } - - return false; -} - -module.exports = isXml; -},{}],7:[function(require,module,exports){ -'use strict'; - -function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); } - -function _wrapNativeSuper(Class) { var _cache = typeof Map === "function" ? new Map() : undefined; _wrapNativeSuper = function _wrapNativeSuper(Class) { if (Class === null || !_isNativeFunction(Class)) return Class; if (typeof Class !== "function") { throw new TypeError("Super expression must either be null or a function"); } if (typeof _cache !== "undefined") { if (_cache.has(Class)) return _cache.get(Class); _cache.set(Class, Wrapper); } function Wrapper() { return _construct(Class, arguments, _getPrototypeOf(this).constructor); } Wrapper.prototype = Object.create(Class.prototype, { constructor: { value: Wrapper, enumerable: false, writable: true, configurable: true } }); return _setPrototypeOf(Wrapper, Class); }; return _wrapNativeSuper(Class); } - -function _construct(Parent, args, Class) { if (_isNativeReflectConstruct()) { _construct = Reflect.construct; } else { _construct = function _construct(Parent, args, Class) { var a = [null]; a.push.apply(a, args); var Constructor = Function.bind.apply(Parent, a); var instance = new Constructor(); if (Class) _setPrototypeOf(instance, Class.prototype); return instance; }; } return _construct.apply(null, arguments); } - -function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === "function") return true; try { Boolean.prototype.valueOf.call(Reflect.construct(Boolean, [], function () {})); return true; } catch (e) { return false; } } - -function _isNativeFunction(fn) { return Function.toString.call(fn).indexOf("[native code]") !== -1; } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } - -var AuthError = /*#__PURE__*/function (_Error) { - _inheritsLoose(AuthError, _Error); - - function AuthError() { - var _this; - - _this = _Error.call(this, 'Authorization required') || this; - _this.name = 'AuthError'; - _this.isAuthError = true; - return _this; - } - - return AuthError; -}( /*#__PURE__*/_wrapNativeSuper(Error)); - -module.exports = AuthError; -},{}],8:[function(require,module,exports){ -'use strict'; - -function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); } - -function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -var qsStringify = require('qs-stringify'); - -var URL = require('url-parse'); - -var RequestClient = require('./RequestClient'); - -var tokenStorage = require('./tokenStorage'); - -var _getName = function _getName(id) { - return id.split('-').map(function (s) { - return s.charAt(0).toUpperCase() + s.slice(1); - }).join(' '); -}; - -module.exports = /*#__PURE__*/function (_RequestClient) { - _inheritsLoose(Provider, _RequestClient); - - function Provider(uppy, opts) { - var _this; - - _this = _RequestClient.call(this, uppy, opts) || this; - _this.provider = opts.provider; - _this.id = _this.provider; - _this.name = _this.opts.name || _getName(_this.id); - _this.pluginId = _this.opts.pluginId; - _this.tokenKey = "companion-" + _this.pluginId + "-auth-token"; - _this.companionKeysParams = _this.opts.companionKeysParams; - _this.preAuthToken = null; - return _this; - } - - var _proto = Provider.prototype; - - _proto.headers = function headers() { - var _this2 = this; - - return Promise.all([_RequestClient.prototype.headers.call(this), this.getAuthToken()]).then(function (_ref) { - var headers = _ref[0], - token = _ref[1]; - var authHeaders = {}; - - if (token) { - authHeaders['uppy-auth-token'] = token; - } - - if (_this2.companionKeysParams) { - authHeaders['uppy-credentials-params'] = btoa(JSON.stringify({ - params: _this2.companionKeysParams - })); - } - - return _extends({}, headers, authHeaders); - }); - }; - - _proto.onReceiveResponse = function onReceiveResponse(response) { - response = _RequestClient.prototype.onReceiveResponse.call(this, response); - var plugin = this.uppy.getPlugin(this.pluginId); - var oldAuthenticated = plugin.getPluginState().authenticated; - var authenticated = oldAuthenticated ? response.status !== 401 : response.status < 400; - plugin.setPluginState({ - authenticated: authenticated - }); - return response; - } // @todo(i.olarewaju) consider whether or not this method should be exposed - ; - - _proto.setAuthToken = function setAuthToken(token) { - return this.uppy.getPlugin(this.pluginId).storage.setItem(this.tokenKey, token); - }; - - _proto.getAuthToken = function getAuthToken() { - return this.uppy.getPlugin(this.pluginId).storage.getItem(this.tokenKey); - }; - - _proto.authUrl = function authUrl(queries) { - if (queries === void 0) { - queries = {}; - } - - if (this.preAuthToken) { - queries.uppyPreAuthToken = this.preAuthToken; - } - - var strigifiedQueries = qsStringify(queries); - strigifiedQueries = strigifiedQueries ? "?" + strigifiedQueries : strigifiedQueries; - return this.hostname + "/" + this.id + "/connect" + strigifiedQueries; - }; - - _proto.fileUrl = function fileUrl(id) { - return this.hostname + "/" + this.id + "/get/" + id; - }; - - _proto.fetchPreAuthToken = function fetchPreAuthToken() { - var _this3 = this; - - if (!this.companionKeysParams) { - return Promise.resolve(); - } - - return this.post(this.id + "/preauth/", { - params: this.companionKeysParams - }).then(function (res) { - _this3.preAuthToken = res.token; - }).catch(function (err) { - _this3.uppy.log("[CompanionClient] unable to fetch preAuthToken " + err, 'warning'); - }); - }; - - _proto.list = function list(directory) { - return this.get(this.id + "/list/" + (directory || '')); - }; - - _proto.logout = function logout() { - var _this4 = this; - - return this.get(this.id + "/logout").then(function (response) { - return Promise.all([response, _this4.uppy.getPlugin(_this4.pluginId).storage.removeItem(_this4.tokenKey)]); - }).then(function (_ref2) { - var response = _ref2[0]; - return response; - }); - }; - - Provider.initPlugin = function initPlugin(plugin, opts, defaultOpts) { - plugin.type = 'acquirer'; - plugin.files = []; - - if (defaultOpts) { - plugin.opts = _extends({}, defaultOpts, opts); - } - - if (opts.serverUrl || opts.serverPattern) { - throw new Error('`serverUrl` and `serverPattern` have been renamed to `companionUrl` and `companionAllowedHosts` respectively in the 0.30.5 release. Please consult the docs (for example, https://uppy.io/docs/instagram/ for the Instagram plugin) and use the updated options.`'); - } - - if (opts.companionAllowedHosts) { - var pattern = opts.companionAllowedHosts; // validate companionAllowedHosts param - - if (typeof pattern !== 'string' && !Array.isArray(pattern) && !(pattern instanceof RegExp)) { - throw new TypeError(plugin.id + ": the option \"companionAllowedHosts\" must be one of string, Array, RegExp"); - } - - plugin.opts.companionAllowedHosts = pattern; - } else { - // does not start with https:// - if (/^(?!https?:\/\/).*$/i.test(opts.companionUrl)) { - plugin.opts.companionAllowedHosts = "https://" + opts.companionUrl.replace(/^\/\//, ''); - } else { - plugin.opts.companionAllowedHosts = new URL(opts.companionUrl).origin; - } - } - - plugin.storage = plugin.opts.storage || tokenStorage; - }; - - return Provider; -}(RequestClient); -},{"./RequestClient":9,"./tokenStorage":13,"qs-stringify":58,"url-parse":61}],9:[function(require,module,exports){ -'use strict'; - -var _class, _temp; - -function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -var AuthError = require('./AuthError'); - -var fetchWithNetworkError = require('@uppy/utils/lib/fetchWithNetworkError'); // Remove the trailing slash so we can always safely append /xyz. - - -function stripSlash(url) { - return url.replace(/\/$/, ''); -} - -module.exports = (_temp = _class = /*#__PURE__*/function () { - function RequestClient(uppy, opts) { - this.uppy = uppy; - this.opts = opts; - this.onReceiveResponse = this.onReceiveResponse.bind(this); - this.allowedHeaders = ['accept', 'content-type', 'uppy-auth-token']; - this.preflightDone = false; - } - - var _proto = RequestClient.prototype; - - _proto.headers = function headers() { - var userHeaders = this.opts.companionHeaders || this.opts.serverHeaders || {}; - return Promise.resolve(_extends({}, this.defaultHeaders, userHeaders)); - }; - - _proto._getPostResponseFunc = function _getPostResponseFunc(skip) { - var _this = this; - - return function (response) { - if (!skip) { - return _this.onReceiveResponse(response); - } - - return response; - }; - }; - - _proto.onReceiveResponse = function onReceiveResponse(response) { - var state = this.uppy.getState(); - var companion = state.companion || {}; - var host = this.opts.companionUrl; - var headers = response.headers; // Store the self-identified domain name for the Companion instance we just hit. - - if (headers.has('i-am') && headers.get('i-am') !== companion[host]) { - var _extends2; - - this.uppy.setState({ - companion: _extends({}, companion, (_extends2 = {}, _extends2[host] = headers.get('i-am'), _extends2)) - }); - } - - return response; - }; - - _proto._getUrl = function _getUrl(url) { - if (/^(https?:|)\/\//.test(url)) { - return url; - } - - return this.hostname + "/" + url; - }; - - _proto._json = function _json(res) { - if (res.status === 401) { - throw new AuthError(); - } - - if (res.status < 200 || res.status > 300) { - var errMsg = "Failed request with status: " + res.status + ". " + res.statusText; - return res.json().then(function (errData) { - errMsg = errData.message ? errMsg + " message: " + errData.message : errMsg; - errMsg = errData.requestId ? errMsg + " request-Id: " + errData.requestId : errMsg; - throw new Error(errMsg); - }).catch(function () { - throw new Error(errMsg); - }); - } - - return res.json(); - }; - - _proto.preflight = function preflight(path) { - var _this2 = this; - - if (this.preflightDone) { - return Promise.resolve(this.allowedHeaders.slice()); - } - - return fetch(this._getUrl(path), { - method: 'OPTIONS' - }).then(function (response) { - if (response.headers.has('access-control-allow-headers')) { - _this2.allowedHeaders = response.headers.get('access-control-allow-headers').split(',').map(function (headerName) { - return headerName.trim().toLowerCase(); - }); - } - - _this2.preflightDone = true; - return _this2.allowedHeaders.slice(); - }).catch(function (err) { - _this2.uppy.log("[CompanionClient] unable to make preflight request " + err, 'warning'); - - _this2.preflightDone = true; - return _this2.allowedHeaders.slice(); - }); - }; - - _proto.preflightAndHeaders = function preflightAndHeaders(path) { - var _this3 = this; - - return Promise.all([this.preflight(path), this.headers()]).then(function (_ref) { - var allowedHeaders = _ref[0], - headers = _ref[1]; - // filter to keep only allowed Headers - Object.keys(headers).forEach(function (header) { - if (allowedHeaders.indexOf(header.toLowerCase()) === -1) { - _this3.uppy.log("[CompanionClient] excluding unallowed header " + header); - - delete headers[header]; - } - }); - return headers; - }); - }; - - _proto.get = function get(path, skipPostResponse) { - var _this4 = this; - - return this.preflightAndHeaders(path).then(function (headers) { - return fetchWithNetworkError(_this4._getUrl(path), { - method: 'get', - headers: headers, - credentials: _this4.opts.companionCookiesRule || 'same-origin' - }); - }).then(this._getPostResponseFunc(skipPostResponse)).then(function (res) { - return _this4._json(res); - }).catch(function (err) { - if (!err.isAuthError) { - err.message = "Could not get " + _this4._getUrl(path) + ". " + err.message; - } - - return Promise.reject(err); - }); - }; - - _proto.post = function post(path, data, skipPostResponse) { - var _this5 = this; - - return this.preflightAndHeaders(path).then(function (headers) { - return fetchWithNetworkError(_this5._getUrl(path), { - method: 'post', - headers: headers, - credentials: _this5.opts.companionCookiesRule || 'same-origin', - body: JSON.stringify(data) - }); - }).then(this._getPostResponseFunc(skipPostResponse)).then(function (res) { - return _this5._json(res); - }).catch(function (err) { - if (!err.isAuthError) { - err.message = "Could not post " + _this5._getUrl(path) + ". " + err.message; - } - - return Promise.reject(err); - }); - }; - - _proto.delete = function _delete(path, data, skipPostResponse) { - var _this6 = this; - - return this.preflightAndHeaders(path).then(function (headers) { - return fetchWithNetworkError(_this6.hostname + "/" + path, { - method: 'delete', - headers: headers, - credentials: _this6.opts.companionCookiesRule || 'same-origin', - body: data ? JSON.stringify(data) : null - }); - }).then(this._getPostResponseFunc(skipPostResponse)).then(function (res) { - return _this6._json(res); - }).catch(function (err) { - if (!err.isAuthError) { - err.message = "Could not delete " + _this6._getUrl(path) + ". " + err.message; - } - - return Promise.reject(err); - }); - }; - - _createClass(RequestClient, [{ - key: "hostname", - get: function get() { - var _this$uppy$getState = this.uppy.getState(), - companion = _this$uppy$getState.companion; - - var host = this.opts.companionUrl; - return stripSlash(companion && companion[host] ? companion[host] : host); - } - }, { - key: "defaultHeaders", - get: function get() { - return { - Accept: 'application/json', - 'Content-Type': 'application/json', - 'Uppy-Versions': "@uppy/companion-client=" + RequestClient.VERSION - }; - } - }]); - - return RequestClient; -}(), _class.VERSION = "1.10.2", _temp); -},{"./AuthError":7,"@uppy/utils/lib/fetchWithNetworkError":29}],10:[function(require,module,exports){ -'use strict'; - -function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -var RequestClient = require('./RequestClient'); - -var _getName = function _getName(id) { - return id.split('-').map(function (s) { - return s.charAt(0).toUpperCase() + s.slice(1); - }).join(' '); -}; - -module.exports = /*#__PURE__*/function (_RequestClient) { - _inheritsLoose(SearchProvider, _RequestClient); - - function SearchProvider(uppy, opts) { - var _this; - - _this = _RequestClient.call(this, uppy, opts) || this; - _this.provider = opts.provider; - _this.id = _this.provider; - _this.name = _this.opts.name || _getName(_this.id); - _this.pluginId = _this.opts.pluginId; - return _this; - } - - var _proto = SearchProvider.prototype; - - _proto.fileUrl = function fileUrl(id) { - return this.hostname + "/search/" + this.id + "/get/" + id; - }; - - _proto.search = function search(text, queries) { - queries = queries ? "&" + queries : ''; - return this.get("search/" + this.id + "/list?q=" + encodeURIComponent(text) + queries); - }; - - return SearchProvider; -}(RequestClient); -},{"./RequestClient":9}],11:[function(require,module,exports){ -var ee = require('namespace-emitter'); - -module.exports = /*#__PURE__*/function () { - function UppySocket(opts) { - this.opts = opts; - this._queued = []; - this.isOpen = false; - this.emitter = ee(); - this._handleMessage = this._handleMessage.bind(this); - this.close = this.close.bind(this); - this.emit = this.emit.bind(this); - this.on = this.on.bind(this); - this.once = this.once.bind(this); - this.send = this.send.bind(this); - - if (!opts || opts.autoOpen !== false) { - this.open(); - } - } - - var _proto = UppySocket.prototype; - - _proto.open = function open() { - var _this = this; - - this.socket = new WebSocket(this.opts.target); - - this.socket.onopen = function (e) { - _this.isOpen = true; - - while (_this._queued.length > 0 && _this.isOpen) { - var first = _this._queued[0]; - - _this.send(first.action, first.payload); - - _this._queued = _this._queued.slice(1); - } - }; - - this.socket.onclose = function (e) { - _this.isOpen = false; - }; - - this.socket.onmessage = this._handleMessage; - }; - - _proto.close = function close() { - if (this.socket) { - this.socket.close(); - } - }; - - _proto.send = function send(action, payload) { - // attach uuid - if (!this.isOpen) { - this._queued.push({ - action: action, - payload: payload - }); - - return; - } - - this.socket.send(JSON.stringify({ - action: action, - payload: payload - })); - }; - - _proto.on = function on(action, handler) { - this.emitter.on(action, handler); - }; - - _proto.emit = function emit(action, payload) { - this.emitter.emit(action, payload); - }; - - _proto.once = function once(action, handler) { - this.emitter.once(action, handler); - }; - - _proto._handleMessage = function _handleMessage(e) { - try { - var message = JSON.parse(e.data); - this.emit(message.action, message.payload); - } catch (err) { - console.log(err); - } - }; - - return UppySocket; -}(); -},{"namespace-emitter":56}],12:[function(require,module,exports){ -'use strict'; -/** - * Manages communications with Companion - */ - -var RequestClient = require('./RequestClient'); - -var Provider = require('./Provider'); - -var SearchProvider = require('./SearchProvider'); - -var Socket = require('./Socket'); - -module.exports = { - RequestClient: RequestClient, - Provider: Provider, - SearchProvider: SearchProvider, - Socket: Socket -}; -},{"./Provider":8,"./RequestClient":9,"./SearchProvider":10,"./Socket":11}],13:[function(require,module,exports){ -'use strict'; -/** - * This module serves as an Async wrapper for LocalStorage - */ - -module.exports.setItem = function (key, value) { - return new Promise(function (resolve) { - localStorage.setItem(key, value); - resolve(); - }); -}; - -module.exports.getItem = function (key) { - return Promise.resolve(localStorage.getItem(key)); -}; - -module.exports.removeItem = function (key) { - return new Promise(function (resolve) { - localStorage.removeItem(key); - resolve(); - }); -}; -},{}],14:[function(require,module,exports){ -function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); } - -var preact = require('preact'); - -var findDOMElement = require('@uppy/utils/lib/findDOMElement'); -/** - * Defer a frequent call to the microtask queue. - */ - - -function debounce(fn) { - var calling = null; - var latestArgs = null; - return function () { - for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { - args[_key] = arguments[_key]; - } - - latestArgs = args; - - if (!calling) { - calling = Promise.resolve().then(function () { - calling = null; // At this point `args` may be different from the most - // recent state, if multiple calls happened since this task - // was queued. So we use the `latestArgs`, which definitely - // is the most recent call. - - return fn.apply(void 0, latestArgs); - }); - } - - return calling; - }; -} -/** - * Boilerplate that all Plugins share - and should not be used - * directly. It also shows which methods final plugins should implement/override, - * this deciding on structure. - * - * @param {object} main Uppy core object - * @param {object} object with plugin options - * @returns {Array|string} files or success/fail message - */ - - -module.exports = /*#__PURE__*/function () { - function Plugin(uppy, opts) { - this.uppy = uppy; - this.opts = opts || {}; - this.update = this.update.bind(this); - this.mount = this.mount.bind(this); - this.install = this.install.bind(this); - this.uninstall = this.uninstall.bind(this); - } - - var _proto = Plugin.prototype; - - _proto.getPluginState = function getPluginState() { - var _this$uppy$getState = this.uppy.getState(), - plugins = _this$uppy$getState.plugins; - - return plugins[this.id] || {}; - }; - - _proto.setPluginState = function setPluginState(update) { - var _extends2; - - var _this$uppy$getState2 = this.uppy.getState(), - plugins = _this$uppy$getState2.plugins; - - this.uppy.setState({ - plugins: _extends({}, plugins, (_extends2 = {}, _extends2[this.id] = _extends({}, plugins[this.id], update), _extends2)) - }); - }; - - _proto.setOptions = function setOptions(newOpts) { - this.opts = _extends({}, this.opts, newOpts); - this.setPluginState(); // so that UI re-renders with new options - }; - - _proto.update = function update(state) { - if (typeof this.el === 'undefined') { - return; - } - - if (this._updateUI) { - this._updateUI(state); - } - } // Called after every state update, after everything's mounted. Debounced. - ; - - _proto.afterUpdate = function afterUpdate() {} - /** - * Called when plugin is mounted, whether in DOM or into another plugin. - * Needed because sometimes plugins are mounted separately/after `install`, - * so this.el and this.parent might not be available in `install`. - * This is the case with @uppy/react plugins, for example. - */ - ; - - _proto.onMount = function onMount() {} - /** - * Check if supplied `target` is a DOM element or an `object`. - * If it’s an object — target is a plugin, and we search `plugins` - * for a plugin with same name and return its target. - * - * @param {string|object} target - * - */ - ; - - _proto.mount = function mount(target, plugin) { - var _this = this; - - var callerPluginName = plugin.id; - var targetElement = findDOMElement(target); - - if (targetElement) { - this.isTargetDOMEl = true; // API for plugins that require a synchronous rerender. - - this.rerender = function (state) { - // plugin could be removed, but this.rerender is debounced below, - // so it could still be called even after uppy.removePlugin or uppy.close - // hence the check - if (!_this.uppy.getPlugin(_this.id)) return; - _this.el = preact.render(_this.render(state), targetElement, _this.el); - - _this.afterUpdate(); - }; - - this._updateUI = debounce(this.rerender); - this.uppy.log("Installing " + callerPluginName + " to a DOM element '" + target + "'"); // clear everything inside the target container - - if (this.opts.replaceTargetContent) { - targetElement.innerHTML = ''; - } - - this.el = preact.render(this.render(this.uppy.getState()), targetElement); - this.onMount(); - return this.el; - } - - var targetPlugin; - - if (typeof target === 'object' && target instanceof Plugin) { - // Targeting a plugin *instance* - targetPlugin = target; - } else if (typeof target === 'function') { - // Targeting a plugin type - var Target = target; // Find the target plugin instance. - - this.uppy.iteratePlugins(function (plugin) { - if (plugin instanceof Target) { - targetPlugin = plugin; - return false; - } - }); - } - - if (targetPlugin) { - this.uppy.log("Installing " + callerPluginName + " to " + targetPlugin.id); - this.parent = targetPlugin; - this.el = targetPlugin.addTarget(plugin); - this.onMount(); - return this.el; - } - - this.uppy.log("Not installing " + callerPluginName); - var message = "Invalid target option given to " + callerPluginName + "."; - - if (typeof target === 'function') { - message += ' The given target is not a Plugin class. ' + 'Please check that you\'re not specifying a React Component instead of a plugin. ' + 'If you are using @uppy/* packages directly, make sure you have only 1 version of @uppy/core installed: ' + 'run `npm ls @uppy/core` on the command line and verify that all the versions match and are deduped correctly.'; - } else { - message += 'If you meant to target an HTML element, please make sure that the element exists. ' + 'Check that the