mirror of
https://github.com/discourse/discourse.git
synced 2024-11-23 01:47:22 +08:00
49c49e8ae0
This takes the uppy chunking algorithm and combines it with some form submission from resumable.js for parity with the current backup controller to make local backup uploads work with uppy. We can then use this to replace the resumable-upload component and the resumable.js library from our codebase, once stable. This is disabled by default, so people using local backups will not be affected. The enable_experimental_backup_uploader site setting must be enabled for this to work.
7588 lines
237 KiB
JavaScript
7588 lines
237 KiB
JavaScript
(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i<t.length;i++)o(t[i]);return o}return r})()({1:[function(require,module,exports){
|
||
// Adapted from https://github.com/Flet/prettier-bytes/
|
||
// Changing 1000 bytes to 1024, so we can keep uppercase KB vs kB
|
||
// ISC License (c) Dan Flettre https://github.com/Flet/prettier-bytes/blob/master/LICENSE
|
||
module.exports = function prettierBytes (num) {
|
||
if (typeof num !== 'number' || isNaN(num)) {
|
||
throw new TypeError('Expected a number, got ' + typeof num)
|
||
}
|
||
|
||
var neg = num < 0
|
||
var units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']
|
||
|
||
if (neg) {
|
||
num = -num
|
||
}
|
||
|
||
if (num < 1) {
|
||
return (neg ? '-' : '') + num + ' B'
|
||
}
|
||
|
||
var exponent = Math.min(Math.floor(Math.log(num) / Math.log(1024)), units.length - 1)
|
||
num = Number(num / Math.pow(1024, exponent))
|
||
var unit = units[exponent]
|
||
|
||
if (num >= 10 || num % 1 === 0) {
|
||
// Do not show decimals when the number is two-digit, or if the number has no
|
||
// decimal component.
|
||
return (neg ? '-' : '') + num.toFixed(0) + ' ' + unit
|
||
} else {
|
||
return (neg ? '-' : '') + num.toFixed(1) + ' ' + unit
|
||
}
|
||
}
|
||
|
||
},{}],2:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
function _classPrivateFieldLooseBase(receiver, privateKey) { if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { throw new TypeError("attempted to use private field on non-instance"); } return receiver; }
|
||
|
||
var id = 0;
|
||
|
||
function _classPrivateFieldLooseKey(name) { return "__private_" + id++ + "_" + name; }
|
||
|
||
const {
|
||
AbortController,
|
||
createAbortError
|
||
} = require('@uppy/utils/lib/AbortController');
|
||
|
||
const delay = require('@uppy/utils/lib/delay');
|
||
|
||
const MB = 1024 * 1024;
|
||
const defaultOptions = {
|
||
limit: 1,
|
||
retryDelays: [0, 1000, 3000, 5000],
|
||
|
||
getChunkSize(file) {
|
||
return Math.ceil(file.size / 10000);
|
||
},
|
||
|
||
onStart() {},
|
||
|
||
onProgress() {},
|
||
|
||
onPartComplete() {},
|
||
|
||
onSuccess() {},
|
||
|
||
onError(err) {
|
||
throw err;
|
||
}
|
||
|
||
};
|
||
|
||
function ensureInt(value) {
|
||
if (typeof value === 'string') {
|
||
return parseInt(value, 10);
|
||
}
|
||
|
||
if (typeof value === 'number') {
|
||
return value;
|
||
}
|
||
|
||
throw new TypeError('Expected a number');
|
||
}
|
||
|
||
var _aborted = /*#__PURE__*/_classPrivateFieldLooseKey("aborted");
|
||
|
||
var _initChunks = /*#__PURE__*/_classPrivateFieldLooseKey("initChunks");
|
||
|
||
var _createUpload = /*#__PURE__*/_classPrivateFieldLooseKey("createUpload");
|
||
|
||
var _resumeUpload = /*#__PURE__*/_classPrivateFieldLooseKey("resumeUpload");
|
||
|
||
var _uploadParts = /*#__PURE__*/_classPrivateFieldLooseKey("uploadParts");
|
||
|
||
var _retryable = /*#__PURE__*/_classPrivateFieldLooseKey("retryable");
|
||
|
||
var _prepareUploadParts = /*#__PURE__*/_classPrivateFieldLooseKey("prepareUploadParts");
|
||
|
||
var _uploadPartRetryable = /*#__PURE__*/_classPrivateFieldLooseKey("uploadPartRetryable");
|
||
|
||
var _uploadPart = /*#__PURE__*/_classPrivateFieldLooseKey("uploadPart");
|
||
|
||
var _onPartProgress = /*#__PURE__*/_classPrivateFieldLooseKey("onPartProgress");
|
||
|
||
var _onPartComplete = /*#__PURE__*/_classPrivateFieldLooseKey("onPartComplete");
|
||
|
||
var _uploadPartBytes = /*#__PURE__*/_classPrivateFieldLooseKey("uploadPartBytes");
|
||
|
||
var _completeUpload = /*#__PURE__*/_classPrivateFieldLooseKey("completeUpload");
|
||
|
||
var _abortUpload = /*#__PURE__*/_classPrivateFieldLooseKey("abortUpload");
|
||
|
||
var _onError = /*#__PURE__*/_classPrivateFieldLooseKey("onError");
|
||
|
||
class MultipartUploader {
|
||
constructor(file, options) {
|
||
Object.defineProperty(this, _onError, {
|
||
value: _onError2
|
||
});
|
||
Object.defineProperty(this, _abortUpload, {
|
||
value: _abortUpload2
|
||
});
|
||
Object.defineProperty(this, _completeUpload, {
|
||
value: _completeUpload2
|
||
});
|
||
Object.defineProperty(this, _uploadPartBytes, {
|
||
value: _uploadPartBytes2
|
||
});
|
||
Object.defineProperty(this, _onPartComplete, {
|
||
value: _onPartComplete2
|
||
});
|
||
Object.defineProperty(this, _onPartProgress, {
|
||
value: _onPartProgress2
|
||
});
|
||
Object.defineProperty(this, _uploadPart, {
|
||
value: _uploadPart2
|
||
});
|
||
Object.defineProperty(this, _uploadPartRetryable, {
|
||
value: _uploadPartRetryable2
|
||
});
|
||
Object.defineProperty(this, _prepareUploadParts, {
|
||
value: _prepareUploadParts2
|
||
});
|
||
Object.defineProperty(this, _retryable, {
|
||
value: _retryable2
|
||
});
|
||
Object.defineProperty(this, _uploadParts, {
|
||
value: _uploadParts2
|
||
});
|
||
Object.defineProperty(this, _resumeUpload, {
|
||
value: _resumeUpload2
|
||
});
|
||
Object.defineProperty(this, _createUpload, {
|
||
value: _createUpload2
|
||
});
|
||
Object.defineProperty(this, _initChunks, {
|
||
value: _initChunks2
|
||
});
|
||
Object.defineProperty(this, _aborted, {
|
||
value: _aborted2
|
||
});
|
||
this.options = { ...defaultOptions,
|
||
...options
|
||
}; // Use default `getChunkSize` if it was null or something
|
||
|
||
if (!this.options.getChunkSize) {
|
||
this.options.getChunkSize = defaultOptions.getChunkSize;
|
||
}
|
||
|
||
this.file = file;
|
||
this.abortController = new AbortController();
|
||
this.key = this.options.key || null;
|
||
this.uploadId = this.options.uploadId || null;
|
||
this.parts = []; // Do `this.createdPromise.then(OP)` to execute an operation `OP` _only_ if the
|
||
// upload was created already. That also ensures that the sequencing is right
|
||
// (so the `OP` definitely happens if the upload is created).
|
||
//
|
||
// This mostly exists to make `#abortUpload` work well: only sending the abort request if
|
||
// the upload was already created, and if the createMultipartUpload request is still in flight,
|
||
// aborting it immediately after it finishes.
|
||
|
||
this.createdPromise = Promise.reject(); // eslint-disable-line prefer-promise-reject-errors
|
||
|
||
this.isPaused = false;
|
||
this.partsInProgress = 0;
|
||
this.chunks = null;
|
||
this.chunkState = null;
|
||
this.lockedCandidatesForBatch = [];
|
||
|
||
_classPrivateFieldLooseBase(this, _initChunks)[_initChunks]();
|
||
|
||
this.createdPromise.catch(() => {}); // silence uncaught rejection warning
|
||
}
|
||
/**
|
||
* Was this upload aborted?
|
||
*
|
||
* If yes, we may need to throw an AbortError.
|
||
*
|
||
* @returns {boolean}
|
||
*/
|
||
|
||
|
||
start() {
|
||
this.isPaused = false;
|
||
|
||
if (this.uploadId) {
|
||
_classPrivateFieldLooseBase(this, _resumeUpload)[_resumeUpload]();
|
||
} else {
|
||
_classPrivateFieldLooseBase(this, _createUpload)[_createUpload]();
|
||
}
|
||
}
|
||
|
||
pause() {
|
||
this.abortController.abort(); // Swap it out for a new controller, because this instance may be resumed later.
|
||
|
||
this.abortController = new AbortController();
|
||
this.isPaused = true;
|
||
}
|
||
|
||
abort(opts = undefined) {
|
||
if (opts != null && opts.really) _classPrivateFieldLooseBase(this, _abortUpload)[_abortUpload]();else this.pause();
|
||
}
|
||
|
||
}
|
||
|
||
function _aborted2() {
|
||
return this.abortController.signal.aborted;
|
||
}
|
||
|
||
function _initChunks2() {
|
||
const chunks = [];
|
||
const desiredChunkSize = this.options.getChunkSize(this.file); // at least 5MB per request, at most 10k requests
|
||
|
||
const minChunkSize = Math.max(5 * MB, Math.ceil(this.file.size / 10000));
|
||
const chunkSize = Math.max(desiredChunkSize, minChunkSize); // Upload zero-sized files in one zero-sized chunk
|
||
|
||
if (this.file.size === 0) {
|
||
chunks.push(this.file);
|
||
} else {
|
||
for (let i = 0; i < this.file.size; i += chunkSize) {
|
||
const end = Math.min(this.file.size, i + chunkSize);
|
||
chunks.push(this.file.slice(i, end));
|
||
}
|
||
}
|
||
|
||
this.chunks = chunks;
|
||
this.chunkState = chunks.map(() => ({
|
||
uploaded: 0,
|
||
busy: false,
|
||
done: false
|
||
}));
|
||
}
|
||
|
||
function _createUpload2() {
|
||
this.createdPromise = Promise.resolve().then(() => this.options.createMultipartUpload());
|
||
return this.createdPromise.then(result => {
|
||
if (_classPrivateFieldLooseBase(this, _aborted)[_aborted]()) throw createAbortError();
|
||
const valid = typeof result === 'object' && result && typeof result.uploadId === 'string' && typeof result.key === 'string';
|
||
|
||
if (!valid) {
|
||
throw new TypeError('AwsS3/Multipart: Got incorrect result from `createMultipartUpload()`, expected an object `{ uploadId, key }`.');
|
||
}
|
||
|
||
this.key = result.key;
|
||
this.uploadId = result.uploadId;
|
||
this.options.onStart(result);
|
||
|
||
_classPrivateFieldLooseBase(this, _uploadParts)[_uploadParts]();
|
||
}).catch(err => {
|
||
_classPrivateFieldLooseBase(this, _onError)[_onError](err);
|
||
});
|
||
}
|
||
|
||
async function _resumeUpload2() {
|
||
try {
|
||
const parts = await this.options.listParts({
|
||
uploadId: this.uploadId,
|
||
key: this.key
|
||
});
|
||
if (_classPrivateFieldLooseBase(this, _aborted)[_aborted]()) throw createAbortError();
|
||
parts.forEach(part => {
|
||
const i = part.PartNumber - 1;
|
||
this.chunkState[i] = {
|
||
uploaded: ensureInt(part.Size),
|
||
etag: part.ETag,
|
||
done: true
|
||
}; // Only add if we did not yet know about this part.
|
||
|
||
if (!this.parts.some(p => p.PartNumber === part.PartNumber)) {
|
||
this.parts.push({
|
||
PartNumber: part.PartNumber,
|
||
ETag: part.ETag
|
||
});
|
||
}
|
||
});
|
||
|
||
_classPrivateFieldLooseBase(this, _uploadParts)[_uploadParts]();
|
||
} catch (err) {
|
||
_classPrivateFieldLooseBase(this, _onError)[_onError](err);
|
||
}
|
||
}
|
||
|
||
function _uploadParts2() {
|
||
if (this.isPaused) return; // All parts are uploaded.
|
||
|
||
if (this.chunkState.every(state => state.done)) {
|
||
_classPrivateFieldLooseBase(this, _completeUpload)[_completeUpload]();
|
||
|
||
return;
|
||
} // For a 100MB file, with the default min chunk size of 5MB and a limit of 10:
|
||
//
|
||
// Total 20 parts
|
||
// ---------
|
||
// Need 1 is 10
|
||
// Need 2 is 5
|
||
// Need 3 is 5
|
||
|
||
|
||
const need = this.options.limit - this.partsInProgress;
|
||
const completeChunks = this.chunkState.filter(state => state.done).length;
|
||
const remainingChunks = this.chunks.length - completeChunks;
|
||
let minNeeded = Math.ceil(this.options.limit / 2);
|
||
|
||
if (minNeeded > remainingChunks) {
|
||
minNeeded = remainingChunks;
|
||
}
|
||
|
||
if (need < minNeeded) return;
|
||
const candidates = [];
|
||
|
||
for (let i = 0; i < this.chunkState.length; i++) {
|
||
// eslint-disable-next-line no-continue
|
||
if (this.lockedCandidatesForBatch.includes(i)) continue;
|
||
const state = this.chunkState[i]; // eslint-disable-next-line no-continue
|
||
|
||
if (state.done || state.busy) continue;
|
||
candidates.push(i);
|
||
|
||
if (candidates.length >= need) {
|
||
break;
|
||
}
|
||
}
|
||
|
||
if (candidates.length === 0) return;
|
||
|
||
_classPrivateFieldLooseBase(this, _prepareUploadParts)[_prepareUploadParts](candidates).then(result => {
|
||
candidates.forEach(index => {
|
||
const partNumber = index + 1;
|
||
const prePreparedPart = {
|
||
url: result.presignedUrls[partNumber],
|
||
headers: result.headers
|
||
};
|
||
|
||
_classPrivateFieldLooseBase(this, _uploadPartRetryable)[_uploadPartRetryable](index, prePreparedPart).then(() => {
|
||
_classPrivateFieldLooseBase(this, _uploadParts)[_uploadParts]();
|
||
}, err => {
|
||
_classPrivateFieldLooseBase(this, _onError)[_onError](err);
|
||
});
|
||
});
|
||
});
|
||
}
|
||
|
||
function _retryable2({
|
||
before,
|
||
attempt,
|
||
after
|
||
}) {
|
||
const {
|
||
retryDelays
|
||
} = this.options;
|
||
const {
|
||
signal
|
||
} = this.abortController;
|
||
if (before) before();
|
||
|
||
function shouldRetry(err) {
|
||
if (err.source && typeof err.source.status === 'number') {
|
||
const {
|
||
status
|
||
} = err.source; // 0 probably indicates network failure
|
||
|
||
return status === 0 || status === 409 || status === 423 || status >= 500 && status < 600;
|
||
}
|
||
|
||
return false;
|
||
}
|
||
|
||
const doAttempt = retryAttempt => attempt().catch(err => {
|
||
if (_classPrivateFieldLooseBase(this, _aborted)[_aborted]()) throw createAbortError();
|
||
|
||
if (shouldRetry(err) && retryAttempt < retryDelays.length) {
|
||
return delay(retryDelays[retryAttempt], {
|
||
signal
|
||
}).then(() => doAttempt(retryAttempt + 1));
|
||
}
|
||
|
||
throw err;
|
||
});
|
||
|
||
return doAttempt(0).then(result => {
|
||
if (after) after();
|
||
return result;
|
||
}, err => {
|
||
if (after) after();
|
||
throw err;
|
||
});
|
||
}
|
||
|
||
async function _prepareUploadParts2(candidates) {
|
||
this.lockedCandidatesForBatch.push(...candidates);
|
||
const result = await _classPrivateFieldLooseBase(this, _retryable)[_retryable]({
|
||
attempt: () => this.options.prepareUploadParts({
|
||
key: this.key,
|
||
uploadId: this.uploadId,
|
||
partNumbers: candidates.map(index => index + 1)
|
||
})
|
||
});
|
||
|
||
if (typeof (result == null ? void 0 : result.presignedUrls) !== 'object') {
|
||
throw new TypeError('AwsS3/Multipart: Got incorrect result from `prepareUploadParts()`, expected an object `{ presignedUrls }`.');
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
function _uploadPartRetryable2(index, prePreparedPart) {
|
||
return _classPrivateFieldLooseBase(this, _retryable)[_retryable]({
|
||
before: () => {
|
||
this.partsInProgress += 1;
|
||
},
|
||
attempt: () => _classPrivateFieldLooseBase(this, _uploadPart)[_uploadPart](index, prePreparedPart),
|
||
after: () => {
|
||
this.partsInProgress -= 1;
|
||
}
|
||
});
|
||
}
|
||
|
||
function _uploadPart2(index, prePreparedPart) {
|
||
this.chunkState[index].busy = true;
|
||
const valid = typeof (prePreparedPart == null ? void 0 : prePreparedPart.url) === 'string';
|
||
|
||
if (!valid) {
|
||
throw new TypeError('AwsS3/Multipart: Got incorrect result for `prePreparedPart`, expected an object `{ url }`.');
|
||
}
|
||
|
||
const {
|
||
url,
|
||
headers
|
||
} = prePreparedPart;
|
||
|
||
if (_classPrivateFieldLooseBase(this, _aborted)[_aborted]()) {
|
||
this.chunkState[index].busy = false;
|
||
throw createAbortError();
|
||
}
|
||
|
||
return _classPrivateFieldLooseBase(this, _uploadPartBytes)[_uploadPartBytes](index, url, headers);
|
||
}
|
||
|
||
function _onPartProgress2(index, sent) {
|
||
this.chunkState[index].uploaded = ensureInt(sent);
|
||
const totalUploaded = this.chunkState.reduce((n, c) => n + c.uploaded, 0);
|
||
this.options.onProgress(totalUploaded, this.file.size);
|
||
}
|
||
|
||
function _onPartComplete2(index, etag) {
|
||
this.chunkState[index].etag = etag;
|
||
this.chunkState[index].done = true;
|
||
const part = {
|
||
PartNumber: index + 1,
|
||
ETag: etag
|
||
};
|
||
this.parts.push(part);
|
||
this.options.onPartComplete(part);
|
||
}
|
||
|
||
function _uploadPartBytes2(index, url, headers) {
|
||
const body = this.chunks[index];
|
||
const {
|
||
signal
|
||
} = this.abortController;
|
||
let defer;
|
||
const promise = new Promise((resolve, reject) => {
|
||
defer = {
|
||
resolve,
|
||
reject
|
||
};
|
||
});
|
||
const xhr = new XMLHttpRequest();
|
||
xhr.open('PUT', url, true);
|
||
|
||
if (headers) {
|
||
Object.keys(headers).forEach(key => {
|
||
xhr.setRequestHeader(key, headers[key]);
|
||
});
|
||
}
|
||
|
||
xhr.responseType = 'text';
|
||
|
||
function cleanup() {
|
||
// eslint-disable-next-line no-use-before-define
|
||
signal.removeEventListener('abort', onabort);
|
||
}
|
||
|
||
function onabort() {
|
||
xhr.abort();
|
||
}
|
||
|
||
signal.addEventListener('abort', onabort);
|
||
xhr.upload.addEventListener('progress', ev => {
|
||
if (!ev.lengthComputable) return;
|
||
|
||
_classPrivateFieldLooseBase(this, _onPartProgress)[_onPartProgress](index, ev.loaded, ev.total);
|
||
});
|
||
xhr.addEventListener('abort', () => {
|
||
cleanup();
|
||
this.chunkState[index].busy = false;
|
||
defer.reject(createAbortError());
|
||
});
|
||
xhr.addEventListener('load', ev => {
|
||
cleanup();
|
||
this.chunkState[index].busy = false;
|
||
|
||
if (ev.target.status < 200 || ev.target.status >= 300) {
|
||
const error = new Error('Non 2xx');
|
||
error.source = ev.target;
|
||
defer.reject(error);
|
||
return;
|
||
} // This avoids the net::ERR_OUT_OF_MEMORY in Chromium Browsers.
|
||
|
||
|
||
this.chunks[index] = null;
|
||
|
||
_classPrivateFieldLooseBase(this, _onPartProgress)[_onPartProgress](index, body.size, body.size); // NOTE This must be allowed by CORS.
|
||
|
||
|
||
const etag = ev.target.getResponseHeader('ETag');
|
||
|
||
if (etag === null) {
|
||
defer.reject(new Error('AwsS3/Multipart: Could not read the ETag header. This likely means CORS is not configured correctly on the S3 Bucket. See https://uppy.io/docs/aws-s3-multipart#S3-Bucket-Configuration for instructions.'));
|
||
return;
|
||
}
|
||
|
||
_classPrivateFieldLooseBase(this, _onPartComplete)[_onPartComplete](index, etag);
|
||
|
||
defer.resolve();
|
||
});
|
||
xhr.addEventListener('error', ev => {
|
||
cleanup();
|
||
this.chunkState[index].busy = false;
|
||
const error = new Error('Unknown error');
|
||
error.source = ev.target;
|
||
defer.reject(error);
|
||
});
|
||
xhr.send(body);
|
||
return promise;
|
||
}
|
||
|
||
async function _completeUpload2() {
|
||
// Parts may not have completed uploading in sorted order, if limit > 1.
|
||
this.parts.sort((a, b) => a.PartNumber - b.PartNumber);
|
||
|
||
try {
|
||
const result = await this.options.completeMultipartUpload({
|
||
key: this.key,
|
||
uploadId: this.uploadId,
|
||
parts: this.parts
|
||
});
|
||
this.options.onSuccess(result);
|
||
} catch (err) {
|
||
_classPrivateFieldLooseBase(this, _onError)[_onError](err);
|
||
}
|
||
}
|
||
|
||
function _abortUpload2() {
|
||
this.abortController.abort();
|
||
this.createdPromise.then(() => {
|
||
this.options.abortMultipartUpload({
|
||
key: this.key,
|
||
uploadId: this.uploadId
|
||
});
|
||
}, () => {// if the creation failed we do not need to abort
|
||
});
|
||
}
|
||
|
||
function _onError2(err) {
|
||
if (err && err.name === 'AbortError') {
|
||
return;
|
||
}
|
||
|
||
this.options.onError(err);
|
||
}
|
||
|
||
module.exports = MultipartUploader;
|
||
},{"@uppy/utils/lib/AbortController":23,"@uppy/utils/lib/delay":29}],3:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
var _class, _temp;
|
||
|
||
const BasePlugin = require('@uppy/core/lib/BasePlugin');
|
||
|
||
const {
|
||
Socket,
|
||
Provider,
|
||
RequestClient
|
||
} = require('@uppy/companion-client');
|
||
|
||
const EventTracker = require('@uppy/utils/lib/EventTracker');
|
||
|
||
const emitSocketProgress = require('@uppy/utils/lib/emitSocketProgress');
|
||
|
||
const getSocketHost = require('@uppy/utils/lib/getSocketHost');
|
||
|
||
const {
|
||
RateLimitedQueue
|
||
} = require('@uppy/utils/lib/RateLimitedQueue');
|
||
|
||
const Uploader = require('./MultipartUploader');
|
||
|
||
function assertServerError(res) {
|
||
if (res && res.error) {
|
||
const error = new Error(res.message);
|
||
Object.assign(error, res.error);
|
||
throw error;
|
||
}
|
||
|
||
return res;
|
||
}
|
||
|
||
module.exports = (_temp = _class = class AwsS3Multipart extends BasePlugin {
|
||
constructor(uppy, opts) {
|
||
super(uppy, opts);
|
||
this.type = 'uploader';
|
||
this.id = this.opts.id || 'AwsS3Multipart';
|
||
this.title = 'AWS S3 Multipart';
|
||
this.client = new RequestClient(uppy, opts);
|
||
const defaultOptions = {
|
||
timeout: 30 * 1000,
|
||
limit: 0,
|
||
retryDelays: [0, 1000, 3000, 5000],
|
||
createMultipartUpload: this.createMultipartUpload.bind(this),
|
||
listParts: this.listParts.bind(this),
|
||
prepareUploadParts: this.prepareUploadParts.bind(this),
|
||
abortMultipartUpload: this.abortMultipartUpload.bind(this),
|
||
completeMultipartUpload: this.completeMultipartUpload.bind(this)
|
||
};
|
||
this.opts = { ...defaultOptions,
|
||
...opts
|
||
};
|
||
this.upload = this.upload.bind(this);
|
||
this.requests = new RateLimitedQueue(this.opts.limit);
|
||
this.uploaders = Object.create(null);
|
||
this.uploaderEvents = Object.create(null);
|
||
this.uploaderSockets = Object.create(null);
|
||
}
|
||
/**
|
||
* Clean up all references for a file's upload: the MultipartUploader instance,
|
||
* any events related to the file, and the Companion WebSocket connection.
|
||
*
|
||
* Set `opts.abort` to tell S3 that the multipart upload is cancelled and must be removed.
|
||
* This should be done when the user cancels the upload, not when the upload is completed or errored.
|
||
*/
|
||
|
||
|
||
resetUploaderReferences(fileID, opts = {}) {
|
||
if (this.uploaders[fileID]) {
|
||
this.uploaders[fileID].abort({
|
||
really: opts.abort || false
|
||
});
|
||
this.uploaders[fileID] = null;
|
||
}
|
||
|
||
if (this.uploaderEvents[fileID]) {
|
||
this.uploaderEvents[fileID].remove();
|
||
this.uploaderEvents[fileID] = null;
|
||
}
|
||
|
||
if (this.uploaderSockets[fileID]) {
|
||
this.uploaderSockets[fileID].close();
|
||
this.uploaderSockets[fileID] = null;
|
||
}
|
||
}
|
||
|
||
assertHost(method) {
|
||
if (!this.opts.companionUrl) {
|
||
throw new Error(`Expected a \`companionUrl\` option containing a Companion address, or if you are not using Companion, a custom \`${method}\` implementation.`);
|
||
}
|
||
}
|
||
|
||
createMultipartUpload(file) {
|
||
this.assertHost('createMultipartUpload');
|
||
const metadata = {};
|
||
Object.keys(file.meta).forEach(key => {
|
||
if (file.meta[key] != null) {
|
||
metadata[key] = file.meta[key].toString();
|
||
}
|
||
});
|
||
return this.client.post('s3/multipart', {
|
||
filename: file.name,
|
||
type: file.type,
|
||
metadata
|
||
}).then(assertServerError);
|
||
}
|
||
|
||
listParts(file, {
|
||
key,
|
||
uploadId
|
||
}) {
|
||
this.assertHost('listParts');
|
||
const filename = encodeURIComponent(key);
|
||
return this.client.get(`s3/multipart/${uploadId}?key=${filename}`).then(assertServerError);
|
||
}
|
||
|
||
prepareUploadParts(file, {
|
||
key,
|
||
uploadId,
|
||
partNumbers
|
||
}) {
|
||
this.assertHost('prepareUploadParts');
|
||
const filename = encodeURIComponent(key);
|
||
return this.client.get(`s3/multipart/${uploadId}/batch?key=${filename}&partNumbers=${partNumbers.join(',')}`).then(assertServerError);
|
||
}
|
||
|
||
completeMultipartUpload(file, {
|
||
key,
|
||
uploadId,
|
||
parts
|
||
}) {
|
||
this.assertHost('completeMultipartUpload');
|
||
const filename = encodeURIComponent(key);
|
||
const uploadIdEnc = encodeURIComponent(uploadId);
|
||
return this.client.post(`s3/multipart/${uploadIdEnc}/complete?key=${filename}`, {
|
||
parts
|
||
}).then(assertServerError);
|
||
}
|
||
|
||
abortMultipartUpload(file, {
|
||
key,
|
||
uploadId
|
||
}) {
|
||
this.assertHost('abortMultipartUpload');
|
||
const filename = encodeURIComponent(key);
|
||
const uploadIdEnc = encodeURIComponent(uploadId);
|
||
return this.client.delete(`s3/multipart/${uploadIdEnc}?key=${filename}`).then(assertServerError);
|
||
}
|
||
|
||
uploadFile(file) {
|
||
return new Promise((resolve, reject) => {
|
||
const onStart = data => {
|
||
const cFile = this.uppy.getFile(file.id);
|
||
this.uppy.setFileState(file.id, {
|
||
s3Multipart: { ...cFile.s3Multipart,
|
||
key: data.key,
|
||
uploadId: data.uploadId
|
||
}
|
||
});
|
||
};
|
||
|
||
const onProgress = (bytesUploaded, bytesTotal) => {
|
||
this.uppy.emit('upload-progress', file, {
|
||
uploader: this,
|
||
bytesUploaded,
|
||
bytesTotal
|
||
});
|
||
};
|
||
|
||
const onError = err => {
|
||
this.uppy.log(err);
|
||
this.uppy.emit('upload-error', file, err);
|
||
queuedRequest.done();
|
||
this.resetUploaderReferences(file.id);
|
||
reject(err);
|
||
};
|
||
|
||
const onSuccess = result => {
|
||
const uploadResp = {
|
||
body: { ...result
|
||
},
|
||
uploadURL: result.location
|
||
};
|
||
queuedRequest.done();
|
||
this.resetUploaderReferences(file.id);
|
||
const cFile = this.uppy.getFile(file.id);
|
||
this.uppy.emit('upload-success', cFile || file, uploadResp);
|
||
|
||
if (result.location) {
|
||
this.uppy.log(`Download ${upload.file.name} from ${result.location}`);
|
||
}
|
||
|
||
resolve(upload);
|
||
};
|
||
|
||
const onPartComplete = part => {
|
||
const cFile = this.uppy.getFile(file.id);
|
||
|
||
if (!cFile) {
|
||
return;
|
||
}
|
||
|
||
this.uppy.emit('s3-multipart:part-uploaded', cFile, part);
|
||
};
|
||
|
||
const upload = new Uploader(file.data, {
|
||
// .bind to pass the file object to each handler.
|
||
createMultipartUpload: this.opts.createMultipartUpload.bind(this, file),
|
||
listParts: this.opts.listParts.bind(this, file),
|
||
prepareUploadParts: this.opts.prepareUploadParts.bind(this, file),
|
||
completeMultipartUpload: this.opts.completeMultipartUpload.bind(this, file),
|
||
abortMultipartUpload: this.opts.abortMultipartUpload.bind(this, file),
|
||
getChunkSize: this.opts.getChunkSize ? this.opts.getChunkSize.bind(this) : null,
|
||
onStart,
|
||
onProgress,
|
||
onError,
|
||
onSuccess,
|
||
onPartComplete,
|
||
limit: this.opts.limit || 5,
|
||
retryDelays: this.opts.retryDelays || [],
|
||
...file.s3Multipart
|
||
});
|
||
this.uploaders[file.id] = upload;
|
||
this.uploaderEvents[file.id] = new EventTracker(this.uppy);
|
||
let queuedRequest = this.requests.run(() => {
|
||
if (!file.isPaused) {
|
||
upload.start();
|
||
} // Don't do anything here, the caller will take care of cancelling the upload itself
|
||
// using resetUploaderReferences(). This is because resetUploaderReferences() has to be
|
||
// called when this request is still in the queue, and has not been started yet, too. At
|
||
// that point this cancellation function is not going to be called.
|
||
|
||
|
||
return () => {};
|
||
});
|
||
this.onFileRemove(file.id, removed => {
|
||
queuedRequest.abort();
|
||
this.resetUploaderReferences(file.id, {
|
||
abort: true
|
||
});
|
||
resolve(`upload ${removed.id} was removed`);
|
||
});
|
||
this.onCancelAll(file.id, () => {
|
||
queuedRequest.abort();
|
||
this.resetUploaderReferences(file.id, {
|
||
abort: true
|
||
});
|
||
resolve(`upload ${file.id} was canceled`);
|
||
});
|
||
this.onFilePause(file.id, isPaused => {
|
||
if (isPaused) {
|
||
// Remove this file from the queue so another file can start in its place.
|
||
queuedRequest.abort();
|
||
upload.pause();
|
||
} else {
|
||
// Resuming an upload should be queued, else you could pause and then
|
||
// resume a queued upload to make it skip the queue.
|
||
queuedRequest.abort();
|
||
queuedRequest = this.requests.run(() => {
|
||
upload.start();
|
||
return () => {};
|
||
});
|
||
}
|
||
});
|
||
this.onPauseAll(file.id, () => {
|
||
queuedRequest.abort();
|
||
upload.pause();
|
||
});
|
||
this.onResumeAll(file.id, () => {
|
||
queuedRequest.abort();
|
||
|
||
if (file.error) {
|
||
upload.abort();
|
||
}
|
||
|
||
queuedRequest = this.requests.run(() => {
|
||
upload.start();
|
||
return () => {};
|
||
});
|
||
}); // Don't double-emit upload-started for Golden Retriever-restored files that were already started
|
||
|
||
if (!file.progress.uploadStarted || !file.isRestored) {
|
||
this.uppy.emit('upload-started', file);
|
||
}
|
||
});
|
||
}
|
||
|
||
uploadRemote(file) {
|
||
this.resetUploaderReferences(file.id); // Don't double-emit upload-started for Golden Retriever-restored files that were already started
|
||
|
||
if (!file.progress.uploadStarted || !file.isRestored) {
|
||
this.uppy.emit('upload-started', file);
|
||
}
|
||
|
||
if (file.serverToken) {
|
||
return this.connectToServerSocket(file);
|
||
}
|
||
|
||
return new Promise((resolve, reject) => {
|
||
const Client = file.remote.providerOptions.provider ? Provider : RequestClient;
|
||
const client = new Client(this.uppy, file.remote.providerOptions);
|
||
client.post(file.remote.url, { ...file.remote.body,
|
||
protocol: 's3-multipart',
|
||
size: file.data.size,
|
||
metadata: file.meta
|
||
}).then(res => {
|
||
this.uppy.setFileState(file.id, {
|
||
serverToken: res.token
|
||
});
|
||
file = this.uppy.getFile(file.id);
|
||
return file;
|
||
}).then(file => {
|
||
return this.connectToServerSocket(file);
|
||
}).then(() => {
|
||
resolve();
|
||
}).catch(err => {
|
||
this.uppy.emit('upload-error', file, err);
|
||
reject(err);
|
||
});
|
||
});
|
||
}
|
||
|
||
connectToServerSocket(file) {
|
||
return new Promise((resolve, reject) => {
|
||
const token = file.serverToken;
|
||
const host = getSocketHost(file.remote.companionUrl);
|
||
const socket = new Socket({
|
||
target: `${host}/api/${token}`,
|
||
autoOpen: false
|
||
});
|
||
this.uploaderSockets[file.id] = socket;
|
||
this.uploaderEvents[file.id] = new EventTracker(this.uppy);
|
||
this.onFileRemove(file.id, () => {
|
||
queuedRequest.abort();
|
||
socket.send('pause', {});
|
||
this.resetUploaderReferences(file.id, {
|
||
abort: true
|
||
});
|
||
resolve(`upload ${file.id} was removed`);
|
||
});
|
||
this.onFilePause(file.id, isPaused => {
|
||
if (isPaused) {
|
||
// Remove this file from the queue so another file can start in its place.
|
||
queuedRequest.abort();
|
||
socket.send('pause', {});
|
||
} else {
|
||
// Resuming an upload should be queued, else you could pause and then
|
||
// resume a queued upload to make it skip the queue.
|
||
queuedRequest.abort();
|
||
queuedRequest = this.requests.run(() => {
|
||
socket.send('resume', {});
|
||
return () => {};
|
||
});
|
||
}
|
||
});
|
||
this.onPauseAll(file.id, () => {
|
||
queuedRequest.abort();
|
||
socket.send('pause', {});
|
||
});
|
||
this.onCancelAll(file.id, () => {
|
||
queuedRequest.abort();
|
||
socket.send('pause', {});
|
||
this.resetUploaderReferences(file.id);
|
||
resolve(`upload ${file.id} was canceled`);
|
||
});
|
||
this.onResumeAll(file.id, () => {
|
||
queuedRequest.abort();
|
||
|
||
if (file.error) {
|
||
socket.send('pause', {});
|
||
}
|
||
|
||
queuedRequest = this.requests.run(() => {
|
||
socket.send('resume', {});
|
||
});
|
||
});
|
||
this.onRetry(file.id, () => {
|
||
// Only do the retry if the upload is actually in progress;
|
||
// else we could try to send these messages when the upload is still queued.
|
||
// We may need a better check for this since the socket may also be closed
|
||
// for other reasons, like network failures.
|
||
if (socket.isOpen) {
|
||
socket.send('pause', {});
|
||
socket.send('resume', {});
|
||
}
|
||
});
|
||
this.onRetryAll(file.id, () => {
|
||
if (socket.isOpen) {
|
||
socket.send('pause', {});
|
||
socket.send('resume', {});
|
||
}
|
||
});
|
||
socket.on('progress', progressData => emitSocketProgress(this, progressData, file));
|
||
socket.on('error', errData => {
|
||
this.uppy.emit('upload-error', file, new Error(errData.error));
|
||
this.resetUploaderReferences(file.id);
|
||
queuedRequest.done();
|
||
reject(new Error(errData.error));
|
||
});
|
||
socket.on('success', data => {
|
||
const uploadResp = {
|
||
uploadURL: data.url
|
||
};
|
||
this.uppy.emit('upload-success', file, uploadResp);
|
||
this.resetUploaderReferences(file.id);
|
||
queuedRequest.done();
|
||
resolve();
|
||
});
|
||
let queuedRequest = this.requests.run(() => {
|
||
socket.open();
|
||
|
||
if (file.isPaused) {
|
||
socket.send('pause', {});
|
||
}
|
||
|
||
return () => {};
|
||
});
|
||
});
|
||
}
|
||
|
||
upload(fileIDs) {
|
||
if (fileIDs.length === 0) return Promise.resolve();
|
||
const promises = fileIDs.map(id => {
|
||
const file = this.uppy.getFile(id);
|
||
|
||
if (file.isRemote) {
|
||
return this.uploadRemote(file);
|
||
}
|
||
|
||
return this.uploadFile(file);
|
||
});
|
||
return Promise.all(promises);
|
||
}
|
||
|
||
onFileRemove(fileID, cb) {
|
||
this.uploaderEvents[fileID].on('file-removed', file => {
|
||
if (fileID === file.id) cb(file.id);
|
||
});
|
||
}
|
||
|
||
onFilePause(fileID, cb) {
|
||
this.uploaderEvents[fileID].on('upload-pause', (targetFileID, isPaused) => {
|
||
if (fileID === targetFileID) {
|
||
// const isPaused = this.uppy.pauseResume(fileID)
|
||
cb(isPaused);
|
||
}
|
||
});
|
||
}
|
||
|
||
onRetry(fileID, cb) {
|
||
this.uploaderEvents[fileID].on('upload-retry', targetFileID => {
|
||
if (fileID === targetFileID) {
|
||
cb();
|
||
}
|
||
});
|
||
}
|
||
|
||
onRetryAll(fileID, cb) {
|
||
this.uploaderEvents[fileID].on('retry-all', () => {
|
||
if (!this.uppy.getFile(fileID)) return;
|
||
cb();
|
||
});
|
||
}
|
||
|
||
onPauseAll(fileID, cb) {
|
||
this.uploaderEvents[fileID].on('pause-all', () => {
|
||
if (!this.uppy.getFile(fileID)) return;
|
||
cb();
|
||
});
|
||
}
|
||
|
||
onCancelAll(fileID, cb) {
|
||
this.uploaderEvents[fileID].on('cancel-all', () => {
|
||
if (!this.uppy.getFile(fileID)) return;
|
||
cb();
|
||
});
|
||
}
|
||
|
||
onResumeAll(fileID, cb) {
|
||
this.uploaderEvents[fileID].on('resume-all', () => {
|
||
if (!this.uppy.getFile(fileID)) return;
|
||
cb();
|
||
});
|
||
}
|
||
|
||
install() {
|
||
const {
|
||
capabilities
|
||
} = this.uppy.getState();
|
||
this.uppy.setState({
|
||
capabilities: { ...capabilities,
|
||
resumableUploads: true
|
||
}
|
||
});
|
||
this.uppy.addUploader(this.upload);
|
||
}
|
||
|
||
uninstall() {
|
||
const {
|
||
capabilities
|
||
} = this.uppy.getState();
|
||
this.uppy.setState({
|
||
capabilities: { ...capabilities,
|
||
resumableUploads: false
|
||
}
|
||
});
|
||
this.uppy.removeUploader(this.upload);
|
||
}
|
||
|
||
}, _class.VERSION = "2.1.1", _temp);
|
||
},{"./MultipartUploader":2,"@uppy/companion-client":12,"@uppy/core/lib/BasePlugin":14,"@uppy/utils/lib/EventTracker":24,"@uppy/utils/lib/RateLimitedQueue":27,"@uppy/utils/lib/emitSocketProgress":30,"@uppy/utils/lib/getSocketHost":41}],4:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
var _getOptions, _addEventHandlerForFile, _addEventHandlerIfFileStillExists, _uploadLocalFile, _uploadRemoteFile;
|
||
|
||
function _classPrivateFieldLooseBase(receiver, privateKey) { if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { throw new TypeError("attempted to use private field on non-instance"); } return receiver; }
|
||
|
||
var id = 0;
|
||
|
||
function _classPrivateFieldLooseKey(name) { return "__private_" + id++ + "_" + name; }
|
||
|
||
const {
|
||
nanoid
|
||
} = require('nanoid');
|
||
|
||
const {
|
||
Provider,
|
||
RequestClient,
|
||
Socket
|
||
} = require('@uppy/companion-client');
|
||
|
||
const emitSocketProgress = require('@uppy/utils/lib/emitSocketProgress');
|
||
|
||
const getSocketHost = require('@uppy/utils/lib/getSocketHost');
|
||
|
||
const EventTracker = require('@uppy/utils/lib/EventTracker');
|
||
|
||
const ProgressTimeout = require('@uppy/utils/lib/ProgressTimeout');
|
||
|
||
const NetworkError = require('@uppy/utils/lib/NetworkError');
|
||
|
||
const isNetworkError = require('@uppy/utils/lib/isNetworkError');
|
||
|
||
const {
|
||
internalRateLimitedQueue
|
||
} = require('@uppy/utils/lib/RateLimitedQueue'); // See XHRUpload
|
||
|
||
|
||
function buildResponseError(xhr, error) {
|
||
if (isNetworkError(xhr)) return new NetworkError(error, xhr); // TODO: when we drop support for browsers that do not support this syntax, use:
|
||
// return new Error('Upload error', { cause: error, request: xhr })
|
||
|
||
const err = new Error('Upload error');
|
||
err.cause = error;
|
||
err.request = xhr;
|
||
return err;
|
||
} // See XHRUpload
|
||
|
||
|
||
function setTypeInBlob(file) {
|
||
const dataWithUpdatedType = file.data.slice(0, file.data.size, file.meta.type);
|
||
return dataWithUpdatedType;
|
||
}
|
||
|
||
function addMetadata(formData, meta, opts) {
|
||
const metaFields = Array.isArray(opts.metaFields) ? opts.metaFields // Send along all fields by default.
|
||
: Object.keys(meta);
|
||
metaFields.forEach(item => {
|
||
formData.append(item, meta[item]);
|
||
});
|
||
}
|
||
|
||
function createFormDataUpload(file, opts) {
|
||
const formPost = new FormData();
|
||
addMetadata(formPost, file.meta, opts);
|
||
const dataWithUpdatedType = setTypeInBlob(file);
|
||
|
||
if (file.name) {
|
||
formPost.append(opts.fieldName, dataWithUpdatedType, file.meta.name);
|
||
} else {
|
||
formPost.append(opts.fieldName, dataWithUpdatedType);
|
||
}
|
||
|
||
return formPost;
|
||
}
|
||
|
||
const createBareUpload = file => file.data;
|
||
|
||
module.exports = (_getOptions = /*#__PURE__*/_classPrivateFieldLooseKey("getOptions"), _addEventHandlerForFile = /*#__PURE__*/_classPrivateFieldLooseKey("addEventHandlerForFile"), _addEventHandlerIfFileStillExists = /*#__PURE__*/_classPrivateFieldLooseKey("addEventHandlerIfFileStillExists"), _uploadLocalFile = /*#__PURE__*/_classPrivateFieldLooseKey("uploadLocalFile"), _uploadRemoteFile = /*#__PURE__*/_classPrivateFieldLooseKey("uploadRemoteFile"), class MiniXHRUpload {
|
||
constructor(_uppy, _opts) {
|
||
Object.defineProperty(this, _uploadRemoteFile, {
|
||
value: _uploadRemoteFile2
|
||
});
|
||
Object.defineProperty(this, _uploadLocalFile, {
|
||
value: _uploadLocalFile2
|
||
});
|
||
Object.defineProperty(this, _addEventHandlerIfFileStillExists, {
|
||
value: _addEventHandlerIfFileStillExists2
|
||
});
|
||
Object.defineProperty(this, _addEventHandlerForFile, {
|
||
value: _addEventHandlerForFile2
|
||
});
|
||
Object.defineProperty(this, _getOptions, {
|
||
value: _getOptions2
|
||
});
|
||
this.uppy = _uppy;
|
||
this.opts = {
|
||
validateStatus(status) {
|
||
return status >= 200 && status < 300;
|
||
},
|
||
|
||
..._opts
|
||
};
|
||
this.requests = _opts[internalRateLimitedQueue];
|
||
this.uploaderEvents = Object.create(null);
|
||
this.i18n = _opts.i18n;
|
||
}
|
||
|
||
uploadFile(id, current, total) {
|
||
const file = this.uppy.getFile(id);
|
||
|
||
if (file.error) {
|
||
throw new Error(file.error);
|
||
} else if (file.isRemote) {
|
||
return _classPrivateFieldLooseBase(this, _uploadRemoteFile)[_uploadRemoteFile](file, current, total);
|
||
}
|
||
|
||
return _classPrivateFieldLooseBase(this, _uploadLocalFile)[_uploadLocalFile](file, current, total);
|
||
}
|
||
|
||
});
|
||
|
||
function _getOptions2(file) {
|
||
var _file$xhrUpload;
|
||
|
||
const {
|
||
uppy
|
||
} = this;
|
||
const overrides = uppy.getState().xhrUpload;
|
||
const opts = { ...this.opts,
|
||
...(overrides || {}),
|
||
...(file.xhrUpload || {}),
|
||
headers: { ...this.opts.headers,
|
||
...(overrides == null ? void 0 : overrides.headers),
|
||
...((_file$xhrUpload = file.xhrUpload) == null ? void 0 : _file$xhrUpload.headers)
|
||
}
|
||
};
|
||
return opts;
|
||
}
|
||
|
||
function _addEventHandlerForFile2(eventName, fileID, eventHandler) {
|
||
this.uploaderEvents[fileID].on(eventName, targetFileID => {
|
||
if (fileID === targetFileID) eventHandler();
|
||
});
|
||
}
|
||
|
||
function _addEventHandlerIfFileStillExists2(eventName, fileID, eventHandler) {
|
||
this.uploaderEvents[fileID].on(eventName, () => {
|
||
if (this.uppy.getFile(fileID)) eventHandler();
|
||
});
|
||
}
|
||
|
||
function _uploadLocalFile2(file, current, total) {
|
||
const opts = _classPrivateFieldLooseBase(this, _getOptions)[_getOptions](file);
|
||
|
||
this.uppy.log(`uploading ${current} of ${total}`);
|
||
return new Promise((resolve, reject) => {
|
||
// This is done in index.js in the S3 plugin.
|
||
// this.uppy.emit('upload-started', file)
|
||
const data = opts.formData ? createFormDataUpload(file, opts) : createBareUpload(file, opts);
|
||
const xhr = new XMLHttpRequest();
|
||
this.uploaderEvents[file.id] = new EventTracker(this.uppy);
|
||
const timer = new ProgressTimeout(opts.timeout, () => {
|
||
xhr.abort(); // eslint-disable-next-line no-use-before-define
|
||
|
||
queuedRequest.done();
|
||
const error = new Error(this.i18n('timedOut', {
|
||
seconds: Math.ceil(opts.timeout / 1000)
|
||
}));
|
||
this.uppy.emit('upload-error', file, error);
|
||
reject(error);
|
||
});
|
||
const id = nanoid();
|
||
xhr.upload.addEventListener('loadstart', () => {
|
||
this.uppy.log(`[AwsS3/XHRUpload] ${id} started`);
|
||
});
|
||
xhr.upload.addEventListener('progress', ev => {
|
||
this.uppy.log(`[AwsS3/XHRUpload] ${id} progress: ${ev.loaded} / ${ev.total}`); // Begin checking for timeouts when progress starts, instead of loading,
|
||
// to avoid timing out requests on browser concurrency queue
|
||
|
||
timer.progress();
|
||
|
||
if (ev.lengthComputable) {
|
||
this.uppy.emit('upload-progress', file, {
|
||
uploader: this,
|
||
bytesUploaded: ev.loaded,
|
||
bytesTotal: ev.total
|
||
});
|
||
}
|
||
});
|
||
xhr.addEventListener('load', ev => {
|
||
this.uppy.log(`[AwsS3/XHRUpload] ${id} finished`);
|
||
timer.done(); // eslint-disable-next-line no-use-before-define
|
||
|
||
queuedRequest.done();
|
||
|
||
if (this.uploaderEvents[file.id]) {
|
||
this.uploaderEvents[file.id].remove();
|
||
this.uploaderEvents[file.id] = null;
|
||
}
|
||
|
||
if (opts.validateStatus(ev.target.status, xhr.responseText, xhr)) {
|
||
const body = opts.getResponseData(xhr.responseText, xhr);
|
||
const uploadURL = body[opts.responseUrlFieldName];
|
||
const uploadResp = {
|
||
status: ev.target.status,
|
||
body,
|
||
uploadURL
|
||
};
|
||
this.uppy.emit('upload-success', file, uploadResp);
|
||
|
||
if (uploadURL) {
|
||
this.uppy.log(`Download ${file.name} from ${uploadURL}`);
|
||
}
|
||
|
||
return resolve(file);
|
||
}
|
||
|
||
const body = opts.getResponseData(xhr.responseText, xhr);
|
||
const error = buildResponseError(xhr, opts.getResponseError(xhr.responseText, xhr));
|
||
const response = {
|
||
status: ev.target.status,
|
||
body
|
||
};
|
||
this.uppy.emit('upload-error', file, error, response);
|
||
return reject(error);
|
||
});
|
||
xhr.addEventListener('error', () => {
|
||
this.uppy.log(`[AwsS3/XHRUpload] ${id} errored`);
|
||
timer.done(); // eslint-disable-next-line no-use-before-define
|
||
|
||
queuedRequest.done();
|
||
|
||
if (this.uploaderEvents[file.id]) {
|
||
this.uploaderEvents[file.id].remove();
|
||
this.uploaderEvents[file.id] = null;
|
||
}
|
||
|
||
const error = buildResponseError(xhr, opts.getResponseError(xhr.responseText, xhr));
|
||
this.uppy.emit('upload-error', file, error);
|
||
return reject(error);
|
||
});
|
||
xhr.open(opts.method.toUpperCase(), opts.endpoint, true); // IE10 does not allow setting `withCredentials` and `responseType`
|
||
// before `open()` is called. It’s important to set withCredentials
|
||
// to a boolean, otherwise React Native crashes
|
||
|
||
xhr.withCredentials = Boolean(opts.withCredentials);
|
||
|
||
if (opts.responseType !== '') {
|
||
xhr.responseType = opts.responseType;
|
||
}
|
||
|
||
Object.keys(opts.headers).forEach(header => {
|
||
xhr.setRequestHeader(header, opts.headers[header]);
|
||
});
|
||
const queuedRequest = this.requests.run(() => {
|
||
xhr.send(data);
|
||
return () => {
|
||
// eslint-disable-next-line no-use-before-define
|
||
timer.done();
|
||
xhr.abort();
|
||
};
|
||
}, {
|
||
priority: 1
|
||
});
|
||
|
||
_classPrivateFieldLooseBase(this, _addEventHandlerForFile)[_addEventHandlerForFile]('file-removed', file.id, () => {
|
||
queuedRequest.abort();
|
||
reject(new Error('File removed'));
|
||
});
|
||
|
||
_classPrivateFieldLooseBase(this, _addEventHandlerIfFileStillExists)[_addEventHandlerIfFileStillExists]('cancel-all', file.id, () => {
|
||
queuedRequest.abort();
|
||
reject(new Error('Upload cancelled'));
|
||
});
|
||
});
|
||
}
|
||
|
||
function _uploadRemoteFile2(file) {
|
||
const opts = _classPrivateFieldLooseBase(this, _getOptions)[_getOptions](file); // This is done in index.js in the S3 plugin.
|
||
// this.uppy.emit('upload-started', file)
|
||
|
||
|
||
const metaFields = Array.isArray(opts.metaFields) ? opts.metaFields // Send along all fields by default.
|
||
: Object.keys(file.meta);
|
||
const Client = file.remote.providerOptions.provider ? Provider : RequestClient;
|
||
const client = new Client(this.uppy, file.remote.providerOptions);
|
||
return client.post(file.remote.url, { ...file.remote.body,
|
||
endpoint: opts.endpoint,
|
||
size: file.data.size,
|
||
fieldname: opts.fieldName,
|
||
metadata: Object.fromEntries(metaFields.map(name => [name, file.meta[name]])),
|
||
httpMethod: opts.method,
|
||
useFormData: opts.formData,
|
||
headers: opts.headers
|
||
}).then(res => new Promise((resolve, reject) => {
|
||
const {
|
||
token
|
||
} = res;
|
||
const host = getSocketHost(file.remote.companionUrl);
|
||
const socket = new Socket({
|
||
target: `${host}/api/${token}`,
|
||
autoOpen: false
|
||
});
|
||
this.uploaderEvents[file.id] = new EventTracker(this.uppy);
|
||
const queuedRequest = this.requests.run(() => {
|
||
socket.open();
|
||
|
||
if (file.isPaused) {
|
||
socket.send('pause', {});
|
||
}
|
||
|
||
return () => socket.close();
|
||
});
|
||
|
||
_classPrivateFieldLooseBase(this, _addEventHandlerForFile)[_addEventHandlerForFile]('file-removed', file.id, () => {
|
||
socket.send('pause', {});
|
||
queuedRequest.abort();
|
||
resolve(`upload ${file.id} was removed`);
|
||
});
|
||
|
||
_classPrivateFieldLooseBase(this, _addEventHandlerIfFileStillExists)[_addEventHandlerIfFileStillExists]('cancel-all', file.id, () => {
|
||
socket.send('pause', {});
|
||
queuedRequest.abort();
|
||
resolve(`upload ${file.id} was canceled`);
|
||
});
|
||
|
||
_classPrivateFieldLooseBase(this, _addEventHandlerForFile)[_addEventHandlerForFile]('upload-retry', file.id, () => {
|
||
socket.send('pause', {});
|
||
socket.send('resume', {});
|
||
});
|
||
|
||
_classPrivateFieldLooseBase(this, _addEventHandlerIfFileStillExists)[_addEventHandlerIfFileStillExists]('retry-all', file.id, () => {
|
||
socket.send('pause', {});
|
||
socket.send('resume', {});
|
||
});
|
||
|
||
socket.on('progress', progressData => emitSocketProgress(this, progressData, file));
|
||
socket.on('success', data => {
|
||
const body = opts.getResponseData(data.response.responseText, data.response);
|
||
const uploadURL = body[opts.responseUrlFieldName];
|
||
const uploadResp = {
|
||
status: data.response.status,
|
||
body,
|
||
uploadURL,
|
||
bytesUploaded: data.bytesUploaded
|
||
};
|
||
this.uppy.emit('upload-success', file, uploadResp);
|
||
queuedRequest.done();
|
||
|
||
if (this.uploaderEvents[file.id]) {
|
||
this.uploaderEvents[file.id].remove();
|
||
this.uploaderEvents[file.id] = null;
|
||
}
|
||
|
||
return resolve();
|
||
});
|
||
socket.on('error', errData => {
|
||
const resp = errData.response;
|
||
const error = resp ? opts.getResponseError(resp.responseText, resp) : Object.assign(new Error(errData.error.message), {
|
||
cause: errData.error
|
||
});
|
||
this.uppy.emit('upload-error', file, error);
|
||
queuedRequest.done();
|
||
|
||
if (this.uploaderEvents[file.id]) {
|
||
this.uploaderEvents[file.id].remove();
|
||
this.uploaderEvents[file.id] = null;
|
||
}
|
||
|
||
reject(error);
|
||
});
|
||
}).catch(err => {
|
||
this.uppy.emit('upload-error', file, err);
|
||
return Promise.reject(err);
|
||
}));
|
||
}
|
||
},{"@uppy/companion-client":12,"@uppy/utils/lib/EventTracker":24,"@uppy/utils/lib/NetworkError":25,"@uppy/utils/lib/ProgressTimeout":26,"@uppy/utils/lib/RateLimitedQueue":27,"@uppy/utils/lib/emitSocketProgress":30,"@uppy/utils/lib/getSocketHost":41,"@uppy/utils/lib/isNetworkError":45,"nanoid":53}],5:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
var _class, _client, _requests, _uploader, _handleUpload, _temp;
|
||
|
||
function _classPrivateFieldLooseBase(receiver, privateKey) { if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { throw new TypeError("attempted to use private field on non-instance"); } return receiver; }
|
||
|
||
var id = 0;
|
||
|
||
function _classPrivateFieldLooseKey(name) { return "__private_" + id++ + "_" + name; }
|
||
|
||
/**
|
||
* This plugin is currently a A Big Hack™! The core reason for that is how this plugin
|
||
* interacts with Uppy's current pipeline design. The pipeline can handle files in steps,
|
||
* including preprocessing, uploading, and postprocessing steps. This plugin initially
|
||
* was designed to do its work in a preprocessing step, and let XHRUpload deal with the
|
||
* actual file upload as an uploading step. However, Uppy runs steps on all files at once,
|
||
* sequentially: first, all files go through a preprocessing step, then, once they are all
|
||
* done, they go through the uploading step.
|
||
*
|
||
* For S3, this causes severely broken behaviour when users upload many files. The
|
||
* preprocessing step will request S3 upload URLs that are valid for a short time only,
|
||
* but it has to do this for _all_ files, which can take a long time if there are hundreds
|
||
* or even thousands of files. By the time the uploader step starts, the first URLs may
|
||
* already have expired. If not, the uploading might take such a long time that later URLs
|
||
* will expire before some files can be uploaded.
|
||
*
|
||
* The long-term solution to this problem is to change the upload pipeline so that files
|
||
* can be sent to the next step individually. That requires a breaking change, so it is
|
||
* planned for some future Uppy version.
|
||
*
|
||
* In the mean time, this plugin is stuck with a hackier approach: the necessary parts
|
||
* of the XHRUpload implementation were copied into this plugin, as the MiniXHRUpload
|
||
* class, and this plugin calls into it immediately once it receives an upload URL.
|
||
* This isn't as nicely modular as we'd like and requires us to maintain two copies of
|
||
* the XHRUpload code, but at least it's not horrifically broken :)
|
||
*/
|
||
const BasePlugin = require('@uppy/core/lib/BasePlugin');
|
||
|
||
const {
|
||
RateLimitedQueue,
|
||
internalRateLimitedQueue
|
||
} = require('@uppy/utils/lib/RateLimitedQueue');
|
||
|
||
const settle = require('@uppy/utils/lib/settle');
|
||
|
||
const {
|
||
RequestClient
|
||
} = require('@uppy/companion-client');
|
||
|
||
const MiniXHRUpload = require('./MiniXHRUpload');
|
||
|
||
const isXml = require('./isXml');
|
||
|
||
function resolveUrl(origin, link) {
|
||
return new URL(link, origin || undefined).toString();
|
||
}
|
||
/**
|
||
* Get the contents of a named tag in an XML source string.
|
||
*
|
||
* @param {string} source - The XML source string.
|
||
* @param {string} tagName - The name of the tag.
|
||
* @returns {string} The contents of the tag, or the empty string if the tag does not exist.
|
||
*/
|
||
|
||
|
||
function getXmlValue(source, tagName) {
|
||
const start = source.indexOf(`<${tagName}>`);
|
||
const end = source.indexOf(`</${tagName}>`, start);
|
||
return start !== -1 && end !== -1 ? source.slice(start + tagName.length + 2, end) : '';
|
||
}
|
||
|
||
function assertServerError(res) {
|
||
if (res && res.error) {
|
||
const error = new Error(res.message);
|
||
Object.assign(error, res.error);
|
||
throw error;
|
||
}
|
||
|
||
return res;
|
||
}
|
||
|
||
function validateParameters(file, params) {
|
||
const valid = params != null && typeof params.url === 'string' && (typeof params.fields === 'object' || params.fields == null);
|
||
|
||
if (!valid) {
|
||
const err = new TypeError(`AwsS3: got incorrect result from 'getUploadParameters()' for file '${file.name}', expected an object '{ url, method, fields, headers }' but got '${JSON.stringify(params)}' instead.\nSee https://uppy.io/docs/aws-s3/#getUploadParameters-file for more on the expected format.`);
|
||
throw err;
|
||
}
|
||
|
||
const methodIsValid = params.method == null || /^p(u|os)t$/i.test(params.method);
|
||
|
||
if (!methodIsValid) {
|
||
const err = new TypeError(`AwsS3: got incorrect method from 'getUploadParameters()' for file '${file.name}', expected 'put' or 'post' but got '${params.method}' instead.\nSee https://uppy.io/docs/aws-s3/#getUploadParameters-file for more on the expected format.`);
|
||
throw err;
|
||
}
|
||
} // Get the error data from a failed XMLHttpRequest instance.
|
||
// `content` is the S3 response as a string.
|
||
// `xhr` is the XMLHttpRequest instance.
|
||
|
||
|
||
function defaultGetResponseError(content, xhr) {
|
||
// If no response, we don't have a specific error message, use the default.
|
||
if (!isXml(content, xhr)) {
|
||
return undefined;
|
||
}
|
||
|
||
const error = getXmlValue(content, 'Message');
|
||
return new Error(error);
|
||
} // warning deduplication flag: see `getResponseData()` XHRUpload option definition
|
||
|
||
|
||
let warnedSuccessActionStatus = false;
|
||
module.exports = (_temp = (_client = /*#__PURE__*/_classPrivateFieldLooseKey("client"), _requests = /*#__PURE__*/_classPrivateFieldLooseKey("requests"), _uploader = /*#__PURE__*/_classPrivateFieldLooseKey("uploader"), _handleUpload = /*#__PURE__*/_classPrivateFieldLooseKey("handleUpload"), _class = class AwsS3 extends BasePlugin {
|
||
// eslint-disable-next-line global-require
|
||
constructor(uppy, opts) {
|
||
super(uppy, opts);
|
||
Object.defineProperty(this, _client, {
|
||
writable: true,
|
||
value: void 0
|
||
});
|
||
Object.defineProperty(this, _requests, {
|
||
writable: true,
|
||
value: void 0
|
||
});
|
||
Object.defineProperty(this, _uploader, {
|
||
writable: true,
|
||
value: void 0
|
||
});
|
||
Object.defineProperty(this, _handleUpload, {
|
||
writable: true,
|
||
value: fileIDs => {
|
||
/**
|
||
* keep track of `getUploadParameters()` responses
|
||
* so we can cancel the calls individually using just a file ID
|
||
*
|
||
* @type {object.<string, Promise>}
|
||
*/
|
||
const paramsPromises = Object.create(null);
|
||
|
||
function onremove(file) {
|
||
var _paramsPromises$id;
|
||
|
||
const {
|
||
id
|
||
} = file;
|
||
(_paramsPromises$id = paramsPromises[id]) == null ? void 0 : _paramsPromises$id.abort();
|
||
}
|
||
|
||
this.uppy.on('file-removed', onremove);
|
||
fileIDs.forEach(id => {
|
||
const file = this.uppy.getFile(id);
|
||
this.uppy.emit('upload-started', file);
|
||
});
|
||
|
||
const getUploadParameters = _classPrivateFieldLooseBase(this, _requests)[_requests].wrapPromiseFunction(file => {
|
||
return this.opts.getUploadParameters(file);
|
||
});
|
||
|
||
const numberOfFiles = fileIDs.length;
|
||
return settle(fileIDs.map((id, index) => {
|
||
paramsPromises[id] = getUploadParameters(this.uppy.getFile(id));
|
||
return paramsPromises[id].then(params => {
|
||
delete paramsPromises[id];
|
||
const file = this.uppy.getFile(id);
|
||
validateParameters(file, params);
|
||
const {
|
||
method = 'post',
|
||
url,
|
||
fields,
|
||
headers
|
||
} = params;
|
||
const xhrOpts = {
|
||
method,
|
||
formData: method.toLowerCase() === 'post',
|
||
endpoint: url,
|
||
metaFields: fields ? Object.keys(fields) : []
|
||
};
|
||
|
||
if (headers) {
|
||
xhrOpts.headers = headers;
|
||
}
|
||
|
||
this.uppy.setFileState(file.id, {
|
||
meta: { ...file.meta,
|
||
...fields
|
||
},
|
||
xhrUpload: xhrOpts
|
||
});
|
||
return _classPrivateFieldLooseBase(this, _uploader)[_uploader].uploadFile(file.id, index, numberOfFiles);
|
||
}).catch(error => {
|
||
delete paramsPromises[id];
|
||
const file = this.uppy.getFile(id);
|
||
this.uppy.emit('upload-error', file, error);
|
||
});
|
||
})).finally(() => {
|
||
// cleanup.
|
||
this.uppy.off('file-removed', onremove);
|
||
});
|
||
}
|
||
});
|
||
this.type = 'uploader';
|
||
this.id = this.opts.id || 'AwsS3';
|
||
this.title = 'AWS S3';
|
||
this.defaultLocale = {
|
||
strings: {
|
||
timedOut: 'Upload stalled for %{seconds} seconds, aborting.'
|
||
}
|
||
};
|
||
const defaultOptions = {
|
||
timeout: 30 * 1000,
|
||
limit: 0,
|
||
metaFields: [],
|
||
// have to opt in
|
||
getUploadParameters: this.getUploadParameters.bind(this)
|
||
};
|
||
this.opts = { ...defaultOptions,
|
||
...opts
|
||
}; // TODO: remove i18n once we can depend on XHRUpload instead of MiniXHRUpload
|
||
|
||
this.i18nInit();
|
||
_classPrivateFieldLooseBase(this, _client)[_client] = new RequestClient(uppy, opts);
|
||
_classPrivateFieldLooseBase(this, _requests)[_requests] = new RateLimitedQueue(this.opts.limit);
|
||
}
|
||
|
||
getUploadParameters(file) {
|
||
if (!this.opts.companionUrl) {
|
||
throw new Error('Expected a `companionUrl` option containing a Companion address.');
|
||
}
|
||
|
||
const filename = file.meta.name;
|
||
const {
|
||
type
|
||
} = file.meta;
|
||
const metadata = Object.fromEntries(this.opts.metaFields.filter(key => file.meta[key] != null).map(key => [`metadata[${key}]`, file.meta[key].toString()]));
|
||
const query = new URLSearchParams({
|
||
filename,
|
||
type,
|
||
...metadata
|
||
});
|
||
return _classPrivateFieldLooseBase(this, _client)[_client].get(`s3/params?${query}`).then(assertServerError);
|
||
}
|
||
|
||
install() {
|
||
const {
|
||
uppy
|
||
} = this;
|
||
uppy.addUploader(_classPrivateFieldLooseBase(this, _handleUpload)[_handleUpload]); // Get the response data from a successful XMLHttpRequest instance.
|
||
// `content` is the S3 response as a string.
|
||
// `xhr` is the XMLHttpRequest instance.
|
||
|
||
function defaultGetResponseData(content, xhr) {
|
||
const opts = this; // If no response, we've hopefully done a PUT request to the file
|
||
// in the bucket on its full URL.
|
||
|
||
if (!isXml(content, xhr)) {
|
||
if (opts.method.toUpperCase() === 'POST') {
|
||
if (!warnedSuccessActionStatus) {
|
||
uppy.log('[AwsS3] No response data found, make sure to set the success_action_status AWS SDK option to 201. See https://uppy.io/docs/aws-s3/#POST-Uploads', 'warning');
|
||
warnedSuccessActionStatus = true;
|
||
} // The responseURL won't contain the object key. Give up.
|
||
|
||
|
||
return {
|
||
location: null
|
||
};
|
||
} // responseURL is not available in older browsers.
|
||
|
||
|
||
if (!xhr.responseURL) {
|
||
return {
|
||
location: null
|
||
};
|
||
} // Trim the query string because it's going to be a bunch of presign
|
||
// parameters for a PUT request—doing a GET request with those will
|
||
// always result in an error
|
||
|
||
|
||
return {
|
||
location: xhr.responseURL.replace(/\?.*$/, '')
|
||
};
|
||
}
|
||
|
||
return {
|
||
// Some S3 alternatives do not reply with an absolute URL.
|
||
// Eg DigitalOcean Spaces uses /$bucketName/xyz
|
||
location: resolveUrl(xhr.responseURL, getXmlValue(content, 'Location')),
|
||
bucket: getXmlValue(content, 'Bucket'),
|
||
key: getXmlValue(content, 'Key'),
|
||
etag: getXmlValue(content, 'ETag')
|
||
};
|
||
}
|
||
|
||
const xhrOptions = {
|
||
fieldName: 'file',
|
||
responseUrlFieldName: 'location',
|
||
timeout: this.opts.timeout,
|
||
// Share the rate limiting queue with XHRUpload.
|
||
[internalRateLimitedQueue]: _classPrivateFieldLooseBase(this, _requests)[_requests],
|
||
responseType: 'text',
|
||
getResponseData: this.opts.getResponseData || defaultGetResponseData,
|
||
getResponseError: defaultGetResponseError
|
||
}; // TODO: remove i18n once we can depend on XHRUpload instead of MiniXHRUpload
|
||
|
||
xhrOptions.i18n = this.i18n; // Revert to `uppy.use(XHRUpload)` once the big comment block at the top of
|
||
// this file is solved
|
||
|
||
_classPrivateFieldLooseBase(this, _uploader)[_uploader] = new MiniXHRUpload(uppy, xhrOptions);
|
||
}
|
||
|
||
uninstall() {
|
||
this.uppy.removeUploader(_classPrivateFieldLooseBase(this, _handleUpload)[_handleUpload]);
|
||
}
|
||
|
||
}), _class.VERSION = "2.0.5", _temp);
|
||
},{"./MiniXHRUpload":4,"./isXml":6,"@uppy/companion-client":12,"@uppy/core/lib/BasePlugin":14,"@uppy/utils/lib/RateLimitedQueue":27,"@uppy/utils/lib/settle":47}],6:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
/**
|
||
* Remove parameters like `charset=utf-8` from the end of a mime type string.
|
||
*
|
||
* @param {string} mimeType - The mime type string that may have optional parameters.
|
||
* @returns {string} The "base" mime type, i.e. only 'category/type'.
|
||
*/
|
||
function removeMimeParameters(mimeType) {
|
||
return mimeType.replace(/;.*$/, '');
|
||
}
|
||
/**
|
||
* Check if a response contains XML based on the response object and its text content.
|
||
*
|
||
* @param {string} content - The text body of the response.
|
||
* @param {object|XMLHttpRequest} xhr - The XHR object or response object from Companion.
|
||
* @returns {bool} Whether the content is (probably) XML.
|
||
*/
|
||
|
||
|
||
function isXml(content, xhr) {
|
||
const rawContentType = xhr.headers ? xhr.headers['content-type'] : xhr.getResponseHeader('Content-Type');
|
||
|
||
if (typeof rawContentType === 'string') {
|
||
const contentType = removeMimeParameters(rawContentType).toLowerCase();
|
||
|
||
if (contentType === 'application/xml' || contentType === 'text/xml') {
|
||
return true;
|
||
} // GCS uses text/html for some reason
|
||
// https://github.com/transloadit/uppy/issues/896
|
||
|
||
|
||
if (contentType === 'text/html' && /^<\?xml /.test(content)) {
|
||
return true;
|
||
}
|
||
}
|
||
|
||
return false;
|
||
}
|
||
|
||
module.exports = isXml;
|
||
},{}],7:[function(require,module,exports){
|
||
'use strict';
|
||
|
||
class AuthError extends Error {
|
||
constructor() {
|
||
super('Authorization required');
|
||
this.name = 'AuthError';
|
||
this.isAuthError = true;
|
||
}
|
||
|
||
}
|
||
|
||
module.exports = AuthError;
|
||
},{}],8:[function(require,module,exports){
|
||
'use strict';
|
||
|
||
const RequestClient = require('./RequestClient');
|
||
|
||
const tokenStorage = require('./tokenStorage');
|
||
|
||
const getName = id => {
|
||
return id.split('-').map(s => s.charAt(0).toUpperCase() + s.slice(1)).join(' ');
|
||
};
|
||
|
||
module.exports = class Provider extends RequestClient {
|
||
constructor(uppy, opts) {
|
||
super(uppy, opts);
|
||
this.provider = opts.provider;
|
||
this.id = this.provider;
|
||
this.name = this.opts.name || getName(this.id);
|
||
this.pluginId = this.opts.pluginId;
|
||
this.tokenKey = `companion-${this.pluginId}-auth-token`;
|
||
this.companionKeysParams = this.opts.companionKeysParams;
|
||
this.preAuthToken = null;
|
||
}
|
||
|
||
headers() {
|
||
return Promise.all([super.headers(), this.getAuthToken()]).then(([headers, token]) => {
|
||
const authHeaders = {};
|
||
|
||
if (token) {
|
||
authHeaders['uppy-auth-token'] = token;
|
||
}
|
||
|
||
if (this.companionKeysParams) {
|
||
authHeaders['uppy-credentials-params'] = btoa(JSON.stringify({
|
||
params: this.companionKeysParams
|
||
}));
|
||
}
|
||
|
||
return { ...headers,
|
||
...authHeaders
|
||
};
|
||
});
|
||
}
|
||
|
||
onReceiveResponse(response) {
|
||
response = super.onReceiveResponse(response);
|
||
const plugin = this.uppy.getPlugin(this.pluginId);
|
||
const oldAuthenticated = plugin.getPluginState().authenticated;
|
||
const authenticated = oldAuthenticated ? response.status !== 401 : response.status < 400;
|
||
plugin.setPluginState({
|
||
authenticated
|
||
});
|
||
return response;
|
||
}
|
||
|
||
setAuthToken(token) {
|
||
return this.uppy.getPlugin(this.pluginId).storage.setItem(this.tokenKey, token);
|
||
}
|
||
|
||
getAuthToken() {
|
||
return this.uppy.getPlugin(this.pluginId).storage.getItem(this.tokenKey);
|
||
}
|
||
|
||
authUrl(queries = {}) {
|
||
if (this.preAuthToken) {
|
||
queries.uppyPreAuthToken = this.preAuthToken;
|
||
}
|
||
|
||
return `${this.hostname}/${this.id}/connect?${new URLSearchParams(queries)}`;
|
||
}
|
||
|
||
fileUrl(id) {
|
||
return `${this.hostname}/${this.id}/get/${id}`;
|
||
}
|
||
|
||
fetchPreAuthToken() {
|
||
if (!this.companionKeysParams) {
|
||
return Promise.resolve();
|
||
}
|
||
|
||
return this.post(`${this.id}/preauth/`, {
|
||
params: this.companionKeysParams
|
||
}).then(res => {
|
||
this.preAuthToken = res.token;
|
||
}).catch(err => {
|
||
this.uppy.log(`[CompanionClient] unable to fetch preAuthToken ${err}`, 'warning');
|
||
});
|
||
}
|
||
|
||
list(directory) {
|
||
return this.get(`${this.id}/list/${directory || ''}`);
|
||
}
|
||
|
||
logout() {
|
||
return this.get(`${this.id}/logout`).then(response => Promise.all([response, this.uppy.getPlugin(this.pluginId).storage.removeItem(this.tokenKey)])).then(([response]) => response);
|
||
}
|
||
|
||
static initPlugin(plugin, opts, defaultOpts) {
|
||
plugin.type = 'acquirer';
|
||
plugin.files = [];
|
||
|
||
if (defaultOpts) {
|
||
plugin.opts = { ...defaultOpts,
|
||
...opts
|
||
};
|
||
}
|
||
|
||
if (opts.serverUrl || opts.serverPattern) {
|
||
throw new Error('`serverUrl` and `serverPattern` have been renamed to `companionUrl` and `companionAllowedHosts` respectively in the 0.30.5 release. Please consult the docs (for example, https://uppy.io/docs/instagram/ for the Instagram plugin) and use the updated options.`');
|
||
}
|
||
|
||
if (opts.companionAllowedHosts) {
|
||
const pattern = opts.companionAllowedHosts; // validate companionAllowedHosts param
|
||
|
||
if (typeof pattern !== 'string' && !Array.isArray(pattern) && !(pattern instanceof RegExp)) {
|
||
throw new TypeError(`${plugin.id}: the option "companionAllowedHosts" must be one of string, Array, RegExp`);
|
||
}
|
||
|
||
plugin.opts.companionAllowedHosts = pattern;
|
||
} else if (/^(?!https?:\/\/).*$/i.test(opts.companionUrl)) {
|
||
// does not start with https://
|
||
plugin.opts.companionAllowedHosts = `https://${opts.companionUrl.replace(/^\/\//, '')}`;
|
||
} else {
|
||
plugin.opts.companionAllowedHosts = new URL(opts.companionUrl).origin;
|
||
}
|
||
|
||
plugin.storage = plugin.opts.storage || tokenStorage;
|
||
}
|
||
|
||
};
|
||
},{"./RequestClient":9,"./tokenStorage":13}],9:[function(require,module,exports){
|
||
'use strict';
|
||
|
||
var _class, _getPostResponseFunc, _getUrl, _errorHandler, _temp;
|
||
|
||
function _classPrivateFieldLooseBase(receiver, privateKey) { if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { throw new TypeError("attempted to use private field on non-instance"); } return receiver; }
|
||
|
||
var id = 0;
|
||
|
||
function _classPrivateFieldLooseKey(name) { return "__private_" + id++ + "_" + name; }
|
||
|
||
const fetchWithNetworkError = require('@uppy/utils/lib/fetchWithNetworkError');
|
||
|
||
const AuthError = require('./AuthError'); // Remove the trailing slash so we can always safely append /xyz.
|
||
|
||
|
||
function stripSlash(url) {
|
||
return url.replace(/\/$/, '');
|
||
}
|
||
|
||
async function handleJSONResponse(res) {
|
||
if (res.status === 401) {
|
||
throw new AuthError();
|
||
}
|
||
|
||
const jsonPromise = res.json();
|
||
|
||
if (res.status < 200 || res.status > 300) {
|
||
let errMsg = `Failed request with status: ${res.status}. ${res.statusText}`;
|
||
|
||
try {
|
||
const errData = await jsonPromise;
|
||
errMsg = errData.message ? `${errMsg} message: ${errData.message}` : errMsg;
|
||
errMsg = errData.requestId ? `${errMsg} request-Id: ${errData.requestId}` : errMsg;
|
||
} finally {
|
||
// eslint-disable-next-line no-unsafe-finally
|
||
throw new Error(errMsg);
|
||
}
|
||
}
|
||
|
||
return jsonPromise;
|
||
}
|
||
|
||
module.exports = (_temp = (_getPostResponseFunc = /*#__PURE__*/_classPrivateFieldLooseKey("getPostResponseFunc"), _getUrl = /*#__PURE__*/_classPrivateFieldLooseKey("getUrl"), _errorHandler = /*#__PURE__*/_classPrivateFieldLooseKey("errorHandler"), _class = class RequestClient {
|
||
// eslint-disable-next-line global-require
|
||
constructor(uppy, opts) {
|
||
Object.defineProperty(this, _errorHandler, {
|
||
value: _errorHandler2
|
||
});
|
||
Object.defineProperty(this, _getUrl, {
|
||
value: _getUrl2
|
||
});
|
||
Object.defineProperty(this, _getPostResponseFunc, {
|
||
writable: true,
|
||
value: skip => response => skip ? response : this.onReceiveResponse(response)
|
||
});
|
||
this.uppy = uppy;
|
||
this.opts = opts;
|
||
this.onReceiveResponse = this.onReceiveResponse.bind(this);
|
||
this.allowedHeaders = ['accept', 'content-type', 'uppy-auth-token'];
|
||
this.preflightDone = false;
|
||
}
|
||
|
||
get hostname() {
|
||
const {
|
||
companion
|
||
} = this.uppy.getState();
|
||
const host = this.opts.companionUrl;
|
||
return stripSlash(companion && companion[host] ? companion[host] : host);
|
||
}
|
||
|
||
headers() {
|
||
const userHeaders = this.opts.companionHeaders || {};
|
||
return Promise.resolve({ ...RequestClient.defaultHeaders,
|
||
...userHeaders
|
||
});
|
||
}
|
||
|
||
onReceiveResponse(response) {
|
||
const state = this.uppy.getState();
|
||
const companion = state.companion || {};
|
||
const host = this.opts.companionUrl;
|
||
const {
|
||
headers
|
||
} = response; // Store the self-identified domain name for the Companion instance we just hit.
|
||
|
||
if (headers.has('i-am') && headers.get('i-am') !== companion[host]) {
|
||
this.uppy.setState({
|
||
companion: { ...companion,
|
||
[host]: headers.get('i-am')
|
||
}
|
||
});
|
||
}
|
||
|
||
return response;
|
||
}
|
||
|
||
preflight(path) {
|
||
if (this.preflightDone) {
|
||
return Promise.resolve(this.allowedHeaders.slice());
|
||
}
|
||
|
||
return fetch(_classPrivateFieldLooseBase(this, _getUrl)[_getUrl](path), {
|
||
method: 'OPTIONS'
|
||
}).then(response => {
|
||
if (response.headers.has('access-control-allow-headers')) {
|
||
this.allowedHeaders = response.headers.get('access-control-allow-headers').split(',').map(headerName => headerName.trim().toLowerCase());
|
||
}
|
||
|
||
this.preflightDone = true;
|
||
return this.allowedHeaders.slice();
|
||
}).catch(err => {
|
||
this.uppy.log(`[CompanionClient] unable to make preflight request ${err}`, 'warning');
|
||
this.preflightDone = true;
|
||
return this.allowedHeaders.slice();
|
||
});
|
||
}
|
||
|
||
preflightAndHeaders(path) {
|
||
return Promise.all([this.preflight(path), this.headers()]).then(([allowedHeaders, headers]) => {
|
||
// filter to keep only allowed Headers
|
||
Object.keys(headers).forEach(header => {
|
||
if (!allowedHeaders.includes(header.toLowerCase())) {
|
||
this.uppy.log(`[CompanionClient] excluding disallowed header ${header}`);
|
||
delete headers[header]; // eslint-disable-line no-param-reassign
|
||
}
|
||
});
|
||
return headers;
|
||
});
|
||
}
|
||
|
||
get(path, skipPostResponse) {
|
||
const method = 'get';
|
||
return this.preflightAndHeaders(path).then(headers => fetchWithNetworkError(_classPrivateFieldLooseBase(this, _getUrl)[_getUrl](path), {
|
||
method,
|
||
headers,
|
||
credentials: this.opts.companionCookiesRule || 'same-origin'
|
||
})).then(_classPrivateFieldLooseBase(this, _getPostResponseFunc)[_getPostResponseFunc](skipPostResponse)).then(handleJSONResponse).catch(_classPrivateFieldLooseBase(this, _errorHandler)[_errorHandler](method, path));
|
||
}
|
||
|
||
post(path, data, skipPostResponse) {
|
||
const method = 'post';
|
||
return this.preflightAndHeaders(path).then(headers => fetchWithNetworkError(_classPrivateFieldLooseBase(this, _getUrl)[_getUrl](path), {
|
||
method,
|
||
headers,
|
||
credentials: this.opts.companionCookiesRule || 'same-origin',
|
||
body: JSON.stringify(data)
|
||
})).then(_classPrivateFieldLooseBase(this, _getPostResponseFunc)[_getPostResponseFunc](skipPostResponse)).then(handleJSONResponse).catch(_classPrivateFieldLooseBase(this, _errorHandler)[_errorHandler](method, path));
|
||
}
|
||
|
||
delete(path, data, skipPostResponse) {
|
||
const method = 'delete';
|
||
return this.preflightAndHeaders(path).then(headers => fetchWithNetworkError(`${this.hostname}/${path}`, {
|
||
method,
|
||
headers,
|
||
credentials: this.opts.companionCookiesRule || 'same-origin',
|
||
body: data ? JSON.stringify(data) : null
|
||
})).then(_classPrivateFieldLooseBase(this, _getPostResponseFunc)[_getPostResponseFunc](skipPostResponse)).then(handleJSONResponse).catch(_classPrivateFieldLooseBase(this, _errorHandler)[_errorHandler](method, path));
|
||
}
|
||
|
||
}), _class.VERSION = "2.0.3", _class.defaultHeaders = {
|
||
Accept: 'application/json',
|
||
'Content-Type': 'application/json',
|
||
'Uppy-Versions': `@uppy/companion-client=${_class.VERSION}`
|
||
}, _temp);
|
||
|
||
function _getUrl2(url) {
|
||
if (/^(https?:|)\/\//.test(url)) {
|
||
return url;
|
||
}
|
||
|
||
return `${this.hostname}/${url}`;
|
||
}
|
||
|
||
function _errorHandler2(method, path) {
|
||
return err => {
|
||
var _err;
|
||
|
||
if (!((_err = err) != null && _err.isAuthError)) {
|
||
const error = new Error(`Could not ${method} ${_classPrivateFieldLooseBase(this, _getUrl)[_getUrl](path)}`);
|
||
error.cause = err;
|
||
err = error; // eslint-disable-line no-param-reassign
|
||
}
|
||
|
||
return Promise.reject(err);
|
||
};
|
||
}
|
||
},{"./AuthError":7,"@uppy/utils/lib/fetchWithNetworkError":31}],10:[function(require,module,exports){
|
||
'use strict';
|
||
|
||
const RequestClient = require('./RequestClient');
|
||
|
||
const getName = id => {
|
||
return id.split('-').map(s => s.charAt(0).toUpperCase() + s.slice(1)).join(' ');
|
||
};
|
||
|
||
module.exports = class SearchProvider extends RequestClient {
|
||
constructor(uppy, opts) {
|
||
super(uppy, opts);
|
||
this.provider = opts.provider;
|
||
this.id = this.provider;
|
||
this.name = this.opts.name || getName(this.id);
|
||
this.pluginId = this.opts.pluginId;
|
||
}
|
||
|
||
fileUrl(id) {
|
||
return `${this.hostname}/search/${this.id}/get/${id}`;
|
||
}
|
||
|
||
search(text, queries) {
|
||
queries = queries ? `&${queries}` : '';
|
||
return this.get(`search/${this.id}/list?q=${encodeURIComponent(text)}${queries}`);
|
||
}
|
||
|
||
};
|
||
},{"./RequestClient":9}],11:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
var _queued, _emitter, _isOpen, _socket, _handleMessage;
|
||
|
||
let _Symbol$for, _Symbol$for2;
|
||
|
||
function _classPrivateFieldLooseBase(receiver, privateKey) { if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { throw new TypeError("attempted to use private field on non-instance"); } return receiver; }
|
||
|
||
var id = 0;
|
||
|
||
function _classPrivateFieldLooseKey(name) { return "__private_" + id++ + "_" + name; }
|
||
|
||
const ee = require('namespace-emitter');
|
||
|
||
module.exports = (_queued = /*#__PURE__*/_classPrivateFieldLooseKey("queued"), _emitter = /*#__PURE__*/_classPrivateFieldLooseKey("emitter"), _isOpen = /*#__PURE__*/_classPrivateFieldLooseKey("isOpen"), _socket = /*#__PURE__*/_classPrivateFieldLooseKey("socket"), _handleMessage = /*#__PURE__*/_classPrivateFieldLooseKey("handleMessage"), _Symbol$for = Symbol.for('uppy test: getSocket'), _Symbol$for2 = Symbol.for('uppy test: getQueued'), class UppySocket {
|
||
constructor(opts) {
|
||
Object.defineProperty(this, _queued, {
|
||
writable: true,
|
||
value: []
|
||
});
|
||
Object.defineProperty(this, _emitter, {
|
||
writable: true,
|
||
value: ee()
|
||
});
|
||
Object.defineProperty(this, _isOpen, {
|
||
writable: true,
|
||
value: false
|
||
});
|
||
Object.defineProperty(this, _socket, {
|
||
writable: true,
|
||
value: void 0
|
||
});
|
||
Object.defineProperty(this, _handleMessage, {
|
||
writable: true,
|
||
value: e => {
|
||
try {
|
||
const message = JSON.parse(e.data);
|
||
this.emit(message.action, message.payload);
|
||
} catch (err) {
|
||
// TODO: use a more robust error handler.
|
||
console.log(err); // eslint-disable-line no-console
|
||
}
|
||
}
|
||
});
|
||
this.opts = opts;
|
||
|
||
if (!opts || opts.autoOpen !== false) {
|
||
this.open();
|
||
}
|
||
}
|
||
|
||
get isOpen() {
|
||
return _classPrivateFieldLooseBase(this, _isOpen)[_isOpen];
|
||
}
|
||
|
||
[_Symbol$for]() {
|
||
return _classPrivateFieldLooseBase(this, _socket)[_socket];
|
||
}
|
||
|
||
[_Symbol$for2]() {
|
||
return _classPrivateFieldLooseBase(this, _queued)[_queued];
|
||
}
|
||
|
||
open() {
|
||
_classPrivateFieldLooseBase(this, _socket)[_socket] = new WebSocket(this.opts.target);
|
||
|
||
_classPrivateFieldLooseBase(this, _socket)[_socket].onopen = () => {
|
||
_classPrivateFieldLooseBase(this, _isOpen)[_isOpen] = true;
|
||
|
||
while (_classPrivateFieldLooseBase(this, _queued)[_queued].length > 0 && _classPrivateFieldLooseBase(this, _isOpen)[_isOpen]) {
|
||
const first = _classPrivateFieldLooseBase(this, _queued)[_queued].shift();
|
||
|
||
this.send(first.action, first.payload);
|
||
}
|
||
};
|
||
|
||
_classPrivateFieldLooseBase(this, _socket)[_socket].onclose = () => {
|
||
_classPrivateFieldLooseBase(this, _isOpen)[_isOpen] = false;
|
||
};
|
||
|
||
_classPrivateFieldLooseBase(this, _socket)[_socket].onmessage = _classPrivateFieldLooseBase(this, _handleMessage)[_handleMessage];
|
||
}
|
||
|
||
close() {
|
||
var _classPrivateFieldLoo;
|
||
|
||
(_classPrivateFieldLoo = _classPrivateFieldLooseBase(this, _socket)[_socket]) == null ? void 0 : _classPrivateFieldLoo.close();
|
||
}
|
||
|
||
send(action, payload) {
|
||
// attach uuid
|
||
if (!_classPrivateFieldLooseBase(this, _isOpen)[_isOpen]) {
|
||
_classPrivateFieldLooseBase(this, _queued)[_queued].push({
|
||
action,
|
||
payload
|
||
});
|
||
|
||
return;
|
||
}
|
||
|
||
_classPrivateFieldLooseBase(this, _socket)[_socket].send(JSON.stringify({
|
||
action,
|
||
payload
|
||
}));
|
||
}
|
||
|
||
on(action, handler) {
|
||
_classPrivateFieldLooseBase(this, _emitter)[_emitter].on(action, handler);
|
||
}
|
||
|
||
emit(action, payload) {
|
||
_classPrivateFieldLooseBase(this, _emitter)[_emitter].emit(action, payload);
|
||
}
|
||
|
||
once(action, handler) {
|
||
_classPrivateFieldLooseBase(this, _emitter)[_emitter].once(action, handler);
|
||
}
|
||
|
||
});
|
||
},{"namespace-emitter":52}],12:[function(require,module,exports){
|
||
'use strict';
|
||
/**
|
||
* Manages communications with Companion
|
||
*/
|
||
|
||
const RequestClient = require('./RequestClient');
|
||
|
||
const Provider = require('./Provider');
|
||
|
||
const SearchProvider = require('./SearchProvider');
|
||
|
||
const Socket = require('./Socket');
|
||
|
||
module.exports = {
|
||
RequestClient,
|
||
Provider,
|
||
SearchProvider,
|
||
Socket
|
||
};
|
||
},{"./Provider":8,"./RequestClient":9,"./SearchProvider":10,"./Socket":11}],13:[function(require,module,exports){
|
||
'use strict';
|
||
/**
|
||
* This module serves as an Async wrapper for LocalStorage
|
||
*/
|
||
|
||
module.exports.setItem = (key, value) => {
|
||
return new Promise(resolve => {
|
||
localStorage.setItem(key, value);
|
||
resolve();
|
||
});
|
||
};
|
||
|
||
module.exports.getItem = key => {
|
||
return Promise.resolve(localStorage.getItem(key));
|
||
};
|
||
|
||
module.exports.removeItem = key => {
|
||
return new Promise(resolve => {
|
||
localStorage.removeItem(key);
|
||
resolve();
|
||
});
|
||
};
|
||
},{}],14:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
/**
|
||
* Core plugin logic that all plugins share.
|
||
*
|
||
* BasePlugin does not contain DOM rendering so it can be used for plugins
|
||
* without a user interface.
|
||
*
|
||
* See `Plugin` for the extended version with Preact rendering for interfaces.
|
||
*/
|
||
const Translator = require('@uppy/utils/lib/Translator');
|
||
|
||
module.exports = class BasePlugin {
|
||
constructor(uppy, opts = {}) {
|
||
this.uppy = uppy;
|
||
this.opts = opts;
|
||
}
|
||
|
||
getPluginState() {
|
||
const {
|
||
plugins
|
||
} = this.uppy.getState();
|
||
return plugins[this.id] || {};
|
||
}
|
||
|
||
setPluginState(update) {
|
||
const {
|
||
plugins
|
||
} = this.uppy.getState();
|
||
this.uppy.setState({
|
||
plugins: { ...plugins,
|
||
[this.id]: { ...plugins[this.id],
|
||
...update
|
||
}
|
||
}
|
||
});
|
||
}
|
||
|
||
setOptions(newOpts) {
|
||
this.opts = { ...this.opts,
|
||
...newOpts
|
||
};
|
||
this.setPluginState(); // so that UI re-renders with new options
|
||
|
||
this.i18nInit();
|
||
}
|
||
|
||
i18nInit() {
|
||
const translator = new Translator([this.defaultLocale, this.uppy.locale, this.opts.locale]);
|
||
this.i18n = translator.translate.bind(translator);
|
||
this.i18nArray = translator.translateArray.bind(translator);
|
||
this.setPluginState(); // so that UI re-renders and we see the updated locale
|
||
}
|
||
/**
|
||
* Extendable methods
|
||
* ==================
|
||
* These methods are here to serve as an overview of the extendable methods as well as
|
||
* making them not conditional in use, such as `if (this.afterUpdate)`.
|
||
*/
|
||
// eslint-disable-next-line class-methods-use-this
|
||
|
||
|
||
addTarget() {
|
||
throw new Error('Extend the addTarget method to add your plugin to another plugin\'s target');
|
||
} // eslint-disable-next-line class-methods-use-this
|
||
|
||
|
||
install() {} // eslint-disable-next-line class-methods-use-this
|
||
|
||
|
||
uninstall() {}
|
||
/**
|
||
* Called when plugin is mounted, whether in DOM or into another plugin.
|
||
* Needed because sometimes plugins are mounted separately/after `install`,
|
||
* so this.el and this.parent might not be available in `install`.
|
||
* This is the case with @uppy/react plugins, for example.
|
||
*/
|
||
|
||
|
||
render() {
|
||
throw new Error('Extend the render method to add your plugin to a DOM element');
|
||
} // eslint-disable-next-line class-methods-use-this
|
||
|
||
|
||
update() {} // Called after every state update, after everything's mounted. Debounced.
|
||
// eslint-disable-next-line class-methods-use-this
|
||
|
||
|
||
afterUpdate() {}
|
||
|
||
};
|
||
},{"@uppy/utils/lib/Translator":28}],15:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
function _classPrivateFieldLooseBase(receiver, privateKey) { if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { throw new TypeError("attempted to use private field on non-instance"); } return receiver; }
|
||
|
||
var id = 0;
|
||
|
||
function _classPrivateFieldLooseKey(name) { return "__private_" + id++ + "_" + name; }
|
||
|
||
const {
|
||
render
|
||
} = require('preact');
|
||
|
||
const findDOMElement = require('@uppy/utils/lib/findDOMElement');
|
||
|
||
const BasePlugin = require('./BasePlugin');
|
||
/**
|
||
* Defer a frequent call to the microtask queue.
|
||
*
|
||
* @param {() => T} fn
|
||
* @returns {Promise<T>}
|
||
*/
|
||
|
||
|
||
function debounce(fn) {
|
||
let calling = null;
|
||
let latestArgs = null;
|
||
return (...args) => {
|
||
latestArgs = args;
|
||
|
||
if (!calling) {
|
||
calling = Promise.resolve().then(() => {
|
||
calling = null; // At this point `args` may be different from the most
|
||
// recent state, if multiple calls happened since this task
|
||
// was queued. So we use the `latestArgs`, which definitely
|
||
// is the most recent call.
|
||
|
||
return fn(...latestArgs);
|
||
});
|
||
}
|
||
|
||
return calling;
|
||
};
|
||
}
|
||
/**
|
||
* UIPlugin is the extended version of BasePlugin to incorporate rendering with Preact.
|
||
* Use this for plugins that need a user interface.
|
||
*
|
||
* For plugins without an user interface, see BasePlugin.
|
||
*/
|
||
|
||
|
||
var _updateUI = /*#__PURE__*/_classPrivateFieldLooseKey("updateUI");
|
||
|
||
class UIPlugin extends BasePlugin {
|
||
constructor(...args) {
|
||
super(...args);
|
||
Object.defineProperty(this, _updateUI, {
|
||
writable: true,
|
||
value: void 0
|
||
});
|
||
}
|
||
|
||
/**
|
||
* Check if supplied `target` is a DOM element or an `object`.
|
||
* If it’s an object — target is a plugin, and we search `plugins`
|
||
* for a plugin with same name and return its target.
|
||
*/
|
||
mount(target, plugin) {
|
||
const callerPluginName = plugin.id;
|
||
const targetElement = findDOMElement(target);
|
||
|
||
if (targetElement) {
|
||
this.isTargetDOMEl = true; // When target is <body> with a single <div> element,
|
||
// Preact thinks it’s the Uppy root element in there when doing a diff,
|
||
// and destroys it. So we are creating a fragment (could be empty div)
|
||
|
||
const uppyRootElement = document.createDocumentFragment(); // API for plugins that require a synchronous rerender.
|
||
|
||
_classPrivateFieldLooseBase(this, _updateUI)[_updateUI] = debounce(state => {
|
||
// plugin could be removed, but this.rerender is debounced below,
|
||
// so it could still be called even after uppy.removePlugin or uppy.close
|
||
// hence the check
|
||
if (!this.uppy.getPlugin(this.id)) return;
|
||
render(this.render(state), uppyRootElement);
|
||
this.afterUpdate();
|
||
});
|
||
this.uppy.log(`Installing ${callerPluginName} to a DOM element '${target}'`);
|
||
|
||
if (this.opts.replaceTargetContent) {
|
||
// Doing render(h(null), targetElement), which should have been
|
||
// a better way, since because the component might need to do additional cleanup when it is removed,
|
||
// stopped working — Preact just adds null into target, not replacing
|
||
targetElement.innerHTML = '';
|
||
}
|
||
|
||
render(this.render(this.uppy.getState()), uppyRootElement);
|
||
this.el = uppyRootElement.firstElementChild;
|
||
targetElement.appendChild(uppyRootElement);
|
||
this.onMount();
|
||
return this.el;
|
||
}
|
||
|
||
let targetPlugin;
|
||
|
||
if (typeof target === 'object' && target instanceof UIPlugin) {
|
||
// Targeting a plugin *instance*
|
||
targetPlugin = target;
|
||
} else if (typeof target === 'function') {
|
||
// Targeting a plugin type
|
||
const Target = target; // Find the target plugin instance.
|
||
|
||
this.uppy.iteratePlugins(p => {
|
||
if (p instanceof Target) {
|
||
targetPlugin = p;
|
||
return false;
|
||
}
|
||
});
|
||
}
|
||
|
||
if (targetPlugin) {
|
||
this.uppy.log(`Installing ${callerPluginName} to ${targetPlugin.id}`);
|
||
this.parent = targetPlugin;
|
||
this.el = targetPlugin.addTarget(plugin);
|
||
this.onMount();
|
||
return this.el;
|
||
}
|
||
|
||
this.uppy.log(`Not installing ${callerPluginName}`);
|
||
let message = `Invalid target option given to ${callerPluginName}.`;
|
||
|
||
if (typeof target === 'function') {
|
||
message += ' The given target is not a Plugin class. ' + 'Please check that you\'re not specifying a React Component instead of a plugin. ' + 'If you are using @uppy/* packages directly, make sure you have only 1 version of @uppy/core installed: ' + 'run `npm ls @uppy/core` on the command line and verify that all the versions match and are deduped correctly.';
|
||
} else {
|
||
message += 'If you meant to target an HTML element, please make sure that the element exists. ' + 'Check that the <script> tag initializing Uppy is right before the closing </body> tag at the end of the page. ' + '(see https://github.com/transloadit/uppy/issues/1042)\n\n' + 'If you meant to target a plugin, please confirm that your `import` statements or `require` calls are correct.';
|
||
}
|
||
|
||
throw new Error(message);
|
||
}
|
||
|
||
update(state) {
|
||
if (this.el != null) {
|
||
var _classPrivateFieldLoo, _classPrivateFieldLoo2;
|
||
|
||
(_classPrivateFieldLoo = (_classPrivateFieldLoo2 = _classPrivateFieldLooseBase(this, _updateUI))[_updateUI]) == null ? void 0 : _classPrivateFieldLoo.call(_classPrivateFieldLoo2, state);
|
||
}
|
||
}
|
||
|
||
unmount() {
|
||
if (this.isTargetDOMEl) {
|
||
var _this$el;
|
||
|
||
(_this$el = this.el) == null ? void 0 : _this$el.remove();
|
||
}
|
||
|
||
this.onUnmount();
|
||
} // eslint-disable-next-line class-methods-use-this
|
||
|
||
|
||
onMount() {} // eslint-disable-next-line class-methods-use-this
|
||
|
||
|
||
onUnmount() {}
|
||
|
||
}
|
||
|
||
module.exports = UIPlugin;
|
||
},{"./BasePlugin":14,"@uppy/utils/lib/findDOMElement":32,"preact":55}],16:[function(require,module,exports){
|
||
/* global AggregateError */
|
||
'use strict';
|
||
|
||
let _Symbol$for, _Symbol$for2;
|
||
|
||
function _classPrivateFieldLooseBase(receiver, privateKey) { if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { throw new TypeError("attempted to use private field on non-instance"); } return receiver; }
|
||
|
||
var id = 0;
|
||
|
||
function _classPrivateFieldLooseKey(name) { return "__private_" + id++ + "_" + name; }
|
||
|
||
const Translator = require('@uppy/utils/lib/Translator');
|
||
|
||
const ee = require('namespace-emitter');
|
||
|
||
const {
|
||
nanoid
|
||
} = require('nanoid');
|
||
|
||
const throttle = require('lodash.throttle');
|
||
|
||
const prettierBytes = require('@transloadit/prettier-bytes');
|
||
|
||
const match = require('mime-match');
|
||
|
||
const DefaultStore = require('@uppy/store-default');
|
||
|
||
const getFileType = require('@uppy/utils/lib/getFileType');
|
||
|
||
const getFileNameAndExtension = require('@uppy/utils/lib/getFileNameAndExtension');
|
||
|
||
const generateFileID = require('@uppy/utils/lib/generateFileID');
|
||
|
||
const supportsUploadProgress = require('./supportsUploadProgress');
|
||
|
||
const getFileName = require('./getFileName');
|
||
|
||
const {
|
||
justErrorsLogger,
|
||
debugLogger
|
||
} = require('./loggers'); // Exported from here.
|
||
|
||
|
||
class RestrictionError extends Error {
|
||
constructor(...args) {
|
||
super(...args);
|
||
this.isRestriction = true;
|
||
}
|
||
|
||
}
|
||
|
||
if (typeof AggregateError === 'undefined') {
|
||
// eslint-disable-next-line no-global-assign
|
||
globalThis.AggregateError = class AggregateError extends Error {
|
||
constructor(message, errors) {
|
||
super(message);
|
||
this.errors = errors;
|
||
}
|
||
|
||
};
|
||
}
|
||
|
||
class AggregateRestrictionError extends AggregateError {
|
||
constructor(...args) {
|
||
super(...args);
|
||
this.isRestriction = true;
|
||
}
|
||
|
||
}
|
||
/**
|
||
* Uppy Core module.
|
||
* Manages plugins, state updates, acts as an event bus,
|
||
* adds/removes files and metadata.
|
||
*/
|
||
|
||
|
||
var _plugins = /*#__PURE__*/_classPrivateFieldLooseKey("plugins");
|
||
|
||
var _storeUnsubscribe = /*#__PURE__*/_classPrivateFieldLooseKey("storeUnsubscribe");
|
||
|
||
var _emitter = /*#__PURE__*/_classPrivateFieldLooseKey("emitter");
|
||
|
||
var _preProcessors = /*#__PURE__*/_classPrivateFieldLooseKey("preProcessors");
|
||
|
||
var _uploaders = /*#__PURE__*/_classPrivateFieldLooseKey("uploaders");
|
||
|
||
var _postProcessors = /*#__PURE__*/_classPrivateFieldLooseKey("postProcessors");
|
||
|
||
var _checkRestrictions = /*#__PURE__*/_classPrivateFieldLooseKey("checkRestrictions");
|
||
|
||
var _checkMinNumberOfFiles = /*#__PURE__*/_classPrivateFieldLooseKey("checkMinNumberOfFiles");
|
||
|
||
var _checkRequiredMetaFields = /*#__PURE__*/_classPrivateFieldLooseKey("checkRequiredMetaFields");
|
||
|
||
var _showOrLogErrorAndThrow = /*#__PURE__*/_classPrivateFieldLooseKey("showOrLogErrorAndThrow");
|
||
|
||
var _assertNewUploadAllowed = /*#__PURE__*/_classPrivateFieldLooseKey("assertNewUploadAllowed");
|
||
|
||
var _checkAndCreateFileStateObject = /*#__PURE__*/_classPrivateFieldLooseKey("checkAndCreateFileStateObject");
|
||
|
||
var _startIfAutoProceed = /*#__PURE__*/_classPrivateFieldLooseKey("startIfAutoProceed");
|
||
|
||
var _addListeners = /*#__PURE__*/_classPrivateFieldLooseKey("addListeners");
|
||
|
||
var _updateOnlineStatus = /*#__PURE__*/_classPrivateFieldLooseKey("updateOnlineStatus");
|
||
|
||
var _createUpload = /*#__PURE__*/_classPrivateFieldLooseKey("createUpload");
|
||
|
||
var _getUpload = /*#__PURE__*/_classPrivateFieldLooseKey("getUpload");
|
||
|
||
var _removeUpload = /*#__PURE__*/_classPrivateFieldLooseKey("removeUpload");
|
||
|
||
var _runUpload = /*#__PURE__*/_classPrivateFieldLooseKey("runUpload");
|
||
|
||
_Symbol$for = Symbol.for('uppy test: getPlugins');
|
||
_Symbol$for2 = Symbol.for('uppy test: createUpload');
|
||
|
||
class Uppy {
|
||
// eslint-disable-next-line global-require
|
||
|
||
/** @type {Record<string, BasePlugin[]>} */
|
||
|
||
/**
|
||
* Instantiate Uppy
|
||
*
|
||
* @param {object} opts — Uppy options
|
||
*/
|
||
constructor(_opts) {
|
||
Object.defineProperty(this, _runUpload, {
|
||
value: _runUpload2
|
||
});
|
||
Object.defineProperty(this, _removeUpload, {
|
||
value: _removeUpload2
|
||
});
|
||
Object.defineProperty(this, _getUpload, {
|
||
value: _getUpload2
|
||
});
|
||
Object.defineProperty(this, _createUpload, {
|
||
value: _createUpload2
|
||
});
|
||
Object.defineProperty(this, _addListeners, {
|
||
value: _addListeners2
|
||
});
|
||
Object.defineProperty(this, _startIfAutoProceed, {
|
||
value: _startIfAutoProceed2
|
||
});
|
||
Object.defineProperty(this, _checkAndCreateFileStateObject, {
|
||
value: _checkAndCreateFileStateObject2
|
||
});
|
||
Object.defineProperty(this, _assertNewUploadAllowed, {
|
||
value: _assertNewUploadAllowed2
|
||
});
|
||
Object.defineProperty(this, _showOrLogErrorAndThrow, {
|
||
value: _showOrLogErrorAndThrow2
|
||
});
|
||
Object.defineProperty(this, _checkRequiredMetaFields, {
|
||
value: _checkRequiredMetaFields2
|
||
});
|
||
Object.defineProperty(this, _checkMinNumberOfFiles, {
|
||
value: _checkMinNumberOfFiles2
|
||
});
|
||
Object.defineProperty(this, _checkRestrictions, {
|
||
value: _checkRestrictions2
|
||
});
|
||
Object.defineProperty(this, _plugins, {
|
||
writable: true,
|
||
value: Object.create(null)
|
||
});
|
||
Object.defineProperty(this, _storeUnsubscribe, {
|
||
writable: true,
|
||
value: void 0
|
||
});
|
||
Object.defineProperty(this, _emitter, {
|
||
writable: true,
|
||
value: ee()
|
||
});
|
||
Object.defineProperty(this, _preProcessors, {
|
||
writable: true,
|
||
value: new Set()
|
||
});
|
||
Object.defineProperty(this, _uploaders, {
|
||
writable: true,
|
||
value: new Set()
|
||
});
|
||
Object.defineProperty(this, _postProcessors, {
|
||
writable: true,
|
||
value: new Set()
|
||
});
|
||
Object.defineProperty(this, _updateOnlineStatus, {
|
||
writable: true,
|
||
value: this.updateOnlineStatus.bind(this)
|
||
});
|
||
this.defaultLocale = {
|
||
strings: {
|
||
addBulkFilesFailed: {
|
||
0: 'Failed to add %{smart_count} file due to an internal error',
|
||
1: 'Failed to add %{smart_count} files due to internal errors'
|
||
},
|
||
youCanOnlyUploadX: {
|
||
0: 'You can only upload %{smart_count} file',
|
||
1: 'You can only upload %{smart_count} files'
|
||
},
|
||
youHaveToAtLeastSelectX: {
|
||
0: 'You have to select at least %{smart_count} file',
|
||
1: 'You have to select at least %{smart_count} files'
|
||
},
|
||
exceedsSize: '%{file} exceeds maximum allowed size of %{size}',
|
||
missingRequiredMetaField: 'Missing required meta fields',
|
||
missingRequiredMetaFieldOnFile: 'Missing required meta fields in %{fileName}',
|
||
inferiorSize: 'This file is smaller than the allowed size of %{size}',
|
||
youCanOnlyUploadFileTypes: 'You can only upload: %{types}',
|
||
noMoreFilesAllowed: 'Cannot add more files',
|
||
noDuplicates: 'Cannot add the duplicate file \'%{fileName}\', it already exists',
|
||
companionError: 'Connection with Companion failed',
|
||
authAborted: 'Authentication aborted',
|
||
companionUnauthorizeHint: 'To unauthorize to your %{provider} account, please go to %{url}',
|
||
failedToUpload: 'Failed to upload %{file}',
|
||
noInternetConnection: 'No Internet connection',
|
||
connectedToInternet: 'Connected to the Internet',
|
||
// Strings for remote providers
|
||
noFilesFound: 'You have no files or folders here',
|
||
selectX: {
|
||
0: 'Select %{smart_count}',
|
||
1: 'Select %{smart_count}'
|
||
},
|
||
allFilesFromFolderNamed: 'All files from folder %{name}',
|
||
openFolderNamed: 'Open folder %{name}',
|
||
cancel: 'Cancel',
|
||
logOut: 'Log out',
|
||
filter: 'Filter',
|
||
resetFilter: 'Reset filter',
|
||
loading: 'Loading...',
|
||
authenticateWithTitle: 'Please authenticate with %{pluginName} to select files',
|
||
authenticateWith: 'Connect to %{pluginName}',
|
||
signInWithGoogle: 'Sign in with Google',
|
||
searchImages: 'Search for images',
|
||
enterTextToSearch: 'Enter text to search for images',
|
||
backToSearch: 'Back to Search',
|
||
emptyFolderAdded: 'No files were added from empty folder',
|
||
folderAlreadyAdded: 'The folder "%{folder}" was already added',
|
||
folderAdded: {
|
||
0: 'Added %{smart_count} file from %{folder}',
|
||
1: 'Added %{smart_count} files from %{folder}'
|
||
}
|
||
}
|
||
};
|
||
const defaultOptions = {
|
||
id: 'uppy',
|
||
autoProceed: false,
|
||
|
||
/**
|
||
* @deprecated The method should not be used
|
||
*/
|
||
allowMultipleUploads: true,
|
||
allowMultipleUploadBatches: true,
|
||
debug: false,
|
||
restrictions: {
|
||
maxFileSize: null,
|
||
minFileSize: null,
|
||
maxTotalFileSize: null,
|
||
maxNumberOfFiles: null,
|
||
minNumberOfFiles: null,
|
||
allowedFileTypes: null,
|
||
requiredMetaFields: []
|
||
},
|
||
meta: {},
|
||
onBeforeFileAdded: currentFile => currentFile,
|
||
onBeforeUpload: files => files,
|
||
store: DefaultStore(),
|
||
logger: justErrorsLogger,
|
||
infoTimeout: 5000
|
||
}; // Merge default options with the ones set by user,
|
||
// making sure to merge restrictions too
|
||
|
||
this.opts = { ...defaultOptions,
|
||
..._opts,
|
||
restrictions: { ...defaultOptions.restrictions,
|
||
...(_opts && _opts.restrictions)
|
||
}
|
||
}; // Support debug: true for backwards-compatability, unless logger is set in opts
|
||
// opts instead of this.opts to avoid comparing objects — we set logger: justErrorsLogger in defaultOptions
|
||
|
||
if (_opts && _opts.logger && _opts.debug) {
|
||
this.log('You are using a custom `logger`, but also set `debug: true`, which uses built-in logger to output logs to console. Ignoring `debug: true` and using your custom `logger`.', 'warning');
|
||
} else if (_opts && _opts.debug) {
|
||
this.opts.logger = debugLogger;
|
||
}
|
||
|
||
this.log(`Using Core v${this.constructor.VERSION}`);
|
||
|
||
if (this.opts.restrictions.allowedFileTypes && this.opts.restrictions.allowedFileTypes !== null && !Array.isArray(this.opts.restrictions.allowedFileTypes)) {
|
||
throw new TypeError('`restrictions.allowedFileTypes` must be an array');
|
||
}
|
||
|
||
this.i18nInit(); // ___Why throttle at 500ms?
|
||
// - We must throttle at >250ms for superfocus in Dashboard to work well
|
||
// (because animation takes 0.25s, and we want to wait for all animations to be over before refocusing).
|
||
// [Practical Check]: if thottle is at 100ms, then if you are uploading a file,
|
||
// and click 'ADD MORE FILES', - focus won't activate in Firefox.
|
||
// - We must throttle at around >500ms to avoid performance lags.
|
||
// [Practical Check] Firefox, try to upload a big file for a prolonged period of time. Laptop will start to heat up.
|
||
|
||
this.calculateProgress = throttle(this.calculateProgress.bind(this), 500, {
|
||
leading: true,
|
||
trailing: true
|
||
});
|
||
this.store = this.opts.store;
|
||
this.setState({
|
||
plugins: {},
|
||
files: {},
|
||
currentUploads: {},
|
||
allowNewUpload: true,
|
||
capabilities: {
|
||
uploadProgress: supportsUploadProgress(),
|
||
individualCancellation: true,
|
||
resumableUploads: false
|
||
},
|
||
totalProgress: 0,
|
||
meta: { ...this.opts.meta
|
||
},
|
||
info: [],
|
||
recoveredState: null
|
||
});
|
||
_classPrivateFieldLooseBase(this, _storeUnsubscribe)[_storeUnsubscribe] = this.store.subscribe((prevState, nextState, patch) => {
|
||
this.emit('state-update', prevState, nextState, patch);
|
||
this.updateAll(nextState);
|
||
}); // Exposing uppy object on window for debugging and testing
|
||
|
||
if (this.opts.debug && typeof window !== 'undefined') {
|
||
window[this.opts.id] = this;
|
||
}
|
||
|
||
_classPrivateFieldLooseBase(this, _addListeners)[_addListeners]();
|
||
}
|
||
|
||
emit(event, ...args) {
|
||
_classPrivateFieldLooseBase(this, _emitter)[_emitter].emit(event, ...args);
|
||
}
|
||
|
||
on(event, callback) {
|
||
_classPrivateFieldLooseBase(this, _emitter)[_emitter].on(event, callback);
|
||
|
||
return this;
|
||
}
|
||
|
||
once(event, callback) {
|
||
_classPrivateFieldLooseBase(this, _emitter)[_emitter].once(event, callback);
|
||
|
||
return this;
|
||
}
|
||
|
||
off(event, callback) {
|
||
_classPrivateFieldLooseBase(this, _emitter)[_emitter].off(event, callback);
|
||
|
||
return this;
|
||
}
|
||
/**
|
||
* Iterate on all plugins and run `update` on them.
|
||
* Called each time state changes.
|
||
*
|
||
*/
|
||
|
||
|
||
updateAll(state) {
|
||
this.iteratePlugins(plugin => {
|
||
plugin.update(state);
|
||
});
|
||
}
|
||
/**
|
||
* Updates state with a patch
|
||
*
|
||
* @param {object} patch {foo: 'bar'}
|
||
*/
|
||
|
||
|
||
setState(patch) {
|
||
this.store.setState(patch);
|
||
}
|
||
/**
|
||
* Returns current state.
|
||
*
|
||
* @returns {object}
|
||
*/
|
||
|
||
|
||
getState() {
|
||
return this.store.getState();
|
||
}
|
||
/**
|
||
* Back compat for when uppy.state is used instead of uppy.getState().
|
||
*
|
||
* @deprecated
|
||
*/
|
||
|
||
|
||
get state() {
|
||
// Here, state is a non-enumerable property.
|
||
return this.getState();
|
||
}
|
||
/**
|
||
* Shorthand to set state for a specific file.
|
||
*/
|
||
|
||
|
||
setFileState(fileID, state) {
|
||
if (!this.getState().files[fileID]) {
|
||
throw new Error(`Can’t set state for ${fileID} (the file could have been removed)`);
|
||
}
|
||
|
||
this.setState({
|
||
files: { ...this.getState().files,
|
||
[fileID]: { ...this.getState().files[fileID],
|
||
...state
|
||
}
|
||
}
|
||
});
|
||
}
|
||
|
||
i18nInit() {
|
||
const translator = new Translator([this.defaultLocale, this.opts.locale]);
|
||
this.i18n = translator.translate.bind(translator);
|
||
this.i18nArray = translator.translateArray.bind(translator);
|
||
this.locale = translator.locale;
|
||
}
|
||
|
||
setOptions(newOpts) {
|
||
this.opts = { ...this.opts,
|
||
...newOpts,
|
||
restrictions: { ...this.opts.restrictions,
|
||
...(newOpts && newOpts.restrictions)
|
||
}
|
||
};
|
||
|
||
if (newOpts.meta) {
|
||
this.setMeta(newOpts.meta);
|
||
}
|
||
|
||
this.i18nInit();
|
||
|
||
if (newOpts.locale) {
|
||
this.iteratePlugins(plugin => {
|
||
plugin.setOptions();
|
||
});
|
||
} // Note: this is not the preact `setState`, it's an internal function that has the same name.
|
||
|
||
|
||
this.setState(); // so that UI re-renders with new options
|
||
}
|
||
|
||
resetProgress() {
|
||
const defaultProgress = {
|
||
percentage: 0,
|
||
bytesUploaded: 0,
|
||
uploadComplete: false,
|
||
uploadStarted: null
|
||
};
|
||
const files = { ...this.getState().files
|
||
};
|
||
const updatedFiles = {};
|
||
Object.keys(files).forEach(fileID => {
|
||
const updatedFile = { ...files[fileID]
|
||
};
|
||
updatedFile.progress = { ...updatedFile.progress,
|
||
...defaultProgress
|
||
};
|
||
updatedFiles[fileID] = updatedFile;
|
||
});
|
||
this.setState({
|
||
files: updatedFiles,
|
||
totalProgress: 0
|
||
});
|
||
this.emit('reset-progress');
|
||
}
|
||
|
||
addPreProcessor(fn) {
|
||
_classPrivateFieldLooseBase(this, _preProcessors)[_preProcessors].add(fn);
|
||
}
|
||
|
||
removePreProcessor(fn) {
|
||
return _classPrivateFieldLooseBase(this, _preProcessors)[_preProcessors].delete(fn);
|
||
}
|
||
|
||
addPostProcessor(fn) {
|
||
_classPrivateFieldLooseBase(this, _postProcessors)[_postProcessors].add(fn);
|
||
}
|
||
|
||
removePostProcessor(fn) {
|
||
return _classPrivateFieldLooseBase(this, _postProcessors)[_postProcessors].delete(fn);
|
||
}
|
||
|
||
addUploader(fn) {
|
||
_classPrivateFieldLooseBase(this, _uploaders)[_uploaders].add(fn);
|
||
}
|
||
|
||
removeUploader(fn) {
|
||
return _classPrivateFieldLooseBase(this, _uploaders)[_uploaders].delete(fn);
|
||
}
|
||
|
||
setMeta(data) {
|
||
const updatedMeta = { ...this.getState().meta,
|
||
...data
|
||
};
|
||
const updatedFiles = { ...this.getState().files
|
||
};
|
||
Object.keys(updatedFiles).forEach(fileID => {
|
||
updatedFiles[fileID] = { ...updatedFiles[fileID],
|
||
meta: { ...updatedFiles[fileID].meta,
|
||
...data
|
||
}
|
||
};
|
||
});
|
||
this.log('Adding metadata:');
|
||
this.log(data);
|
||
this.setState({
|
||
meta: updatedMeta,
|
||
files: updatedFiles
|
||
});
|
||
}
|
||
|
||
setFileMeta(fileID, data) {
|
||
const updatedFiles = { ...this.getState().files
|
||
};
|
||
|
||
if (!updatedFiles[fileID]) {
|
||
this.log('Was trying to set metadata for a file that has been removed: ', fileID);
|
||
return;
|
||
}
|
||
|
||
const newMeta = { ...updatedFiles[fileID].meta,
|
||
...data
|
||
};
|
||
updatedFiles[fileID] = { ...updatedFiles[fileID],
|
||
meta: newMeta
|
||
};
|
||
this.setState({
|
||
files: updatedFiles
|
||
});
|
||
}
|
||
/**
|
||
* Get a file object.
|
||
*
|
||
* @param {string} fileID The ID of the file object to return.
|
||
*/
|
||
|
||
|
||
getFile(fileID) {
|
||
return this.getState().files[fileID];
|
||
}
|
||
/**
|
||
* Get all files in an array.
|
||
*/
|
||
|
||
|
||
getFiles() {
|
||
const {
|
||
files
|
||
} = this.getState();
|
||
return Object.values(files);
|
||
}
|
||
|
||
getObjectOfFilesPerState() {
|
||
const {
|
||
files: filesObject,
|
||
totalProgress,
|
||
error
|
||
} = this.getState();
|
||
const files = Object.values(filesObject);
|
||
const inProgressFiles = files.filter(({
|
||
progress
|
||
}) => !progress.uploadComplete && progress.uploadStarted);
|
||
const newFiles = files.filter(file => !file.progress.uploadStarted);
|
||
const startedFiles = files.filter(file => file.progress.uploadStarted || file.progress.preprocess || file.progress.postprocess);
|
||
const uploadStartedFiles = files.filter(file => file.progress.uploadStarted);
|
||
const pausedFiles = files.filter(file => file.isPaused);
|
||
const completeFiles = files.filter(file => file.progress.uploadComplete);
|
||
const erroredFiles = files.filter(file => file.error);
|
||
const inProgressNotPausedFiles = inProgressFiles.filter(file => !file.isPaused);
|
||
const processingFiles = files.filter(file => file.progress.preprocess || file.progress.postprocess);
|
||
return {
|
||
newFiles,
|
||
startedFiles,
|
||
uploadStartedFiles,
|
||
pausedFiles,
|
||
completeFiles,
|
||
erroredFiles,
|
||
inProgressFiles,
|
||
inProgressNotPausedFiles,
|
||
processingFiles,
|
||
isUploadStarted: uploadStartedFiles.length > 0,
|
||
isAllComplete: totalProgress === 100 && completeFiles.length === files.length && processingFiles.length === 0,
|
||
isAllErrored: !!error && erroredFiles.length === files.length,
|
||
isAllPaused: inProgressFiles.length !== 0 && pausedFiles.length === inProgressFiles.length,
|
||
isUploadInProgress: inProgressFiles.length > 0,
|
||
isSomeGhost: files.some(file => file.isGhost)
|
||
};
|
||
}
|
||
/**
|
||
* A public wrapper for _checkRestrictions — checks if a file passes a set of restrictions.
|
||
* For use in UI pluigins (like Providers), to disallow selecting files that won’t pass restrictions.
|
||
*
|
||
* @param {object} file object to check
|
||
* @param {Array} [files] array to check maxNumberOfFiles and maxTotalFileSize
|
||
* @returns {object} { result: true/false, reason: why file didn’t pass restrictions }
|
||
*/
|
||
|
||
|
||
validateRestrictions(file, files) {
|
||
try {
|
||
_classPrivateFieldLooseBase(this, _checkRestrictions)[_checkRestrictions](file, files);
|
||
|
||
return {
|
||
result: true
|
||
};
|
||
} catch (err) {
|
||
return {
|
||
result: false,
|
||
reason: err.message
|
||
};
|
||
}
|
||
}
|
||
/**
|
||
* Check if file passes a set of restrictions set in options: maxFileSize, minFileSize,
|
||
* maxNumberOfFiles and allowedFileTypes.
|
||
*
|
||
* @param {object} file object to check
|
||
* @param {Array} [files] array to check maxNumberOfFiles and maxTotalFileSize
|
||
* @private
|
||
*/
|
||
|
||
|
||
checkIfFileAlreadyExists(fileID) {
|
||
const {
|
||
files
|
||
} = this.getState();
|
||
|
||
if (files[fileID] && !files[fileID].isGhost) {
|
||
return true;
|
||
}
|
||
|
||
return false;
|
||
}
|
||
/**
|
||
* Create a file state object based on user-provided `addFile()` options.
|
||
*
|
||
* Note this is extremely side-effectful and should only be done when a file state object
|
||
* will be added to state immediately afterward!
|
||
*
|
||
* The `files` value is passed in because it may be updated by the caller without updating the store.
|
||
*/
|
||
|
||
|
||
/**
|
||
* Add a new file to `state.files`. This will run `onBeforeFileAdded`,
|
||
* try to guess file type in a clever way, check file against restrictions,
|
||
* and start an upload if `autoProceed === true`.
|
||
*
|
||
* @param {object} file object to add
|
||
* @returns {string} id for the added file
|
||
*/
|
||
addFile(file) {
|
||
_classPrivateFieldLooseBase(this, _assertNewUploadAllowed)[_assertNewUploadAllowed](file);
|
||
|
||
const {
|
||
files
|
||
} = this.getState();
|
||
|
||
let newFile = _classPrivateFieldLooseBase(this, _checkAndCreateFileStateObject)[_checkAndCreateFileStateObject](files, file); // Users are asked to re-select recovered files without data,
|
||
// and to keep the progress, meta and everthing else, we only replace said data
|
||
|
||
|
||
if (files[newFile.id] && files[newFile.id].isGhost) {
|
||
newFile = { ...files[newFile.id],
|
||
data: file.data,
|
||
isGhost: false
|
||
};
|
||
this.log(`Replaced the blob in the restored ghost file: ${newFile.name}, ${newFile.id}`);
|
||
}
|
||
|
||
this.setState({
|
||
files: { ...files,
|
||
[newFile.id]: newFile
|
||
}
|
||
});
|
||
this.emit('file-added', newFile);
|
||
this.emit('files-added', [newFile]);
|
||
this.log(`Added file: ${newFile.name}, ${newFile.id}, mime type: ${newFile.type}`);
|
||
|
||
_classPrivateFieldLooseBase(this, _startIfAutoProceed)[_startIfAutoProceed]();
|
||
|
||
return newFile.id;
|
||
}
|
||
/**
|
||
* Add multiple files to `state.files`. See the `addFile()` documentation.
|
||
*
|
||
* If an error occurs while adding a file, it is logged and the user is notified.
|
||
* This is good for UI plugins, but not for programmatic use.
|
||
* Programmatic users should usually still use `addFile()` on individual files.
|
||
*/
|
||
|
||
|
||
addFiles(fileDescriptors) {
|
||
_classPrivateFieldLooseBase(this, _assertNewUploadAllowed)[_assertNewUploadAllowed](); // create a copy of the files object only once
|
||
|
||
|
||
const files = { ...this.getState().files
|
||
};
|
||
const newFiles = [];
|
||
const errors = [];
|
||
|
||
for (let i = 0; i < fileDescriptors.length; i++) {
|
||
try {
|
||
let newFile = _classPrivateFieldLooseBase(this, _checkAndCreateFileStateObject)[_checkAndCreateFileStateObject](files, fileDescriptors[i]); // Users are asked to re-select recovered files without data,
|
||
// and to keep the progress, meta and everthing else, we only replace said data
|
||
|
||
|
||
if (files[newFile.id] && files[newFile.id].isGhost) {
|
||
newFile = { ...files[newFile.id],
|
||
data: fileDescriptors[i].data,
|
||
isGhost: false
|
||
};
|
||
this.log(`Replaced blob in a ghost file: ${newFile.name}, ${newFile.id}`);
|
||
}
|
||
|
||
files[newFile.id] = newFile;
|
||
newFiles.push(newFile);
|
||
} catch (err) {
|
||
if (!err.isRestriction) {
|
||
errors.push(err);
|
||
}
|
||
}
|
||
}
|
||
|
||
this.setState({
|
||
files
|
||
});
|
||
newFiles.forEach(newFile => {
|
||
this.emit('file-added', newFile);
|
||
});
|
||
this.emit('files-added', newFiles);
|
||
|
||
if (newFiles.length > 5) {
|
||
this.log(`Added batch of ${newFiles.length} files`);
|
||
} else {
|
||
Object.keys(newFiles).forEach(fileID => {
|
||
this.log(`Added file: ${newFiles[fileID].name}\n id: ${newFiles[fileID].id}\n type: ${newFiles[fileID].type}`);
|
||
});
|
||
}
|
||
|
||
if (newFiles.length > 0) {
|
||
_classPrivateFieldLooseBase(this, _startIfAutoProceed)[_startIfAutoProceed]();
|
||
}
|
||
|
||
if (errors.length > 0) {
|
||
let message = 'Multiple errors occurred while adding files:\n';
|
||
errors.forEach(subError => {
|
||
message += `\n * ${subError.message}`;
|
||
});
|
||
this.info({
|
||
message: this.i18n('addBulkFilesFailed', {
|
||
smart_count: errors.length
|
||
}),
|
||
details: message
|
||
}, 'error', this.opts.infoTimeout);
|
||
|
||
if (typeof AggregateError === 'function') {
|
||
throw new AggregateError(errors, message);
|
||
} else {
|
||
const err = new Error(message);
|
||
err.errors = errors;
|
||
throw err;
|
||
}
|
||
}
|
||
}
|
||
|
||
removeFiles(fileIDs, reason) {
|
||
const {
|
||
files,
|
||
currentUploads
|
||
} = this.getState();
|
||
const updatedFiles = { ...files
|
||
};
|
||
const updatedUploads = { ...currentUploads
|
||
};
|
||
const removedFiles = Object.create(null);
|
||
fileIDs.forEach(fileID => {
|
||
if (files[fileID]) {
|
||
removedFiles[fileID] = files[fileID];
|
||
delete updatedFiles[fileID];
|
||
}
|
||
}); // Remove files from the `fileIDs` list in each upload.
|
||
|
||
function fileIsNotRemoved(uploadFileID) {
|
||
return removedFiles[uploadFileID] === undefined;
|
||
}
|
||
|
||
Object.keys(updatedUploads).forEach(uploadID => {
|
||
const newFileIDs = currentUploads[uploadID].fileIDs.filter(fileIsNotRemoved); // Remove the upload if no files are associated with it anymore.
|
||
|
||
if (newFileIDs.length === 0) {
|
||
delete updatedUploads[uploadID];
|
||
return;
|
||
}
|
||
|
||
updatedUploads[uploadID] = { ...currentUploads[uploadID],
|
||
fileIDs: newFileIDs
|
||
};
|
||
});
|
||
const stateUpdate = {
|
||
currentUploads: updatedUploads,
|
||
files: updatedFiles
|
||
}; // If all files were removed - allow new uploads,
|
||
// and clear recoveredState
|
||
|
||
if (Object.keys(updatedFiles).length === 0) {
|
||
stateUpdate.allowNewUpload = true;
|
||
stateUpdate.error = null;
|
||
stateUpdate.recoveredState = null;
|
||
}
|
||
|
||
this.setState(stateUpdate);
|
||
this.calculateTotalProgress();
|
||
const removedFileIDs = Object.keys(removedFiles);
|
||
removedFileIDs.forEach(fileID => {
|
||
this.emit('file-removed', removedFiles[fileID], reason);
|
||
});
|
||
|
||
if (removedFileIDs.length > 5) {
|
||
this.log(`Removed ${removedFileIDs.length} files`);
|
||
} else {
|
||
this.log(`Removed files: ${removedFileIDs.join(', ')}`);
|
||
}
|
||
}
|
||
|
||
removeFile(fileID, reason = null) {
|
||
this.removeFiles([fileID], reason);
|
||
}
|
||
|
||
pauseResume(fileID) {
|
||
if (!this.getState().capabilities.resumableUploads || this.getFile(fileID).uploadComplete) {
|
||
return undefined;
|
||
}
|
||
|
||
const wasPaused = this.getFile(fileID).isPaused || false;
|
||
const isPaused = !wasPaused;
|
||
this.setFileState(fileID, {
|
||
isPaused
|
||
});
|
||
this.emit('upload-pause', fileID, isPaused);
|
||
return isPaused;
|
||
}
|
||
|
||
pauseAll() {
|
||
const updatedFiles = { ...this.getState().files
|
||
};
|
||
const inProgressUpdatedFiles = Object.keys(updatedFiles).filter(file => {
|
||
return !updatedFiles[file].progress.uploadComplete && updatedFiles[file].progress.uploadStarted;
|
||
});
|
||
inProgressUpdatedFiles.forEach(file => {
|
||
const updatedFile = { ...updatedFiles[file],
|
||
isPaused: true
|
||
};
|
||
updatedFiles[file] = updatedFile;
|
||
});
|
||
this.setState({
|
||
files: updatedFiles
|
||
});
|
||
this.emit('pause-all');
|
||
}
|
||
|
||
resumeAll() {
|
||
const updatedFiles = { ...this.getState().files
|
||
};
|
||
const inProgressUpdatedFiles = Object.keys(updatedFiles).filter(file => {
|
||
return !updatedFiles[file].progress.uploadComplete && updatedFiles[file].progress.uploadStarted;
|
||
});
|
||
inProgressUpdatedFiles.forEach(file => {
|
||
const updatedFile = { ...updatedFiles[file],
|
||
isPaused: false,
|
||
error: null
|
||
};
|
||
updatedFiles[file] = updatedFile;
|
||
});
|
||
this.setState({
|
||
files: updatedFiles
|
||
});
|
||
this.emit('resume-all');
|
||
}
|
||
|
||
retryAll() {
|
||
const updatedFiles = { ...this.getState().files
|
||
};
|
||
const filesToRetry = Object.keys(updatedFiles).filter(file => {
|
||
return updatedFiles[file].error;
|
||
});
|
||
filesToRetry.forEach(file => {
|
||
const updatedFile = { ...updatedFiles[file],
|
||
isPaused: false,
|
||
error: null
|
||
};
|
||
updatedFiles[file] = updatedFile;
|
||
});
|
||
this.setState({
|
||
files: updatedFiles,
|
||
error: null
|
||
});
|
||
this.emit('retry-all', filesToRetry);
|
||
|
||
if (filesToRetry.length === 0) {
|
||
return Promise.resolve({
|
||
successful: [],
|
||
failed: []
|
||
});
|
||
}
|
||
|
||
const uploadID = _classPrivateFieldLooseBase(this, _createUpload)[_createUpload](filesToRetry, {
|
||
forceAllowNewUpload: true // create new upload even if allowNewUpload: false
|
||
|
||
});
|
||
|
||
return _classPrivateFieldLooseBase(this, _runUpload)[_runUpload](uploadID);
|
||
}
|
||
|
||
cancelAll() {
|
||
this.emit('cancel-all');
|
||
const {
|
||
files
|
||
} = this.getState();
|
||
const fileIDs = Object.keys(files);
|
||
|
||
if (fileIDs.length) {
|
||
this.removeFiles(fileIDs, 'cancel-all');
|
||
}
|
||
|
||
this.setState({
|
||
totalProgress: 0,
|
||
error: null,
|
||
recoveredState: null
|
||
});
|
||
}
|
||
|
||
retryUpload(fileID) {
|
||
this.setFileState(fileID, {
|
||
error: null,
|
||
isPaused: false
|
||
});
|
||
this.emit('upload-retry', fileID);
|
||
|
||
const uploadID = _classPrivateFieldLooseBase(this, _createUpload)[_createUpload]([fileID], {
|
||
forceAllowNewUpload: true // create new upload even if allowNewUpload: false
|
||
|
||
});
|
||
|
||
return _classPrivateFieldLooseBase(this, _runUpload)[_runUpload](uploadID);
|
||
}
|
||
|
||
reset() {
|
||
this.cancelAll();
|
||
}
|
||
|
||
logout() {
|
||
this.iteratePlugins(plugin => {
|
||
if (plugin.provider && plugin.provider.logout) {
|
||
plugin.provider.logout();
|
||
}
|
||
});
|
||
}
|
||
|
||
calculateProgress(file, data) {
|
||
if (!this.getFile(file.id)) {
|
||
this.log(`Not setting progress for a file that has been removed: ${file.id}`);
|
||
return;
|
||
} // bytesTotal may be null or zero; in that case we can't divide by it
|
||
|
||
|
||
const canHavePercentage = Number.isFinite(data.bytesTotal) && data.bytesTotal > 0;
|
||
this.setFileState(file.id, {
|
||
progress: { ...this.getFile(file.id).progress,
|
||
bytesUploaded: data.bytesUploaded,
|
||
bytesTotal: data.bytesTotal,
|
||
percentage: canHavePercentage ? Math.round(data.bytesUploaded / data.bytesTotal * 100) : 0
|
||
}
|
||
});
|
||
this.calculateTotalProgress();
|
||
}
|
||
|
||
calculateTotalProgress() {
|
||
// calculate total progress, using the number of files currently uploading,
|
||
// multiplied by 100 and the summ of individual progress of each file
|
||
const files = this.getFiles();
|
||
const inProgress = files.filter(file => {
|
||
return file.progress.uploadStarted || file.progress.preprocess || file.progress.postprocess;
|
||
});
|
||
|
||
if (inProgress.length === 0) {
|
||
this.emit('progress', 0);
|
||
this.setState({
|
||
totalProgress: 0
|
||
});
|
||
return;
|
||
}
|
||
|
||
const sizedFiles = inProgress.filter(file => file.progress.bytesTotal != null);
|
||
const unsizedFiles = inProgress.filter(file => file.progress.bytesTotal == null);
|
||
|
||
if (sizedFiles.length === 0) {
|
||
const progressMax = inProgress.length * 100;
|
||
const currentProgress = unsizedFiles.reduce((acc, file) => {
|
||
return acc + file.progress.percentage;
|
||
}, 0);
|
||
const totalProgress = Math.round(currentProgress / progressMax * 100);
|
||
this.setState({
|
||
totalProgress
|
||
});
|
||
return;
|
||
}
|
||
|
||
let totalSize = sizedFiles.reduce((acc, file) => {
|
||
return acc + file.progress.bytesTotal;
|
||
}, 0);
|
||
const averageSize = totalSize / sizedFiles.length;
|
||
totalSize += averageSize * unsizedFiles.length;
|
||
let uploadedSize = 0;
|
||
sizedFiles.forEach(file => {
|
||
uploadedSize += file.progress.bytesUploaded;
|
||
});
|
||
unsizedFiles.forEach(file => {
|
||
uploadedSize += averageSize * (file.progress.percentage || 0) / 100;
|
||
});
|
||
let totalProgress = totalSize === 0 ? 0 : Math.round(uploadedSize / totalSize * 100); // hot fix, because:
|
||
// uploadedSize ended up larger than totalSize, resulting in 1325% total
|
||
|
||
if (totalProgress > 100) {
|
||
totalProgress = 100;
|
||
}
|
||
|
||
this.setState({
|
||
totalProgress
|
||
});
|
||
this.emit('progress', totalProgress);
|
||
}
|
||
/**
|
||
* Registers listeners for all global actions, like:
|
||
* `error`, `file-removed`, `upload-progress`
|
||
*/
|
||
|
||
|
||
updateOnlineStatus() {
|
||
const online = typeof window.navigator.onLine !== 'undefined' ? window.navigator.onLine : true;
|
||
|
||
if (!online) {
|
||
this.emit('is-offline');
|
||
this.info(this.i18n('noInternetConnection'), 'error', 0);
|
||
this.wasOffline = true;
|
||
} else {
|
||
this.emit('is-online');
|
||
|
||
if (this.wasOffline) {
|
||
this.emit('back-online');
|
||
this.info(this.i18n('connectedToInternet'), 'success', 3000);
|
||
this.wasOffline = false;
|
||
}
|
||
}
|
||
}
|
||
|
||
getID() {
|
||
return this.opts.id;
|
||
}
|
||
/**
|
||
* Registers a plugin with Core.
|
||
*
|
||
* @param {object} Plugin object
|
||
* @param {object} [opts] object with options to be passed to Plugin
|
||
* @returns {object} self for chaining
|
||
*/
|
||
// eslint-disable-next-line no-shadow
|
||
|
||
|
||
use(Plugin, opts) {
|
||
if (typeof Plugin !== 'function') {
|
||
const msg = `Expected a plugin class, but got ${Plugin === null ? 'null' : typeof Plugin}.` + ' Please verify that the plugin was imported and spelled correctly.';
|
||
throw new TypeError(msg);
|
||
} // Instantiate
|
||
|
||
|
||
const plugin = new Plugin(this, opts);
|
||
const pluginId = plugin.id;
|
||
|
||
if (!pluginId) {
|
||
throw new Error('Your plugin must have an id');
|
||
}
|
||
|
||
if (!plugin.type) {
|
||
throw new Error('Your plugin must have a type');
|
||
}
|
||
|
||
const existsPluginAlready = this.getPlugin(pluginId);
|
||
|
||
if (existsPluginAlready) {
|
||
const msg = `Already found a plugin named '${existsPluginAlready.id}'. ` + `Tried to use: '${pluginId}'.\n` + 'Uppy plugins must have unique `id` options. See https://uppy.io/docs/plugins/#id.';
|
||
throw new Error(msg);
|
||
}
|
||
|
||
if (Plugin.VERSION) {
|
||
this.log(`Using ${pluginId} v${Plugin.VERSION}`);
|
||
}
|
||
|
||
if (plugin.type in _classPrivateFieldLooseBase(this, _plugins)[_plugins]) {
|
||
_classPrivateFieldLooseBase(this, _plugins)[_plugins][plugin.type].push(plugin);
|
||
} else {
|
||
_classPrivateFieldLooseBase(this, _plugins)[_plugins][plugin.type] = [plugin];
|
||
}
|
||
|
||
plugin.install();
|
||
return this;
|
||
}
|
||
/**
|
||
* Find one Plugin by name.
|
||
*
|
||
* @param {string} id plugin id
|
||
* @returns {BasePlugin|undefined}
|
||
*/
|
||
|
||
|
||
getPlugin(id) {
|
||
for (const plugins of Object.values(_classPrivateFieldLooseBase(this, _plugins)[_plugins])) {
|
||
const foundPlugin = plugins.find(plugin => plugin.id === id);
|
||
if (foundPlugin != null) return foundPlugin;
|
||
}
|
||
|
||
return undefined;
|
||
}
|
||
|
||
[_Symbol$for](type) {
|
||
return _classPrivateFieldLooseBase(this, _plugins)[_plugins][type];
|
||
}
|
||
/**
|
||
* Iterate through all `use`d plugins.
|
||
*
|
||
* @param {Function} method that will be run on each plugin
|
||
*/
|
||
|
||
|
||
iteratePlugins(method) {
|
||
Object.values(_classPrivateFieldLooseBase(this, _plugins)[_plugins]).flat(1).forEach(method);
|
||
}
|
||
/**
|
||
* Uninstall and remove a plugin.
|
||
*
|
||
* @param {object} instance The plugin instance to remove.
|
||
*/
|
||
|
||
|
||
removePlugin(instance) {
|
||
this.log(`Removing plugin ${instance.id}`);
|
||
this.emit('plugin-remove', instance);
|
||
|
||
if (instance.uninstall) {
|
||
instance.uninstall();
|
||
}
|
||
|
||
const list = _classPrivateFieldLooseBase(this, _plugins)[_plugins][instance.type]; // list.indexOf failed here, because Vue3 converted the plugin instance
|
||
// to a Proxy object, which failed the strict comparison test:
|
||
// obj !== objProxy
|
||
|
||
|
||
const index = list.findIndex(item => item.id === instance.id);
|
||
|
||
if (index !== -1) {
|
||
list.splice(index, 1);
|
||
}
|
||
|
||
const state = this.getState();
|
||
const updatedState = {
|
||
plugins: { ...state.plugins,
|
||
[instance.id]: undefined
|
||
}
|
||
};
|
||
this.setState(updatedState);
|
||
}
|
||
/**
|
||
* Uninstall all plugins and close down this Uppy instance.
|
||
*/
|
||
|
||
|
||
close() {
|
||
this.log(`Closing Uppy instance ${this.opts.id}: removing all files and uninstalling plugins`);
|
||
this.reset();
|
||
|
||
_classPrivateFieldLooseBase(this, _storeUnsubscribe)[_storeUnsubscribe]();
|
||
|
||
this.iteratePlugins(plugin => {
|
||
this.removePlugin(plugin);
|
||
});
|
||
|
||
if (typeof window !== 'undefined' && window.removeEventListener) {
|
||
window.removeEventListener('online', _classPrivateFieldLooseBase(this, _updateOnlineStatus)[_updateOnlineStatus]);
|
||
window.removeEventListener('offline', _classPrivateFieldLooseBase(this, _updateOnlineStatus)[_updateOnlineStatus]);
|
||
}
|
||
}
|
||
|
||
hideInfo() {
|
||
const {
|
||
info
|
||
} = this.getState();
|
||
this.setState({
|
||
info: info.slice(1)
|
||
});
|
||
this.emit('info-hidden');
|
||
}
|
||
/**
|
||
* Set info message in `state.info`, so that UI plugins like `Informer`
|
||
* can display the message.
|
||
*
|
||
* @param {string | object} message Message to be displayed by the informer
|
||
* @param {string} [type]
|
||
* @param {number} [duration]
|
||
*/
|
||
|
||
|
||
info(message, type = 'info', duration = 3000) {
|
||
const isComplexMessage = typeof message === 'object';
|
||
this.setState({
|
||
info: [...this.getState().info, {
|
||
type,
|
||
message: isComplexMessage ? message.message : message,
|
||
details: isComplexMessage ? message.details : null
|
||
}]
|
||
});
|
||
setTimeout(() => this.hideInfo(), duration);
|
||
this.emit('info-visible');
|
||
}
|
||
/**
|
||
* Passes messages to a function, provided in `opts.logger`.
|
||
* If `opts.logger: Uppy.debugLogger` or `opts.debug: true`, logs to the browser console.
|
||
*
|
||
* @param {string|object} message to log
|
||
* @param {string} [type] optional `error` or `warning`
|
||
*/
|
||
|
||
|
||
log(message, type) {
|
||
const {
|
||
logger
|
||
} = this.opts;
|
||
|
||
switch (type) {
|
||
case 'error':
|
||
logger.error(message);
|
||
break;
|
||
|
||
case 'warning':
|
||
logger.warn(message);
|
||
break;
|
||
|
||
default:
|
||
logger.debug(message);
|
||
break;
|
||
}
|
||
}
|
||
/**
|
||
* Restore an upload by its ID.
|
||
*/
|
||
|
||
|
||
restore(uploadID) {
|
||
this.log(`Core: attempting to restore upload "${uploadID}"`);
|
||
|
||
if (!this.getState().currentUploads[uploadID]) {
|
||
_classPrivateFieldLooseBase(this, _removeUpload)[_removeUpload](uploadID);
|
||
|
||
return Promise.reject(new Error('Nonexistent upload'));
|
||
}
|
||
|
||
return _classPrivateFieldLooseBase(this, _runUpload)[_runUpload](uploadID);
|
||
}
|
||
/**
|
||
* Create an upload for a bunch of files.
|
||
*
|
||
* @param {Array<string>} fileIDs File IDs to include in this upload.
|
||
* @returns {string} ID of this upload.
|
||
*/
|
||
|
||
|
||
[_Symbol$for2](...args) {
|
||
return _classPrivateFieldLooseBase(this, _createUpload)[_createUpload](...args);
|
||
}
|
||
|
||
/**
|
||
* Add data to an upload's result object.
|
||
*
|
||
* @param {string} uploadID The ID of the upload.
|
||
* @param {object} data Data properties to add to the result object.
|
||
*/
|
||
addResultData(uploadID, data) {
|
||
if (!_classPrivateFieldLooseBase(this, _getUpload)[_getUpload](uploadID)) {
|
||
this.log(`Not setting result for an upload that has been removed: ${uploadID}`);
|
||
return;
|
||
}
|
||
|
||
const {
|
||
currentUploads
|
||
} = this.getState();
|
||
const currentUpload = { ...currentUploads[uploadID],
|
||
result: { ...currentUploads[uploadID].result,
|
||
...data
|
||
}
|
||
};
|
||
this.setState({
|
||
currentUploads: { ...currentUploads,
|
||
[uploadID]: currentUpload
|
||
}
|
||
});
|
||
}
|
||
/**
|
||
* Remove an upload, eg. if it has been canceled or completed.
|
||
*
|
||
* @param {string} uploadID The ID of the upload.
|
||
*/
|
||
|
||
|
||
/**
|
||
* Start an upload for all the files that are not currently being uploaded.
|
||
*
|
||
* @returns {Promise}
|
||
*/
|
||
upload() {
|
||
var _classPrivateFieldLoo;
|
||
|
||
if (!((_classPrivateFieldLoo = _classPrivateFieldLooseBase(this, _plugins)[_plugins].uploader) != null && _classPrivateFieldLoo.length)) {
|
||
this.log('No uploader type plugins are used', 'warning');
|
||
}
|
||
|
||
let {
|
||
files
|
||
} = this.getState();
|
||
const onBeforeUploadResult = this.opts.onBeforeUpload(files);
|
||
|
||
if (onBeforeUploadResult === false) {
|
||
return Promise.reject(new Error('Not starting the upload because onBeforeUpload returned false'));
|
||
}
|
||
|
||
if (onBeforeUploadResult && typeof onBeforeUploadResult === 'object') {
|
||
files = onBeforeUploadResult; // Updating files in state, because uploader plugins receive file IDs,
|
||
// and then fetch the actual file object from state
|
||
|
||
this.setState({
|
||
files
|
||
});
|
||
}
|
||
|
||
return Promise.resolve().then(() => {
|
||
_classPrivateFieldLooseBase(this, _checkMinNumberOfFiles)[_checkMinNumberOfFiles](files);
|
||
|
||
_classPrivateFieldLooseBase(this, _checkRequiredMetaFields)[_checkRequiredMetaFields](files);
|
||
}).catch(err => {
|
||
_classPrivateFieldLooseBase(this, _showOrLogErrorAndThrow)[_showOrLogErrorAndThrow](err);
|
||
}).then(() => {
|
||
const {
|
||
currentUploads
|
||
} = this.getState(); // get a list of files that are currently assigned to uploads
|
||
|
||
const currentlyUploadingFiles = Object.values(currentUploads).flatMap(curr => curr.fileIDs);
|
||
const waitingFileIDs = [];
|
||
Object.keys(files).forEach(fileID => {
|
||
const file = this.getFile(fileID); // if the file hasn't started uploading and hasn't already been assigned to an upload..
|
||
|
||
if (!file.progress.uploadStarted && currentlyUploadingFiles.indexOf(fileID) === -1) {
|
||
waitingFileIDs.push(file.id);
|
||
}
|
||
});
|
||
|
||
const uploadID = _classPrivateFieldLooseBase(this, _createUpload)[_createUpload](waitingFileIDs);
|
||
|
||
return _classPrivateFieldLooseBase(this, _runUpload)[_runUpload](uploadID);
|
||
}).catch(err => {
|
||
_classPrivateFieldLooseBase(this, _showOrLogErrorAndThrow)[_showOrLogErrorAndThrow](err, {
|
||
showInformer: false
|
||
});
|
||
});
|
||
}
|
||
|
||
}
|
||
|
||
function _checkRestrictions2(file, files = this.getFiles()) {
|
||
const {
|
||
maxFileSize,
|
||
minFileSize,
|
||
maxTotalFileSize,
|
||
maxNumberOfFiles,
|
||
allowedFileTypes
|
||
} = this.opts.restrictions;
|
||
|
||
if (maxNumberOfFiles) {
|
||
if (files.length + 1 > maxNumberOfFiles) {
|
||
throw new RestrictionError(`${this.i18n('youCanOnlyUploadX', {
|
||
smart_count: maxNumberOfFiles
|
||
})}`);
|
||
}
|
||
}
|
||
|
||
if (allowedFileTypes) {
|
||
const isCorrectFileType = allowedFileTypes.some(type => {
|
||
// check if this is a mime-type
|
||
if (type.indexOf('/') > -1) {
|
||
if (!file.type) return false;
|
||
return match(file.type.replace(/;.*?$/, ''), type);
|
||
} // otherwise this is likely an extension
|
||
|
||
|
||
if (type[0] === '.' && file.extension) {
|
||
return file.extension.toLowerCase() === type.substr(1).toLowerCase();
|
||
}
|
||
|
||
return false;
|
||
});
|
||
|
||
if (!isCorrectFileType) {
|
||
const allowedFileTypesString = allowedFileTypes.join(', ');
|
||
throw new RestrictionError(this.i18n('youCanOnlyUploadFileTypes', {
|
||
types: allowedFileTypesString
|
||
}));
|
||
}
|
||
} // We can't check maxTotalFileSize if the size is unknown.
|
||
|
||
|
||
if (maxTotalFileSize && file.size != null) {
|
||
let totalFilesSize = 0;
|
||
totalFilesSize += file.size;
|
||
files.forEach(f => {
|
||
totalFilesSize += f.size;
|
||
});
|
||
|
||
if (totalFilesSize > maxTotalFileSize) {
|
||
throw new RestrictionError(this.i18n('exceedsSize', {
|
||
size: prettierBytes(maxTotalFileSize),
|
||
file: file.name
|
||
}));
|
||
}
|
||
} // We can't check maxFileSize if the size is unknown.
|
||
|
||
|
||
if (maxFileSize && file.size != null) {
|
||
if (file.size > maxFileSize) {
|
||
throw new RestrictionError(this.i18n('exceedsSize', {
|
||
size: prettierBytes(maxFileSize),
|
||
file: file.name
|
||
}));
|
||
}
|
||
} // We can't check minFileSize if the size is unknown.
|
||
|
||
|
||
if (minFileSize && file.size != null) {
|
||
if (file.size < minFileSize) {
|
||
throw new RestrictionError(this.i18n('inferiorSize', {
|
||
size: prettierBytes(minFileSize)
|
||
}));
|
||
}
|
||
}
|
||
}
|
||
|
||
function _checkMinNumberOfFiles2(files) {
|
||
const {
|
||
minNumberOfFiles
|
||
} = this.opts.restrictions;
|
||
|
||
if (Object.keys(files).length < minNumberOfFiles) {
|
||
throw new RestrictionError(`${this.i18n('youHaveToAtLeastSelectX', {
|
||
smart_count: minNumberOfFiles
|
||
})}`);
|
||
}
|
||
}
|
||
|
||
function _checkRequiredMetaFields2(files) {
|
||
const {
|
||
requiredMetaFields
|
||
} = this.opts.restrictions;
|
||
const {
|
||
hasOwnProperty
|
||
} = Object.prototype;
|
||
const errors = [];
|
||
|
||
for (const fileID of Object.keys(files)) {
|
||
const file = this.getFile(fileID);
|
||
|
||
for (let i = 0; i < requiredMetaFields.length; i++) {
|
||
if (!hasOwnProperty.call(file.meta, requiredMetaFields[i]) || file.meta[requiredMetaFields[i]] === '') {
|
||
const err = new RestrictionError(`${this.i18n('missingRequiredMetaFieldOnFile', {
|
||
fileName: file.name
|
||
})}`);
|
||
errors.push(err);
|
||
|
||
_classPrivateFieldLooseBase(this, _showOrLogErrorAndThrow)[_showOrLogErrorAndThrow](err, {
|
||
file,
|
||
showInformer: false,
|
||
throwErr: false
|
||
});
|
||
}
|
||
}
|
||
}
|
||
|
||
if (errors.length) {
|
||
throw new AggregateRestrictionError(`${this.i18n('missingRequiredMetaField')}`, errors);
|
||
}
|
||
}
|
||
|
||
function _showOrLogErrorAndThrow2(err, {
|
||
showInformer = true,
|
||
file = null,
|
||
throwErr = true
|
||
} = {}) {
|
||
const message = typeof err === 'object' ? err.message : err;
|
||
const details = typeof err === 'object' && err.details ? err.details : ''; // Restriction errors should be logged, but not as errors,
|
||
// as they are expected and shown in the UI.
|
||
|
||
let logMessageWithDetails = message;
|
||
|
||
if (details) {
|
||
logMessageWithDetails += ` ${details}`;
|
||
}
|
||
|
||
if (err.isRestriction) {
|
||
this.log(logMessageWithDetails);
|
||
this.emit('restriction-failed', file, err);
|
||
} else {
|
||
this.log(logMessageWithDetails, 'error');
|
||
} // Sometimes informer has to be shown manually by the developer,
|
||
// for example, in `onBeforeFileAdded`.
|
||
|
||
|
||
if (showInformer) {
|
||
this.info({
|
||
message,
|
||
details
|
||
}, 'error', this.opts.infoTimeout);
|
||
}
|
||
|
||
if (throwErr) {
|
||
throw typeof err === 'object' ? err : new Error(err);
|
||
}
|
||
}
|
||
|
||
function _assertNewUploadAllowed2(file) {
|
||
const {
|
||
allowNewUpload
|
||
} = this.getState();
|
||
|
||
if (allowNewUpload === false) {
|
||
_classPrivateFieldLooseBase(this, _showOrLogErrorAndThrow)[_showOrLogErrorAndThrow](new RestrictionError(this.i18n('noMoreFilesAllowed')), {
|
||
file
|
||
});
|
||
}
|
||
}
|
||
|
||
function _checkAndCreateFileStateObject2(files, fileDescriptor) {
|
||
const fileType = getFileType(fileDescriptor);
|
||
const fileName = getFileName(fileType, fileDescriptor);
|
||
const fileExtension = getFileNameAndExtension(fileName).extension;
|
||
const isRemote = Boolean(fileDescriptor.isRemote);
|
||
const fileID = generateFileID({ ...fileDescriptor,
|
||
type: fileType
|
||
});
|
||
|
||
if (this.checkIfFileAlreadyExists(fileID)) {
|
||
const error = new RestrictionError(this.i18n('noDuplicates', {
|
||
fileName
|
||
}));
|
||
|
||
_classPrivateFieldLooseBase(this, _showOrLogErrorAndThrow)[_showOrLogErrorAndThrow](error, {
|
||
file: fileDescriptor
|
||
});
|
||
}
|
||
|
||
const meta = fileDescriptor.meta || {};
|
||
meta.name = fileName;
|
||
meta.type = fileType; // `null` means the size is unknown.
|
||
|
||
const size = Number.isFinite(fileDescriptor.data.size) ? fileDescriptor.data.size : null;
|
||
let newFile = {
|
||
source: fileDescriptor.source || '',
|
||
id: fileID,
|
||
name: fileName,
|
||
extension: fileExtension || '',
|
||
meta: { ...this.getState().meta,
|
||
...meta
|
||
},
|
||
type: fileType,
|
||
data: fileDescriptor.data,
|
||
progress: {
|
||
percentage: 0,
|
||
bytesUploaded: 0,
|
||
bytesTotal: size,
|
||
uploadComplete: false,
|
||
uploadStarted: null
|
||
},
|
||
size,
|
||
isRemote,
|
||
remote: fileDescriptor.remote || '',
|
||
preview: fileDescriptor.preview
|
||
};
|
||
const onBeforeFileAddedResult = this.opts.onBeforeFileAdded(newFile, files);
|
||
|
||
if (onBeforeFileAddedResult === false) {
|
||
// Don’t show UI info for this error, as it should be done by the developer
|
||
_classPrivateFieldLooseBase(this, _showOrLogErrorAndThrow)[_showOrLogErrorAndThrow](new RestrictionError('Cannot add the file because onBeforeFileAdded returned false.'), {
|
||
showInformer: false,
|
||
fileDescriptor
|
||
});
|
||
} else if (typeof onBeforeFileAddedResult === 'object' && onBeforeFileAddedResult !== null) {
|
||
newFile = onBeforeFileAddedResult;
|
||
}
|
||
|
||
try {
|
||
const filesArray = Object.keys(files).map(i => files[i]);
|
||
|
||
_classPrivateFieldLooseBase(this, _checkRestrictions)[_checkRestrictions](newFile, filesArray);
|
||
} catch (err) {
|
||
_classPrivateFieldLooseBase(this, _showOrLogErrorAndThrow)[_showOrLogErrorAndThrow](err, {
|
||
file: newFile
|
||
});
|
||
}
|
||
|
||
return newFile;
|
||
}
|
||
|
||
function _startIfAutoProceed2() {
|
||
if (this.opts.autoProceed && !this.scheduledAutoProceed) {
|
||
this.scheduledAutoProceed = setTimeout(() => {
|
||
this.scheduledAutoProceed = null;
|
||
this.upload().catch(err => {
|
||
if (!err.isRestriction) {
|
||
this.log(err.stack || err.message || err);
|
||
}
|
||
});
|
||
}, 4);
|
||
}
|
||
}
|
||
|
||
function _addListeners2() {
|
||
/**
|
||
* @param {Error} error
|
||
* @param {object} [file]
|
||
* @param {object} [response]
|
||
*/
|
||
const errorHandler = (error, file, response) => {
|
||
let errorMsg = error.message || 'Unknown error';
|
||
|
||
if (error.details) {
|
||
errorMsg += ` ${error.details}`;
|
||
}
|
||
|
||
this.setState({
|
||
error: errorMsg
|
||
});
|
||
|
||
if (file != null && file.id in this.getState().files) {
|
||
this.setFileState(file.id, {
|
||
error: errorMsg,
|
||
response
|
||
});
|
||
}
|
||
};
|
||
|
||
this.on('error', errorHandler);
|
||
this.on('upload-error', (file, error, response) => {
|
||
errorHandler(error, file, response);
|
||
|
||
if (typeof error === 'object' && error.message) {
|
||
const newError = new Error(error.message);
|
||
newError.details = error.message;
|
||
|
||
if (error.details) {
|
||
newError.details += ` ${error.details}`;
|
||
}
|
||
|
||
newError.message = this.i18n('failedToUpload', {
|
||
file: file.name
|
||
});
|
||
|
||
_classPrivateFieldLooseBase(this, _showOrLogErrorAndThrow)[_showOrLogErrorAndThrow](newError, {
|
||
throwErr: false
|
||
});
|
||
} else {
|
||
_classPrivateFieldLooseBase(this, _showOrLogErrorAndThrow)[_showOrLogErrorAndThrow](error, {
|
||
throwErr: false
|
||
});
|
||
}
|
||
});
|
||
this.on('upload', () => {
|
||
this.setState({
|
||
error: null
|
||
});
|
||
});
|
||
this.on('upload-started', file => {
|
||
if (!this.getFile(file.id)) {
|
||
this.log(`Not setting progress for a file that has been removed: ${file.id}`);
|
||
return;
|
||
}
|
||
|
||
this.setFileState(file.id, {
|
||
progress: {
|
||
uploadStarted: Date.now(),
|
||
uploadComplete: false,
|
||
percentage: 0,
|
||
bytesUploaded: 0,
|
||
bytesTotal: file.size
|
||
}
|
||
});
|
||
});
|
||
this.on('upload-progress', this.calculateProgress);
|
||
this.on('upload-success', (file, uploadResp) => {
|
||
if (!this.getFile(file.id)) {
|
||
this.log(`Not setting progress for a file that has been removed: ${file.id}`);
|
||
return;
|
||
}
|
||
|
||
const currentProgress = this.getFile(file.id).progress;
|
||
this.setFileState(file.id, {
|
||
progress: { ...currentProgress,
|
||
postprocess: _classPrivateFieldLooseBase(this, _postProcessors)[_postProcessors].size > 0 ? {
|
||
mode: 'indeterminate'
|
||
} : null,
|
||
uploadComplete: true,
|
||
percentage: 100,
|
||
bytesUploaded: currentProgress.bytesTotal
|
||
},
|
||
response: uploadResp,
|
||
uploadURL: uploadResp.uploadURL,
|
||
isPaused: false
|
||
}); // Remote providers sometimes don't tell us the file size,
|
||
// but we can know how many bytes we uploaded once the upload is complete.
|
||
|
||
if (file.size == null) {
|
||
this.setFileState(file.id, {
|
||
size: uploadResp.bytesUploaded || currentProgress.bytesTotal
|
||
});
|
||
}
|
||
|
||
this.calculateTotalProgress();
|
||
});
|
||
this.on('preprocess-progress', (file, progress) => {
|
||
if (!this.getFile(file.id)) {
|
||
this.log(`Not setting progress for a file that has been removed: ${file.id}`);
|
||
return;
|
||
}
|
||
|
||
this.setFileState(file.id, {
|
||
progress: { ...this.getFile(file.id).progress,
|
||
preprocess: progress
|
||
}
|
||
});
|
||
});
|
||
this.on('preprocess-complete', file => {
|
||
if (!this.getFile(file.id)) {
|
||
this.log(`Not setting progress for a file that has been removed: ${file.id}`);
|
||
return;
|
||
}
|
||
|
||
const files = { ...this.getState().files
|
||
};
|
||
files[file.id] = { ...files[file.id],
|
||
progress: { ...files[file.id].progress
|
||
}
|
||
};
|
||
delete files[file.id].progress.preprocess;
|
||
this.setState({
|
||
files
|
||
});
|
||
});
|
||
this.on('postprocess-progress', (file, progress) => {
|
||
if (!this.getFile(file.id)) {
|
||
this.log(`Not setting progress for a file that has been removed: ${file.id}`);
|
||
return;
|
||
}
|
||
|
||
this.setFileState(file.id, {
|
||
progress: { ...this.getState().files[file.id].progress,
|
||
postprocess: progress
|
||
}
|
||
});
|
||
});
|
||
this.on('postprocess-complete', file => {
|
||
if (!this.getFile(file.id)) {
|
||
this.log(`Not setting progress for a file that has been removed: ${file.id}`);
|
||
return;
|
||
}
|
||
|
||
const files = { ...this.getState().files
|
||
};
|
||
files[file.id] = { ...files[file.id],
|
||
progress: { ...files[file.id].progress
|
||
}
|
||
};
|
||
delete files[file.id].progress.postprocess;
|
||
this.setState({
|
||
files
|
||
});
|
||
});
|
||
this.on('restored', () => {
|
||
// Files may have changed--ensure progress is still accurate.
|
||
this.calculateTotalProgress();
|
||
}); // show informer if offline
|
||
|
||
if (typeof window !== 'undefined' && window.addEventListener) {
|
||
window.addEventListener('online', _classPrivateFieldLooseBase(this, _updateOnlineStatus)[_updateOnlineStatus]);
|
||
window.addEventListener('offline', _classPrivateFieldLooseBase(this, _updateOnlineStatus)[_updateOnlineStatus]);
|
||
setTimeout(_classPrivateFieldLooseBase(this, _updateOnlineStatus)[_updateOnlineStatus], 3000);
|
||
}
|
||
}
|
||
|
||
function _createUpload2(fileIDs, opts = {}) {
|
||
// uppy.retryAll sets this to true — when retrying we want to ignore `allowNewUpload: false`
|
||
const {
|
||
forceAllowNewUpload = false
|
||
} = opts;
|
||
const {
|
||
allowNewUpload,
|
||
currentUploads
|
||
} = this.getState();
|
||
|
||
if (!allowNewUpload && !forceAllowNewUpload) {
|
||
throw new Error('Cannot create a new upload: already uploading.');
|
||
}
|
||
|
||
const uploadID = nanoid();
|
||
this.emit('upload', {
|
||
id: uploadID,
|
||
fileIDs
|
||
});
|
||
this.setState({
|
||
allowNewUpload: this.opts.allowMultipleUploadBatches !== false && this.opts.allowMultipleUploads !== false,
|
||
currentUploads: { ...currentUploads,
|
||
[uploadID]: {
|
||
fileIDs,
|
||
step: 0,
|
||
result: {}
|
||
}
|
||
}
|
||
});
|
||
return uploadID;
|
||
}
|
||
|
||
function _getUpload2(uploadID) {
|
||
const {
|
||
currentUploads
|
||
} = this.getState();
|
||
return currentUploads[uploadID];
|
||
}
|
||
|
||
function _removeUpload2(uploadID) {
|
||
const currentUploads = { ...this.getState().currentUploads
|
||
};
|
||
delete currentUploads[uploadID];
|
||
this.setState({
|
||
currentUploads
|
||
});
|
||
}
|
||
|
||
async function _runUpload2(uploadID) {
|
||
let {
|
||
currentUploads
|
||
} = this.getState();
|
||
let currentUpload = currentUploads[uploadID];
|
||
const restoreStep = currentUpload.step || 0;
|
||
const steps = [..._classPrivateFieldLooseBase(this, _preProcessors)[_preProcessors], ..._classPrivateFieldLooseBase(this, _uploaders)[_uploaders], ..._classPrivateFieldLooseBase(this, _postProcessors)[_postProcessors]];
|
||
|
||
try {
|
||
for (let step = restoreStep; step < steps.length; step++) {
|
||
if (!currentUpload) {
|
||
break;
|
||
}
|
||
|
||
const fn = steps[step];
|
||
const updatedUpload = { ...currentUpload,
|
||
step
|
||
};
|
||
this.setState({
|
||
currentUploads: { ...currentUploads,
|
||
[uploadID]: updatedUpload
|
||
}
|
||
}); // TODO give this the `updatedUpload` object as its only parameter maybe?
|
||
// Otherwise when more metadata may be added to the upload this would keep getting more parameters
|
||
|
||
await fn(updatedUpload.fileIDs, uploadID); // Update currentUpload value in case it was modified asynchronously.
|
||
|
||
currentUploads = this.getState().currentUploads;
|
||
currentUpload = currentUploads[uploadID];
|
||
}
|
||
} catch (err) {
|
||
this.emit('error', err);
|
||
|
||
_classPrivateFieldLooseBase(this, _removeUpload)[_removeUpload](uploadID);
|
||
|
||
throw err;
|
||
} // Set result data.
|
||
|
||
|
||
if (currentUpload) {
|
||
// Mark postprocessing step as complete if necessary; this addresses a case where we might get
|
||
// stuck in the postprocessing UI while the upload is fully complete.
|
||
// If the postprocessing steps do not do any work, they may not emit postprocessing events at
|
||
// all, and never mark the postprocessing as complete. This is fine on its own but we
|
||
// introduced code in the @uppy/core upload-success handler to prepare postprocessing progress
|
||
// state if any postprocessors are registered. That is to avoid a "flash of completed state"
|
||
// before the postprocessing plugins can emit events.
|
||
//
|
||
// So, just in case an upload with postprocessing plugins *has* completed *without* emitting
|
||
// postprocessing completion, we do it instead.
|
||
currentUpload.fileIDs.forEach(fileID => {
|
||
const file = this.getFile(fileID);
|
||
|
||
if (file && file.progress.postprocess) {
|
||
this.emit('postprocess-complete', file);
|
||
}
|
||
});
|
||
const files = currentUpload.fileIDs.map(fileID => this.getFile(fileID));
|
||
const successful = files.filter(file => !file.error);
|
||
const failed = files.filter(file => file.error);
|
||
await this.addResultData(uploadID, {
|
||
successful,
|
||
failed,
|
||
uploadID
|
||
}); // Update currentUpload value in case it was modified asynchronously.
|
||
|
||
currentUploads = this.getState().currentUploads;
|
||
currentUpload = currentUploads[uploadID];
|
||
} // Emit completion events.
|
||
// This is in a separate function so that the `currentUploads` variable
|
||
// always refers to the latest state. In the handler right above it refers
|
||
// to an outdated object without the `.result` property.
|
||
|
||
|
||
let result;
|
||
|
||
if (currentUpload) {
|
||
result = currentUpload.result;
|
||
this.emit('complete', result);
|
||
|
||
_classPrivateFieldLooseBase(this, _removeUpload)[_removeUpload](uploadID);
|
||
}
|
||
|
||
if (result == null) {
|
||
this.log(`Not setting result for an upload that has been removed: ${uploadID}`);
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
Uppy.VERSION = "2.1.1";
|
||
module.exports = Uppy;
|
||
},{"./getFileName":17,"./loggers":19,"./supportsUploadProgress":20,"@transloadit/prettier-bytes":1,"@uppy/store-default":22,"@uppy/utils/lib/Translator":28,"@uppy/utils/lib/generateFileID":33,"@uppy/utils/lib/getFileNameAndExtension":39,"@uppy/utils/lib/getFileType":40,"lodash.throttle":50,"mime-match":51,"namespace-emitter":52,"nanoid":53}],17:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
module.exports = function getFileName(fileType, fileDescriptor) {
|
||
if (fileDescriptor.name) {
|
||
return fileDescriptor.name;
|
||
}
|
||
|
||
if (fileType.split('/')[0] === 'image') {
|
||
return `${fileType.split('/')[0]}.${fileType.split('/')[1]}`;
|
||
}
|
||
|
||
return 'noname';
|
||
};
|
||
},{}],18:[function(require,module,exports){
|
||
'use strict';
|
||
|
||
const Uppy = require('./Uppy');
|
||
|
||
const UIPlugin = require('./UIPlugin');
|
||
|
||
const BasePlugin = require('./BasePlugin');
|
||
|
||
const {
|
||
debugLogger
|
||
} = require('./loggers');
|
||
|
||
module.exports = Uppy;
|
||
module.exports.Uppy = Uppy;
|
||
module.exports.UIPlugin = UIPlugin;
|
||
module.exports.BasePlugin = BasePlugin;
|
||
module.exports.debugLogger = debugLogger;
|
||
},{"./BasePlugin":14,"./UIPlugin":15,"./Uppy":16,"./loggers":19}],19:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
/* eslint-disable no-console */
|
||
const getTimeStamp = require('@uppy/utils/lib/getTimeStamp'); // Swallow all logs, except errors.
|
||
// default if logger is not set or debug: false
|
||
|
||
|
||
const justErrorsLogger = {
|
||
debug: () => {},
|
||
warn: () => {},
|
||
error: (...args) => console.error(`[Uppy] [${getTimeStamp()}]`, ...args)
|
||
}; // Print logs to console with namespace + timestamp,
|
||
// set by logger: Uppy.debugLogger or debug: true
|
||
|
||
const debugLogger = {
|
||
debug: (...args) => console.debug(`[Uppy] [${getTimeStamp()}]`, ...args),
|
||
warn: (...args) => console.warn(`[Uppy] [${getTimeStamp()}]`, ...args),
|
||
error: (...args) => console.error(`[Uppy] [${getTimeStamp()}]`, ...args)
|
||
};
|
||
module.exports = {
|
||
justErrorsLogger,
|
||
debugLogger
|
||
};
|
||
},{"@uppy/utils/lib/getTimeStamp":42}],20:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
// Edge 15.x does not fire 'progress' events on uploads.
|
||
// See https://github.com/transloadit/uppy/issues/945
|
||
// And https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/12224510/
|
||
module.exports = function supportsUploadProgress(userAgent) {
|
||
// Allow passing in userAgent for tests
|
||
if (userAgent == null) {
|
||
userAgent = typeof navigator !== 'undefined' ? navigator.userAgent : null;
|
||
} // Assume it works because basically everything supports progress events.
|
||
|
||
|
||
if (!userAgent) return true;
|
||
const m = /Edge\/(\d+\.\d+)/.exec(userAgent);
|
||
if (!m) return true;
|
||
const edgeVersion = m[1];
|
||
let [major, minor] = edgeVersion.split('.');
|
||
major = parseInt(major, 10);
|
||
minor = parseInt(minor, 10); // Worked before:
|
||
// Edge 40.15063.0.0
|
||
// Microsoft EdgeHTML 15.15063
|
||
|
||
if (major < 15 || major === 15 && minor < 15063) {
|
||
return true;
|
||
} // Fixed in:
|
||
// Microsoft EdgeHTML 18.18218
|
||
|
||
|
||
if (major > 18 || major === 18 && minor >= 18218) {
|
||
return true;
|
||
} // other versions don't work.
|
||
|
||
|
||
return false;
|
||
};
|
||
},{}],21:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
var _class, _temp;
|
||
|
||
const BasePlugin = require('@uppy/core/lib/BasePlugin');
|
||
|
||
const getDroppedFiles = require('@uppy/utils/lib/getDroppedFiles');
|
||
|
||
const toArray = require('@uppy/utils/lib/toArray');
|
||
/**
|
||
* Drop Target plugin
|
||
*
|
||
*/
|
||
|
||
|
||
module.exports = (_temp = _class = class DropTarget extends BasePlugin {
|
||
constructor(uppy, opts) {
|
||
super(uppy, opts);
|
||
|
||
this.addFiles = files => {
|
||
const descriptors = files.map(file => ({
|
||
source: this.id,
|
||
name: file.name,
|
||
type: file.type,
|
||
data: file,
|
||
meta: {
|
||
// path of the file relative to the ancestor directory the user selected.
|
||
// e.g. 'docs/Old Prague/airbnb.pdf'
|
||
relativePath: file.relativePath || null
|
||
}
|
||
}));
|
||
|
||
try {
|
||
this.uppy.addFiles(descriptors);
|
||
} catch (err) {
|
||
this.uppy.log(err);
|
||
}
|
||
};
|
||
|
||
this.handleDrop = async event => {
|
||
var _this$opts$onDrop, _this$opts;
|
||
|
||
event.preventDefault();
|
||
event.stopPropagation();
|
||
clearTimeout(this.removeDragOverClassTimeout); // 2. Remove dragover class
|
||
|
||
event.currentTarget.classList.remove('uppy-is-drag-over');
|
||
this.setPluginState({
|
||
isDraggingOver: false
|
||
}); // 3. Add all dropped files
|
||
|
||
this.uppy.log('[DropTarget] Files were dropped');
|
||
|
||
const logDropError = error => {
|
||
this.uppy.log(error, 'error');
|
||
};
|
||
|
||
const files = await getDroppedFiles(event.dataTransfer, {
|
||
logDropError
|
||
});
|
||
this.addFiles(files);
|
||
(_this$opts$onDrop = (_this$opts = this.opts).onDrop) == null ? void 0 : _this$opts$onDrop.call(_this$opts, event);
|
||
};
|
||
|
||
this.handleDragOver = event => {
|
||
var _this$opts$onDragOver, _this$opts2;
|
||
|
||
event.preventDefault();
|
||
event.stopPropagation(); // 1. Add a small (+) icon on drop
|
||
// (and prevent browsers from interpreting this as files being _moved_ into the browser,
|
||
// https://github.com/transloadit/uppy/issues/1978)
|
||
|
||
event.dataTransfer.dropEffect = 'copy';
|
||
clearTimeout(this.removeDragOverClassTimeout);
|
||
event.currentTarget.classList.add('uppy-is-drag-over');
|
||
this.setPluginState({
|
||
isDraggingOver: true
|
||
});
|
||
(_this$opts$onDragOver = (_this$opts2 = this.opts).onDragOver) == null ? void 0 : _this$opts$onDragOver.call(_this$opts2, event);
|
||
};
|
||
|
||
this.handleDragLeave = event => {
|
||
var _this$opts$onDragLeav, _this$opts3;
|
||
|
||
event.preventDefault();
|
||
event.stopPropagation();
|
||
const {
|
||
currentTarget
|
||
} = event;
|
||
clearTimeout(this.removeDragOverClassTimeout); // Timeout against flickering, this solution is taken from drag-drop library.
|
||
// Solution with 'pointer-events: none' didn't work across browsers.
|
||
|
||
this.removeDragOverClassTimeout = setTimeout(() => {
|
||
currentTarget.classList.remove('uppy-is-drag-over');
|
||
this.setPluginState({
|
||
isDraggingOver: false
|
||
});
|
||
}, 50);
|
||
(_this$opts$onDragLeav = (_this$opts3 = this.opts).onDragLeave) == null ? void 0 : _this$opts$onDragLeav.call(_this$opts3, event);
|
||
};
|
||
|
||
this.addListeners = () => {
|
||
const {
|
||
target
|
||
} = this.opts;
|
||
|
||
if (target instanceof Element) {
|
||
this.nodes = [target];
|
||
} else if (typeof target === 'string') {
|
||
this.nodes = toArray(document.querySelectorAll(target));
|
||
}
|
||
|
||
if (!this.nodes && !this.nodes.length > 0) {
|
||
throw new Error(`"${target}" does not match any HTML elements`);
|
||
}
|
||
|
||
this.nodes.forEach(node => {
|
||
node.addEventListener('dragover', this.handleDragOver, false);
|
||
node.addEventListener('dragleave', this.handleDragLeave, false);
|
||
node.addEventListener('drop', this.handleDrop, false);
|
||
});
|
||
};
|
||
|
||
this.removeListeners = () => {
|
||
if (this.nodes) {
|
||
this.nodes.forEach(node => {
|
||
node.removeEventListener('dragover', this.handleDragOver, false);
|
||
node.removeEventListener('dragleave', this.handleDragLeave, false);
|
||
node.removeEventListener('drop', this.handleDrop, false);
|
||
});
|
||
}
|
||
};
|
||
|
||
this.type = 'acquirer';
|
||
this.id = this.opts.id || 'DropTarget';
|
||
this.title = 'Drop Target'; // Default options
|
||
|
||
const defaultOpts = {
|
||
target: null
|
||
}; // Merge default options with the ones set by user
|
||
|
||
this.opts = { ...defaultOpts,
|
||
...opts
|
||
};
|
||
this.removeDragOverClassTimeout = null;
|
||
}
|
||
|
||
install() {
|
||
this.setPluginState({
|
||
isDraggingOver: false
|
||
});
|
||
this.addListeners();
|
||
}
|
||
|
||
uninstall() {
|
||
this.removeListeners();
|
||
}
|
||
|
||
}, _class.VERSION = "1.1.1", _temp);
|
||
},{"@uppy/core/lib/BasePlugin":14,"@uppy/utils/lib/getDroppedFiles":34,"@uppy/utils/lib/toArray":48}],22:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
function _classPrivateFieldLooseBase(receiver, privateKey) { if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { throw new TypeError("attempted to use private field on non-instance"); } return receiver; }
|
||
|
||
var id = 0;
|
||
|
||
function _classPrivateFieldLooseKey(name) { return "__private_" + id++ + "_" + name; }
|
||
|
||
var _publish = /*#__PURE__*/_classPrivateFieldLooseKey("publish");
|
||
|
||
/**
|
||
* Default store that keeps state in a simple object.
|
||
*/
|
||
class DefaultStore {
|
||
constructor() {
|
||
Object.defineProperty(this, _publish, {
|
||
value: _publish2
|
||
});
|
||
this.state = {};
|
||
this.callbacks = [];
|
||
}
|
||
|
||
getState() {
|
||
return this.state;
|
||
}
|
||
|
||
setState(patch) {
|
||
const prevState = { ...this.state
|
||
};
|
||
const nextState = { ...this.state,
|
||
...patch
|
||
};
|
||
this.state = nextState;
|
||
|
||
_classPrivateFieldLooseBase(this, _publish)[_publish](prevState, nextState, patch);
|
||
}
|
||
|
||
subscribe(listener) {
|
||
this.callbacks.push(listener);
|
||
return () => {
|
||
// Remove the listener.
|
||
this.callbacks.splice(this.callbacks.indexOf(listener), 1);
|
||
};
|
||
}
|
||
|
||
}
|
||
|
||
function _publish2(...args) {
|
||
this.callbacks.forEach(listener => {
|
||
listener(...args);
|
||
});
|
||
}
|
||
|
||
DefaultStore.VERSION = "2.0.2";
|
||
|
||
module.exports = function defaultStore() {
|
||
return new DefaultStore();
|
||
};
|
||
},{}],23:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
/**
|
||
* Little AbortController proxy module so we can swap out the implementation easily later.
|
||
*/
|
||
exports.AbortController = globalThis.AbortController;
|
||
exports.AbortSignal = globalThis.AbortSignal;
|
||
|
||
exports.createAbortError = (message = 'Aborted') => new DOMException(message, 'AbortError');
|
||
},{}],24:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
var _emitter, _events;
|
||
|
||
function _classPrivateFieldLooseBase(receiver, privateKey) { if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { throw new TypeError("attempted to use private field on non-instance"); } return receiver; }
|
||
|
||
var id = 0;
|
||
|
||
function _classPrivateFieldLooseKey(name) { return "__private_" + id++ + "_" + name; }
|
||
|
||
/**
|
||
* Create a wrapper around an event emitter with a `remove` method to remove
|
||
* all events that were added using the wrapped emitter.
|
||
*/
|
||
module.exports = (_emitter = /*#__PURE__*/_classPrivateFieldLooseKey("emitter"), _events = /*#__PURE__*/_classPrivateFieldLooseKey("events"), class EventTracker {
|
||
constructor(emitter) {
|
||
Object.defineProperty(this, _emitter, {
|
||
writable: true,
|
||
value: void 0
|
||
});
|
||
Object.defineProperty(this, _events, {
|
||
writable: true,
|
||
value: []
|
||
});
|
||
_classPrivateFieldLooseBase(this, _emitter)[_emitter] = emitter;
|
||
}
|
||
|
||
on(event, fn) {
|
||
_classPrivateFieldLooseBase(this, _events)[_events].push([event, fn]);
|
||
|
||
return _classPrivateFieldLooseBase(this, _emitter)[_emitter].on(event, fn);
|
||
}
|
||
|
||
remove() {
|
||
for (const [event, fn] of _classPrivateFieldLooseBase(this, _events)[_events].splice(0)) {
|
||
_classPrivateFieldLooseBase(this, _emitter)[_emitter].off(event, fn);
|
||
}
|
||
}
|
||
|
||
});
|
||
},{}],25:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
class NetworkError extends Error {
|
||
constructor(error, xhr = null) {
|
||
super(`This looks like a network error, the endpoint might be blocked by an internet provider or a firewall.`);
|
||
this.cause = error;
|
||
this.isNetworkError = true;
|
||
this.request = xhr;
|
||
}
|
||
|
||
}
|
||
|
||
module.exports = NetworkError;
|
||
},{}],26:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
function _classPrivateFieldLooseBase(receiver, privateKey) { if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { throw new TypeError("attempted to use private field on non-instance"); } return receiver; }
|
||
|
||
var id = 0;
|
||
|
||
function _classPrivateFieldLooseKey(name) { return "__private_" + id++ + "_" + name; }
|
||
|
||
var _aliveTimer = /*#__PURE__*/_classPrivateFieldLooseKey("aliveTimer");
|
||
|
||
var _isDone = /*#__PURE__*/_classPrivateFieldLooseKey("isDone");
|
||
|
||
var _onTimedOut = /*#__PURE__*/_classPrivateFieldLooseKey("onTimedOut");
|
||
|
||
var _timeout = /*#__PURE__*/_classPrivateFieldLooseKey("timeout");
|
||
|
||
/**
|
||
* Helper to abort upload requests if there has not been any progress for `timeout` ms.
|
||
* Create an instance using `timer = new ProgressTimeout(10000, onTimeout)`
|
||
* Call `timer.progress()` to signal that there has been progress of any kind.
|
||
* Call `timer.done()` when the upload has completed.
|
||
*/
|
||
class ProgressTimeout {
|
||
constructor(timeout, timeoutHandler) {
|
||
Object.defineProperty(this, _aliveTimer, {
|
||
writable: true,
|
||
value: void 0
|
||
});
|
||
Object.defineProperty(this, _isDone, {
|
||
writable: true,
|
||
value: false
|
||
});
|
||
Object.defineProperty(this, _onTimedOut, {
|
||
writable: true,
|
||
value: void 0
|
||
});
|
||
Object.defineProperty(this, _timeout, {
|
||
writable: true,
|
||
value: void 0
|
||
});
|
||
_classPrivateFieldLooseBase(this, _timeout)[_timeout] = timeout;
|
||
_classPrivateFieldLooseBase(this, _onTimedOut)[_onTimedOut] = timeoutHandler;
|
||
}
|
||
|
||
progress() {
|
||
// Some browsers fire another progress event when the upload is
|
||
// cancelled, so we have to ignore progress after the timer was
|
||
// told to stop.
|
||
if (_classPrivateFieldLooseBase(this, _isDone)[_isDone]) return;
|
||
|
||
if (_classPrivateFieldLooseBase(this, _timeout)[_timeout] > 0) {
|
||
clearTimeout(_classPrivateFieldLooseBase(this, _aliveTimer)[_aliveTimer]);
|
||
_classPrivateFieldLooseBase(this, _aliveTimer)[_aliveTimer] = setTimeout(_classPrivateFieldLooseBase(this, _onTimedOut)[_onTimedOut], _classPrivateFieldLooseBase(this, _timeout)[_timeout]);
|
||
}
|
||
}
|
||
|
||
done() {
|
||
if (!_classPrivateFieldLooseBase(this, _isDone)[_isDone]) {
|
||
clearTimeout(_classPrivateFieldLooseBase(this, _aliveTimer)[_aliveTimer]);
|
||
_classPrivateFieldLooseBase(this, _aliveTimer)[_aliveTimer] = null;
|
||
_classPrivateFieldLooseBase(this, _isDone)[_isDone] = true;
|
||
}
|
||
}
|
||
|
||
}
|
||
|
||
module.exports = ProgressTimeout;
|
||
},{}],27:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
function _classPrivateFieldLooseBase(receiver, privateKey) { if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { throw new TypeError("attempted to use private field on non-instance"); } return receiver; }
|
||
|
||
var id = 0;
|
||
|
||
function _classPrivateFieldLooseKey(name) { return "__private_" + id++ + "_" + name; }
|
||
|
||
function createCancelError() {
|
||
return new Error('Cancelled');
|
||
}
|
||
|
||
var _activeRequests = /*#__PURE__*/_classPrivateFieldLooseKey("activeRequests");
|
||
|
||
var _queuedHandlers = /*#__PURE__*/_classPrivateFieldLooseKey("queuedHandlers");
|
||
|
||
var _call = /*#__PURE__*/_classPrivateFieldLooseKey("call");
|
||
|
||
var _queueNext = /*#__PURE__*/_classPrivateFieldLooseKey("queueNext");
|
||
|
||
var _next = /*#__PURE__*/_classPrivateFieldLooseKey("next");
|
||
|
||
var _queue = /*#__PURE__*/_classPrivateFieldLooseKey("queue");
|
||
|
||
var _dequeue = /*#__PURE__*/_classPrivateFieldLooseKey("dequeue");
|
||
|
||
class RateLimitedQueue {
|
||
constructor(limit) {
|
||
Object.defineProperty(this, _dequeue, {
|
||
value: _dequeue2
|
||
});
|
||
Object.defineProperty(this, _queue, {
|
||
value: _queue2
|
||
});
|
||
Object.defineProperty(this, _next, {
|
||
value: _next2
|
||
});
|
||
Object.defineProperty(this, _queueNext, {
|
||
value: _queueNext2
|
||
});
|
||
Object.defineProperty(this, _call, {
|
||
value: _call2
|
||
});
|
||
Object.defineProperty(this, _activeRequests, {
|
||
writable: true,
|
||
value: 0
|
||
});
|
||
Object.defineProperty(this, _queuedHandlers, {
|
||
writable: true,
|
||
value: []
|
||
});
|
||
|
||
if (typeof limit !== 'number' || limit === 0) {
|
||
this.limit = Infinity;
|
||
} else {
|
||
this.limit = limit;
|
||
}
|
||
}
|
||
|
||
run(fn, queueOptions) {
|
||
if (_classPrivateFieldLooseBase(this, _activeRequests)[_activeRequests] < this.limit) {
|
||
return _classPrivateFieldLooseBase(this, _call)[_call](fn);
|
||
}
|
||
|
||
return _classPrivateFieldLooseBase(this, _queue)[_queue](fn, queueOptions);
|
||
}
|
||
|
||
wrapPromiseFunction(fn, queueOptions) {
|
||
return (...args) => {
|
||
let queuedRequest;
|
||
const outerPromise = new Promise((resolve, reject) => {
|
||
queuedRequest = this.run(() => {
|
||
let cancelError;
|
||
let innerPromise;
|
||
|
||
try {
|
||
innerPromise = Promise.resolve(fn(...args));
|
||
} catch (err) {
|
||
innerPromise = Promise.reject(err);
|
||
}
|
||
|
||
innerPromise.then(result => {
|
||
if (cancelError) {
|
||
reject(cancelError);
|
||
} else {
|
||
queuedRequest.done();
|
||
resolve(result);
|
||
}
|
||
}, err => {
|
||
if (cancelError) {
|
||
reject(cancelError);
|
||
} else {
|
||
queuedRequest.done();
|
||
reject(err);
|
||
}
|
||
});
|
||
return () => {
|
||
cancelError = createCancelError();
|
||
};
|
||
}, queueOptions);
|
||
});
|
||
|
||
outerPromise.abort = () => {
|
||
queuedRequest.abort();
|
||
};
|
||
|
||
return outerPromise;
|
||
};
|
||
}
|
||
|
||
}
|
||
|
||
function _call2(fn) {
|
||
_classPrivateFieldLooseBase(this, _activeRequests)[_activeRequests] += 1;
|
||
let done = false;
|
||
let cancelActive;
|
||
|
||
try {
|
||
cancelActive = fn();
|
||
} catch (err) {
|
||
_classPrivateFieldLooseBase(this, _activeRequests)[_activeRequests] -= 1;
|
||
throw err;
|
||
}
|
||
|
||
return {
|
||
abort: () => {
|
||
if (done) return;
|
||
done = true;
|
||
_classPrivateFieldLooseBase(this, _activeRequests)[_activeRequests] -= 1;
|
||
cancelActive();
|
||
|
||
_classPrivateFieldLooseBase(this, _queueNext)[_queueNext]();
|
||
},
|
||
done: () => {
|
||
if (done) return;
|
||
done = true;
|
||
_classPrivateFieldLooseBase(this, _activeRequests)[_activeRequests] -= 1;
|
||
|
||
_classPrivateFieldLooseBase(this, _queueNext)[_queueNext]();
|
||
}
|
||
};
|
||
}
|
||
|
||
function _queueNext2() {
|
||
// Do it soon but not immediately, this allows clearing out the entire queue synchronously
|
||
// one by one without continuously _advancing_ it (and starting new tasks before immediately
|
||
// aborting them)
|
||
queueMicrotask(() => _classPrivateFieldLooseBase(this, _next)[_next]());
|
||
}
|
||
|
||
function _next2() {
|
||
if (_classPrivateFieldLooseBase(this, _activeRequests)[_activeRequests] >= this.limit) {
|
||
return;
|
||
}
|
||
|
||
if (_classPrivateFieldLooseBase(this, _queuedHandlers)[_queuedHandlers].length === 0) {
|
||
return;
|
||
} // Dispatch the next request, and update the abort/done handlers
|
||
// so that cancelling it does the Right Thing (and doesn't just try
|
||
// to dequeue an already-running request).
|
||
|
||
|
||
const next = _classPrivateFieldLooseBase(this, _queuedHandlers)[_queuedHandlers].shift();
|
||
|
||
const handler = _classPrivateFieldLooseBase(this, _call)[_call](next.fn);
|
||
|
||
next.abort = handler.abort;
|
||
next.done = handler.done;
|
||
}
|
||
|
||
function _queue2(fn, options = {}) {
|
||
const handler = {
|
||
fn,
|
||
priority: options.priority || 0,
|
||
abort: () => {
|
||
_classPrivateFieldLooseBase(this, _dequeue)[_dequeue](handler);
|
||
},
|
||
done: () => {
|
||
throw new Error('Cannot mark a queued request as done: this indicates a bug');
|
||
}
|
||
};
|
||
|
||
const index = _classPrivateFieldLooseBase(this, _queuedHandlers)[_queuedHandlers].findIndex(other => {
|
||
return handler.priority > other.priority;
|
||
});
|
||
|
||
if (index === -1) {
|
||
_classPrivateFieldLooseBase(this, _queuedHandlers)[_queuedHandlers].push(handler);
|
||
} else {
|
||
_classPrivateFieldLooseBase(this, _queuedHandlers)[_queuedHandlers].splice(index, 0, handler);
|
||
}
|
||
|
||
return handler;
|
||
}
|
||
|
||
function _dequeue2(handler) {
|
||
const index = _classPrivateFieldLooseBase(this, _queuedHandlers)[_queuedHandlers].indexOf(handler);
|
||
|
||
if (index !== -1) {
|
||
_classPrivateFieldLooseBase(this, _queuedHandlers)[_queuedHandlers].splice(index, 1);
|
||
}
|
||
}
|
||
|
||
module.exports = {
|
||
RateLimitedQueue,
|
||
internalRateLimitedQueue: Symbol('__queue')
|
||
};
|
||
},{}],28:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
var _apply;
|
||
|
||
function _classPrivateFieldLooseBase(receiver, privateKey) { if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { throw new TypeError("attempted to use private field on non-instance"); } return receiver; }
|
||
|
||
var id = 0;
|
||
|
||
function _classPrivateFieldLooseKey(name) { return "__private_" + id++ + "_" + name; }
|
||
|
||
const has = require('./hasProperty');
|
||
|
||
function insertReplacement(source, rx, replacement) {
|
||
const newParts = [];
|
||
source.forEach(chunk => {
|
||
// When the source contains multiple placeholders for interpolation,
|
||
// we should ignore chunks that are not strings, because those
|
||
// can be JSX objects and will be otherwise incorrectly turned into strings.
|
||
// Without this condition we’d get this: [object Object] hello [object Object] my <button>
|
||
if (typeof chunk !== 'string') {
|
||
return newParts.push(chunk);
|
||
}
|
||
|
||
return rx[Symbol.split](chunk).forEach((raw, i, list) => {
|
||
if (raw !== '') {
|
||
newParts.push(raw);
|
||
} // Interlace with the `replacement` value
|
||
|
||
|
||
if (i < list.length - 1) {
|
||
newParts.push(replacement);
|
||
}
|
||
});
|
||
});
|
||
return newParts;
|
||
}
|
||
/**
|
||
* Takes a string with placeholder variables like `%{smart_count} file selected`
|
||
* and replaces it with values from options `{smart_count: 5}`
|
||
*
|
||
* @license https://github.com/airbnb/polyglot.js/blob/master/LICENSE
|
||
* taken from https://github.com/airbnb/polyglot.js/blob/master/lib/polyglot.js#L299
|
||
*
|
||
* @param {string} phrase that needs interpolation, with placeholders
|
||
* @param {object} options with values that will be used to replace placeholders
|
||
* @returns {any[]} interpolated
|
||
*/
|
||
|
||
|
||
function interpolate(phrase, options) {
|
||
const dollarRegex = /\$/g;
|
||
const dollarBillsYall = '$$$$';
|
||
let interpolated = [phrase];
|
||
if (options == null) return interpolated;
|
||
|
||
for (const arg of Object.keys(options)) {
|
||
if (arg !== '_') {
|
||
// Ensure replacement value is escaped to prevent special $-prefixed
|
||
// regex replace tokens. the "$$$$" is needed because each "$" needs to
|
||
// be escaped with "$" itself, and we need two in the resulting output.
|
||
let replacement = options[arg];
|
||
|
||
if (typeof replacement === 'string') {
|
||
replacement = dollarRegex[Symbol.replace](replacement, dollarBillsYall);
|
||
} // We create a new `RegExp` each time instead of using a more-efficient
|
||
// string replace so that the same argument can be replaced multiple times
|
||
// in the same phrase.
|
||
|
||
|
||
interpolated = insertReplacement(interpolated, new RegExp(`%\\{${arg}\\}`, 'g'), replacement);
|
||
}
|
||
}
|
||
|
||
return interpolated;
|
||
}
|
||
/**
|
||
* Translates strings with interpolation & pluralization support.
|
||
* Extensible with custom dictionaries and pluralization functions.
|
||
*
|
||
* Borrows heavily from and inspired by Polyglot https://github.com/airbnb/polyglot.js,
|
||
* basically a stripped-down version of it. Differences: pluralization functions are not hardcoded
|
||
* and can be easily added among with dictionaries, nested objects are used for pluralization
|
||
* as opposed to `||||` delimeter
|
||
*
|
||
* Usage example: `translator.translate('files_chosen', {smart_count: 3})`
|
||
*/
|
||
|
||
|
||
module.exports = (_apply = /*#__PURE__*/_classPrivateFieldLooseKey("apply"), class Translator {
|
||
/**
|
||
* @param {object|Array<object>} locales - locale or list of locales.
|
||
*/
|
||
constructor(locales) {
|
||
Object.defineProperty(this, _apply, {
|
||
value: _apply2
|
||
});
|
||
this.locale = {
|
||
strings: {},
|
||
|
||
pluralize(n) {
|
||
if (n === 1) {
|
||
return 0;
|
||
}
|
||
|
||
return 1;
|
||
}
|
||
|
||
};
|
||
|
||
if (Array.isArray(locales)) {
|
||
locales.forEach(_classPrivateFieldLooseBase(this, _apply)[_apply], this);
|
||
} else {
|
||
_classPrivateFieldLooseBase(this, _apply)[_apply](locales);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Public translate method
|
||
*
|
||
* @param {string} key
|
||
* @param {object} options with values that will be used later to replace placeholders in string
|
||
* @returns {string} translated (and interpolated)
|
||
*/
|
||
translate(key, options) {
|
||
return this.translateArray(key, options).join('');
|
||
}
|
||
/**
|
||
* Get a translation and return the translated and interpolated parts as an array.
|
||
*
|
||
* @param {string} key
|
||
* @param {object} options with values that will be used to replace placeholders
|
||
* @returns {Array} The translated and interpolated parts, in order.
|
||
*/
|
||
|
||
|
||
translateArray(key, options) {
|
||
if (!has(this.locale.strings, key)) {
|
||
throw new Error(`missing string: ${key}`);
|
||
}
|
||
|
||
const string = this.locale.strings[key];
|
||
const hasPluralForms = typeof string === 'object';
|
||
|
||
if (hasPluralForms) {
|
||
if (options && typeof options.smart_count !== 'undefined') {
|
||
const plural = this.locale.pluralize(options.smart_count);
|
||
return interpolate(string[plural], options);
|
||
}
|
||
|
||
throw new Error('Attempted to use a string with plural forms, but no value was given for %{smart_count}');
|
||
}
|
||
|
||
return interpolate(string, options);
|
||
}
|
||
|
||
});
|
||
|
||
function _apply2(locale) {
|
||
if (!(locale != null && locale.strings)) {
|
||
return;
|
||
}
|
||
|
||
const prevLocale = this.locale;
|
||
this.locale = { ...prevLocale,
|
||
strings: { ...prevLocale.strings,
|
||
...locale.strings
|
||
}
|
||
};
|
||
this.locale.pluralize = locale.pluralize || prevLocale.pluralize;
|
||
}
|
||
},{"./hasProperty":43}],29:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
const {
|
||
createAbortError
|
||
} = require('./AbortController');
|
||
/**
|
||
* Return a Promise that resolves after `ms` milliseconds.
|
||
*
|
||
* @param {number} ms - Number of milliseconds to wait.
|
||
* @param {{ signal?: AbortSignal }} [opts] - An abort signal that can be used to cancel the delay early.
|
||
* @returns {Promise<void>} A Promise that resolves after the given amount of `ms`.
|
||
*/
|
||
|
||
|
||
module.exports = function delay(ms, opts) {
|
||
return new Promise((resolve, reject) => {
|
||
var _opts$signal, _opts$signal2;
|
||
|
||
if (opts != null && (_opts$signal = opts.signal) != null && _opts$signal.aborted) {
|
||
return reject(createAbortError());
|
||
}
|
||
|
||
const timeout = setTimeout(() => {
|
||
cleanup(); // eslint-disable-line no-use-before-define
|
||
|
||
resolve();
|
||
}, ms);
|
||
|
||
function onabort() {
|
||
clearTimeout(timeout);
|
||
cleanup(); // eslint-disable-line no-use-before-define
|
||
|
||
reject(createAbortError());
|
||
}
|
||
|
||
opts == null ? void 0 : (_opts$signal2 = opts.signal) == null ? void 0 : _opts$signal2.addEventListener('abort', onabort);
|
||
|
||
function cleanup() {
|
||
var _opts$signal3;
|
||
|
||
opts == null ? void 0 : (_opts$signal3 = opts.signal) == null ? void 0 : _opts$signal3.removeEventListener('abort', onabort);
|
||
}
|
||
|
||
return undefined;
|
||
});
|
||
};
|
||
},{"./AbortController":23}],30:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
const throttle = require('lodash.throttle');
|
||
|
||
function emitSocketProgress(uploader, progressData, file) {
|
||
const {
|
||
progress,
|
||
bytesUploaded,
|
||
bytesTotal
|
||
} = progressData;
|
||
|
||
if (progress) {
|
||
uploader.uppy.log(`Upload progress: ${progress}`);
|
||
uploader.uppy.emit('upload-progress', file, {
|
||
uploader,
|
||
bytesUploaded,
|
||
bytesTotal
|
||
});
|
||
}
|
||
}
|
||
|
||
module.exports = throttle(emitSocketProgress, 300, {
|
||
leading: true,
|
||
trailing: true
|
||
});
|
||
},{"lodash.throttle":50}],31:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
const NetworkError = require('./NetworkError');
|
||
/**
|
||
* Wrapper around window.fetch that throws a NetworkError when appropriate
|
||
*/
|
||
|
||
|
||
module.exports = function fetchWithNetworkError(...options) {
|
||
return fetch(...options).catch(err => {
|
||
if (err.name === 'AbortError') {
|
||
throw err;
|
||
} else {
|
||
throw new NetworkError(err);
|
||
}
|
||
});
|
||
};
|
||
},{"./NetworkError":25}],32:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
const isDOMElement = require('./isDOMElement');
|
||
/**
|
||
* Find a DOM element.
|
||
*
|
||
* @param {Node|string} element
|
||
* @returns {Node|null}
|
||
*/
|
||
|
||
|
||
module.exports = function findDOMElement(element, context = document) {
|
||
if (typeof element === 'string') {
|
||
return context.querySelector(element);
|
||
}
|
||
|
||
if (isDOMElement(element)) {
|
||
return element;
|
||
}
|
||
|
||
return null;
|
||
};
|
||
},{"./isDOMElement":44}],33:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
function encodeCharacter(character) {
|
||
return character.charCodeAt(0).toString(32);
|
||
}
|
||
|
||
function encodeFilename(name) {
|
||
let suffix = '';
|
||
return name.replace(/[^A-Z0-9]/ig, character => {
|
||
suffix += `-${encodeCharacter(character)}`;
|
||
return '/';
|
||
}) + suffix;
|
||
}
|
||
/**
|
||
* Takes a file object and turns it into fileID, by converting file.name to lowercase,
|
||
* removing extra characters and adding type, size and lastModified
|
||
*
|
||
* @param {object} file
|
||
* @returns {string} the fileID
|
||
*/
|
||
|
||
|
||
module.exports = function generateFileID(file) {
|
||
// It's tempting to do `[items].filter(Boolean).join('-')` here, but that
|
||
// is slower! simple string concatenation is fast
|
||
let id = 'uppy';
|
||
|
||
if (typeof file.name === 'string') {
|
||
id += `-${encodeFilename(file.name.toLowerCase())}`;
|
||
}
|
||
|
||
if (file.type !== undefined) {
|
||
id += `-${file.type}`;
|
||
}
|
||
|
||
if (file.meta && typeof file.meta.relativePath === 'string') {
|
||
id += `-${encodeFilename(file.meta.relativePath.toLowerCase())}`;
|
||
}
|
||
|
||
if (file.data.size !== undefined) {
|
||
id += `-${file.data.size}`;
|
||
}
|
||
|
||
if (file.data.lastModified !== undefined) {
|
||
id += `-${file.data.lastModified}`;
|
||
}
|
||
|
||
return id;
|
||
};
|
||
},{}],34:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
const webkitGetAsEntryApi = require('./utils/webkitGetAsEntryApi/index');
|
||
|
||
const fallbackApi = require('./utils/fallbackApi');
|
||
/**
|
||
* Returns a promise that resolves to the array of dropped files (if a folder is
|
||
* dropped, and browser supports folder parsing - promise resolves to the flat
|
||
* array of all files in all directories).
|
||
* Each file has .relativePath prop appended to it (e.g. "/docs/Prague/ticket_from_prague_to_ufa.pdf")
|
||
* if browser supports it. Otherwise it's undefined.
|
||
*
|
||
* @param {DataTransfer} dataTransfer
|
||
* @param {Function} logDropError - a function that's called every time some
|
||
* folder or some file error out (e.g. because of the folder name being too long
|
||
* on Windows). Notice that resulting promise will always be resolved anyway.
|
||
*
|
||
* @returns {Promise} - Array<File>
|
||
*/
|
||
|
||
|
||
module.exports = function getDroppedFiles(dataTransfer, {
|
||
logDropError = () => {}
|
||
} = {}) {
|
||
var _dataTransfer$items;
|
||
|
||
// Get all files from all subdirs. Works (at least) in Chrome, Mozilla, and Safari
|
||
if ((_dataTransfer$items = dataTransfer.items) != null && _dataTransfer$items[0] && 'webkitGetAsEntry' in dataTransfer.items[0]) {
|
||
return webkitGetAsEntryApi(dataTransfer, logDropError); // Otherwise just return all first-order files
|
||
}
|
||
|
||
return fallbackApi(dataTransfer);
|
||
};
|
||
},{"./utils/fallbackApi":35,"./utils/webkitGetAsEntryApi/index":38}],35:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
const toArray = require('../../toArray'); // .files fallback, should be implemented in any browser
|
||
|
||
|
||
module.exports = function fallbackApi(dataTransfer) {
|
||
const files = toArray(dataTransfer.files);
|
||
return Promise.resolve(files);
|
||
};
|
||
},{"../../toArray":48}],36:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
/**
|
||
* Recursive function, calls the original callback() when the directory is entirely parsed.
|
||
*
|
||
* @param {FileSystemDirectoryReader} directoryReader
|
||
* @param {Array} oldEntries
|
||
* @param {Function} logDropError
|
||
* @param {Function} callback - called with ([ all files and directories in that directoryReader ])
|
||
*/
|
||
module.exports = function getFilesAndDirectoriesFromDirectory(directoryReader, oldEntries, logDropError, {
|
||
onSuccess
|
||
}) {
|
||
directoryReader.readEntries(entries => {
|
||
const newEntries = [...oldEntries, ...entries]; // According to the FileSystem API spec, getFilesAndDirectoriesFromDirectory()
|
||
// must be called until it calls the onSuccess with an empty array.
|
||
|
||
if (entries.length) {
|
||
setTimeout(() => {
|
||
getFilesAndDirectoriesFromDirectory(directoryReader, newEntries, logDropError, {
|
||
onSuccess
|
||
});
|
||
}, 0); // Done iterating this particular directory
|
||
} else {
|
||
onSuccess(newEntries);
|
||
}
|
||
}, // Make sure we resolve on error anyway, it's fine if only one directory couldn't be parsed!
|
||
error => {
|
||
logDropError(error);
|
||
onSuccess(oldEntries);
|
||
});
|
||
};
|
||
},{}],37:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
/**
|
||
* Get the relative path from the FileEntry#fullPath, because File#webkitRelativePath is always '', at least onDrop.
|
||
*
|
||
* @param {FileEntry} fileEntry
|
||
*
|
||
* @returns {string|null} - if file is not in a folder - return null (this is to
|
||
* be consistent with .relativePath-s of files selected from My Device). If file
|
||
* is in a folder - return its fullPath, e.g. '/simpsons/hi.jpeg'.
|
||
*/
|
||
module.exports = function getRelativePath(fileEntry) {
|
||
// fileEntry.fullPath - "/simpsons/hi.jpeg" or undefined (for browsers that don't support it)
|
||
// fileEntry.name - "hi.jpeg"
|
||
if (!fileEntry.fullPath || fileEntry.fullPath === `/${fileEntry.name}`) {
|
||
return null;
|
||
}
|
||
|
||
return fileEntry.fullPath;
|
||
};
|
||
},{}],38:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
const toArray = require('../../../toArray');
|
||
|
||
const getRelativePath = require('./getRelativePath');
|
||
|
||
const getFilesAndDirectoriesFromDirectory = require('./getFilesAndDirectoriesFromDirectory');
|
||
|
||
module.exports = function webkitGetAsEntryApi(dataTransfer, logDropError) {
|
||
const files = [];
|
||
const rootPromises = [];
|
||
/**
|
||
* Returns a resolved promise, when :files array is enhanced
|
||
*
|
||
* @param {(FileSystemFileEntry|FileSystemDirectoryEntry)} entry
|
||
* @returns {Promise} - empty promise that resolves when :files is enhanced with a file
|
||
*/
|
||
|
||
const createPromiseToAddFileOrParseDirectory = entry => new Promise(resolve => {
|
||
// This is a base call
|
||
if (entry.isFile) {
|
||
// Creates a new File object which can be used to read the file.
|
||
entry.file(file => {
|
||
// eslint-disable-next-line no-param-reassign
|
||
file.relativePath = getRelativePath(entry);
|
||
files.push(file);
|
||
resolve();
|
||
}, // Make sure we resolve on error anyway, it's fine if only one file couldn't be read!
|
||
error => {
|
||
logDropError(error);
|
||
resolve();
|
||
}); // This is a recursive call
|
||
} else if (entry.isDirectory) {
|
||
const directoryReader = entry.createReader();
|
||
getFilesAndDirectoriesFromDirectory(directoryReader, [], logDropError, {
|
||
onSuccess: entries => resolve(Promise.all(entries.map(createPromiseToAddFileOrParseDirectory)))
|
||
});
|
||
}
|
||
}); // For each dropped item, - make sure it's a file/directory, and start deepening in!
|
||
|
||
|
||
toArray(dataTransfer.items).forEach(item => {
|
||
const entry = item.webkitGetAsEntry(); // :entry can be null when we drop the url e.g.
|
||
|
||
if (entry) {
|
||
rootPromises.push(createPromiseToAddFileOrParseDirectory(entry));
|
||
}
|
||
});
|
||
return Promise.all(rootPromises).then(() => files);
|
||
};
|
||
},{"../../../toArray":48,"./getFilesAndDirectoriesFromDirectory":36,"./getRelativePath":37}],39:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
/**
|
||
* Takes a full filename string and returns an object {name, extension}
|
||
*
|
||
* @param {string} fullFileName
|
||
* @returns {object} {name, extension}
|
||
*/
|
||
module.exports = function getFileNameAndExtension(fullFileName) {
|
||
const lastDot = fullFileName.lastIndexOf('.'); // these count as no extension: "no-dot", "trailing-dot."
|
||
|
||
if (lastDot === -1 || lastDot === fullFileName.length - 1) {
|
||
return {
|
||
name: fullFileName,
|
||
extension: undefined
|
||
};
|
||
}
|
||
|
||
return {
|
||
name: fullFileName.slice(0, lastDot),
|
||
extension: fullFileName.slice(lastDot + 1)
|
||
};
|
||
};
|
||
},{}],40:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
const getFileNameAndExtension = require('./getFileNameAndExtension');
|
||
|
||
const mimeTypes = require('./mimeTypes');
|
||
|
||
module.exports = function getFileType(file) {
|
||
var _getFileNameAndExtens;
|
||
|
||
if (file.type) return file.type;
|
||
const fileExtension = file.name ? (_getFileNameAndExtens = getFileNameAndExtension(file.name).extension) == null ? void 0 : _getFileNameAndExtens.toLowerCase() : null;
|
||
|
||
if (fileExtension && fileExtension in mimeTypes) {
|
||
// else, see if we can map extension to a mime type
|
||
return mimeTypes[fileExtension];
|
||
} // if all fails, fall back to a generic byte stream type
|
||
|
||
|
||
return 'application/octet-stream';
|
||
};
|
||
},{"./getFileNameAndExtension":39,"./mimeTypes":46}],41:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
module.exports = function getSocketHost(url) {
|
||
// get the host domain
|
||
const regex = /^(?:https?:\/\/|\/\/)?(?:[^@\n]+@)?(?:www\.)?([^\n]+)/i;
|
||
const host = regex.exec(url)[1];
|
||
const socketProtocol = /^http:\/\//i.test(url) ? 'ws' : 'wss';
|
||
return `${socketProtocol}://${host}`;
|
||
};
|
||
},{}],42:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
/**
|
||
* Adds zero to strings shorter than two characters.
|
||
*
|
||
* @param {number} number
|
||
* @returns {string}
|
||
*/
|
||
function pad(number) {
|
||
return number < 10 ? `0${number}` : number.toString();
|
||
}
|
||
/**
|
||
* Returns a timestamp in the format of `hours:minutes:seconds`
|
||
*/
|
||
|
||
|
||
module.exports = function getTimeStamp() {
|
||
const date = new Date();
|
||
const hours = pad(date.getHours());
|
||
const minutes = pad(date.getMinutes());
|
||
const seconds = pad(date.getSeconds());
|
||
return `${hours}:${minutes}:${seconds}`;
|
||
};
|
||
},{}],43:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
module.exports = function has(object, key) {
|
||
return Object.prototype.hasOwnProperty.call(object, key);
|
||
};
|
||
},{}],44:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
/**
|
||
* Check if an object is a DOM element. Duck-typing based on `nodeType`.
|
||
*
|
||
* @param {*} obj
|
||
*/
|
||
module.exports = function isDOMElement(obj) {
|
||
return (obj == null ? void 0 : obj.nodeType) === Node.ELEMENT_NODE;
|
||
};
|
||
},{}],45:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
function isNetworkError(xhr) {
|
||
if (!xhr) {
|
||
return false;
|
||
}
|
||
|
||
return xhr.readyState !== 0 && xhr.readyState !== 4 || xhr.status === 0;
|
||
}
|
||
|
||
module.exports = isNetworkError;
|
||
},{}],46:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
// ___Why not add the mime-types package?
|
||
// It's 19.7kB gzipped, and we only need mime types for well-known extensions (for file previews).
|
||
// ___Where to take new extensions from?
|
||
// https://github.com/jshttp/mime-db/blob/master/db.json
|
||
module.exports = {
|
||
md: 'text/markdown',
|
||
markdown: 'text/markdown',
|
||
mp4: 'video/mp4',
|
||
mp3: 'audio/mp3',
|
||
svg: 'image/svg+xml',
|
||
jpg: 'image/jpeg',
|
||
png: 'image/png',
|
||
gif: 'image/gif',
|
||
heic: 'image/heic',
|
||
heif: 'image/heif',
|
||
yaml: 'text/yaml',
|
||
yml: 'text/yaml',
|
||
csv: 'text/csv',
|
||
tsv: 'text/tab-separated-values',
|
||
tab: 'text/tab-separated-values',
|
||
avi: 'video/x-msvideo',
|
||
mks: 'video/x-matroska',
|
||
mkv: 'video/x-matroska',
|
||
mov: 'video/quicktime',
|
||
doc: 'application/msword',
|
||
docm: 'application/vnd.ms-word.document.macroenabled.12',
|
||
docx: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
|
||
dot: 'application/msword',
|
||
dotm: 'application/vnd.ms-word.template.macroenabled.12',
|
||
dotx: 'application/vnd.openxmlformats-officedocument.wordprocessingml.template',
|
||
xla: 'application/vnd.ms-excel',
|
||
xlam: 'application/vnd.ms-excel.addin.macroenabled.12',
|
||
xlc: 'application/vnd.ms-excel',
|
||
xlf: 'application/x-xliff+xml',
|
||
xlm: 'application/vnd.ms-excel',
|
||
xls: 'application/vnd.ms-excel',
|
||
xlsb: 'application/vnd.ms-excel.sheet.binary.macroenabled.12',
|
||
xlsm: 'application/vnd.ms-excel.sheet.macroenabled.12',
|
||
xlsx: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
||
xlt: 'application/vnd.ms-excel',
|
||
xltm: 'application/vnd.ms-excel.template.macroenabled.12',
|
||
xltx: 'application/vnd.openxmlformats-officedocument.spreadsheetml.template',
|
||
xlw: 'application/vnd.ms-excel',
|
||
txt: 'text/plain',
|
||
text: 'text/plain',
|
||
conf: 'text/plain',
|
||
log: 'text/plain',
|
||
pdf: 'application/pdf',
|
||
zip: 'application/zip',
|
||
'7z': 'application/x-7z-compressed',
|
||
rar: 'application/x-rar-compressed',
|
||
tar: 'application/x-tar',
|
||
gz: 'application/gzip',
|
||
dmg: 'application/x-apple-diskimage'
|
||
};
|
||
},{}],47:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
module.exports = function settle(promises) {
|
||
const resolutions = [];
|
||
const rejections = [];
|
||
|
||
function resolved(value) {
|
||
resolutions.push(value);
|
||
}
|
||
|
||
function rejected(error) {
|
||
rejections.push(error);
|
||
}
|
||
|
||
const wait = Promise.all(promises.map(promise => promise.then(resolved, rejected)));
|
||
return wait.then(() => {
|
||
return {
|
||
successful: resolutions,
|
||
failed: rejections
|
||
};
|
||
});
|
||
};
|
||
},{}],48:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
/**
|
||
* Converts list into array
|
||
*/
|
||
module.exports = Array.from;
|
||
},{}],49:[function(require,module,exports){
|
||
"use strict";
|
||
|
||
var _class, _temp;
|
||
|
||
const BasePlugin = require('@uppy/core/lib/BasePlugin');
|
||
|
||
const {
|
||
nanoid
|
||
} = require('nanoid');
|
||
|
||
const {
|
||
Provider,
|
||
RequestClient,
|
||
Socket
|
||
} = require('@uppy/companion-client');
|
||
|
||
const emitSocketProgress = require('@uppy/utils/lib/emitSocketProgress');
|
||
|
||
const getSocketHost = require('@uppy/utils/lib/getSocketHost');
|
||
|
||
const settle = require('@uppy/utils/lib/settle');
|
||
|
||
const EventTracker = require('@uppy/utils/lib/EventTracker');
|
||
|
||
const ProgressTimeout = require('@uppy/utils/lib/ProgressTimeout');
|
||
|
||
const {
|
||
RateLimitedQueue,
|
||
internalRateLimitedQueue
|
||
} = require('@uppy/utils/lib/RateLimitedQueue');
|
||
|
||
const NetworkError = require('@uppy/utils/lib/NetworkError');
|
||
|
||
const isNetworkError = require('@uppy/utils/lib/isNetworkError');
|
||
|
||
function buildResponseError(xhr, err) {
|
||
let error = err; // No error message
|
||
|
||
if (!error) error = new Error('Upload error'); // Got an error message string
|
||
|
||
if (typeof error === 'string') error = new Error(error); // Got something else
|
||
|
||
if (!(error instanceof Error)) {
|
||
error = Object.assign(new Error('Upload error'), {
|
||
data: error
|
||
});
|
||
}
|
||
|
||
if (isNetworkError(xhr)) {
|
||
error = new NetworkError(error, xhr);
|
||
return error;
|
||
}
|
||
|
||
error.request = xhr;
|
||
return error;
|
||
}
|
||
/**
|
||
* Set `data.type` in the blob to `file.meta.type`,
|
||
* because we might have detected a more accurate file type in Uppy
|
||
* https://stackoverflow.com/a/50875615
|
||
*
|
||
* @param {object} file File object with `data`, `size` and `meta` properties
|
||
* @returns {object} blob updated with the new `type` set from `file.meta.type`
|
||
*/
|
||
|
||
|
||
function setTypeInBlob(file) {
|
||
const dataWithUpdatedType = file.data.slice(0, file.data.size, file.meta.type);
|
||
return dataWithUpdatedType;
|
||
}
|
||
|
||
module.exports = (_temp = _class = class XHRUpload extends BasePlugin {
|
||
// eslint-disable-next-line global-require
|
||
constructor(uppy, opts) {
|
||
super(uppy, opts);
|
||
this.type = 'uploader';
|
||
this.id = this.opts.id || 'XHRUpload';
|
||
this.title = 'XHRUpload';
|
||
this.defaultLocale = {
|
||
strings: {
|
||
timedOut: 'Upload stalled for %{seconds} seconds, aborting.'
|
||
}
|
||
}; // Default options
|
||
|
||
const defaultOptions = {
|
||
formData: true,
|
||
fieldName: opts.bundle ? 'files[]' : 'file',
|
||
method: 'post',
|
||
metaFields: null,
|
||
responseUrlFieldName: 'url',
|
||
bundle: false,
|
||
headers: {},
|
||
timeout: 30 * 1000,
|
||
limit: 5,
|
||
withCredentials: false,
|
||
responseType: '',
|
||
|
||
/**
|
||
* @typedef respObj
|
||
* @property {string} responseText
|
||
* @property {number} status
|
||
* @property {string} statusText
|
||
* @property {object.<string, string>} headers
|
||
*
|
||
* @param {string} responseText the response body string
|
||
* @param {XMLHttpRequest | respObj} response the response object (XHR or similar)
|
||
*/
|
||
getResponseData(responseText) {
|
||
let parsedResponse = {};
|
||
|
||
try {
|
||
parsedResponse = JSON.parse(responseText);
|
||
} catch (err) {
|
||
uppy.log(err);
|
||
}
|
||
|
||
return parsedResponse;
|
||
},
|
||
|
||
/**
|
||
*
|
||
* @param {string} responseText the response body string
|
||
* @param {XMLHttpRequest | respObj} response the response object (XHR or similar)
|
||
*/
|
||
getResponseError(_, response) {
|
||
let error = new Error('Upload error');
|
||
|
||
if (isNetworkError(response)) {
|
||
error = new NetworkError(error, response);
|
||
}
|
||
|
||
return error;
|
||
},
|
||
|
||
/**
|
||
* Check if the response from the upload endpoint indicates that the upload was successful.
|
||
*
|
||
* @param {number} status the response status code
|
||
*/
|
||
validateStatus(status) {
|
||
return status >= 200 && status < 300;
|
||
}
|
||
|
||
};
|
||
this.opts = { ...defaultOptions,
|
||
...opts
|
||
};
|
||
this.i18nInit();
|
||
this.handleUpload = this.handleUpload.bind(this); // Simultaneous upload limiting is shared across all uploads with this plugin.
|
||
|
||
if (internalRateLimitedQueue in this.opts) {
|
||
this.requests = this.opts[internalRateLimitedQueue];
|
||
} else {
|
||
this.requests = new RateLimitedQueue(this.opts.limit);
|
||
}
|
||
|
||
if (this.opts.bundle && !this.opts.formData) {
|
||
throw new Error('`opts.formData` must be true when `opts.bundle` is enabled.');
|
||
}
|
||
|
||
this.uploaderEvents = Object.create(null);
|
||
}
|
||
|
||
getOptions(file) {
|
||
const overrides = this.uppy.getState().xhrUpload;
|
||
const {
|
||
headers
|
||
} = this.opts;
|
||
const opts = { ...this.opts,
|
||
...(overrides || {}),
|
||
...(file.xhrUpload || {}),
|
||
headers: {}
|
||
}; // Support for `headers` as a function, only in the XHRUpload settings.
|
||
// Options set by other plugins in Uppy state or on the files themselves are still merged in afterward.
|
||
//
|
||
// ```js
|
||
// headers: (file) => ({ expires: file.meta.expires })
|
||
// ```
|
||
|
||
if (typeof headers === 'function') {
|
||
opts.headers = headers(file);
|
||
} else {
|
||
Object.assign(opts.headers, this.opts.headers);
|
||
}
|
||
|
||
if (overrides) {
|
||
Object.assign(opts.headers, overrides.headers);
|
||
}
|
||
|
||
if (file.xhrUpload) {
|
||
Object.assign(opts.headers, file.xhrUpload.headers);
|
||
}
|
||
|
||
return opts;
|
||
} // eslint-disable-next-line class-methods-use-this
|
||
|
||
|
||
addMetadata(formData, meta, opts) {
|
||
const metaFields = Array.isArray(opts.metaFields) ? opts.metaFields : Object.keys(meta); // Send along all fields by default.
|
||
|
||
metaFields.forEach(item => {
|
||
formData.append(item, meta[item]);
|
||
});
|
||
}
|
||
|
||
createFormDataUpload(file, opts) {
|
||
const formPost = new FormData();
|
||
this.addMetadata(formPost, file.meta, opts);
|
||
const dataWithUpdatedType = setTypeInBlob(file);
|
||
|
||
if (file.name) {
|
||
formPost.append(opts.fieldName, dataWithUpdatedType, file.meta.name);
|
||
} else {
|
||
formPost.append(opts.fieldName, dataWithUpdatedType);
|
||
}
|
||
|
||
return formPost;
|
||
}
|
||
|
||
createBundledUpload(files, opts) {
|
||
const formPost = new FormData();
|
||
const {
|
||
meta
|
||
} = this.uppy.getState();
|
||
this.addMetadata(formPost, meta, opts);
|
||
files.forEach(file => {
|
||
const options = this.getOptions(file);
|
||
const dataWithUpdatedType = setTypeInBlob(file);
|
||
|
||
if (file.name) {
|
||
formPost.append(options.fieldName, dataWithUpdatedType, file.name);
|
||
} else {
|
||
formPost.append(options.fieldName, dataWithUpdatedType);
|
||
}
|
||
});
|
||
return formPost;
|
||
}
|
||
|
||
upload(file, current, total) {
|
||
const opts = this.getOptions(file);
|
||
this.uppy.log(`uploading ${current} of ${total}`);
|
||
return new Promise((resolve, reject) => {
|
||
this.uppy.emit('upload-started', file);
|
||
const data = opts.formData ? this.createFormDataUpload(file, opts) : file.data;
|
||
const xhr = new XMLHttpRequest();
|
||
this.uploaderEvents[file.id] = new EventTracker(this.uppy);
|
||
const timer = new ProgressTimeout(opts.timeout, () => {
|
||
xhr.abort();
|
||
queuedRequest.done();
|
||
const error = new Error(this.i18n('timedOut', {
|
||
seconds: Math.ceil(opts.timeout / 1000)
|
||
}));
|
||
this.uppy.emit('upload-error', file, error);
|
||
reject(error);
|
||
});
|
||
const id = nanoid();
|
||
xhr.upload.addEventListener('loadstart', () => {
|
||
this.uppy.log(`[XHRUpload] ${id} started`);
|
||
});
|
||
xhr.upload.addEventListener('progress', ev => {
|
||
this.uppy.log(`[XHRUpload] ${id} progress: ${ev.loaded} / ${ev.total}`); // Begin checking for timeouts when progress starts, instead of loading,
|
||
// to avoid timing out requests on browser concurrency queue
|
||
|
||
timer.progress();
|
||
|
||
if (ev.lengthComputable) {
|
||
this.uppy.emit('upload-progress', file, {
|
||
uploader: this,
|
||
bytesUploaded: ev.loaded,
|
||
bytesTotal: ev.total
|
||
});
|
||
}
|
||
});
|
||
xhr.addEventListener('load', ev => {
|
||
this.uppy.log(`[XHRUpload] ${id} finished`);
|
||
timer.done();
|
||
queuedRequest.done();
|
||
|
||
if (this.uploaderEvents[file.id]) {
|
||
this.uploaderEvents[file.id].remove();
|
||
this.uploaderEvents[file.id] = null;
|
||
}
|
||
|
||
if (opts.validateStatus(ev.target.status, xhr.responseText, xhr)) {
|
||
const body = opts.getResponseData(xhr.responseText, xhr);
|
||
const uploadURL = body[opts.responseUrlFieldName];
|
||
const uploadResp = {
|
||
status: ev.target.status,
|
||
body,
|
||
uploadURL
|
||
};
|
||
this.uppy.emit('upload-success', file, uploadResp);
|
||
|
||
if (uploadURL) {
|
||
this.uppy.log(`Download ${file.name} from ${uploadURL}`);
|
||
}
|
||
|
||
return resolve(file);
|
||
}
|
||
|
||
const body = opts.getResponseData(xhr.responseText, xhr);
|
||
const error = buildResponseError(xhr, opts.getResponseError(xhr.responseText, xhr));
|
||
const response = {
|
||
status: ev.target.status,
|
||
body
|
||
};
|
||
this.uppy.emit('upload-error', file, error, response);
|
||
return reject(error);
|
||
});
|
||
xhr.addEventListener('error', () => {
|
||
this.uppy.log(`[XHRUpload] ${id} errored`);
|
||
timer.done();
|
||
queuedRequest.done();
|
||
|
||
if (this.uploaderEvents[file.id]) {
|
||
this.uploaderEvents[file.id].remove();
|
||
this.uploaderEvents[file.id] = null;
|
||
}
|
||
|
||
const error = buildResponseError(xhr, opts.getResponseError(xhr.responseText, xhr));
|
||
this.uppy.emit('upload-error', file, error);
|
||
return reject(error);
|
||
});
|
||
xhr.open(opts.method.toUpperCase(), opts.endpoint, true); // IE10 does not allow setting `withCredentials` and `responseType`
|
||
// before `open()` is called.
|
||
|
||
xhr.withCredentials = opts.withCredentials;
|
||
|
||
if (opts.responseType !== '') {
|
||
xhr.responseType = opts.responseType;
|
||
}
|
||
|
||
const queuedRequest = this.requests.run(() => {
|
||
this.uppy.emit('upload-started', file); // When using an authentication system like JWT, the bearer token goes as a header. This
|
||
// header needs to be fresh each time the token is refreshed so computing and setting the
|
||
// headers just before the upload starts enables this kind of authentication to work properly.
|
||
// Otherwise, half-way through the list of uploads the token could be stale and the upload would fail.
|
||
|
||
const currentOpts = this.getOptions(file);
|
||
Object.keys(currentOpts.headers).forEach(header => {
|
||
xhr.setRequestHeader(header, currentOpts.headers[header]);
|
||
});
|
||
xhr.send(data);
|
||
return () => {
|
||
timer.done();
|
||
xhr.abort();
|
||
};
|
||
});
|
||
this.onFileRemove(file.id, () => {
|
||
queuedRequest.abort();
|
||
reject(new Error('File removed'));
|
||
});
|
||
this.onCancelAll(file.id, () => {
|
||
queuedRequest.abort();
|
||
reject(new Error('Upload cancelled'));
|
||
});
|
||
});
|
||
}
|
||
|
||
uploadRemote(file) {
|
||
const opts = this.getOptions(file);
|
||
return new Promise((resolve, reject) => {
|
||
const fields = {};
|
||
const metaFields = Array.isArray(opts.metaFields) ? opts.metaFields // Send along all fields by default.
|
||
: Object.keys(file.meta);
|
||
metaFields.forEach(name => {
|
||
fields[name] = file.meta[name];
|
||
});
|
||
const Client = file.remote.providerOptions.provider ? Provider : RequestClient;
|
||
const client = new Client(this.uppy, file.remote.providerOptions);
|
||
client.post(file.remote.url, { ...file.remote.body,
|
||
endpoint: opts.endpoint,
|
||
size: file.data.size,
|
||
fieldname: opts.fieldName,
|
||
metadata: fields,
|
||
httpMethod: opts.method,
|
||
useFormData: opts.formData,
|
||
headers: opts.headers
|
||
}).then(res => {
|
||
const {
|
||
token
|
||
} = res;
|
||
const host = getSocketHost(file.remote.companionUrl);
|
||
const socket = new Socket({
|
||
target: `${host}/api/${token}`,
|
||
autoOpen: false
|
||
});
|
||
this.uploaderEvents[file.id] = new EventTracker(this.uppy);
|
||
this.onFileRemove(file.id, () => {
|
||
socket.send('pause', {});
|
||
queuedRequest.abort();
|
||
resolve(`upload ${file.id} was removed`);
|
||
});
|
||
this.onCancelAll(file.id, () => {
|
||
socket.send('pause', {});
|
||
queuedRequest.abort();
|
||
resolve(`upload ${file.id} was canceled`);
|
||
});
|
||
this.onRetry(file.id, () => {
|
||
socket.send('pause', {});
|
||
socket.send('resume', {});
|
||
});
|
||
this.onRetryAll(file.id, () => {
|
||
socket.send('pause', {});
|
||
socket.send('resume', {});
|
||
});
|
||
socket.on('progress', progressData => emitSocketProgress(this, progressData, file));
|
||
socket.on('success', data => {
|
||
const body = opts.getResponseData(data.response.responseText, data.response);
|
||
const uploadURL = body[opts.responseUrlFieldName];
|
||
const uploadResp = {
|
||
status: data.response.status,
|
||
body,
|
||
uploadURL
|
||
};
|
||
this.uppy.emit('upload-success', file, uploadResp);
|
||
queuedRequest.done();
|
||
|
||
if (this.uploaderEvents[file.id]) {
|
||
this.uploaderEvents[file.id].remove();
|
||
this.uploaderEvents[file.id] = null;
|
||
}
|
||
|
||
return resolve();
|
||
});
|
||
socket.on('error', errData => {
|
||
const resp = errData.response;
|
||
const error = resp ? opts.getResponseError(resp.responseText, resp) : Object.assign(new Error(errData.error.message), {
|
||
cause: errData.error
|
||
});
|
||
this.uppy.emit('upload-error', file, error);
|
||
queuedRequest.done();
|
||
|
||
if (this.uploaderEvents[file.id]) {
|
||
this.uploaderEvents[file.id].remove();
|
||
this.uploaderEvents[file.id] = null;
|
||
}
|
||
|
||
reject(error);
|
||
});
|
||
const queuedRequest = this.requests.run(() => {
|
||
socket.open();
|
||
|
||
if (file.isPaused) {
|
||
socket.send('pause', {});
|
||
}
|
||
|
||
return () => socket.close();
|
||
});
|
||
}).catch(err => {
|
||
this.uppy.emit('upload-error', file, err);
|
||
reject(err);
|
||
});
|
||
});
|
||
}
|
||
|
||
uploadBundle(files) {
|
||
return new Promise((resolve, reject) => {
|
||
const {
|
||
endpoint
|
||
} = this.opts;
|
||
const {
|
||
method
|
||
} = this.opts;
|
||
const optsFromState = this.uppy.getState().xhrUpload;
|
||
const formData = this.createBundledUpload(files, { ...this.opts,
|
||
...(optsFromState || {})
|
||
});
|
||
const xhr = new XMLHttpRequest();
|
||
const timer = new ProgressTimeout(this.opts.timeout, () => {
|
||
xhr.abort();
|
||
const error = new Error(this.i18n('timedOut', {
|
||
seconds: Math.ceil(this.opts.timeout / 1000)
|
||
}));
|
||
emitError(error);
|
||
reject(error);
|
||
});
|
||
|
||
const emitError = error => {
|
||
files.forEach(file => {
|
||
this.uppy.emit('upload-error', file, error);
|
||
});
|
||
};
|
||
|
||
xhr.upload.addEventListener('loadstart', () => {
|
||
this.uppy.log('[XHRUpload] started uploading bundle');
|
||
timer.progress();
|
||
});
|
||
xhr.upload.addEventListener('progress', ev => {
|
||
timer.progress();
|
||
if (!ev.lengthComputable) return;
|
||
files.forEach(file => {
|
||
this.uppy.emit('upload-progress', file, {
|
||
uploader: this,
|
||
bytesUploaded: ev.loaded / ev.total * file.size,
|
||
bytesTotal: file.size
|
||
});
|
||
});
|
||
});
|
||
xhr.addEventListener('load', ev => {
|
||
timer.done();
|
||
|
||
if (this.opts.validateStatus(ev.target.status, xhr.responseText, xhr)) {
|
||
const body = this.opts.getResponseData(xhr.responseText, xhr);
|
||
const uploadResp = {
|
||
status: ev.target.status,
|
||
body
|
||
};
|
||
files.forEach(file => {
|
||
this.uppy.emit('upload-success', file, uploadResp);
|
||
});
|
||
return resolve();
|
||
}
|
||
|
||
const error = this.opts.getResponseError(xhr.responseText, xhr) || new Error('Upload error');
|
||
error.request = xhr;
|
||
emitError(error);
|
||
return reject(error);
|
||
});
|
||
xhr.addEventListener('error', () => {
|
||
timer.done();
|
||
const error = this.opts.getResponseError(xhr.responseText, xhr) || new Error('Upload error');
|
||
emitError(error);
|
||
return reject(error);
|
||
});
|
||
this.uppy.on('cancel-all', () => {
|
||
timer.done();
|
||
xhr.abort();
|
||
});
|
||
xhr.open(method.toUpperCase(), endpoint, true); // IE10 does not allow setting `withCredentials` and `responseType`
|
||
// before `open()` is called.
|
||
|
||
xhr.withCredentials = this.opts.withCredentials;
|
||
|
||
if (this.opts.responseType !== '') {
|
||
xhr.responseType = this.opts.responseType;
|
||
}
|
||
|
||
Object.keys(this.opts.headers).forEach(header => {
|
||
xhr.setRequestHeader(header, this.opts.headers[header]);
|
||
});
|
||
xhr.send(formData);
|
||
files.forEach(file => {
|
||
this.uppy.emit('upload-started', file);
|
||
});
|
||
});
|
||
}
|
||
|
||
uploadFiles(files) {
|
||
const promises = files.map((file, i) => {
|
||
const current = parseInt(i, 10) + 1;
|
||
const total = files.length;
|
||
|
||
if (file.error) {
|
||
return Promise.reject(new Error(file.error));
|
||
}
|
||
|
||
if (file.isRemote) {
|
||
return this.uploadRemote(file, current, total);
|
||
}
|
||
|
||
return this.upload(file, current, total);
|
||
});
|
||
return settle(promises);
|
||
}
|
||
|
||
onFileRemove(fileID, cb) {
|
||
this.uploaderEvents[fileID].on('file-removed', file => {
|
||
if (fileID === file.id) cb(file.id);
|
||
});
|
||
}
|
||
|
||
onRetry(fileID, cb) {
|
||
this.uploaderEvents[fileID].on('upload-retry', targetFileID => {
|
||
if (fileID === targetFileID) {
|
||
cb();
|
||
}
|
||
});
|
||
}
|
||
|
||
onRetryAll(fileID, cb) {
|
||
this.uploaderEvents[fileID].on('retry-all', () => {
|
||
if (!this.uppy.getFile(fileID)) return;
|
||
cb();
|
||
});
|
||
}
|
||
|
||
onCancelAll(fileID, cb) {
|
||
this.uploaderEvents[fileID].on('cancel-all', () => {
|
||
if (!this.uppy.getFile(fileID)) return;
|
||
cb();
|
||
});
|
||
}
|
||
|
||
handleUpload(fileIDs) {
|
||
if (fileIDs.length === 0) {
|
||
this.uppy.log('[XHRUpload] No files to upload!');
|
||
return Promise.resolve();
|
||
} // No limit configured by the user, and no RateLimitedQueue passed in by a "parent" plugin
|
||
// (basically just AwsS3) using the internal symbol
|
||
|
||
|
||
if (this.opts.limit === 0 && !this.opts[internalRateLimitedQueue]) {
|
||
this.uppy.log('[XHRUpload] When uploading multiple files at once, consider setting the `limit` option (to `10` for example), to limit the number of concurrent uploads, which helps prevent memory and network issues: https://uppy.io/docs/xhr-upload/#limit-0', 'warning');
|
||
}
|
||
|
||
this.uppy.log('[XHRUpload] Uploading...');
|
||
const files = fileIDs.map(fileID => this.uppy.getFile(fileID));
|
||
|
||
if (this.opts.bundle) {
|
||
// if bundle: true, we don’t support remote uploads
|
||
const isSomeFileRemote = files.some(file => file.isRemote);
|
||
|
||
if (isSomeFileRemote) {
|
||
throw new Error('Can’t upload remote files when the `bundle: true` option is set');
|
||
}
|
||
|
||
if (typeof this.opts.headers === 'function') {
|
||
throw new TypeError('`headers` may not be a function when the `bundle: true` option is set');
|
||
}
|
||
|
||
return this.uploadBundle(files);
|
||
}
|
||
|
||
return this.uploadFiles(files).then(() => null);
|
||
}
|
||
|
||
install() {
|
||
if (this.opts.bundle) {
|
||
const {
|
||
capabilities
|
||
} = this.uppy.getState();
|
||
this.uppy.setState({
|
||
capabilities: { ...capabilities,
|
||
individualCancellation: false
|
||
}
|
||
});
|
||
}
|
||
|
||
this.uppy.addUploader(this.handleUpload);
|
||
}
|
||
|
||
uninstall() {
|
||
if (this.opts.bundle) {
|
||
const {
|
||
capabilities
|
||
} = this.uppy.getState();
|
||
this.uppy.setState({
|
||
capabilities: { ...capabilities,
|
||
individualCancellation: true
|
||
}
|
||
});
|
||
}
|
||
|
||
this.uppy.removeUploader(this.handleUpload);
|
||
}
|
||
|
||
}, _class.VERSION = "2.0.5", _temp);
|
||
},{"@uppy/companion-client":12,"@uppy/core/lib/BasePlugin":14,"@uppy/utils/lib/EventTracker":24,"@uppy/utils/lib/NetworkError":25,"@uppy/utils/lib/ProgressTimeout":26,"@uppy/utils/lib/RateLimitedQueue":27,"@uppy/utils/lib/emitSocketProgress":30,"@uppy/utils/lib/getSocketHost":41,"@uppy/utils/lib/isNetworkError":45,"@uppy/utils/lib/settle":47,"nanoid":53}],50:[function(require,module,exports){
|
||
(function (global){(function (){
|
||
/**
|
||
* lodash (Custom Build) <https://lodash.com/>
|
||
* Build: `lodash modularize exports="npm" -o ./`
|
||
* Copyright jQuery Foundation and other contributors <https://jquery.org/>
|
||
* Released under MIT license <https://lodash.com/license>
|
||
* Based on Underscore.js 1.8.3 <http://underscorejs.org/LICENSE>
|
||
* Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
|
||
*/
|
||
|
||
/** Used as the `TypeError` message for "Functions" methods. */
|
||
var FUNC_ERROR_TEXT = 'Expected a function';
|
||
|
||
/** Used as references for various `Number` constants. */
|
||
var NAN = 0 / 0;
|
||
|
||
/** `Object#toString` result references. */
|
||
var symbolTag = '[object Symbol]';
|
||
|
||
/** Used to match leading and trailing whitespace. */
|
||
var reTrim = /^\s+|\s+$/g;
|
||
|
||
/** Used to detect bad signed hexadecimal string values. */
|
||
var reIsBadHex = /^[-+]0x[0-9a-f]+$/i;
|
||
|
||
/** Used to detect binary string values. */
|
||
var reIsBinary = /^0b[01]+$/i;
|
||
|
||
/** Used to detect octal string values. */
|
||
var reIsOctal = /^0o[0-7]+$/i;
|
||
|
||
/** Built-in method references without a dependency on `root`. */
|
||
var freeParseInt = parseInt;
|
||
|
||
/** Detect free variable `global` from Node.js. */
|
||
var freeGlobal = typeof global == 'object' && global && global.Object === Object && global;
|
||
|
||
/** Detect free variable `self`. */
|
||
var freeSelf = typeof self == 'object' && self && self.Object === Object && self;
|
||
|
||
/** Used as a reference to the global object. */
|
||
var root = freeGlobal || freeSelf || Function('return this')();
|
||
|
||
/** Used for built-in method references. */
|
||
var objectProto = Object.prototype;
|
||
|
||
/**
|
||
* Used to resolve the
|
||
* [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring)
|
||
* of values.
|
||
*/
|
||
var objectToString = objectProto.toString;
|
||
|
||
/* Built-in method references for those with the same name as other `lodash` methods. */
|
||
var nativeMax = Math.max,
|
||
nativeMin = Math.min;
|
||
|
||
/**
|
||
* Gets the timestamp of the number of milliseconds that have elapsed since
|
||
* the Unix epoch (1 January 1970 00:00:00 UTC).
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 2.4.0
|
||
* @category Date
|
||
* @returns {number} Returns the timestamp.
|
||
* @example
|
||
*
|
||
* _.defer(function(stamp) {
|
||
* console.log(_.now() - stamp);
|
||
* }, _.now());
|
||
* // => Logs the number of milliseconds it took for the deferred invocation.
|
||
*/
|
||
var now = function() {
|
||
return root.Date.now();
|
||
};
|
||
|
||
/**
|
||
* Creates a debounced function that delays invoking `func` until after `wait`
|
||
* milliseconds have elapsed since the last time the debounced function was
|
||
* invoked. The debounced function comes with a `cancel` method to cancel
|
||
* delayed `func` invocations and a `flush` method to immediately invoke them.
|
||
* Provide `options` to indicate whether `func` should be invoked on the
|
||
* leading and/or trailing edge of the `wait` timeout. The `func` is invoked
|
||
* with the last arguments provided to the debounced function. Subsequent
|
||
* calls to the debounced function return the result of the last `func`
|
||
* invocation.
|
||
*
|
||
* **Note:** If `leading` and `trailing` options are `true`, `func` is
|
||
* invoked on the trailing edge of the timeout only if the debounced function
|
||
* is invoked more than once during the `wait` timeout.
|
||
*
|
||
* If `wait` is `0` and `leading` is `false`, `func` invocation is deferred
|
||
* until to the next tick, similar to `setTimeout` with a timeout of `0`.
|
||
*
|
||
* See [David Corbacho's article](https://css-tricks.com/debouncing-throttling-explained-examples/)
|
||
* for details over the differences between `_.debounce` and `_.throttle`.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Function
|
||
* @param {Function} func The function to debounce.
|
||
* @param {number} [wait=0] The number of milliseconds to delay.
|
||
* @param {Object} [options={}] The options object.
|
||
* @param {boolean} [options.leading=false]
|
||
* Specify invoking on the leading edge of the timeout.
|
||
* @param {number} [options.maxWait]
|
||
* The maximum time `func` is allowed to be delayed before it's invoked.
|
||
* @param {boolean} [options.trailing=true]
|
||
* Specify invoking on the trailing edge of the timeout.
|
||
* @returns {Function} Returns the new debounced function.
|
||
* @example
|
||
*
|
||
* // Avoid costly calculations while the window size is in flux.
|
||
* jQuery(window).on('resize', _.debounce(calculateLayout, 150));
|
||
*
|
||
* // Invoke `sendMail` when clicked, debouncing subsequent calls.
|
||
* jQuery(element).on('click', _.debounce(sendMail, 300, {
|
||
* 'leading': true,
|
||
* 'trailing': false
|
||
* }));
|
||
*
|
||
* // Ensure `batchLog` is invoked once after 1 second of debounced calls.
|
||
* var debounced = _.debounce(batchLog, 250, { 'maxWait': 1000 });
|
||
* var source = new EventSource('/stream');
|
||
* jQuery(source).on('message', debounced);
|
||
*
|
||
* // Cancel the trailing debounced invocation.
|
||
* jQuery(window).on('popstate', debounced.cancel);
|
||
*/
|
||
function debounce(func, wait, options) {
|
||
var lastArgs,
|
||
lastThis,
|
||
maxWait,
|
||
result,
|
||
timerId,
|
||
lastCallTime,
|
||
lastInvokeTime = 0,
|
||
leading = false,
|
||
maxing = false,
|
||
trailing = true;
|
||
|
||
if (typeof func != 'function') {
|
||
throw new TypeError(FUNC_ERROR_TEXT);
|
||
}
|
||
wait = toNumber(wait) || 0;
|
||
if (isObject(options)) {
|
||
leading = !!options.leading;
|
||
maxing = 'maxWait' in options;
|
||
maxWait = maxing ? nativeMax(toNumber(options.maxWait) || 0, wait) : maxWait;
|
||
trailing = 'trailing' in options ? !!options.trailing : trailing;
|
||
}
|
||
|
||
function invokeFunc(time) {
|
||
var args = lastArgs,
|
||
thisArg = lastThis;
|
||
|
||
lastArgs = lastThis = undefined;
|
||
lastInvokeTime = time;
|
||
result = func.apply(thisArg, args);
|
||
return result;
|
||
}
|
||
|
||
function leadingEdge(time) {
|
||
// Reset any `maxWait` timer.
|
||
lastInvokeTime = time;
|
||
// Start the timer for the trailing edge.
|
||
timerId = setTimeout(timerExpired, wait);
|
||
// Invoke the leading edge.
|
||
return leading ? invokeFunc(time) : result;
|
||
}
|
||
|
||
function remainingWait(time) {
|
||
var timeSinceLastCall = time - lastCallTime,
|
||
timeSinceLastInvoke = time - lastInvokeTime,
|
||
result = wait - timeSinceLastCall;
|
||
|
||
return maxing ? nativeMin(result, maxWait - timeSinceLastInvoke) : result;
|
||
}
|
||
|
||
function shouldInvoke(time) {
|
||
var timeSinceLastCall = time - lastCallTime,
|
||
timeSinceLastInvoke = time - lastInvokeTime;
|
||
|
||
// Either this is the first call, activity has stopped and we're at the
|
||
// trailing edge, the system time has gone backwards and we're treating
|
||
// it as the trailing edge, or we've hit the `maxWait` limit.
|
||
return (lastCallTime === undefined || (timeSinceLastCall >= wait) ||
|
||
(timeSinceLastCall < 0) || (maxing && timeSinceLastInvoke >= maxWait));
|
||
}
|
||
|
||
function timerExpired() {
|
||
var time = now();
|
||
if (shouldInvoke(time)) {
|
||
return trailingEdge(time);
|
||
}
|
||
// Restart the timer.
|
||
timerId = setTimeout(timerExpired, remainingWait(time));
|
||
}
|
||
|
||
function trailingEdge(time) {
|
||
timerId = undefined;
|
||
|
||
// Only invoke if we have `lastArgs` which means `func` has been
|
||
// debounced at least once.
|
||
if (trailing && lastArgs) {
|
||
return invokeFunc(time);
|
||
}
|
||
lastArgs = lastThis = undefined;
|
||
return result;
|
||
}
|
||
|
||
function cancel() {
|
||
if (timerId !== undefined) {
|
||
clearTimeout(timerId);
|
||
}
|
||
lastInvokeTime = 0;
|
||
lastArgs = lastCallTime = lastThis = timerId = undefined;
|
||
}
|
||
|
||
function flush() {
|
||
return timerId === undefined ? result : trailingEdge(now());
|
||
}
|
||
|
||
function debounced() {
|
||
var time = now(),
|
||
isInvoking = shouldInvoke(time);
|
||
|
||
lastArgs = arguments;
|
||
lastThis = this;
|
||
lastCallTime = time;
|
||
|
||
if (isInvoking) {
|
||
if (timerId === undefined) {
|
||
return leadingEdge(lastCallTime);
|
||
}
|
||
if (maxing) {
|
||
// Handle invocations in a tight loop.
|
||
timerId = setTimeout(timerExpired, wait);
|
||
return invokeFunc(lastCallTime);
|
||
}
|
||
}
|
||
if (timerId === undefined) {
|
||
timerId = setTimeout(timerExpired, wait);
|
||
}
|
||
return result;
|
||
}
|
||
debounced.cancel = cancel;
|
||
debounced.flush = flush;
|
||
return debounced;
|
||
}
|
||
|
||
/**
|
||
* Creates a throttled function that only invokes `func` at most once per
|
||
* every `wait` milliseconds. The throttled function comes with a `cancel`
|
||
* method to cancel delayed `func` invocations and a `flush` method to
|
||
* immediately invoke them. Provide `options` to indicate whether `func`
|
||
* should be invoked on the leading and/or trailing edge of the `wait`
|
||
* timeout. The `func` is invoked with the last arguments provided to the
|
||
* throttled function. Subsequent calls to the throttled function return the
|
||
* result of the last `func` invocation.
|
||
*
|
||
* **Note:** If `leading` and `trailing` options are `true`, `func` is
|
||
* invoked on the trailing edge of the timeout only if the throttled function
|
||
* is invoked more than once during the `wait` timeout.
|
||
*
|
||
* If `wait` is `0` and `leading` is `false`, `func` invocation is deferred
|
||
* until to the next tick, similar to `setTimeout` with a timeout of `0`.
|
||
*
|
||
* See [David Corbacho's article](https://css-tricks.com/debouncing-throttling-explained-examples/)
|
||
* for details over the differences between `_.throttle` and `_.debounce`.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Function
|
||
* @param {Function} func The function to throttle.
|
||
* @param {number} [wait=0] The number of milliseconds to throttle invocations to.
|
||
* @param {Object} [options={}] The options object.
|
||
* @param {boolean} [options.leading=true]
|
||
* Specify invoking on the leading edge of the timeout.
|
||
* @param {boolean} [options.trailing=true]
|
||
* Specify invoking on the trailing edge of the timeout.
|
||
* @returns {Function} Returns the new throttled function.
|
||
* @example
|
||
*
|
||
* // Avoid excessively updating the position while scrolling.
|
||
* jQuery(window).on('scroll', _.throttle(updatePosition, 100));
|
||
*
|
||
* // Invoke `renewToken` when the click event is fired, but not more than once every 5 minutes.
|
||
* var throttled = _.throttle(renewToken, 300000, { 'trailing': false });
|
||
* jQuery(element).on('click', throttled);
|
||
*
|
||
* // Cancel the trailing throttled invocation.
|
||
* jQuery(window).on('popstate', throttled.cancel);
|
||
*/
|
||
function throttle(func, wait, options) {
|
||
var leading = true,
|
||
trailing = true;
|
||
|
||
if (typeof func != 'function') {
|
||
throw new TypeError(FUNC_ERROR_TEXT);
|
||
}
|
||
if (isObject(options)) {
|
||
leading = 'leading' in options ? !!options.leading : leading;
|
||
trailing = 'trailing' in options ? !!options.trailing : trailing;
|
||
}
|
||
return debounce(func, wait, {
|
||
'leading': leading,
|
||
'maxWait': wait,
|
||
'trailing': trailing
|
||
});
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is the
|
||
* [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types)
|
||
* of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 0.1.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is an object, else `false`.
|
||
* @example
|
||
*
|
||
* _.isObject({});
|
||
* // => true
|
||
*
|
||
* _.isObject([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isObject(_.noop);
|
||
* // => true
|
||
*
|
||
* _.isObject(null);
|
||
* // => false
|
||
*/
|
||
function isObject(value) {
|
||
var type = typeof value;
|
||
return !!value && (type == 'object' || type == 'function');
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is object-like. A value is object-like if it's not `null`
|
||
* and has a `typeof` result of "object".
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is object-like, else `false`.
|
||
* @example
|
||
*
|
||
* _.isObjectLike({});
|
||
* // => true
|
||
*
|
||
* _.isObjectLike([1, 2, 3]);
|
||
* // => true
|
||
*
|
||
* _.isObjectLike(_.noop);
|
||
* // => false
|
||
*
|
||
* _.isObjectLike(null);
|
||
* // => false
|
||
*/
|
||
function isObjectLike(value) {
|
||
return !!value && typeof value == 'object';
|
||
}
|
||
|
||
/**
|
||
* Checks if `value` is classified as a `Symbol` primitive or object.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to check.
|
||
* @returns {boolean} Returns `true` if `value` is a symbol, else `false`.
|
||
* @example
|
||
*
|
||
* _.isSymbol(Symbol.iterator);
|
||
* // => true
|
||
*
|
||
* _.isSymbol('abc');
|
||
* // => false
|
||
*/
|
||
function isSymbol(value) {
|
||
return typeof value == 'symbol' ||
|
||
(isObjectLike(value) && objectToString.call(value) == symbolTag);
|
||
}
|
||
|
||
/**
|
||
* Converts `value` to a number.
|
||
*
|
||
* @static
|
||
* @memberOf _
|
||
* @since 4.0.0
|
||
* @category Lang
|
||
* @param {*} value The value to process.
|
||
* @returns {number} Returns the number.
|
||
* @example
|
||
*
|
||
* _.toNumber(3.2);
|
||
* // => 3.2
|
||
*
|
||
* _.toNumber(Number.MIN_VALUE);
|
||
* // => 5e-324
|
||
*
|
||
* _.toNumber(Infinity);
|
||
* // => Infinity
|
||
*
|
||
* _.toNumber('3.2');
|
||
* // => 3.2
|
||
*/
|
||
function toNumber(value) {
|
||
if (typeof value == 'number') {
|
||
return value;
|
||
}
|
||
if (isSymbol(value)) {
|
||
return NAN;
|
||
}
|
||
if (isObject(value)) {
|
||
var other = typeof value.valueOf == 'function' ? value.valueOf() : value;
|
||
value = isObject(other) ? (other + '') : other;
|
||
}
|
||
if (typeof value != 'string') {
|
||
return value === 0 ? value : +value;
|
||
}
|
||
value = value.replace(reTrim, '');
|
||
var isBinary = reIsBinary.test(value);
|
||
return (isBinary || reIsOctal.test(value))
|
||
? freeParseInt(value.slice(2), isBinary ? 2 : 8)
|
||
: (reIsBadHex.test(value) ? NAN : +value);
|
||
}
|
||
|
||
module.exports = throttle;
|
||
|
||
}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
|
||
},{}],51:[function(require,module,exports){
|
||
var wildcard = require('wildcard');
|
||
var reMimePartSplit = /[\/\+\.]/;
|
||
|
||
/**
|
||
# mime-match
|
||
|
||
A simple function to checker whether a target mime type matches a mime-type
|
||
pattern (e.g. image/jpeg matches image/jpeg OR image/*).
|
||
|
||
## Example Usage
|
||
|
||
<<< example.js
|
||
|
||
**/
|
||
module.exports = function(target, pattern) {
|
||
function test(pattern) {
|
||
var result = wildcard(pattern, target, reMimePartSplit);
|
||
|
||
// ensure that we have a valid mime type (should have two parts)
|
||
return result && result.length >= 2;
|
||
}
|
||
|
||
return pattern ? test(pattern.split(';')[0]) : test;
|
||
};
|
||
|
||
},{"wildcard":57}],52:[function(require,module,exports){
|
||
/**
|
||
* Create an event emitter with namespaces
|
||
* @name createNamespaceEmitter
|
||
* @example
|
||
* var emitter = require('./index')()
|
||
*
|
||
* emitter.on('*', function () {
|
||
* console.log('all events emitted', this.event)
|
||
* })
|
||
*
|
||
* emitter.on('example', function () {
|
||
* console.log('example event emitted')
|
||
* })
|
||
*/
|
||
module.exports = function createNamespaceEmitter () {
|
||
var emitter = {}
|
||
var _fns = emitter._fns = {}
|
||
|
||
/**
|
||
* Emit an event. Optionally namespace the event. Handlers are fired in the order in which they were added with exact matches taking precedence. Separate the namespace and event with a `:`
|
||
* @name emit
|
||
* @param {String} event – the name of the event, with optional namespace
|
||
* @param {...*} data – up to 6 arguments that are passed to the event listener
|
||
* @example
|
||
* emitter.emit('example')
|
||
* emitter.emit('demo:test')
|
||
* emitter.emit('data', { example: true}, 'a string', 1)
|
||
*/
|
||
emitter.emit = function emit (event, arg1, arg2, arg3, arg4, arg5, arg6) {
|
||
var toEmit = getListeners(event)
|
||
|
||
if (toEmit.length) {
|
||
emitAll(event, toEmit, [arg1, arg2, arg3, arg4, arg5, arg6])
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Create en event listener.
|
||
* @name on
|
||
* @param {String} event
|
||
* @param {Function} fn
|
||
* @example
|
||
* emitter.on('example', function () {})
|
||
* emitter.on('demo', function () {})
|
||
*/
|
||
emitter.on = function on (event, fn) {
|
||
if (!_fns[event]) {
|
||
_fns[event] = []
|
||
}
|
||
|
||
_fns[event].push(fn)
|
||
}
|
||
|
||
/**
|
||
* Create en event listener that fires once.
|
||
* @name once
|
||
* @param {String} event
|
||
* @param {Function} fn
|
||
* @example
|
||
* emitter.once('example', function () {})
|
||
* emitter.once('demo', function () {})
|
||
*/
|
||
emitter.once = function once (event, fn) {
|
||
function one () {
|
||
fn.apply(this, arguments)
|
||
emitter.off(event, one)
|
||
}
|
||
this.on(event, one)
|
||
}
|
||
|
||
/**
|
||
* Stop listening to an event. Stop all listeners on an event by only passing the event name. Stop a single listener by passing that event handler as a callback.
|
||
* You must be explicit about what will be unsubscribed: `emitter.off('demo')` will unsubscribe an `emitter.on('demo')` listener,
|
||
* `emitter.off('demo:example')` will unsubscribe an `emitter.on('demo:example')` listener
|
||
* @name off
|
||
* @param {String} event
|
||
* @param {Function} [fn] – the specific handler
|
||
* @example
|
||
* emitter.off('example')
|
||
* emitter.off('demo', function () {})
|
||
*/
|
||
emitter.off = function off (event, fn) {
|
||
var keep = []
|
||
|
||
if (event && fn) {
|
||
var fns = this._fns[event]
|
||
var i = 0
|
||
var l = fns ? fns.length : 0
|
||
|
||
for (i; i < l; i++) {
|
||
if (fns[i] !== fn) {
|
||
keep.push(fns[i])
|
||
}
|
||
}
|
||
}
|
||
|
||
keep.length ? this._fns[event] = keep : delete this._fns[event]
|
||
}
|
||
|
||
function getListeners (e) {
|
||
var out = _fns[e] ? _fns[e] : []
|
||
var idx = e.indexOf(':')
|
||
var args = (idx === -1) ? [e] : [e.substring(0, idx), e.substring(idx + 1)]
|
||
|
||
var keys = Object.keys(_fns)
|
||
var i = 0
|
||
var l = keys.length
|
||
|
||
for (i; i < l; i++) {
|
||
var key = keys[i]
|
||
if (key === '*') {
|
||
out = out.concat(_fns[key])
|
||
}
|
||
|
||
if (args.length === 2 && args[0] === key) {
|
||
out = out.concat(_fns[key])
|
||
break
|
||
}
|
||
}
|
||
|
||
return out
|
||
}
|
||
|
||
function emitAll (e, fns, args) {
|
||
var i = 0
|
||
var l = fns.length
|
||
|
||
for (i; i < l; i++) {
|
||
if (!fns[i]) break
|
||
fns[i].event = e
|
||
fns[i].apply(fns[i], args)
|
||
}
|
||
}
|
||
|
||
return emitter
|
||
}
|
||
|
||
},{}],53:[function(require,module,exports){
|
||
(function (process){(function (){
|
||
// This file replaces `index.js` in bundlers like webpack or Rollup,
|
||
// according to `browser` config in `package.json`.
|
||
|
||
let { urlAlphabet } = require('./url-alphabet/index.cjs')
|
||
|
||
if (process.env.NODE_ENV !== 'production') {
|
||
// All bundlers will remove this block in the production bundle.
|
||
if (
|
||
typeof navigator !== 'undefined' &&
|
||
navigator.product === 'ReactNative' &&
|
||
typeof crypto === 'undefined'
|
||
) {
|
||
throw new Error(
|
||
'React Native does not have a built-in secure random generator. ' +
|
||
'If you don’t need unpredictable IDs use `nanoid/non-secure`. ' +
|
||
'For secure IDs, import `react-native-get-random-values` ' +
|
||
'before Nano ID.'
|
||
)
|
||
}
|
||
if (typeof msCrypto !== 'undefined' && typeof crypto === 'undefined') {
|
||
throw new Error(
|
||
'Import file with `if (!window.crypto) window.crypto = window.msCrypto`' +
|
||
' before importing Nano ID to fix IE 11 support'
|
||
)
|
||
}
|
||
if (typeof crypto === 'undefined') {
|
||
throw new Error(
|
||
'Your browser does not have secure random generator. ' +
|
||
'If you don’t need unpredictable IDs, you can use nanoid/non-secure.'
|
||
)
|
||
}
|
||
}
|
||
|
||
let random = bytes => crypto.getRandomValues(new Uint8Array(bytes))
|
||
|
||
let customRandom = (alphabet, size, getRandom) => {
|
||
// First, a bitmask is necessary to generate the ID. The bitmask makes bytes
|
||
// values closer to the alphabet size. The bitmask calculates the closest
|
||
// `2^31 - 1` number, which exceeds the alphabet size.
|
||
// For example, the bitmask for the alphabet size 30 is 31 (00011111).
|
||
// `Math.clz32` is not used, because it is not available in browsers.
|
||
let mask = (2 << (Math.log(alphabet.length - 1) / Math.LN2)) - 1
|
||
// Though, the bitmask solution is not perfect since the bytes exceeding
|
||
// the alphabet size are refused. Therefore, to reliably generate the ID,
|
||
// the random bytes redundancy has to be satisfied.
|
||
|
||
// Note: every hardware random generator call is performance expensive,
|
||
// because the system call for entropy collection takes a lot of time.
|
||
// So, to avoid additional system calls, extra bytes are requested in advance.
|
||
|
||
// Next, a step determines how many random bytes to generate.
|
||
// The number of random bytes gets decided upon the ID size, mask,
|
||
// alphabet size, and magic number 1.6 (using 1.6 peaks at performance
|
||
// according to benchmarks).
|
||
|
||
// `-~f => Math.ceil(f)` if f is a float
|
||
// `-~i => i + 1` if i is an integer
|
||
let step = -~((1.6 * mask * size) / alphabet.length)
|
||
|
||
return () => {
|
||
let id = ''
|
||
while (true) {
|
||
let bytes = getRandom(step)
|
||
// A compact alternative for `for (var i = 0; i < step; i++)`.
|
||
let j = step
|
||
while (j--) {
|
||
// Adding `|| ''` refuses a random byte that exceeds the alphabet size.
|
||
id += alphabet[bytes[j] & mask] || ''
|
||
if (id.length === size) return id
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
let customAlphabet = (alphabet, size) => customRandom(alphabet, size, random)
|
||
|
||
let nanoid = (size = 21) => {
|
||
let id = ''
|
||
let bytes = crypto.getRandomValues(new Uint8Array(size))
|
||
|
||
// A compact alternative for `for (var i = 0; i < step; i++)`.
|
||
while (size--) {
|
||
// It is incorrect to use bytes exceeding the alphabet size.
|
||
// The following mask reduces the random byte in the 0-255 value
|
||
// range to the 0-63 value range. Therefore, adding hacks, such
|
||
// as empty string fallback or magic numbers, is unneccessary because
|
||
// the bitmask trims bytes down to the alphabet size.
|
||
let byte = bytes[size] & 63
|
||
if (byte < 36) {
|
||
// `0-9a-z`
|
||
id += byte.toString(36)
|
||
} else if (byte < 62) {
|
||
// `A-Z`
|
||
id += (byte - 26).toString(36).toUpperCase()
|
||
} else if (byte < 63) {
|
||
id += '_'
|
||
} else {
|
||
id += '-'
|
||
}
|
||
}
|
||
return id
|
||
}
|
||
|
||
module.exports = { nanoid, customAlphabet, customRandom, urlAlphabet, random }
|
||
|
||
}).call(this)}).call(this,require('_process'))
|
||
},{"./url-alphabet/index.cjs":54,"_process":56}],54:[function(require,module,exports){
|
||
// This alphabet uses `A-Za-z0-9_-` symbols. The genetic algorithm helped
|
||
// optimize the gzip compression for this alphabet.
|
||
let urlAlphabet =
|
||
'ModuleSymbhasOwnPr-0123456789ABCDEFGHNRVfgctiUvz_KqYTJkLxpZXIjQW'
|
||
|
||
module.exports = { urlAlphabet }
|
||
|
||
},{}],55:[function(require,module,exports){
|
||
var n,l,u,t,i,o,r,f,e={},c=[],s=/acit|ex(?:s|g|n|p|$)|rph|grid|ows|mnc|ntw|ine[ch]|zoo|^ord|itera/i;function a(n,l){for(var u in l)n[u]=l[u];return n}function p(n){var l=n.parentNode;l&&l.removeChild(n)}function v(l,u,t){var i,o,r,f={};for(r in u)"key"==r?i=u[r]:"ref"==r?o=u[r]:f[r]=u[r];if(arguments.length>2&&(f.children=arguments.length>3?n.call(arguments,2):t),"function"==typeof l&&null!=l.defaultProps)for(r in l.defaultProps)void 0===f[r]&&(f[r]=l.defaultProps[r]);return h(l,f,i,o,null)}function h(n,t,i,o,r){var f={type:n,props:t,key:i,ref:o,__k:null,__:null,__b:0,__e:null,__d:void 0,__c:null,__h:null,constructor:void 0,__v:null==r?++u:r};return null!=l.vnode&&l.vnode(f),f}function y(n){return n.children}function d(n,l){this.props=n,this.context=l}function _(n,l){if(null==l)return n.__?_(n.__,n.__.__k.indexOf(n)+1):null;for(var u;l<n.__k.length;l++)if(null!=(u=n.__k[l])&&null!=u.__e)return u.__e;return"function"==typeof n.type?_(n):null}function k(n){var l,u;if(null!=(n=n.__)&&null!=n.__c){for(n.__e=n.__c.base=null,l=0;l<n.__k.length;l++)if(null!=(u=n.__k[l])&&null!=u.__e){n.__e=n.__c.base=u.__e;break}return k(n)}}function x(n){(!n.__d&&(n.__d=!0)&&i.push(n)&&!b.__r++||r!==l.debounceRendering)&&((r=l.debounceRendering)||o)(b)}function b(){for(var n;b.__r=i.length;)n=i.sort(function(n,l){return n.__v.__b-l.__v.__b}),i=[],n.some(function(n){var l,u,t,i,o,r;n.__d&&(o=(i=(l=n).__v).__e,(r=l.__P)&&(u=[],(t=a({},i)).__v=i.__v+1,I(r,i,t,l.__n,void 0!==r.ownerSVGElement,null!=i.__h?[o]:null,u,null==o?_(i):o,i.__h),T(u,i),i.__e!=o&&k(i)))})}function m(n,l,u,t,i,o,r,f,s,a){var p,v,d,k,x,b,m,A=t&&t.__k||c,P=A.length;for(u.__k=[],p=0;p<l.length;p++)if(null!=(k=u.__k[p]=null==(k=l[p])||"boolean"==typeof k?null:"string"==typeof k||"number"==typeof k||"bigint"==typeof k?h(null,k,null,null,k):Array.isArray(k)?h(y,{children:k},null,null,null):k.__b>0?h(k.type,k.props,k.key,null,k.__v):k)){if(k.__=u,k.__b=u.__b+1,null===(d=A[p])||d&&k.key==d.key&&k.type===d.type)A[p]=void 0;else for(v=0;v<P;v++){if((d=A[v])&&k.key==d.key&&k.type===d.type){A[v]=void 0;break}d=null}I(n,k,d=d||e,i,o,r,f,s,a),x=k.__e,(v=k.ref)&&d.ref!=v&&(m||(m=[]),d.ref&&m.push(d.ref,null,k),m.push(v,k.__c||x,k)),null!=x?(null==b&&(b=x),"function"==typeof k.type&&null!=k.__k&&k.__k===d.__k?k.__d=s=g(k,s,n):s=w(n,k,d,A,x,s),a||"option"!==u.type?"function"==typeof u.type&&(u.__d=s):n.value=""):s&&d.__e==s&&s.parentNode!=n&&(s=_(d))}for(u.__e=b,p=P;p--;)null!=A[p]&&("function"==typeof u.type&&null!=A[p].__e&&A[p].__e==u.__d&&(u.__d=_(t,p+1)),L(A[p],A[p]));if(m)for(p=0;p<m.length;p++)z(m[p],m[++p],m[++p])}function g(n,l,u){var t,i;for(t=0;t<n.__k.length;t++)(i=n.__k[t])&&(i.__=n,l="function"==typeof i.type?g(i,l,u):w(u,i,i,n.__k,i.__e,l));return l}function w(n,l,u,t,i,o){var r,f,e;if(void 0!==l.__d)r=l.__d,l.__d=void 0;else if(null==u||i!=o||null==i.parentNode)n:if(null==o||o.parentNode!==n)n.appendChild(i),r=null;else{for(f=o,e=0;(f=f.nextSibling)&&e<t.length;e+=2)if(f==i)break n;n.insertBefore(i,o),r=o}return void 0!==r?r:i.nextSibling}function A(n,l,u,t,i){var o;for(o in u)"children"===o||"key"===o||o in l||C(n,o,null,u[o],t);for(o in l)i&&"function"!=typeof l[o]||"children"===o||"key"===o||"value"===o||"checked"===o||u[o]===l[o]||C(n,o,l[o],u[o],t)}function P(n,l,u){"-"===l[0]?n.setProperty(l,u):n[l]=null==u?"":"number"!=typeof u||s.test(l)?u:u+"px"}function C(n,l,u,t,i){var o;n:if("style"===l)if("string"==typeof u)n.style.cssText=u;else{if("string"==typeof t&&(n.style.cssText=t=""),t)for(l in t)u&&l in u||P(n.style,l,"");if(u)for(l in u)t&&u[l]===t[l]||P(n.style,l,u[l])}else if("o"===l[0]&&"n"===l[1])o=l!==(l=l.replace(/Capture$/,"")),l=l.toLowerCase()in n?l.toLowerCase().slice(2):l.slice(2),n.l||(n.l={}),n.l[l+o]=u,u?t||n.addEventListener(l,o?H:$,o):n.removeEventListener(l,o?H:$,o);else if("dangerouslySetInnerHTML"!==l){if(i)l=l.replace(/xlink[H:h]/,"h").replace(/sName$/,"s");else if("href"!==l&&"list"!==l&&"form"!==l&&"tabIndex"!==l&&"download"!==l&&l in n)try{n[l]=null==u?"":u;break n}catch(n){}"function"==typeof u||(null!=u&&(!1!==u||"a"===l[0]&&"r"===l[1])?n.setAttribute(l,u):n.removeAttribute(l))}}function $(n){this.l[n.type+!1](l.event?l.event(n):n)}function H(n){this.l[n.type+!0](l.event?l.event(n):n)}function I(n,u,t,i,o,r,f,e,c){var s,p,v,h,_,k,x,b,g,w,A,P=u.type;if(void 0!==u.constructor)return null;null!=t.__h&&(c=t.__h,e=u.__e=t.__e,u.__h=null,r=[e]),(s=l.__b)&&s(u);try{n:if("function"==typeof P){if(b=u.props,g=(s=P.contextType)&&i[s.__c],w=s?g?g.props.value:s.__:i,t.__c?x=(p=u.__c=t.__c).__=p.__E:("prototype"in P&&P.prototype.render?u.__c=p=new P(b,w):(u.__c=p=new d(b,w),p.constructor=P,p.render=M),g&&g.sub(p),p.props=b,p.state||(p.state={}),p.context=w,p.__n=i,v=p.__d=!0,p.__h=[]),null==p.__s&&(p.__s=p.state),null!=P.getDerivedStateFromProps&&(p.__s==p.state&&(p.__s=a({},p.__s)),a(p.__s,P.getDerivedStateFromProps(b,p.__s))),h=p.props,_=p.state,v)null==P.getDerivedStateFromProps&&null!=p.componentWillMount&&p.componentWillMount(),null!=p.componentDidMount&&p.__h.push(p.componentDidMount);else{if(null==P.getDerivedStateFromProps&&b!==h&&null!=p.componentWillReceiveProps&&p.componentWillReceiveProps(b,w),!p.__e&&null!=p.shouldComponentUpdate&&!1===p.shouldComponentUpdate(b,p.__s,w)||u.__v===t.__v){p.props=b,p.state=p.__s,u.__v!==t.__v&&(p.__d=!1),p.__v=u,u.__e=t.__e,u.__k=t.__k,u.__k.forEach(function(n){n&&(n.__=u)}),p.__h.length&&f.push(p);break n}null!=p.componentWillUpdate&&p.componentWillUpdate(b,p.__s,w),null!=p.componentDidUpdate&&p.__h.push(function(){p.componentDidUpdate(h,_,k)})}p.context=w,p.props=b,p.state=p.__s,(s=l.__r)&&s(u),p.__d=!1,p.__v=u,p.__P=n,s=p.render(p.props,p.state,p.context),p.state=p.__s,null!=p.getChildContext&&(i=a(a({},i),p.getChildContext())),v||null==p.getSnapshotBeforeUpdate||(k=p.getSnapshotBeforeUpdate(h,_)),A=null!=s&&s.type===y&&null==s.key?s.props.children:s,m(n,Array.isArray(A)?A:[A],u,t,i,o,r,f,e,c),p.base=u.__e,u.__h=null,p.__h.length&&f.push(p),x&&(p.__E=p.__=null),p.__e=!1}else null==r&&u.__v===t.__v?(u.__k=t.__k,u.__e=t.__e):u.__e=j(t.__e,u,t,i,o,r,f,c);(s=l.diffed)&&s(u)}catch(n){u.__v=null,(c||null!=r)&&(u.__e=e,u.__h=!!c,r[r.indexOf(e)]=null),l.__e(n,u,t)}}function T(n,u){l.__c&&l.__c(u,n),n.some(function(u){try{n=u.__h,u.__h=[],n.some(function(n){n.call(u)})}catch(n){l.__e(n,u.__v)}})}function j(l,u,t,i,o,r,f,c){var s,a,v,h=t.props,y=u.props,d=u.type,k=0;if("svg"===d&&(o=!0),null!=r)for(;k<r.length;k++)if((s=r[k])&&(s===l||(d?s.localName==d:3==s.nodeType))){l=s,r[k]=null;break}if(null==l){if(null===d)return document.createTextNode(y);l=o?document.createElementNS("http://www.w3.org/2000/svg",d):document.createElement(d,y.is&&y),r=null,c=!1}if(null===d)h===y||c&&l.data===y||(l.data=y);else{if(r=r&&n.call(l.childNodes),a=(h=t.props||e).dangerouslySetInnerHTML,v=y.dangerouslySetInnerHTML,!c){if(null!=r)for(h={},k=0;k<l.attributes.length;k++)h[l.attributes[k].name]=l.attributes[k].value;(v||a)&&(v&&(a&&v.__html==a.__html||v.__html===l.innerHTML)||(l.innerHTML=v&&v.__html||""))}if(A(l,y,h,o,c),v)u.__k=[];else if(k=u.props.children,m(l,Array.isArray(k)?k:[k],u,t,i,o&&"foreignObject"!==d,r,f,r?r[0]:t.__k&&_(t,0),c),null!=r)for(k=r.length;k--;)null!=r[k]&&p(r[k]);c||("value"in y&&void 0!==(k=y.value)&&(k!==l.value||"progress"===d&&!k)&&C(l,"value",k,h.value,!1),"checked"in y&&void 0!==(k=y.checked)&&k!==l.checked&&C(l,"checked",k,h.checked,!1))}return l}function z(n,u,t){try{"function"==typeof n?n(u):n.current=u}catch(n){l.__e(n,t)}}function L(n,u,t){var i,o;if(l.unmount&&l.unmount(n),(i=n.ref)&&(i.current&&i.current!==n.__e||z(i,null,u)),null!=(i=n.__c)){if(i.componentWillUnmount)try{i.componentWillUnmount()}catch(n){l.__e(n,u)}i.base=i.__P=null}if(i=n.__k)for(o=0;o<i.length;o++)i[o]&&L(i[o],u,"function"!=typeof n.type);t||null==n.__e||p(n.__e),n.__e=n.__d=void 0}function M(n,l,u){return this.constructor(n,u)}function N(u,t,i){var o,r,f;l.__&&l.__(u,t),r=(o="function"==typeof i)?null:i&&i.__k||t.__k,f=[],I(t,u=(!o&&i||t).__k=v(y,null,[u]),r||e,e,void 0!==t.ownerSVGElement,!o&&i?[i]:r?null:t.firstChild?n.call(t.childNodes):null,f,!o&&i?i:r?r.__e:t.firstChild,o),T(f,u)}n=c.slice,l={__e:function(n,l){for(var u,t,i;l=l.__;)if((u=l.__c)&&!u.__)try{if((t=u.constructor)&&null!=t.getDerivedStateFromError&&(u.setState(t.getDerivedStateFromError(n)),i=u.__d),null!=u.componentDidCatch&&(u.componentDidCatch(n),i=u.__d),i)return u.__E=u}catch(l){n=l}throw n}},u=0,t=function(n){return null!=n&&void 0===n.constructor},d.prototype.setState=function(n,l){var u;u=null!=this.__s&&this.__s!==this.state?this.__s:this.__s=a({},this.state),"function"==typeof n&&(n=n(a({},u),this.props)),n&&a(u,n),null!=n&&this.__v&&(l&&this.__h.push(l),x(this))},d.prototype.forceUpdate=function(n){this.__v&&(this.__e=!0,n&&this.__h.push(n),x(this))},d.prototype.render=y,i=[],o="function"==typeof Promise?Promise.prototype.then.bind(Promise.resolve()):setTimeout,b.__r=0,f=0,exports.render=N,exports.hydrate=function n(l,u){N(l,u,n)},exports.createElement=v,exports.h=v,exports.Fragment=y,exports.createRef=function(){return{current:null}},exports.isValidElement=t,exports.Component=d,exports.cloneElement=function(l,u,t){var i,o,r,f=a({},l.props);for(r in u)"key"==r?i=u[r]:"ref"==r?o=u[r]:f[r]=u[r];return arguments.length>2&&(f.children=arguments.length>3?n.call(arguments,2):t),h(l.type,f,i||l.key,o||l.ref,null)},exports.createContext=function(n,l){var u={__c:l="__cC"+f++,__:n,Consumer:function(n,l){return n.children(l)},Provider:function(n){var u,t;return this.getChildContext||(u=[],(t={})[l]=this,this.getChildContext=function(){return t},this.shouldComponentUpdate=function(n){this.props.value!==n.value&&u.some(x)},this.sub=function(n){u.push(n);var l=n.componentWillUnmount;n.componentWillUnmount=function(){u.splice(u.indexOf(n),1),l&&l.call(n)}}),n.children}};return u.Provider.__=u.Consumer.contextType=u},exports.toChildArray=function n(l,u){return u=u||[],null==l||"boolean"==typeof l||(Array.isArray(l)?l.some(function(l){n(l,u)}):u.push(l)),u},exports.options=l;
|
||
|
||
|
||
},{}],56:[function(require,module,exports){
|
||
// shim for using process in browser
|
||
var process = module.exports = {};
|
||
|
||
// cached from whatever global is present so that test runners that stub it
|
||
// don't break things. But we need to wrap it in a try catch in case it is
|
||
// wrapped in strict mode code which doesn't define any globals. It's inside a
|
||
// function because try/catches deoptimize in certain engines.
|
||
|
||
var cachedSetTimeout;
|
||
var cachedClearTimeout;
|
||
|
||
function defaultSetTimout() {
|
||
throw new Error('setTimeout has not been defined');
|
||
}
|
||
function defaultClearTimeout () {
|
||
throw new Error('clearTimeout has not been defined');
|
||
}
|
||
(function () {
|
||
try {
|
||
if (typeof setTimeout === 'function') {
|
||
cachedSetTimeout = setTimeout;
|
||
} else {
|
||
cachedSetTimeout = defaultSetTimout;
|
||
}
|
||
} catch (e) {
|
||
cachedSetTimeout = defaultSetTimout;
|
||
}
|
||
try {
|
||
if (typeof clearTimeout === 'function') {
|
||
cachedClearTimeout = clearTimeout;
|
||
} else {
|
||
cachedClearTimeout = defaultClearTimeout;
|
||
}
|
||
} catch (e) {
|
||
cachedClearTimeout = defaultClearTimeout;
|
||
}
|
||
} ())
|
||
function runTimeout(fun) {
|
||
if (cachedSetTimeout === setTimeout) {
|
||
//normal enviroments in sane situations
|
||
return setTimeout(fun, 0);
|
||
}
|
||
// if setTimeout wasn't available but was latter defined
|
||
if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) {
|
||
cachedSetTimeout = setTimeout;
|
||
return setTimeout(fun, 0);
|
||
}
|
||
try {
|
||
// when when somebody has screwed with setTimeout but no I.E. maddness
|
||
return cachedSetTimeout(fun, 0);
|
||
} catch(e){
|
||
try {
|
||
// When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally
|
||
return cachedSetTimeout.call(null, fun, 0);
|
||
} catch(e){
|
||
// same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error
|
||
return cachedSetTimeout.call(this, fun, 0);
|
||
}
|
||
}
|
||
|
||
|
||
}
|
||
function runClearTimeout(marker) {
|
||
if (cachedClearTimeout === clearTimeout) {
|
||
//normal enviroments in sane situations
|
||
return clearTimeout(marker);
|
||
}
|
||
// if clearTimeout wasn't available but was latter defined
|
||
if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) {
|
||
cachedClearTimeout = clearTimeout;
|
||
return clearTimeout(marker);
|
||
}
|
||
try {
|
||
// when when somebody has screwed with setTimeout but no I.E. maddness
|
||
return cachedClearTimeout(marker);
|
||
} catch (e){
|
||
try {
|
||
// When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally
|
||
return cachedClearTimeout.call(null, marker);
|
||
} catch (e){
|
||
// same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error.
|
||
// Some versions of I.E. have different rules for clearTimeout vs setTimeout
|
||
return cachedClearTimeout.call(this, marker);
|
||
}
|
||
}
|
||
|
||
|
||
|
||
}
|
||
var queue = [];
|
||
var draining = false;
|
||
var currentQueue;
|
||
var queueIndex = -1;
|
||
|
||
function cleanUpNextTick() {
|
||
if (!draining || !currentQueue) {
|
||
return;
|
||
}
|
||
draining = false;
|
||
if (currentQueue.length) {
|
||
queue = currentQueue.concat(queue);
|
||
} else {
|
||
queueIndex = -1;
|
||
}
|
||
if (queue.length) {
|
||
drainQueue();
|
||
}
|
||
}
|
||
|
||
function drainQueue() {
|
||
if (draining) {
|
||
return;
|
||
}
|
||
var timeout = runTimeout(cleanUpNextTick);
|
||
draining = true;
|
||
|
||
var len = queue.length;
|
||
while(len) {
|
||
currentQueue = queue;
|
||
queue = [];
|
||
while (++queueIndex < len) {
|
||
if (currentQueue) {
|
||
currentQueue[queueIndex].run();
|
||
}
|
||
}
|
||
queueIndex = -1;
|
||
len = queue.length;
|
||
}
|
||
currentQueue = null;
|
||
draining = false;
|
||
runClearTimeout(timeout);
|
||
}
|
||
|
||
process.nextTick = function (fun) {
|
||
var args = new Array(arguments.length - 1);
|
||
if (arguments.length > 1) {
|
||
for (var i = 1; i < arguments.length; i++) {
|
||
args[i - 1] = arguments[i];
|
||
}
|
||
}
|
||
queue.push(new Item(fun, args));
|
||
if (queue.length === 1 && !draining) {
|
||
runTimeout(drainQueue);
|
||
}
|
||
};
|
||
|
||
// v8 likes predictible objects
|
||
function Item(fun, array) {
|
||
this.fun = fun;
|
||
this.array = array;
|
||
}
|
||
Item.prototype.run = function () {
|
||
this.fun.apply(null, this.array);
|
||
};
|
||
process.title = 'browser';
|
||
process.browser = true;
|
||
process.env = {};
|
||
process.argv = [];
|
||
process.version = ''; // empty string to avoid regexp issues
|
||
process.versions = {};
|
||
|
||
function noop() {}
|
||
|
||
process.on = noop;
|
||
process.addListener = noop;
|
||
process.once = noop;
|
||
process.off = noop;
|
||
process.removeListener = noop;
|
||
process.removeAllListeners = noop;
|
||
process.emit = noop;
|
||
process.prependListener = noop;
|
||
process.prependOnceListener = noop;
|
||
|
||
process.listeners = function (name) { return [] }
|
||
|
||
process.binding = function (name) {
|
||
throw new Error('process.binding is not supported');
|
||
};
|
||
|
||
process.cwd = function () { return '/' };
|
||
process.chdir = function (dir) {
|
||
throw new Error('process.chdir is not supported');
|
||
};
|
||
process.umask = function() { return 0; };
|
||
|
||
},{}],57:[function(require,module,exports){
|
||
/* jshint node: true */
|
||
'use strict';
|
||
|
||
/**
|
||
# wildcard
|
||
|
||
Very simple wildcard matching, which is designed to provide the same
|
||
functionality that is found in the
|
||
[eve](https://github.com/adobe-webplatform/eve) eventing library.
|
||
|
||
## Usage
|
||
|
||
It works with strings:
|
||
|
||
<<< examples/strings.js
|
||
|
||
Arrays:
|
||
|
||
<<< examples/arrays.js
|
||
|
||
Objects (matching against keys):
|
||
|
||
<<< examples/objects.js
|
||
|
||
While the library works in Node, if you are are looking for file-based
|
||
wildcard matching then you should have a look at:
|
||
|
||
<https://github.com/isaacs/node-glob>
|
||
**/
|
||
|
||
function WildcardMatcher(text, separator) {
|
||
this.text = text = text || '';
|
||
this.hasWild = ~text.indexOf('*');
|
||
this.separator = separator;
|
||
this.parts = text.split(separator);
|
||
}
|
||
|
||
WildcardMatcher.prototype.match = function(input) {
|
||
var matches = true;
|
||
var parts = this.parts;
|
||
var ii;
|
||
var partsCount = parts.length;
|
||
var testParts;
|
||
|
||
if (typeof input == 'string' || input instanceof String) {
|
||
if (!this.hasWild && this.text != input) {
|
||
matches = false;
|
||
} else {
|
||
testParts = (input || '').split(this.separator);
|
||
for (ii = 0; matches && ii < partsCount; ii++) {
|
||
if (parts[ii] === '*') {
|
||
continue;
|
||
} else if (ii < testParts.length) {
|
||
matches = parts[ii] === testParts[ii];
|
||
} else {
|
||
matches = false;
|
||
}
|
||
}
|
||
|
||
// If matches, then return the component parts
|
||
matches = matches && testParts;
|
||
}
|
||
}
|
||
else if (typeof input.splice == 'function') {
|
||
matches = [];
|
||
|
||
for (ii = input.length; ii--; ) {
|
||
if (this.match(input[ii])) {
|
||
matches[matches.length] = input[ii];
|
||
}
|
||
}
|
||
}
|
||
else if (typeof input == 'object') {
|
||
matches = {};
|
||
|
||
for (var key in input) {
|
||
if (this.match(key)) {
|
||
matches[key] = input[key];
|
||
}
|
||
}
|
||
}
|
||
|
||
return matches;
|
||
};
|
||
|
||
module.exports = function(text, test, separator) {
|
||
var matcher = new WildcardMatcher(text, separator || /[\/\.]/);
|
||
if (typeof test != 'undefined') {
|
||
return matcher.match(test);
|
||
}
|
||
|
||
return matcher;
|
||
};
|
||
|
||
},{}],58:[function(require,module,exports){
|
||
// We need a custom build of Uppy because we do not use webpack for
|
||
// our JS modules/build. The only way to get what you want from Uppy
|
||
// is to use the webpack modules or to include the entire Uppy project
|
||
// including all plugins in a single JS file. This way we can just
|
||
// use the plugins we actually want.
|
||
window.Uppy = {}
|
||
Uppy.Core = require('@uppy/core')
|
||
Uppy.XHRUpload = require('@uppy/xhr-upload')
|
||
Uppy.AwsS3 = require('@uppy/aws-s3')
|
||
Uppy.AwsS3Multipart = require('@uppy/aws-s3-multipart')
|
||
Uppy.DropTarget = require('@uppy/drop-target')
|
||
Uppy.Utils = {
|
||
delay: require('@uppy/utils/lib/delay'),
|
||
EventTracker: require('@uppy/utils/lib/EventTracker'),
|
||
AbortControllerLib: require('@uppy/utils/lib/AbortController')
|
||
}
|
||
|
||
},{"@uppy/aws-s3":5,"@uppy/aws-s3-multipart":3,"@uppy/core":18,"@uppy/drop-target":21,"@uppy/utils/lib/AbortController":23,"@uppy/utils/lib/EventTracker":24,"@uppy/utils/lib/delay":29,"@uppy/xhr-upload":49}]},{},[58]);
|