mirror of
https://github.com/discourse/discourse.git
synced 2024-11-29 05:53:38 +08:00
FEATURE: Make Discourse work offline with WorkboxJS (#7870)
This commit is contained in:
parent
839916aa49
commit
1221d34284
|
@ -9,9 +9,7 @@ export default {
|
|||
const isSupported = isSecured && "serviceWorker" in navigator;
|
||||
|
||||
if (isSupported) {
|
||||
const isApple = !!navigator.platform.match(/(Mac|iPhone|iPod|iPad)/i);
|
||||
|
||||
if (Discourse.ServiceWorkerURL && !isApple) {
|
||||
if (Discourse.ServiceWorkerURL) {
|
||||
navigator.serviceWorker.getRegistrations().then(registrations => {
|
||||
for (let registration of registrations) {
|
||||
if (
|
||||
|
|
|
@ -140,7 +140,7 @@ export function ajax() {
|
|||
}
|
||||
|
||||
if (args.type === "GET" && args.cache !== true) {
|
||||
args.cache = false;
|
||||
args.cache = true; // Disable JQuery cache busting param, which was created to deal with IE8
|
||||
}
|
||||
|
||||
ajaxObj = $.ajax(Discourse.getURL(url), args);
|
||||
|
|
|
@ -1,117 +1,25 @@
|
|||
'use strict';
|
||||
|
||||
importScripts("<%= ::UrlHelper.absolute("/javascripts/workbox/workbox-sw.js") %>");
|
||||
|
||||
// Special offline and fetch interception is restricted to Android only
|
||||
// we have had a large amount of pain supporting this on Firefox / Safari
|
||||
// it is only strongly required on Android, when PWA gets better on iOS
|
||||
// we can unlock it there as well, for Desktop we can consider unlocking it
|
||||
// if we start supporting offline browsing for laptops
|
||||
if (/(android)/i.test(navigator.userAgent)) {
|
||||
workbox.setConfig({
|
||||
modulePathPrefix: "<%= ::UrlHelper.absolute("/javascripts/workbox") %>"
|
||||
});
|
||||
|
||||
// Incrementing CACHE_VERSION will kick off the install event and force previously cached
|
||||
// resources to be cached again.
|
||||
const CACHE_VERSION = 1;
|
||||
const cacheVersion = "1";
|
||||
|
||||
const CURRENT_CACHES = {
|
||||
offline: 'offline-v' + CACHE_VERSION
|
||||
};
|
||||
|
||||
const OFFLINE_URL = 'offline.html';
|
||||
|
||||
const createCacheBustedRequest = function(url) {
|
||||
var headers = new Headers({
|
||||
'Discourse-Track-View': '0'
|
||||
});
|
||||
|
||||
var request = new Request(url, {cache: 'reload', headers: headers});
|
||||
// See https://fetch.spec.whatwg.org/#concept-request-mode
|
||||
// This is not yet supported in Chrome as of M48, so we need to explicitly check to see
|
||||
// if the cache: 'reload' option had any effect.
|
||||
if ('cache' in request) {
|
||||
return request;
|
||||
}
|
||||
|
||||
// If {cache: 'reload'} didn't have any effect, append a cache-busting URL parameter instead.
|
||||
var bustedUrl = new URL(url, self.location.href);
|
||||
bustedUrl.search += (bustedUrl.search ? '&' : '') + 'cachebust=' + Date.now();
|
||||
return new Request(bustedUrl, {headers: headers});
|
||||
}
|
||||
|
||||
self.addEventListener('install', function(event) {
|
||||
event.waitUntil(
|
||||
// We can't use cache.add() here, since we want OFFLINE_URL to be the cache key, but
|
||||
// the actual URL we end up requesting might include a cache-busting parameter.
|
||||
fetch(createCacheBustedRequest(OFFLINE_URL)).then(function(response) {
|
||||
return caches.open(CURRENT_CACHES.offline).then(function(cache) {
|
||||
return cache.put(OFFLINE_URL, response);
|
||||
});
|
||||
}).then(function(cache) {
|
||||
self.skipWaiting();
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
self.addEventListener('activate', function(event) {
|
||||
// Delete all caches that aren't named in CURRENT_CACHES.
|
||||
// While there is only one cache in this example, the same logic will handle the case where
|
||||
// there are multiple versioned caches.
|
||||
var expectedCacheNames = Object.keys(CURRENT_CACHES).map(function(key) {
|
||||
return CURRENT_CACHES[key];
|
||||
});
|
||||
|
||||
event.waitUntil(
|
||||
caches.keys().then(function(cacheNames) {
|
||||
return Promise.all(
|
||||
cacheNames.map(function(cacheName) {
|
||||
if (expectedCacheNames.indexOf(cacheName) === -1) {
|
||||
// If this cache name isn't present in the array of "expected" cache names,
|
||||
// then delete it.
|
||||
return caches.delete(cacheName);
|
||||
}
|
||||
})
|
||||
);
|
||||
}).then(function() {
|
||||
self.clients.claim()
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
self.addEventListener('fetch', function(event) {
|
||||
// Bypass service workers if this is a url with a token param
|
||||
if(/\?.*token/i.test(event.request.url)) {
|
||||
return;
|
||||
}
|
||||
// We only want to call event.respondWith() if this is a navigation request
|
||||
// for an HTML page.
|
||||
// request.mode of 'navigate' is unfortunately not supported in Chrome
|
||||
// versions older than 49, so we need to include a less precise fallback,
|
||||
// which checks for a GET request with an Accept: text/html header.
|
||||
if (event.request.mode === 'navigate' ||
|
||||
(event.request.method === 'GET' &&
|
||||
event.request.headers.get('accept').includes('text/html'))) {
|
||||
event.respondWith(
|
||||
fetch(event.request).catch(function(error) {
|
||||
// The catch is only triggered if fetch() throws an exception, which will most likely
|
||||
// happen due to the server being unreachable.
|
||||
// If fetch() returns a valid HTTP response with an response code in the 4xx or 5xx
|
||||
// range, the catch() will NOT be called. If you need custom handling for 4xx or 5xx
|
||||
// errors, see https://github.com/GoogleChrome/samples/tree/gh-pages/service-worker/fallback-response
|
||||
if (!navigator.onLine) {
|
||||
return caches.match(OFFLINE_URL);
|
||||
} else {
|
||||
throw new Error(error);
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
// If our if() condition is false, then this fetch handler won't intercept the request.
|
||||
// If there are any other fetch handlers registered, they will get a chance to call
|
||||
// event.respondWith(). If no fetch handlers call event.respondWith(), the request will be
|
||||
// handled by the browser as if there were no service worker involvement.
|
||||
});
|
||||
|
||||
}
|
||||
// Cache all GET requests, so Discourse can be used while offline
|
||||
workbox.routing.registerRoute(
|
||||
new RegExp('.*?'), // Matches all, GET is implicit
|
||||
new workbox.strategies.NetworkFirst({ // This will only use the cache when a network request fails
|
||||
cacheName: "discourse-" + cacheVersion,
|
||||
plugins: [
|
||||
new workbox.expiration.Plugin({
|
||||
maxAgeSeconds: 7* 24 * 60 * 60, // 7 days
|
||||
}),
|
||||
],
|
||||
})
|
||||
);
|
||||
|
||||
const idleThresholdTime = 1000 * 10; // 10 seconds
|
||||
var lastAction = -1;
|
||||
|
|
|
@ -90,6 +90,26 @@ task 'javascript:update' do
|
|||
}, {
|
||||
# TODO: drop when we eventually drop IE11, this will land in iOS in version 13
|
||||
source: 'intersection-observer/intersection-observer.js'
|
||||
}, {
|
||||
source: 'workbox-sw/build/.',
|
||||
destination: 'workbox',
|
||||
public: true
|
||||
}, {
|
||||
source: 'workbox-routing/build/.',
|
||||
destination: 'workbox',
|
||||
public: true
|
||||
}, {
|
||||
source: 'workbox-core/build/.',
|
||||
destination: 'workbox',
|
||||
public: true
|
||||
}, {
|
||||
source: 'workbox-strategies/build/.',
|
||||
destination: 'workbox',
|
||||
public: true
|
||||
}, {
|
||||
source: 'workbox-expiration/build/.',
|
||||
destination: 'workbox',
|
||||
public: true
|
||||
}
|
||||
]
|
||||
|
||||
|
|
|
@ -30,7 +30,12 @@
|
|||
"mousetrap": "https://github.com/discourse/mousetrap#firefox-alt-key",
|
||||
"pikaday": "1.8.0",
|
||||
"resumablejs": "1.1.0",
|
||||
"spectrum-colorpicker": "1.8.0"
|
||||
"spectrum-colorpicker": "1.8.0",
|
||||
"workbox-core": "^4.3.1",
|
||||
"workbox-expiration": "^4.3.1",
|
||||
"workbox-routing": "^4.3.1",
|
||||
"workbox-strategies": "^4.3.1",
|
||||
"workbox-sw": "^4.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@arkweid/lefthook": "^0.5.6",
|
||||
|
|
1712
public/javascripts/workbox/workbox-core.dev.js
Normal file
1712
public/javascripts/workbox/workbox-core.dev.js
Normal file
File diff suppressed because it is too large
Load Diff
1
public/javascripts/workbox/workbox-core.dev.js.map
Normal file
1
public/javascripts/workbox/workbox-core.dev.js.map
Normal file
File diff suppressed because one or more lines are too long
2
public/javascripts/workbox/workbox-core.prod.js
Normal file
2
public/javascripts/workbox/workbox-core.prod.js
Normal file
File diff suppressed because one or more lines are too long
1
public/javascripts/workbox/workbox-core.prod.js.map
Normal file
1
public/javascripts/workbox/workbox-core.prod.js.map
Normal file
File diff suppressed because one or more lines are too long
652
public/javascripts/workbox/workbox-expiration.dev.js
Normal file
652
public/javascripts/workbox/workbox-expiration.dev.js
Normal file
|
@ -0,0 +1,652 @@
|
|||
this.workbox = this.workbox || {};
|
||||
this.workbox.expiration = (function (exports, DBWrapper_mjs, deleteDatabase_mjs, WorkboxError_mjs, assert_mjs, logger_mjs, cacheNames_mjs, getFriendlyURL_mjs, registerQuotaErrorCallback_mjs) {
|
||||
'use strict';
|
||||
|
||||
try {
|
||||
self['workbox:expiration:4.3.1'] && _();
|
||||
} catch (e) {} // eslint-disable-line
|
||||
|
||||
/*
|
||||
Copyright 2018 Google LLC
|
||||
|
||||
Use of this source code is governed by an MIT-style
|
||||
license that can be found in the LICENSE file or at
|
||||
https://opensource.org/licenses/MIT.
|
||||
*/
|
||||
const DB_NAME = 'workbox-expiration';
|
||||
const OBJECT_STORE_NAME = 'cache-entries';
|
||||
|
||||
const normalizeURL = unNormalizedUrl => {
|
||||
const url = new URL(unNormalizedUrl, location);
|
||||
url.hash = '';
|
||||
return url.href;
|
||||
};
|
||||
/**
|
||||
* Returns the timestamp model.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
class CacheTimestampsModel {
|
||||
/**
|
||||
*
|
||||
* @param {string} cacheName
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
constructor(cacheName) {
|
||||
this._cacheName = cacheName;
|
||||
this._db = new DBWrapper_mjs.DBWrapper(DB_NAME, 1, {
|
||||
onupgradeneeded: event => this._handleUpgrade(event)
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Should perform an upgrade of indexedDB.
|
||||
*
|
||||
* @param {Event} event
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
_handleUpgrade(event) {
|
||||
const db = event.target.result; // TODO(philipwalton): EdgeHTML doesn't support arrays as a keyPath, so we
|
||||
// have to use the `id` keyPath here and create our own values (a
|
||||
// concatenation of `url + cacheName`) instead of simply using
|
||||
// `keyPath: ['url', 'cacheName']`, which is supported in other browsers.
|
||||
|
||||
const objStore = db.createObjectStore(OBJECT_STORE_NAME, {
|
||||
keyPath: 'id'
|
||||
}); // TODO(philipwalton): once we don't have to support EdgeHTML, we can
|
||||
// create a single index with the keyPath `['cacheName', 'timestamp']`
|
||||
// instead of doing both these indexes.
|
||||
|
||||
objStore.createIndex('cacheName', 'cacheName', {
|
||||
unique: false
|
||||
});
|
||||
objStore.createIndex('timestamp', 'timestamp', {
|
||||
unique: false
|
||||
}); // Previous versions of `workbox-expiration` used `this._cacheName`
|
||||
// as the IDBDatabase name.
|
||||
|
||||
deleteDatabase_mjs.deleteDatabase(this._cacheName);
|
||||
}
|
||||
/**
|
||||
* @param {string} url
|
||||
* @param {number} timestamp
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
async setTimestamp(url, timestamp) {
|
||||
url = normalizeURL(url);
|
||||
await this._db.put(OBJECT_STORE_NAME, {
|
||||
url,
|
||||
timestamp,
|
||||
cacheName: this._cacheName,
|
||||
// Creating an ID from the URL and cache name won't be necessary once
|
||||
// Edge switches to Chromium and all browsers we support work with
|
||||
// array keyPaths.
|
||||
id: this._getId(url)
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Returns the timestamp stored for a given URL.
|
||||
*
|
||||
* @param {string} url
|
||||
* @return {number}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
async getTimestamp(url) {
|
||||
const entry = await this._db.get(OBJECT_STORE_NAME, this._getId(url));
|
||||
return entry.timestamp;
|
||||
}
|
||||
/**
|
||||
* Iterates through all the entries in the object store (from newest to
|
||||
* oldest) and removes entries once either `maxCount` is reached or the
|
||||
* entry's timestamp is less than `minTimestamp`.
|
||||
*
|
||||
* @param {number} minTimestamp
|
||||
* @param {number} maxCount
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
async expireEntries(minTimestamp, maxCount) {
|
||||
const entriesToDelete = await this._db.transaction(OBJECT_STORE_NAME, 'readwrite', (txn, done) => {
|
||||
const store = txn.objectStore(OBJECT_STORE_NAME);
|
||||
const entriesToDelete = [];
|
||||
let entriesNotDeletedCount = 0;
|
||||
|
||||
store.index('timestamp').openCursor(null, 'prev').onsuccess = ({
|
||||
target
|
||||
}) => {
|
||||
const cursor = target.result;
|
||||
|
||||
if (cursor) {
|
||||
const result = cursor.value; // TODO(philipwalton): once we can use a multi-key index, we
|
||||
// won't have to check `cacheName` here.
|
||||
|
||||
if (result.cacheName === this._cacheName) {
|
||||
// Delete an entry if it's older than the max age or
|
||||
// if we already have the max number allowed.
|
||||
if (minTimestamp && result.timestamp < minTimestamp || maxCount && entriesNotDeletedCount >= maxCount) {
|
||||
// TODO(philipwalton): we should be able to delete the
|
||||
// entry right here, but doing so causes an iteration
|
||||
// bug in Safari stable (fixed in TP). Instead we can
|
||||
// store the keys of the entries to delete, and then
|
||||
// delete the separate transactions.
|
||||
// https://github.com/GoogleChrome/workbox/issues/1978
|
||||
// cursor.delete();
|
||||
// We only need to return the URL, not the whole entry.
|
||||
entriesToDelete.push(cursor.value);
|
||||
} else {
|
||||
entriesNotDeletedCount++;
|
||||
}
|
||||
}
|
||||
|
||||
cursor.continue();
|
||||
} else {
|
||||
done(entriesToDelete);
|
||||
}
|
||||
};
|
||||
}); // TODO(philipwalton): once the Safari bug in the following issue is fixed,
|
||||
// we should be able to remove this loop and do the entry deletion in the
|
||||
// cursor loop above:
|
||||
// https://github.com/GoogleChrome/workbox/issues/1978
|
||||
|
||||
const urlsDeleted = [];
|
||||
|
||||
for (const entry of entriesToDelete) {
|
||||
await this._db.delete(OBJECT_STORE_NAME, entry.id);
|
||||
urlsDeleted.push(entry.url);
|
||||
}
|
||||
|
||||
return urlsDeleted;
|
||||
}
|
||||
/**
|
||||
* Takes a URL and returns an ID that will be unique in the object store.
|
||||
*
|
||||
* @param {string} url
|
||||
* @return {string}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
_getId(url) {
|
||||
// Creating an ID from the URL and cache name won't be necessary once
|
||||
// Edge switches to Chromium and all browsers we support work with
|
||||
// array keyPaths.
|
||||
return this._cacheName + '|' + normalizeURL(url);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
Copyright 2018 Google LLC
|
||||
|
||||
Use of this source code is governed by an MIT-style
|
||||
license that can be found in the LICENSE file or at
|
||||
https://opensource.org/licenses/MIT.
|
||||
*/
|
||||
/**
|
||||
* The `CacheExpiration` class allows you define an expiration and / or
|
||||
* limit on the number of responses stored in a
|
||||
* [`Cache`](https://developer.mozilla.org/en-US/docs/Web/API/Cache).
|
||||
*
|
||||
* @memberof workbox.expiration
|
||||
*/
|
||||
|
||||
class CacheExpiration {
|
||||
/**
|
||||
* To construct a new CacheExpiration instance you must provide at least
|
||||
* one of the `config` properties.
|
||||
*
|
||||
* @param {string} cacheName Name of the cache to apply restrictions to.
|
||||
* @param {Object} config
|
||||
* @param {number} [config.maxEntries] The maximum number of entries to cache.
|
||||
* Entries used the least will be removed as the maximum is reached.
|
||||
* @param {number} [config.maxAgeSeconds] The maximum age of an entry before
|
||||
* it's treated as stale and removed.
|
||||
*/
|
||||
constructor(cacheName, config = {}) {
|
||||
{
|
||||
assert_mjs.assert.isType(cacheName, 'string', {
|
||||
moduleName: 'workbox-expiration',
|
||||
className: 'CacheExpiration',
|
||||
funcName: 'constructor',
|
||||
paramName: 'cacheName'
|
||||
});
|
||||
|
||||
if (!(config.maxEntries || config.maxAgeSeconds)) {
|
||||
throw new WorkboxError_mjs.WorkboxError('max-entries-or-age-required', {
|
||||
moduleName: 'workbox-expiration',
|
||||
className: 'CacheExpiration',
|
||||
funcName: 'constructor'
|
||||
});
|
||||
}
|
||||
|
||||
if (config.maxEntries) {
|
||||
assert_mjs.assert.isType(config.maxEntries, 'number', {
|
||||
moduleName: 'workbox-expiration',
|
||||
className: 'CacheExpiration',
|
||||
funcName: 'constructor',
|
||||
paramName: 'config.maxEntries'
|
||||
}); // TODO: Assert is positive
|
||||
}
|
||||
|
||||
if (config.maxAgeSeconds) {
|
||||
assert_mjs.assert.isType(config.maxAgeSeconds, 'number', {
|
||||
moduleName: 'workbox-expiration',
|
||||
className: 'CacheExpiration',
|
||||
funcName: 'constructor',
|
||||
paramName: 'config.maxAgeSeconds'
|
||||
}); // TODO: Assert is positive
|
||||
}
|
||||
}
|
||||
|
||||
this._isRunning = false;
|
||||
this._rerunRequested = false;
|
||||
this._maxEntries = config.maxEntries;
|
||||
this._maxAgeSeconds = config.maxAgeSeconds;
|
||||
this._cacheName = cacheName;
|
||||
this._timestampModel = new CacheTimestampsModel(cacheName);
|
||||
}
|
||||
/**
|
||||
* Expires entries for the given cache and given criteria.
|
||||
*/
|
||||
|
||||
|
||||
async expireEntries() {
|
||||
if (this._isRunning) {
|
||||
this._rerunRequested = true;
|
||||
return;
|
||||
}
|
||||
|
||||
this._isRunning = true;
|
||||
const minTimestamp = this._maxAgeSeconds ? Date.now() - this._maxAgeSeconds * 1000 : undefined;
|
||||
const urlsExpired = await this._timestampModel.expireEntries(minTimestamp, this._maxEntries); // Delete URLs from the cache
|
||||
|
||||
const cache = await caches.open(this._cacheName);
|
||||
|
||||
for (const url of urlsExpired) {
|
||||
await cache.delete(url);
|
||||
}
|
||||
|
||||
{
|
||||
if (urlsExpired.length > 0) {
|
||||
logger_mjs.logger.groupCollapsed(`Expired ${urlsExpired.length} ` + `${urlsExpired.length === 1 ? 'entry' : 'entries'} and removed ` + `${urlsExpired.length === 1 ? 'it' : 'them'} from the ` + `'${this._cacheName}' cache.`);
|
||||
logger_mjs.logger.log(`Expired the following ${urlsExpired.length === 1 ? 'URL' : 'URLs'}:`);
|
||||
urlsExpired.forEach(url => logger_mjs.logger.log(` ${url}`));
|
||||
logger_mjs.logger.groupEnd();
|
||||
} else {
|
||||
logger_mjs.logger.debug(`Cache expiration ran and found no entries to remove.`);
|
||||
}
|
||||
}
|
||||
|
||||
this._isRunning = false;
|
||||
|
||||
if (this._rerunRequested) {
|
||||
this._rerunRequested = false;
|
||||
this.expireEntries();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Update the timestamp for the given URL. This ensures the when
|
||||
* removing entries based on maximum entries, most recently used
|
||||
* is accurate or when expiring, the timestamp is up-to-date.
|
||||
*
|
||||
* @param {string} url
|
||||
*/
|
||||
|
||||
|
||||
async updateTimestamp(url) {
|
||||
{
|
||||
assert_mjs.assert.isType(url, 'string', {
|
||||
moduleName: 'workbox-expiration',
|
||||
className: 'CacheExpiration',
|
||||
funcName: 'updateTimestamp',
|
||||
paramName: 'url'
|
||||
});
|
||||
}
|
||||
|
||||
await this._timestampModel.setTimestamp(url, Date.now());
|
||||
}
|
||||
/**
|
||||
* Can be used to check if a URL has expired or not before it's used.
|
||||
*
|
||||
* This requires a look up from IndexedDB, so can be slow.
|
||||
*
|
||||
* Note: This method will not remove the cached entry, call
|
||||
* `expireEntries()` to remove indexedDB and Cache entries.
|
||||
*
|
||||
* @param {string} url
|
||||
* @return {boolean}
|
||||
*/
|
||||
|
||||
|
||||
async isURLExpired(url) {
|
||||
{
|
||||
if (!this._maxAgeSeconds) {
|
||||
throw new WorkboxError_mjs.WorkboxError(`expired-test-without-max-age`, {
|
||||
methodName: 'isURLExpired',
|
||||
paramName: 'maxAgeSeconds'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const timestamp = await this._timestampModel.getTimestamp(url);
|
||||
const expireOlderThan = Date.now() - this._maxAgeSeconds * 1000;
|
||||
return timestamp < expireOlderThan;
|
||||
}
|
||||
/**
|
||||
* Removes the IndexedDB object store used to keep track of cache expiration
|
||||
* metadata.
|
||||
*/
|
||||
|
||||
|
||||
async delete() {
|
||||
// Make sure we don't attempt another rerun if we're called in the middle of
|
||||
// a cache expiration.
|
||||
this._rerunRequested = false;
|
||||
await this._timestampModel.expireEntries(Infinity); // Expires all.
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
Copyright 2018 Google LLC
|
||||
|
||||
Use of this source code is governed by an MIT-style
|
||||
license that can be found in the LICENSE file or at
|
||||
https://opensource.org/licenses/MIT.
|
||||
*/
|
||||
/**
|
||||
* This plugin can be used in the Workbox APIs to regularly enforce a
|
||||
* limit on the age and / or the number of cached requests.
|
||||
*
|
||||
* Whenever a cached request is used or updated, this plugin will look
|
||||
* at the used Cache and remove any old or extra requests.
|
||||
*
|
||||
* When using `maxAgeSeconds`, requests may be used *once* after expiring
|
||||
* because the expiration clean up will not have occurred until *after* the
|
||||
* cached request has been used. If the request has a "Date" header, then
|
||||
* a light weight expiration check is performed and the request will not be
|
||||
* used immediately.
|
||||
*
|
||||
* When using `maxEntries`, the entry least-recently requested will be removed from the cache first.
|
||||
*
|
||||
* @memberof workbox.expiration
|
||||
*/
|
||||
|
||||
class Plugin {
|
||||
/**
|
||||
* @param {Object} config
|
||||
* @param {number} [config.maxEntries] The maximum number of entries to cache.
|
||||
* Entries used the least will be removed as the maximum is reached.
|
||||
* @param {number} [config.maxAgeSeconds] The maximum age of an entry before
|
||||
* it's treated as stale and removed.
|
||||
* @param {boolean} [config.purgeOnQuotaError] Whether to opt this cache in to
|
||||
* automatic deletion if the available storage quota has been exceeded.
|
||||
*/
|
||||
constructor(config = {}) {
|
||||
{
|
||||
if (!(config.maxEntries || config.maxAgeSeconds)) {
|
||||
throw new WorkboxError_mjs.WorkboxError('max-entries-or-age-required', {
|
||||
moduleName: 'workbox-expiration',
|
||||
className: 'Plugin',
|
||||
funcName: 'constructor'
|
||||
});
|
||||
}
|
||||
|
||||
if (config.maxEntries) {
|
||||
assert_mjs.assert.isType(config.maxEntries, 'number', {
|
||||
moduleName: 'workbox-expiration',
|
||||
className: 'Plugin',
|
||||
funcName: 'constructor',
|
||||
paramName: 'config.maxEntries'
|
||||
});
|
||||
}
|
||||
|
||||
if (config.maxAgeSeconds) {
|
||||
assert_mjs.assert.isType(config.maxAgeSeconds, 'number', {
|
||||
moduleName: 'workbox-expiration',
|
||||
className: 'Plugin',
|
||||
funcName: 'constructor',
|
||||
paramName: 'config.maxAgeSeconds'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
this._config = config;
|
||||
this._maxAgeSeconds = config.maxAgeSeconds;
|
||||
this._cacheExpirations = new Map();
|
||||
|
||||
if (config.purgeOnQuotaError) {
|
||||
registerQuotaErrorCallback_mjs.registerQuotaErrorCallback(() => this.deleteCacheAndMetadata());
|
||||
}
|
||||
}
|
||||
/**
|
||||
* A simple helper method to return a CacheExpiration instance for a given
|
||||
* cache name.
|
||||
*
|
||||
* @param {string} cacheName
|
||||
* @return {CacheExpiration}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
_getCacheExpiration(cacheName) {
|
||||
if (cacheName === cacheNames_mjs.cacheNames.getRuntimeName()) {
|
||||
throw new WorkboxError_mjs.WorkboxError('expire-custom-caches-only');
|
||||
}
|
||||
|
||||
let cacheExpiration = this._cacheExpirations.get(cacheName);
|
||||
|
||||
if (!cacheExpiration) {
|
||||
cacheExpiration = new CacheExpiration(cacheName, this._config);
|
||||
|
||||
this._cacheExpirations.set(cacheName, cacheExpiration);
|
||||
}
|
||||
|
||||
return cacheExpiration;
|
||||
}
|
||||
/**
|
||||
* A "lifecycle" callback that will be triggered automatically by the
|
||||
* `workbox.strategies` handlers when a `Response` is about to be returned
|
||||
* from a [Cache](https://developer.mozilla.org/en-US/docs/Web/API/Cache) to
|
||||
* the handler. It allows the `Response` to be inspected for freshness and
|
||||
* prevents it from being used if the `Response`'s `Date` header value is
|
||||
* older than the configured `maxAgeSeconds`.
|
||||
*
|
||||
* @param {Object} options
|
||||
* @param {string} options.cacheName Name of the cache the response is in.
|
||||
* @param {Response} options.cachedResponse The `Response` object that's been
|
||||
* read from a cache and whose freshness should be checked.
|
||||
* @return {Response} Either the `cachedResponse`, if it's
|
||||
* fresh, or `null` if the `Response` is older than `maxAgeSeconds`.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
cachedResponseWillBeUsed({
|
||||
event,
|
||||
request,
|
||||
cacheName,
|
||||
cachedResponse
|
||||
}) {
|
||||
if (!cachedResponse) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let isFresh = this._isResponseDateFresh(cachedResponse); // Expire entries to ensure that even if the expiration date has
|
||||
// expired, it'll only be used once.
|
||||
|
||||
|
||||
const cacheExpiration = this._getCacheExpiration(cacheName);
|
||||
|
||||
cacheExpiration.expireEntries(); // Update the metadata for the request URL to the current timestamp,
|
||||
// but don't `await` it as we don't want to block the response.
|
||||
|
||||
const updateTimestampDone = cacheExpiration.updateTimestamp(request.url);
|
||||
|
||||
if (event) {
|
||||
try {
|
||||
event.waitUntil(updateTimestampDone);
|
||||
} catch (error) {
|
||||
{
|
||||
logger_mjs.logger.warn(`Unable to ensure service worker stays alive when ` + `updating cache entry for '${getFriendlyURL_mjs.getFriendlyURL(event.request.url)}'.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return isFresh ? cachedResponse : null;
|
||||
}
|
||||
/**
|
||||
* @param {Response} cachedResponse
|
||||
* @return {boolean}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
_isResponseDateFresh(cachedResponse) {
|
||||
if (!this._maxAgeSeconds) {
|
||||
// We aren't expiring by age, so return true, it's fresh
|
||||
return true;
|
||||
} // Check if the 'date' header will suffice a quick expiration check.
|
||||
// See https://github.com/GoogleChromeLabs/sw-toolbox/issues/164 for
|
||||
// discussion.
|
||||
|
||||
|
||||
const dateHeaderTimestamp = this._getDateHeaderTimestamp(cachedResponse);
|
||||
|
||||
if (dateHeaderTimestamp === null) {
|
||||
// Unable to parse date, so assume it's fresh.
|
||||
return true;
|
||||
} // If we have a valid headerTime, then our response is fresh iff the
|
||||
// headerTime plus maxAgeSeconds is greater than the current time.
|
||||
|
||||
|
||||
const now = Date.now();
|
||||
return dateHeaderTimestamp >= now - this._maxAgeSeconds * 1000;
|
||||
}
|
||||
/**
|
||||
* This method will extract the data header and parse it into a useful
|
||||
* value.
|
||||
*
|
||||
* @param {Response} cachedResponse
|
||||
* @return {number}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
_getDateHeaderTimestamp(cachedResponse) {
|
||||
if (!cachedResponse.headers.has('date')) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const dateHeader = cachedResponse.headers.get('date');
|
||||
const parsedDate = new Date(dateHeader);
|
||||
const headerTime = parsedDate.getTime(); // If the Date header was invalid for some reason, parsedDate.getTime()
|
||||
// will return NaN.
|
||||
|
||||
if (isNaN(headerTime)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return headerTime;
|
||||
}
|
||||
/**
|
||||
* A "lifecycle" callback that will be triggered automatically by the
|
||||
* `workbox.strategies` handlers when an entry is added to a cache.
|
||||
*
|
||||
* @param {Object} options
|
||||
* @param {string} options.cacheName Name of the cache that was updated.
|
||||
* @param {string} options.request The Request for the cached entry.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
async cacheDidUpdate({
|
||||
cacheName,
|
||||
request
|
||||
}) {
|
||||
{
|
||||
assert_mjs.assert.isType(cacheName, 'string', {
|
||||
moduleName: 'workbox-expiration',
|
||||
className: 'Plugin',
|
||||
funcName: 'cacheDidUpdate',
|
||||
paramName: 'cacheName'
|
||||
});
|
||||
assert_mjs.assert.isInstance(request, Request, {
|
||||
moduleName: 'workbox-expiration',
|
||||
className: 'Plugin',
|
||||
funcName: 'cacheDidUpdate',
|
||||
paramName: 'request'
|
||||
});
|
||||
}
|
||||
|
||||
const cacheExpiration = this._getCacheExpiration(cacheName);
|
||||
|
||||
await cacheExpiration.updateTimestamp(request.url);
|
||||
await cacheExpiration.expireEntries();
|
||||
}
|
||||
/**
|
||||
* This is a helper method that performs two operations:
|
||||
*
|
||||
* - Deletes *all* the underlying Cache instances associated with this plugin
|
||||
* instance, by calling caches.delete() on your behalf.
|
||||
* - Deletes the metadata from IndexedDB used to keep track of expiration
|
||||
* details for each Cache instance.
|
||||
*
|
||||
* When using cache expiration, calling this method is preferable to calling
|
||||
* `caches.delete()` directly, since this will ensure that the IndexedDB
|
||||
* metadata is also cleanly removed and open IndexedDB instances are deleted.
|
||||
*
|
||||
* Note that if you're *not* using cache expiration for a given cache, calling
|
||||
* `caches.delete()` and passing in the cache's name should be sufficient.
|
||||
* There is no Workbox-specific method needed for cleanup in that case.
|
||||
*/
|
||||
|
||||
|
||||
async deleteCacheAndMetadata() {
|
||||
// Do this one at a time instead of all at once via `Promise.all()` to
|
||||
// reduce the chance of inconsistency if a promise rejects.
|
||||
for (const [cacheName, cacheExpiration] of this._cacheExpirations) {
|
||||
await caches.delete(cacheName);
|
||||
await cacheExpiration.delete();
|
||||
} // Reset this._cacheExpirations to its initial state.
|
||||
|
||||
|
||||
this._cacheExpirations = new Map();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
Copyright 2018 Google LLC
|
||||
|
||||
Use of this source code is governed by an MIT-style
|
||||
license that can be found in the LICENSE file or at
|
||||
https://opensource.org/licenses/MIT.
|
||||
*/
|
||||
|
||||
exports.CacheExpiration = CacheExpiration;
|
||||
exports.Plugin = Plugin;
|
||||
|
||||
return exports;
|
||||
|
||||
}({}, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core));
|
||||
//# sourceMappingURL=workbox-expiration.dev.js.map
|
1
public/javascripts/workbox/workbox-expiration.dev.js.map
Normal file
1
public/javascripts/workbox/workbox-expiration.dev.js.map
Normal file
File diff suppressed because one or more lines are too long
2
public/javascripts/workbox/workbox-expiration.prod.js
Normal file
2
public/javascripts/workbox/workbox-expiration.prod.js
Normal file
|
@ -0,0 +1,2 @@
|
|||
this.workbox=this.workbox||{},this.workbox.expiration=function(t,e,s,i,a,n){"use strict";try{self["workbox:expiration:4.3.1"]&&_()}catch(t){}const h="workbox-expiration",c="cache-entries",r=t=>{const e=new URL(t,location);return e.hash="",e.href};class o{constructor(t){this.t=t,this.s=new e.DBWrapper(h,1,{onupgradeneeded:t=>this.i(t)})}i(t){const e=t.target.result.createObjectStore(c,{keyPath:"id"});e.createIndex("cacheName","cacheName",{unique:!1}),e.createIndex("timestamp","timestamp",{unique:!1}),s.deleteDatabase(this.t)}async setTimestamp(t,e){t=r(t),await this.s.put(c,{url:t,timestamp:e,cacheName:this.t,id:this.h(t)})}async getTimestamp(t){return(await this.s.get(c,this.h(t))).timestamp}async expireEntries(t,e){const s=await this.s.transaction(c,"readwrite",(s,i)=>{const a=s.objectStore(c),n=[];let h=0;a.index("timestamp").openCursor(null,"prev").onsuccess=(({target:s})=>{const a=s.result;if(a){const s=a.value;s.cacheName===this.t&&(t&&s.timestamp<t||e&&h>=e?n.push(a.value):h++),a.continue()}else i(n)})}),i=[];for(const t of s)await this.s.delete(c,t.id),i.push(t.url);return i}h(t){return this.t+"|"+r(t)}}class u{constructor(t,e={}){this.o=!1,this.u=!1,this.l=e.maxEntries,this.p=e.maxAgeSeconds,this.t=t,this.m=new o(t)}async expireEntries(){if(this.o)return void(this.u=!0);this.o=!0;const t=this.p?Date.now()-1e3*this.p:void 0,e=await this.m.expireEntries(t,this.l),s=await caches.open(this.t);for(const t of e)await s.delete(t);this.o=!1,this.u&&(this.u=!1,this.expireEntries())}async updateTimestamp(t){await this.m.setTimestamp(t,Date.now())}async isURLExpired(t){return await this.m.getTimestamp(t)<Date.now()-1e3*this.p}async delete(){this.u=!1,await this.m.expireEntries(1/0)}}return t.CacheExpiration=u,t.Plugin=class{constructor(t={}){this.D=t,this.p=t.maxAgeSeconds,this.g=new Map,t.purgeOnQuotaError&&n.registerQuotaErrorCallback(()=>this.deleteCacheAndMetadata())}k(t){if(t===a.cacheNames.getRuntimeName())throw new i.WorkboxError("expire-custom-caches-only");let e=this.g.get(t);return e||(e=new u(t,this.D),this.g.set(t,e)),e}cachedResponseWillBeUsed({event:t,request:e,cacheName:s,cachedResponse:i}){if(!i)return null;let a=this.N(i);const n=this.k(s);n.expireEntries();const h=n.updateTimestamp(e.url);if(t)try{t.waitUntil(h)}catch(t){}return a?i:null}N(t){if(!this.p)return!0;const e=this._(t);return null===e||e>=Date.now()-1e3*this.p}_(t){if(!t.headers.has("date"))return null;const e=t.headers.get("date"),s=new Date(e).getTime();return isNaN(s)?null:s}async cacheDidUpdate({cacheName:t,request:e}){const s=this.k(t);await s.updateTimestamp(e.url),await s.expireEntries()}async deleteCacheAndMetadata(){for(const[t,e]of this.g)await caches.delete(t),await e.delete();this.g=new Map}},t}({},workbox.core._private,workbox.core._private,workbox.core._private,workbox.core._private,workbox.core);
|
||||
//# sourceMappingURL=workbox-expiration.prod.js.map
|
File diff suppressed because one or more lines are too long
1020
public/javascripts/workbox/workbox-routing.dev.js
Normal file
1020
public/javascripts/workbox/workbox-routing.dev.js
Normal file
File diff suppressed because it is too large
Load Diff
1
public/javascripts/workbox/workbox-routing.dev.js.map
Normal file
1
public/javascripts/workbox/workbox-routing.dev.js.map
Normal file
File diff suppressed because one or more lines are too long
2
public/javascripts/workbox/workbox-routing.prod.js
Normal file
2
public/javascripts/workbox/workbox-routing.prod.js
Normal file
|
@ -0,0 +1,2 @@
|
|||
this.workbox=this.workbox||{},this.workbox.routing=function(t,e,r){"use strict";try{self["workbox:routing:4.3.1"]&&_()}catch(t){}const s="GET",n=t=>t&&"object"==typeof t?t:{handle:t};class o{constructor(t,e,r){this.handler=n(e),this.match=t,this.method=r||s}}class i extends o{constructor(t,{whitelist:e=[/./],blacklist:r=[]}={}){super(t=>this.t(t),t),this.s=e,this.o=r}t({url:t,request:e}){if("navigate"!==e.mode)return!1;const r=t.pathname+t.search;for(const t of this.o)if(t.test(r))return!1;return!!this.s.some(t=>t.test(r))}}class u extends o{constructor(t,e,r){super(({url:e})=>{const r=t.exec(e.href);return r?e.origin!==location.origin&&0!==r.index?null:r.slice(1):null},e,r)}}class c{constructor(){this.i=new Map}get routes(){return this.i}addFetchListener(){self.addEventListener("fetch",t=>{const{request:e}=t,r=this.handleRequest({request:e,event:t});r&&t.respondWith(r)})}addCacheListener(){self.addEventListener("message",async t=>{if(t.data&&"CACHE_URLS"===t.data.type){const{payload:e}=t.data,r=Promise.all(e.urlsToCache.map(t=>{"string"==typeof t&&(t=[t]);const e=new Request(...t);return this.handleRequest({request:e})}));t.waitUntil(r),t.ports&&t.ports[0]&&(await r,t.ports[0].postMessage(!0))}})}handleRequest({request:t,event:e}){const r=new URL(t.url,location);if(!r.protocol.startsWith("http"))return;let s,{params:n,route:o}=this.findMatchingRoute({url:r,request:t,event:e}),i=o&&o.handler;if(!i&&this.u&&(i=this.u),i){try{s=i.handle({url:r,request:t,event:e,params:n})}catch(t){s=Promise.reject(t)}return s&&this.h&&(s=s.catch(t=>this.h.handle({url:r,event:e,err:t}))),s}}findMatchingRoute({url:t,request:e,event:r}){const s=this.i.get(e.method)||[];for(const n of s){let s,o=n.match({url:t,request:e,event:r});if(o)return Array.isArray(o)&&o.length>0?s=o:o.constructor===Object&&Object.keys(o).length>0&&(s=o),{route:n,params:s}}return{}}setDefaultHandler(t){this.u=n(t)}setCatchHandler(t){this.h=n(t)}registerRoute(t){this.i.has(t.method)||this.i.set(t.method,[]),this.i.get(t.method).push(t)}unregisterRoute(t){if(!this.i.has(t.method))throw new r.WorkboxError("unregister-route-but-not-found-with-method",{method:t.method});const e=this.i.get(t.method).indexOf(t);if(!(e>-1))throw new r.WorkboxError("unregister-route-route-not-registered");this.i.get(t.method).splice(e,1)}}let a;const h=()=>(a||((a=new c).addFetchListener(),a.addCacheListener()),a);return t.NavigationRoute=i,t.RegExpRoute=u,t.registerNavigationRoute=((t,r={})=>{const s=e.cacheNames.getPrecacheName(r.cacheName),n=new i(async()=>{try{const e=await caches.match(t,{cacheName:s});if(e)return e;throw new Error(`The cache ${s} did not have an entry for `+`${t}.`)}catch(e){return fetch(t)}},{whitelist:r.whitelist,blacklist:r.blacklist});return h().registerRoute(n),n}),t.registerRoute=((t,e,s="GET")=>{let n;if("string"==typeof t){const r=new URL(t,location);n=new o(({url:t})=>t.href===r.href,e,s)}else if(t instanceof RegExp)n=new u(t,e,s);else if("function"==typeof t)n=new o(t,e,s);else{if(!(t instanceof o))throw new r.WorkboxError("unsupported-route-type",{moduleName:"workbox-routing",funcName:"registerRoute",paramName:"capture"});n=t}return h().registerRoute(n),n}),t.Route=o,t.Router=c,t.setCatchHandler=(t=>{h().setCatchHandler(t)}),t.setDefaultHandler=(t=>{h().setDefaultHandler(t)}),t}({},workbox.core._private,workbox.core._private);
|
||||
//# sourceMappingURL=workbox-routing.prod.js.map
|
1
public/javascripts/workbox/workbox-routing.prod.js.map
Normal file
1
public/javascripts/workbox/workbox-routing.prod.js.map
Normal file
File diff suppressed because one or more lines are too long
1138
public/javascripts/workbox/workbox-strategies.dev.js
Normal file
1138
public/javascripts/workbox/workbox-strategies.dev.js
Normal file
File diff suppressed because it is too large
Load Diff
1
public/javascripts/workbox/workbox-strategies.dev.js.map
Normal file
1
public/javascripts/workbox/workbox-strategies.dev.js.map
Normal file
File diff suppressed because one or more lines are too long
2
public/javascripts/workbox/workbox-strategies.prod.js
Normal file
2
public/javascripts/workbox/workbox-strategies.prod.js
Normal file
|
@ -0,0 +1,2 @@
|
|||
this.workbox=this.workbox||{},this.workbox.strategies=function(e,t,s,n,r){"use strict";try{self["workbox:strategies:4.3.1"]&&_()}catch(e){}class i{constructor(e={}){this.t=t.cacheNames.getRuntimeName(e.cacheName),this.s=e.plugins||[],this.i=e.fetchOptions||null,this.h=e.matchOptions||null}async handle({event:e,request:t}){return this.makeRequest({event:e,request:t||e.request})}async makeRequest({event:e,request:t}){"string"==typeof t&&(t=new Request(t));let n,i=await s.cacheWrapper.match({cacheName:this.t,request:t,event:e,matchOptions:this.h,plugins:this.s});if(!i)try{i=await this.u(t,e)}catch(e){n=e}if(!i)throw new r.WorkboxError("no-response",{url:t.url,error:n});return i}async u(e,t){const r=await n.fetchWrapper.fetch({request:e,event:t,fetchOptions:this.i,plugins:this.s}),i=r.clone(),h=s.cacheWrapper.put({cacheName:this.t,request:e,response:i,event:t,plugins:this.s});if(t)try{t.waitUntil(h)}catch(e){}return r}}class h{constructor(e={}){this.t=t.cacheNames.getRuntimeName(e.cacheName),this.s=e.plugins||[],this.h=e.matchOptions||null}async handle({event:e,request:t}){return this.makeRequest({event:e,request:t||e.request})}async makeRequest({event:e,request:t}){"string"==typeof t&&(t=new Request(t));const n=await s.cacheWrapper.match({cacheName:this.t,request:t,event:e,matchOptions:this.h,plugins:this.s});if(!n)throw new r.WorkboxError("no-response",{url:t.url});return n}}const u={cacheWillUpdate:({response:e})=>200===e.status||0===e.status?e:null};class a{constructor(e={}){if(this.t=t.cacheNames.getRuntimeName(e.cacheName),e.plugins){let t=e.plugins.some(e=>!!e.cacheWillUpdate);this.s=t?e.plugins:[u,...e.plugins]}else this.s=[u];this.o=e.networkTimeoutSeconds,this.i=e.fetchOptions||null,this.h=e.matchOptions||null}async handle({event:e,request:t}){return this.makeRequest({event:e,request:t||e.request})}async makeRequest({event:e,request:t}){const s=[];"string"==typeof t&&(t=new Request(t));const n=[];let i;if(this.o){const{id:r,promise:h}=this.l({request:t,event:e,logs:s});i=r,n.push(h)}const h=this.q({timeoutId:i,request:t,event:e,logs:s});n.push(h);let u=await Promise.race(n);if(u||(u=await h),!u)throw new r.WorkboxError("no-response",{url:t.url});return u}l({request:e,logs:t,event:s}){let n;return{promise:new Promise(t=>{n=setTimeout(async()=>{t(await this.p({request:e,event:s}))},1e3*this.o)}),id:n}}async q({timeoutId:e,request:t,logs:r,event:i}){let h,u;try{u=await n.fetchWrapper.fetch({request:t,event:i,fetchOptions:this.i,plugins:this.s})}catch(e){h=e}if(e&&clearTimeout(e),h||!u)u=await this.p({request:t,event:i});else{const e=u.clone(),n=s.cacheWrapper.put({cacheName:this.t,request:t,response:e,event:i,plugins:this.s});if(i)try{i.waitUntil(n)}catch(e){}}return u}p({event:e,request:t}){return s.cacheWrapper.match({cacheName:this.t,request:t,event:e,matchOptions:this.h,plugins:this.s})}}class c{constructor(e={}){this.t=t.cacheNames.getRuntimeName(e.cacheName),this.s=e.plugins||[],this.i=e.fetchOptions||null}async handle({event:e,request:t}){return this.makeRequest({event:e,request:t||e.request})}async makeRequest({event:e,request:t}){let s,i;"string"==typeof t&&(t=new Request(t));try{i=await n.fetchWrapper.fetch({request:t,event:e,fetchOptions:this.i,plugins:this.s})}catch(e){s=e}if(!i)throw new r.WorkboxError("no-response",{url:t.url,error:s});return i}}class o{constructor(e={}){if(this.t=t.cacheNames.getRuntimeName(e.cacheName),this.s=e.plugins||[],e.plugins){let t=e.plugins.some(e=>!!e.cacheWillUpdate);this.s=t?e.plugins:[u,...e.plugins]}else this.s=[u];this.i=e.fetchOptions||null,this.h=e.matchOptions||null}async handle({event:e,request:t}){return this.makeRequest({event:e,request:t||e.request})}async makeRequest({event:e,request:t}){"string"==typeof t&&(t=new Request(t));const n=this.u({request:t,event:e});let i,h=await s.cacheWrapper.match({cacheName:this.t,request:t,event:e,matchOptions:this.h,plugins:this.s});if(h){if(e)try{e.waitUntil(n)}catch(i){}}else try{h=await n}catch(e){i=e}if(!h)throw new r.WorkboxError("no-response",{url:t.url,error:i});return h}async u({request:e,event:t}){const r=await n.fetchWrapper.fetch({request:e,event:t,fetchOptions:this.i,plugins:this.s}),i=s.cacheWrapper.put({cacheName:this.t,request:e,response:r.clone(),event:t,plugins:this.s});if(t)try{t.waitUntil(i)}catch(e){}return r}}const l={cacheFirst:i,cacheOnly:h,networkFirst:a,networkOnly:c,staleWhileRevalidate:o},q=e=>{const t=l[e];return e=>new t(e)},w=q("cacheFirst"),p=q("cacheOnly"),v=q("networkFirst"),y=q("networkOnly"),m=q("staleWhileRevalidate");return e.CacheFirst=i,e.CacheOnly=h,e.NetworkFirst=a,e.NetworkOnly=c,e.StaleWhileRevalidate=o,e.cacheFirst=w,e.cacheOnly=p,e.networkFirst=v,e.networkOnly=y,e.staleWhileRevalidate=m,e}({},workbox.core._private,workbox.core._private,workbox.core._private,workbox.core._private);
|
||||
//# sourceMappingURL=workbox-strategies.prod.js.map
|
File diff suppressed because one or more lines are too long
2
public/javascripts/workbox/workbox-sw.js
Normal file
2
public/javascripts/workbox/workbox-sw.js
Normal file
|
@ -0,0 +1,2 @@
|
|||
!function(){"use strict";try{self["workbox:sw:4.3.1"]&&_()}catch(t){}const t="https://storage.googleapis.com/workbox-cdn/releases/4.3.1",e={backgroundSync:"background-sync",broadcastUpdate:"broadcast-update",cacheableResponse:"cacheable-response",core:"core",expiration:"expiration",googleAnalytics:"offline-ga",navigationPreload:"navigation-preload",precaching:"precaching",rangeRequests:"range-requests",routing:"routing",strategies:"strategies",streams:"streams"};self.workbox=new class{constructor(){return this.v={},this.t={debug:"localhost"===self.location.hostname,modulePathPrefix:null,modulePathCb:null},this.s=this.t.debug?"dev":"prod",this.o=!1,new Proxy(this,{get(t,s){if(t[s])return t[s];const o=e[s];return o&&t.loadModule(`workbox-${o}`),t[s]}})}setConfig(t={}){if(this.o)throw new Error("Config must be set before accessing workbox.* modules");Object.assign(this.t,t),this.s=this.t.debug?"dev":"prod"}loadModule(t){const e=this.i(t);try{importScripts(e),this.o=!0}catch(s){throw console.error(`Unable to import module '${t}' from '${e}'.`),s}}i(e){if(this.t.modulePathCb)return this.t.modulePathCb(e,this.t.debug);let s=[t];const o=`${e}.${this.s}.js`,r=this.t.modulePathPrefix;return r&&""===(s=r.split("/"))[s.length-1]&&s.splice(s.length-1,1),s.push(o),s.join("/")}}}();
|
||||
//# sourceMappingURL=workbox-sw.js.map
|
1
public/javascripts/workbox/workbox-sw.js.map
Normal file
1
public/javascripts/workbox/workbox-sw.js.map
Normal file
File diff suppressed because one or more lines are too long
31
yarn.lock
31
yarn.lock
|
@ -2584,6 +2584,37 @@ wordwrap@~1.0.0:
|
|||
resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb"
|
||||
integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=
|
||||
|
||||
workbox-core@^4.3.1:
|
||||
version "4.3.1"
|
||||
resolved "https://registry.yarnpkg.com/workbox-core/-/workbox-core-4.3.1.tgz#005d2c6a06a171437afd6ca2904a5727ecd73be6"
|
||||
integrity sha512-I3C9jlLmMKPxAC1t0ExCq+QoAMd0vAAHULEgRZ7kieCdUd919n53WC0AfvokHNwqRhGn+tIIj7vcb5duCjs2Kg==
|
||||
|
||||
workbox-expiration@^4.3.1:
|
||||
version "4.3.1"
|
||||
resolved "https://registry.yarnpkg.com/workbox-expiration/-/workbox-expiration-4.3.1.tgz#d790433562029e56837f341d7f553c4a78ebe921"
|
||||
integrity sha512-vsJLhgQsQouv9m0rpbXubT5jw0jMQdjpkum0uT+d9tTwhXcEZks7qLfQ9dGSaufTD2eimxbUOJfWLbNQpIDMPw==
|
||||
dependencies:
|
||||
workbox-core "^4.3.1"
|
||||
|
||||
workbox-routing@^4.3.1:
|
||||
version "4.3.1"
|
||||
resolved "https://registry.yarnpkg.com/workbox-routing/-/workbox-routing-4.3.1.tgz#a675841af623e0bb0c67ce4ed8e724ac0bed0cda"
|
||||
integrity sha512-FkbtrODA4Imsi0p7TW9u9MXuQ5P4pVs1sWHK4dJMMChVROsbEltuE79fBoIk/BCztvOJ7yUpErMKa4z3uQLX+g==
|
||||
dependencies:
|
||||
workbox-core "^4.3.1"
|
||||
|
||||
workbox-strategies@^4.3.1:
|
||||
version "4.3.1"
|
||||
resolved "https://registry.yarnpkg.com/workbox-strategies/-/workbox-strategies-4.3.1.tgz#d2be03c4ef214c115e1ab29c9c759c9fe3e9e646"
|
||||
integrity sha512-F/+E57BmVG8dX6dCCopBlkDvvhg/zj6VDs0PigYwSN23L8hseSRwljrceU2WzTvk/+BSYICsWmRq5qHS2UYzhw==
|
||||
dependencies:
|
||||
workbox-core "^4.3.1"
|
||||
|
||||
workbox-sw@^4.3.1:
|
||||
version "4.3.1"
|
||||
resolved "https://registry.yarnpkg.com/workbox-sw/-/workbox-sw-4.3.1.tgz#df69e395c479ef4d14499372bcd84c0f5e246164"
|
||||
integrity sha512-0jXdusCL2uC5gM3yYFT6QMBzKfBr2XTk0g5TPAV4y8IZDyVNDyj1a8uSXy3/XrvkVTmQvLN4O5k3JawGReXr9w==
|
||||
|
||||
wrappy@1:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
|
||||
|
|
Loading…
Reference in New Issue
Block a user