import * as plugins from './plugins.js'; import * as interfaces from './env.js'; import { logger } from './logging.js'; import { ServiceWorker } from './classes.serviceworker.js'; export class CacheManager { public losslessServiceWorkerRef: ServiceWorker; public usedCacheNames = { runtimeCacheName: 'runtime' }; constructor(losslessServiceWorkerRefArg: ServiceWorker) { this.losslessServiceWorkerRef = losslessServiceWorkerRefArg; this._setupCache(); } private _setupCache = () => { const createMatchRequest = (requestArg: Request) => { // lets create a matchRequest let matchRequest: Request; if (requestArg.url.startsWith(this.losslessServiceWorkerRef.serviceWindowRef.location.origin)) { // internal request matchRequest = requestArg; } else { matchRequest = new Request(requestArg.url, { ...requestArg.clone(), mode: 'cors' }); } return matchRequest; }; /** * creates a 500 response */ const create500Response = async (requestArg: Request, responseArg: Response) => { return new Response( `
serviceworker running, but status 500
serviceworker is unable to fetch this request
Here is some info about the request/response pair:

requestUrl: ${requestArg.url}
responseType: ${responseArg.type}
responseBody: ${await responseArg.clone().text()}
`, { headers: { "Content-Type": "text/html" }, status: 500 } ); }; // A list of local resources we always want to be cached. this.losslessServiceWorkerRef.serviceWindowRef.addEventListener('fetch', async (fetchEventArg: any) => { // Lets block scopes we don't want to be passing through the serviceworker const originalRequest: Request = fetchEventArg.request; const parsedUrl = new URL(originalRequest.url); if ( parsedUrl.hostname.includes('paddle.com') || parsedUrl.hostname.includes('paypal.com') || parsedUrl.hostname.includes('reception.lossless.one') || parsedUrl.pathname.startsWith('/socket.io') || originalRequest.url.startsWith('https://umami.') ) { logger.log('note',`serviceworker not active for ${parsedUrl.toString()}`); return; } // lets continue for the rest const done = plugins.smartpromise.defer(); fetchEventArg.respondWith(done.promise); if ( (originalRequest.method === 'GET' && (originalRequest.url.startsWith(this.losslessServiceWorkerRef.serviceWindowRef.location.origin) && !originalRequest.url.includes('/api/') && !originalRequest.url.includes('smartserve/reloadcheck'))) || originalRequest.url.includes('https://assetbroker.') || originalRequest.url.includes('https://assetbroker.') || originalRequest.url.includes('https://assetbroker.') || originalRequest.url.includes('https://unpkg.com') || originalRequest.url.includes('https://fonts.googleapis.com') || originalRequest.url.includes('https://fonts.gstatic.com') ) { // lets see if things need to be updated // not waiting here this.losslessServiceWorkerRef.updateManager.checkUpdate(this); // this code block is executed for local requests const matchRequest = createMatchRequest(originalRequest); const cachedResponse = await caches.match(matchRequest); if (cachedResponse) { logger.log('ok', `CACHED: found cached response for ${matchRequest.url}`); done.resolve(cachedResponse); return; } // in case there is no cached response logger.log('info', `NOTYETCACHED: trying to cache ${matchRequest.url}`); const newResponse: Response = await fetch(matchRequest).catch(async err => { return await create500Response(matchRequest, new Response(err.message)); }); // fill cache // Put a copy of the response in the runtime cache. if (newResponse.status > 299 || newResponse.type === 'opaque') { logger.log( 'error', `NOTCACHED: can't cache response for ${matchRequest.url} due to status ${ newResponse.status } and type ${newResponse.type}` ); done.resolve(await create500Response(matchRequest, newResponse)); } else { const cache = await caches.open(this.usedCacheNames.runtimeCacheName); const responseToPutToCache = newResponse.clone(); const headers = new Headers(); responseToPutToCache.headers.forEach((value, key) => { if ( value !== 'Cache-Control' && value !== 'cache-control' && value !== 'Expires' && value !== 'expires' && value !== 'Pragma' && value !== 'pragma' ) { headers.set(key, value); } }); headers.set('Cache-Control', 'no-cache, no-store, must-revalidate'); headers.set('Pragma', 'no-cache'); headers.set('Expires', '0'); await cache.put(matchRequest, new Response(responseToPutToCache.body, { ...responseToPutToCache, headers })); logger.log( 'ok', `NOWCACHED: cached response for ${matchRequest.url} for subsequent requests!` ); done.resolve(newResponse); } } else { // this code block is executed for remote requests logger.log( 'ok', `NOTCACHED: not caching any responses for ${ originalRequest.url }. Fetching from origin now...` ); done.resolve( await fetch(originalRequest).catch(async err => { return await create500Response(originalRequest, new Response(err.message)); }) ); } }); } /** * update caches * @param reasonArg */ /** * cleans all caches * should only be run when running a new service worker * @param reasonArg */ public cleanCaches = async (reasonArg = 'no reason given') => { logger.log('info', `MAJOR CACHEEVENT: cleaning caches now! Reason: ${reasonArg}`); const cacheNames = await caches.keys(); const deletePromises = cacheNames.map(cacheToDelete => { const deletePromise = caches.delete(cacheToDelete); deletePromise.then(() => { logger.log('ok', `Deleted cache ${cacheToDelete}`); }); return deletePromise; }); await Promise.all(deletePromises); } /** * revalidate cache */ public async revalidateCache() { const runtimeCache = await caches.open(this.usedCacheNames.runtimeCacheName); const cacheKeys = await runtimeCache.keys(); for (const requestArg of cacheKeys) { const cachedResponse = runtimeCache.match(requestArg); // lets get a new response for comparison const clonedRequest = requestArg.clone(); const response = await plugins.smartpromise.timeoutWrap(fetch(clonedRequest), 1000); if (response && response.status >= 200 && response.status < 300) { await runtimeCache.delete(requestArg); await runtimeCache.put(requestArg, response); } } } }