@ -17,13 +17,17 @@ export class CacheManager {
private _setupCache = ( ) = > {
const createMatchRequest = ( requestArg : Request ) = > {
// lets c reate a matchRequest
// C reate a matchRequest.
let matchRequest : Request ;
if ( requestArg . url . startsWith ( this . losslessServiceWorkerRef . serviceWindowRef . location . origin ) ) {
if (
requestArg . url . startsWith (
this . losslessServiceWorkerRef . serviceWindowRef . location . origin
)
) {
// internal request
matchRequest = requestArg ;
} else {
// For external requests, create a new r equest with appropriate CORS settings
// For external requests, create a new R equest with CORS settings.
matchRequest = new Request ( requestArg . url , {
method : requestArg.method ,
headers : requestArg.headers ,
@ -34,9 +38,9 @@ export class CacheManager {
}
return matchRequest ;
} ;
/**
* c reates a 500 response
* C reates a 500 response
*/
const create500Response = async ( requestArg : Request , responseArg : Response ) = > {
return new Response (
@ -75,27 +79,27 @@ export class CacheManager {
}
) ;
} ;
// A list of local resources we always want to be cached.
this . losslessServiceWorkerRef . serviceWindowRef . addEventListener ( 'fetch' , async ( fetchEventArg : any ) = > {
// Lets b lock scopes we don't want to be passing through the serviceworker
// B lock scopes we don't want the serviceworker to handle.
const originalRequest : Request = fetchEventArg . request ;
const parsedUrl = new URL ( originalRequest . url ) ;
if (
parsedUrl . hostname . includes ( 'paddle.com' )
|| parsedUrl . hostname . includes ( 'paypal.com' )
|| parsedUrl . hostname . includes ( 'reception.lossless.one' )
|| parsedUrl . pathname . startsWith ( '/socket.io' )
|| originalRequest . url . startsWith ( 'https://umami.' )
parsedUrl . hostname . includes ( 'paddle.com' ) ||
parsedUrl . hostname . includes ( 'paypal.com' ) ||
parsedUrl . hostname . includes ( 'reception.lossless.one' ) ||
parsedUrl . pathname . startsWith ( '/socket.io' ) ||
originalRequest . url . startsWith ( 'https://umami.' )
) {
logger . log ( 'note' , ` serviceworker not active for ${ parsedUrl . toString ( ) } ` ) ;
logger . log ( 'note' , ` serviceworker not active for ${ parsedUrl . toString ( ) } ` ) ;
return ;
}
// lets c ontinue for the rest
// C ontinue for the rest.
const done = plugins . smartpromise . defer < Response > ( ) ;
fetchEventArg . respondWith ( done . promise ) ;
if (
( originalRequest . method === 'GET' &&
( originalRequest . url . startsWith ( this . losslessServiceWorkerRef . serviceWindowRef . location . origin ) &&
@ -106,12 +110,10 @@ export class CacheManager {
originalRequest . url . includes ( 'https://fonts.googleapis.com' ) ||
originalRequest . url . includes ( 'https://fonts.gstatic.com' )
) {
// lets see if things need to be updated
// not waiting here
// Check for updates asynchronously.
this . losslessServiceWorkerRef . updateManager . checkUpdate ( this ) ;
// this cod e b lock is executed for local requests
// Handl e local or approved remote requests.
const matchRequest = createMatchRequest ( originalRequest ) ;
const cachedResponse = await caches . match ( matchRequest ) ;
if ( cachedResponse ) {
@ -119,15 +121,14 @@ export class CacheManager {
done . resolve ( cachedResponse ) ;
return ;
}
// in case there is no cached response
// No cached response found; try to fetch and cache.
logger . log ( 'info' , ` NOTYETCACHED: trying to cache ${ matchRequest . url } ` ) ;
const newResponse : Response = await fetch ( matchRequest ) . catch ( async err = > {
return await create500Response ( matchRequest , new Response ( err . message ) ) ;
} ) ;
// fill cache
// Put a copy of the response in the runtime cache.
// If status > 299 or opaque response, don't cache.
if ( newResponse . status > 299 || newResponse . type === 'opaque' ) {
logger . log (
'error' ,
@ -139,9 +140,10 @@ export class CacheManager {
} else {
const cache = await caches . open ( this . usedCacheNames . runtimeCacheName ) ;
const responseToPutToCache = newResponse . clone ( ) ;
// Create new headers preserving all except caching-related headers.
const headers = new Headers ( ) ;
responseToPutToCache . headers . forEach ( ( value , key ) = > {
// Preserve all headers except caching headers
if ( ! [
'Cache-Control' ,
'cache-control' ,
@ -153,8 +155,8 @@ export class CacheManager {
headers . set ( key , value ) ;
}
} ) ;
// Ensure CORS headers are present in cached response
// Ensure CORS headers are present in cached response.
if ( ! headers . has ( 'Access-Control-Allow-Origin' ) ) {
headers . set ( 'Access-Control-Allow-Origin' , '*' ) ;
}
@ -164,28 +166,29 @@ export class CacheManager {
if ( ! headers . has ( 'Access-Control-Allow-Headers' ) ) {
headers . set ( 'Access-Control-Allow-Headers' , 'Content-Type' ) ;
}
// Prevent browser caching while allowing service worker caching
// Prevent browser caching while allowing service worker caching.
headers . set ( 'Cache-Control' , 'no-store, no-cache, must-revalidate, proxy-revalidate' ) ;
headers . set ( 'Pragma' , 'no-cache' ) ;
headers . set ( 'Expires' , '0' ) ;
headers . set ( 'Surrogate-Control' , 'no-store' ) ;
await cache . put ( matchRequest , new Response ( responseToPutToCache . body , {
. . . responseToPutToCache ,
// IMPORTANT: Read the entire response body as a blob so that
// Safari does not have issues with a locked stream when caching.
const bodyBlob = await responseToPutToCache . blob ( ) ;
const newCachedResponse = new Response ( bodyBlob , {
status : responseToPutToCache.status ,
statusText : responseToPutToCache.statusText ,
headers
} ) ) ;
logger . log (
'ok' ,
` NOWCACHED: cached response for ${ matchRequest . url } for subsequent requests! `
) ;
} ) ;
await cache . put ( matchRequest , newCachedResponse ) ;
logger . log ( 'ok' , ` NOWCACHED: cached response for ${ matchRequest . url } for subsequent requests! ` ) ;
done . resolve ( newResponse ) ;
}
} else {
// this code block is executed for remote requests
// For remote requests that don't qualify for caching.
logger . log (
'ok' ,
` NOTCACHED: not caching any responses for ${
originalRequest . url
} . Fetching from origin now... `
` NOTCACHED: not caching any responses for ${ originalRequest . url } . Fetching from origin now... `
) ;
done . resolve (
await fetch ( originalRequest ) . catch ( async err = > {
@ -197,39 +200,35 @@ export class CacheManager {
}
/**
* update caches
* @param reasonArg
*/
/**
* cleans all caches
* should only be run when running a new service worker
* Cleans all caches.
* Should only be run when running a new service worker.
* @param reasonArg
*/
public cleanCaches = async ( reasonArg = 'no reason given' ) = > {
logger . log ( 'info' , ` MAJOR CACHEEVENT: cleaning caches now! Reason: ${ reasonArg } ` ) ;
const cacheNames = await caches . keys ( ) ;
const deletePromises = cacheNames . map ( cacheToDelete = > {
const deletePromise = caches . delete ( cacheToDelete ) ;
deletePromise . then ( ( ) = > {
logger . log ( 'ok' , ` Deleted cache ${ cacheToDelete } ` ) ;
} ) ;
return deletePromise ;
const deletePromise = caches . delete ( cacheToDelete ) ;
deletePromise . then ( ( ) = > {
logger . log ( 'ok' , ` Deleted cache ${ cacheToDelete } ` ) ;
} ) ;
return deletePromise ;
} ) ;
await Promise . all ( deletePromises ) ;
}
/**
* r evalidate cache
* R evalidates the runtime cache.
*/
public async revalidateCache() {
const runtimeCache = await caches . open ( this . usedCacheNames . runtimeCacheName ) ;
const cacheKeys = await runtimeCache . keys ( ) ;
for ( const requestArg of cacheKeys ) {
// Get the cached response.
const cachedResponse = runtimeCache . match ( requestArg ) ;
// l ets get a new response for comparison
// F etch a new response for comparison.
const clonedRequest = requestArg . clone ( ) ;
const response = await plugins . smartpromise . timeoutWrap ( fetch ( clonedRequest ) , 5000 ) ; // Increased timeout for better mobile compatibility
if ( response && response . status >= 200 && response . status < 300 ) {
@ -238,4 +237,4 @@ export class CacheManager {
}
}
}
}
}