diff --git a/public/scripts/network.js b/public/scripts/network.js index 53c2c67..50bf4b8 100644 --- a/public/scripts/network.js +++ b/public/scripts/network.js @@ -27,23 +27,38 @@ class ServerConnection { } _getConfig() { + console.log("Loading config...") return new Promise((resolve, reject) => { let xhr = new XMLHttpRequest(); - xhr.open('GET', 'config', true); xhr.addEventListener("load", () => { if (xhr.status === 200) { // Config received let config = JSON.parse(xhr.responseText); + console.log("Config loaded:", config) this._config = config; Events.fire('config', config); resolve() - } else if (xhr.status !== 200) { - // Handle errors - console.error('Error:', xhr.status, xhr.statusText); - reject(); + } else if (xhr.status < 200 || xhr.status >= 300) { + retry(xhr); } + }) + + xhr.addEventListener("error", _ => { + retry(xhr); }); - xhr.send(); + + function retry(request) { + setTimeout(function () { + openAndSend(request) + }, 1000) + } + + function openAndSend() { + xhr.open('GET', 'config'); + xhr.send(); + } + + openAndSend(xhr); }) } diff --git a/public/service-worker.js b/public/service-worker.js index 752cb46..f87e879 100644 --- a/public/service-worker.js +++ b/public/service-worker.js @@ -1,7 +1,7 @@ const cacheVersion = 'v1.9.4'; const cacheTitle = `pairdrop-cache-${cacheVersion}`; const forceFetch = false; // FOR DEVELOPMENT: Set to true to always update assets instead of using cached versions -const urlsToCache = [ +const relativePathsToCache = [ './', 'index.html', 'manifest.json', @@ -36,28 +36,44 @@ const urlsToCache = [ 'lang/ru.json', 'lang/zh-CN.json' ]; +const relativePathsNotToCache = [ + 'config' +] self.addEventListener('install', function(event) { // Perform install steps event.waitUntil( caches.open(cacheTitle) .then(function(cache) { - return cache.addAll(urlsToCache).then(_ => { - console.log('All files cached.'); - }); + return cache + .addAll(relativePathsToCache) + .then(_ => { + console.log('All files cached.'); + }); }) ); }); // fetch the resource from the network const fromNetwork = (request, timeout) => - new Promise((fulfill, reject) => { + new Promise((resolve, reject) => { const timeoutId = setTimeout(reject, timeout); - fetch(request).then(response => { - clearTimeout(timeoutId); - fulfill(response); - update(request).then(_ => console.log("Cache successfully updated for", request.url)); - }, reject); + fetch(request) + .then(response => { + clearTimeout(timeoutId); + resolve(response); + + if (doNotCacheRequest(request)) return; + + update(request) + .then(() => console.log("Cache successfully updated for", request.url)) + .catch(reason => console.log("Cache could not be updated for", request.url, "Reason:", reason)); + }) + .catch(error => { + // Handle any errors that occurred during the fetch + console.error(`Could not fetch ${request.url}. Are you online?`); + reject(error); + }); }); // fetch the resource from the browser cache @@ -68,17 +84,32 @@ const fromCache = request => cache.match(request) ); +const rootUrl = location.href.substring(0, location.href.length - "service-worker.js".length); +const rootUrlLength = rootUrl.length; + +const doNotCacheRequest = request => { + const requestRelativePath = request.url.substring(rootUrlLength); + return relativePathsNotToCache.indexOf(requestRelativePath) !== -1 +}; + // cache the current page to make it available for offline -const update = request => +const update = request => new Promise((resolve, reject) => { + if (doNotCacheRequest(request)) { + reject("Url is specifically prevented from being cached in the serviceworker."); + return; + } caches .open(cacheTitle) .then(cache => - fetch(request) - .then(async response => { - await cache.put(request, response); + fetch(request, {cache: "no-store"}) + .then(response => { + cache + .put(request, response) + .then(() => resolve()); }) - .catch(_ => console.log(`Cache could not be updated. ${request.url}`)) + .catch(reason => reject(reason)) ); +}); // general strategy when making a request (eg if online try to fetch it // from cache, if something fails fetch from network. Update cache everytime files are fetched.