diff options
| author | (quasar) nebula <qznebula@protonmail.com> | 2026-02-22 12:45:10 -0400 |
|---|---|---|
| committer | (quasar) nebula <qznebula@protonmail.com> | 2026-02-22 12:47:30 -0400 |
| commit | 4a3b66c4adbb7c8e79114aab76bf2ff29baed04c (patch) | |
| tree | 1a32c7d8e086a79e9f4091a43adab94dadfedc72 /src/static/js | |
| parent | 83d037e83fc5f7b8af3bb8b74b59e7accf0a2a42 (diff) | |
[preview] client: search-worker: get way more defensive preview
The TODO I'm dropping here is probably a note Gio left... which we *definitely* have seen plenty times before.........
Diffstat (limited to 'src/static/js')
| -rw-r--r-- | src/static/js/search-worker.js | 112 |
1 files changed, 103 insertions, 9 deletions
diff --git a/src/static/js/search-worker.js b/src/static/js/search-worker.js index f6b24386..b79df3d4 100644 --- a/src/static/js/search-worker.js +++ b/src/static/js/search-worker.js @@ -32,7 +32,7 @@ postStatus('alive'); Promise.all([ loadDependencies(), loadDatabase(), -]).then(main) +]).then(() => main()) .then( () => { postStatus('ready'); @@ -182,7 +182,7 @@ function fetchIndexes(keysNeedingFetch) { }))); } -async function main() { +async function main(fromRetry = false) { const prepareIndexDataPromise = prepareIndexData(); indexes = @@ -194,17 +194,50 @@ async function main() { const {indexData, idbIndexData} = await prepareIndexDataPromise; + const understoodKeys = Object.keys(searchSpec); + const unexpectedKeysFromCache = + Object.keys(idbIndexData) + .filter(key => !understoodKeys.includes(key)); + + // This step is largely "unnecessary" because the rest of the code pays + // attention to which keys are understood anyway, but we delete unexpected + // keys from the index anyway, to trim stored data that isn't being used. + if (idb && !empty(unexpectedKeysFromCache)) { + for (const key of unexpectedKeysFromCache) { + console.warn(`Unexpected search index in cache, deleting: ${key}`); + } + + const transaction = + idb.transaction(['indexes'], 'readwrite'); + + const store = + transaction.objectStore('indexes'); + + for (const [key] of unexpectedKeysFromCache) { + try { + await promisifyIDBRequest(store.delete(key)); + } catch (error) { + console.warn(`Error deleting ${key} from internal search cache`); + console.warn(error); + continue; + } + } + } + const keysNeedingFetch = (idbIndexData ? Object.keys(indexData) + .filter(key => understoodKeys.includes(key)) .filter(key => indexData[key].md5 !== idbIndexData[key]?.md5) - : Object.keys(indexData)); + : Object.keys(indexData) + .filter(key => understoodKeys.includes(key))); const keysFromCache = Object.keys(indexData) - .filter(key => !keysNeedingFetch.includes(key)) + .filter(key => understoodKeys.includes(key)) + .filter(key => !keysNeedingFetch.includes(key)); const cacheArrayBufferPromises = keysFromCache @@ -233,10 +266,20 @@ async function main() { } function importIndexes(keys, jsons) { + const succeeded = []; + const failed = []; + stitchArrays({key: keys, json: jsons}) .forEach(({key, json}) => { - importIndex(key, json); + try { + importIndex(key, json); + succeeded.push([key, null]); + } catch (caughtError) { + failed.push([key, caughtError]); + } }); + + return {succeeded, failed}; } if (idb) { @@ -244,6 +287,8 @@ async function main() { console.debug(`Fetching indexes anew:`, keysNeedingFetch); } + let signalRetryNeeded = false; + await Promise.all([ async () => { const cacheArrayBuffers = @@ -253,7 +298,34 @@ async function main() { cacheArrayBuffers .map(arrayBufferToJSON); - importIndexes(keysFromCache, cacheJSONs); + const importResults = + importIndexes(keysFromCache, cacheJSONs); + + if (empty(importResults.failed)) return; + if (!idb) return; + + const transaction = + idb.transaction(['indexes'], 'readwrite'); + + const store = + transaction.objectStore('indexes'); + + for (const [key, error] of importResults.failed) { + console.warn(`Failed to import search index from cache: ${key}`); + console.warn(error); + } + + for (const [key] of importResults.failed) { + try { + await promisifyIDBRequest(store.delete(key)); + } catch (error) { + console.warn(`Error deleting ${key} from internal search cache`); + console.warn(error); + continue; + } + } + + signalRetryNeeded = true; }, async () => { @@ -264,7 +336,21 @@ async function main() { fetchArrayBuffers .map(arrayBufferToJSON); - importIndexes(keysNeedingFetch, fetchJSONs); + const importResults = + importIndexes(keysNeedingFetch, fetchJSONs); + + if (empty(importResults.failed)) return; + + for (const [key, error] of importResults.failed) { + console.warn(`Failed to import search index from fetch: ${key}`); + console.warn(error); + } + + console.warn( + `Trying again would just mean fetching this same data, ` + + `so this is needs outside intervention.`); + + throw new Error(`Failed to load search data from fresh fetch`); }, async () => { @@ -299,11 +385,19 @@ async function main() { } }, ].map(fn => fn())); + + if (signalRetryNeeded) { + if (fromRetry) { + console.error(`Already retried, this is probably a logic / code flow error.`); + throw new Error(`Failed to load good search data even on a retry`); + } else { + console.warn(`Trying to load search data again, hopefully from fresh conditions`); + return main(true); + } + } } function importIndex(indexKey, indexData) { - // If this fails, it's because an outdated index was cached. - // TODO: If this fails, try again once with a cache busting url. for (const [key, value] of Object.entries(indexData)) { indexes[indexKey].import(key, JSON.stringify(value)); } |