From 0f12714a6d4e89c3665093ba6be2676ef122a06c Mon Sep 17 00:00:00 2001 From: "(quasar) nebula" Date: Sun, 19 May 2024 22:53:19 -0300 Subject: data: reportDuplicateDirectories -> reportDirectoryErrors And report null directories. --- src/data/checks.js | 35 ++++++++++++++++++++++++++--------- src/data/yaml.js | 4 ++-- src/upd8.js | 14 +++++++------- 3 files changed, 35 insertions(+), 18 deletions(-) diff --git a/src/data/checks.js b/src/data/checks.js index fe4528e8..f3741a19 100644 --- a/src/data/checks.js +++ b/src/data/checks.js @@ -24,13 +24,21 @@ function inspect(value, opts = {}) { return nodeInspect(value, {colors: ENABLE_COLOR, ...opts}); } -// Warn about directories which are reused across more than one of the same type -// of Thing. Directories are the unique identifier for most data objects across -// the wiki, so we have to make sure they aren't duplicated! -export function reportDuplicateDirectories(wikiData, { +// Warn about problems to do with directories. +// +// * Duplicate directories: these are the unique identifier for referencable +// data objects across the wiki, so duplicates introduce ambiguity where it +// can't fit. +// +// * Missing directories: in almost all cases directories can be computed, +// but in particularly brutal internal cases, it might not be possible, and +// a thing's directory is just null. This leaves it unable to be referenced. +// +export function reportDirectoryErrors(wikiData, { getAllFindSpecs, }) { const duplicateSets = []; + const missingDirectoryThings = new Set(); for (const findSpec of Object.values(getAllFindSpecs())) { if (!findSpec.bindTo) continue; @@ -52,6 +60,11 @@ export function reportDuplicateDirectories(wikiData, { : [thing.directory]); for (const directory of directories) { + if (directory === null || directory === undefined) { + missingDirectoryThings.add(thing); + continue; + } + if (directory in directoryPlaces) { directoryPlaces[directory].push(thing); duplicateDirectories.add(directory); @@ -61,8 +74,6 @@ export function reportDuplicateDirectories(wikiData, { } } - if (empty(duplicateDirectories)) continue; - const sortedDuplicateDirectories = Array.from(duplicateDirectories) .sort((a, b) => { @@ -77,8 +88,6 @@ export function reportDuplicateDirectories(wikiData, { } } - if (empty(duplicateSets)) return; - // Multiple find functions may effectively have duplicates across the same // things. These only need to be reported once, because resolving one of them // will resolve the rest, so cut out duplicate sets before reporting. @@ -109,12 +118,20 @@ export function reportDuplicateDirectories(wikiData, { deduplicateDuplicateSets.push(set); } - withAggregate({message: `Duplicate directories found`}, ({push}) => { + withAggregate({message: `Directory errors detected`}, ({push}) => { for (const {directory, places} of deduplicateDuplicateSets) { push(new Error( `Duplicate directory ${colors.green(`"${directory}"`)}:\n` + places.map(thing => ` - ` + inspect(thing)).join('\n'))); } + + if (!empty(missingDirectoryThings)) { + push(new Error( + `Couldn't figure out an implicit directory for:\n` + + Array.from(missingDirectoryThings) + .map(thing => `- ` + inspect(thing)) + .join('\n'))); + } }); } diff --git a/src/data/yaml.js b/src/data/yaml.js index bd0b55dc..c9ce5329 100644 --- a/src/data/yaml.js +++ b/src/data/yaml.js @@ -23,7 +23,7 @@ import { import { filterReferenceErrors, reportContentTextErrors, - reportDuplicateDirectories, + reportDirectoryErrors, } from '#data-checks'; import { @@ -1188,7 +1188,7 @@ export async function quickLoadAllFromYAML(dataPath, { linkWikiDataArrays(wikiData); try { - reportDuplicateDirectories(wikiData, {getAllFindSpecs}); + reportDirectoryErrors(wikiData, {getAllFindSpecs}); logInfo`No duplicate directories found. (complete data)`; } catch (error) { showAggregate(error); diff --git a/src/upd8.js b/src/upd8.js index 3e90f988..f35f9e5f 100755 --- a/src/upd8.js +++ b/src/upd8.js @@ -65,7 +65,7 @@ import { import { filterReferenceErrors, - reportDuplicateDirectories, + reportDirectoryErrors, reportContentTextErrors, } from '#data-checks'; @@ -143,8 +143,8 @@ async function main() { precacheCommonData: {...defaultStepStatus, name: `precache common data`}, - reportDuplicateDirectories: - {...defaultStepStatus, name: `report duplicate directories`}, + reportDirectoryErrors: + {...defaultStepStatus, name: `report directory errors`}, filterReferenceErrors: {...defaultStepStatus, name: `filter reference errors`}, @@ -1343,17 +1343,17 @@ async function main() { // Filter out any things with duplicate directories throughout the data, // warning about them too. - Object.assign(stepStatusSummary.reportDuplicateDirectories, { + Object.assign(stepStatusSummary.reportDirectoryErrors, { status: STATUS_STARTED_NOT_DONE, timeStart: Date.now(), }); try { - reportDuplicateDirectories(wikiData, {getAllFindSpecs}); + reportDirectoryErrors(wikiData, {getAllFindSpecs}); logInfo`No duplicate directories found - nice!`; paragraph = false; - Object.assign(stepStatusSummary.reportDuplicateDirectories, { + Object.assign(stepStatusSummary.reportDirectoryErrors, { status: STATUS_DONE_CLEAN, timeEnd: Date.now(), }); @@ -1369,7 +1369,7 @@ async function main() { console.log(''); paragraph = true; - Object.assign(stepStatusSummary.reportDuplicateDirectories, { + Object.assign(stepStatusSummary.reportDirectoryErrors, { status: STATUS_FATAL_ERROR, annotation: `duplicate directories found`, timeEnd: Date.now(), -- cgit 1.3.0-6-gf8a5