« get me outta code hell

yaml: filterDuplicateDirectories -> reportDuplicateDirectories - hsmusic-wiki - HSMusic - static wiki software cataloguing collaborative creation
about summary refs log tree commit diff
diff options
context:
space:
mode:
author(quasar) nebula <qznebula@protonmail.com>2024-01-30 13:50:31 -0400
committer(quasar) nebula <qznebula@protonmail.com>2024-01-30 14:30:00 -0400
commita73cdbd50766267c2aa20b607424fd8cd0cf1964 (patch)
tree52be2c199b1dbde81d14e680a6cec5e4dc3899a0
parent7963d414a73f3031ff52d4076f8c2cfe68826150 (diff)
yaml: filterDuplicateDirectories -> reportDuplicateDirectories
-rw-r--r--src/data/yaml.js33
-rwxr-xr-xsrc/upd8.js15
2 files changed, 10 insertions, 38 deletions
diff --git a/src/data/yaml.js b/src/data/yaml.js
index e47da51c..10797861 100644
--- a/src/data/yaml.js
+++ b/src/data/yaml.js
@@ -1007,11 +1007,8 @@ export function sortWikiDataArrays(wikiData) {
 
 // Warn about directories which are reused across more than one of the same type
 // of Thing. Directories are the unique identifier for most data objects across
-// the wiki, so we have to make sure they aren't duplicated!  This also
-// altogether filters out instances of things with duplicate directories (so if
-// two tracks share the directory "megalovania", they'll both be skipped for the
-// build, for example).
-export function filterDuplicateDirectories(wikiData) {
+// the wiki, so we have to make sure they aren't duplicated!
+export function reportDuplicateDirectories(wikiData) {
   const deduplicateSpec = [
     'albumData',
     'artTagData',
@@ -1058,32 +1055,10 @@ export function filterDuplicateDirectories(wikiData) {
             places.map(thing => ` - ` + inspect(thing)).join('\n'));
         });
       }
-
-      const allDuplicatedThings = Object.values(directoryPlaces)
-        .filter((arr) => arr.length > 1)
-        .flat();
-
-      const filteredThings = thingData
-        .filter((thing) => !allDuplicatedThings.includes(thing));
-
-      wikiData[thingDataProp] = filteredThings;
     });
   }
 
-  // TODO: This code closes the aggregate but it generally gets closed again
-  // by the caller. This works but it might be weird to assume closing an
-  // aggregate twice is okay, maybe there's a better solution? Expose a new
-  // function on aggregates for checking if it *would* error?
-  // (i.e: errors.length > 0)
-  try {
-    aggregate.close();
-  } catch (error) {
-    // Duplicate entries were found and filtered out, resulting in altered
-    // wikiData arrays. These must be re-linked so objects receive the new
-    // data.
-    linkWikiDataArrays(wikiData);
-  }
-  return aggregate;
+  aggregate.close();
 }
 
 // Warn about references across data which don't match anything.  This involves
@@ -1413,7 +1388,7 @@ export async function quickLoadAllFromYAML(dataPath, {
   linkWikiDataArrays(wikiData);
 
   try {
-    filterDuplicateDirectories(wikiData).close();
+    reportDuplicateDirectories(wikiData).close();
     logInfo`No duplicate directories found. (complete data)`;
   } catch (error) {
     showAggregate(error);
diff --git a/src/upd8.js b/src/upd8.js
index 4d057b2f..eaf6b435 100755
--- a/src/upd8.js
+++ b/src/upd8.js
@@ -68,10 +68,10 @@ import genThumbs, {
 } from '#thumbs';
 
 import {
-  filterDuplicateDirectories,
   filterReferenceErrors,
   linkWikiDataArrays,
   loadAndProcessDataDocuments,
+  reportDuplicateDirectories,
   sortWikiDataArrays,
 } from '#yaml';
 
@@ -131,7 +131,7 @@ async function main() {
     precacheCommonData:
       {...defaultStepStatus, name: `precache common data`},
 
-    filterDuplicateDirectories:
+    reportDuplicateDirectories:
       {...defaultStepStatus, name: `filter duplicate directories`},
 
     filterReferenceErrors:
@@ -1110,19 +1110,16 @@ async function main() {
   // Filter out any things with duplicate directories throughout the data,
   // warning about them too.
 
-  Object.assign(stepStatusSummary.filterDuplicateDirectories, {
+  Object.assign(stepStatusSummary.reportDuplicateDirectories, {
     status: STATUS_STARTED_NOT_DONE,
     timeStart: Date.now(),
   });
 
-  const filterDuplicateDirectoriesAggregate =
-    filterDuplicateDirectories(wikiData);
-
   try {
-    filterDuplicateDirectoriesAggregate.close();
+    reportDuplicateDirectories(wikiData);
     logInfo`No duplicate directories found - nice!`;
 
-    Object.assign(stepStatusSummary.filterDuplicateDirectories, {
+    Object.assign(stepStatusSummary.reportDuplicateDirectories, {
       status: STATUS_DONE_CLEAN,
       timeEnd: Date.now(),
     });
@@ -1134,7 +1131,7 @@ async function main() {
     logWarn`correct, the build can't continue. Specify unique 'Directory' fields in`;
     logWarn`some or all of these data entries to resolve the errors.`;
 
-    Object.assign(stepStatusSummary.filterDuplicateDirectories, {
+    Object.assign(stepStatusSummary.reportDuplicateDirectories, {
       status: STATUS_FATAL_ERROR,
       annotation: `duplicate directories found`,
       timeEnd: Date.now(),