« get me outta code hell

hsmusic-wiki - HSMusic - static wiki software cataloguing collaborative creation
about summary refs log tree commit diff
path: root/src/data/yaml.js
diff options
context:
space:
mode:
Diffstat (limited to 'src/data/yaml.js')
-rw-r--r--src/data/yaml.js1978
1 files changed, 853 insertions, 1125 deletions
diff --git a/src/data/yaml.js b/src/data/yaml.js
index 32cf729..86f3014 100644
--- a/src/data/yaml.js
+++ b/src/data/yaml.js
@@ -1,595 +1,494 @@
 // yaml.js - specification for HSMusic YAML data file format and utilities for
-// loading and processing YAML files and documents
+// loading, processing, and validating YAML files and documents
 
-import * as path from 'path';
-import yaml from 'js-yaml';
+import {readFile, stat} from 'node:fs/promises';
+import * as path from 'node:path';
+import {inspect as nodeInspect} from 'node:util';
 
-import { readFile } from 'fs/promises';
-import { inspect as nodeInspect } from 'util';
+import yaml from 'js-yaml';
 
-import {
-    Album,
-    Artist,
-    ArtTag,
-    Flash,
-    FlashAct,
-    Group,
-    GroupCategory,
-    HomepageLayout,
-    HomepageLayoutAlbumsRow,
-    HomepageLayoutRow,
-    NewsEntry,
-    StaticPage,
-    Thing,
-    Track,
-    TrackGroup,
-    WikiInfo,
-} from './things.js';
+import {colors, ENABLE_COLOR, logInfo, logWarn} from '#cli';
+import {sortByName} from '#sort';
+import {atOffset, empty, filterProperties, typeAppearance, withEntries}
+  from '#sugar';
+import Thing from '#thing';
+import thingConstructors from '#things';
 
 import {
-    color,
-    ENABLE_COLOR,
-    logInfo,
-    logWarn,
-} from '../util/cli.js';
+  filterReferenceErrors,
+  reportContentTextErrors,
+  reportDuplicateDirectories,
+} from '#data-checks';
 
 import {
-    decorateErrorWithIndex,
-    mapAggregate,
-    openAggregate,
-    showAggregate,
-    withAggregate,
-} from '../util/sugar.js';
-
-import {
-    sortByDate,
-    sortByName,
-} from '../util/wiki-data.js';
-
-import find, { bindFind } from '../util/find.js';
-import { findFiles } from '../util/io.js';
-
-// --> General supporting stuff
-
-function inspect(value) {
-    return nodeInspect(value, {colors: ENABLE_COLOR});
+  annotateErrorWithFile,
+  decorateErrorWithIndex,
+  decorateErrorWithAnnotation,
+  openAggregate,
+  showAggregate,
+  withAggregate,
+} from '#aggregate';
+
+function inspect(value, opts = {}) {
+  return nodeInspect(value, {colors: ENABLE_COLOR, ...opts});
 }
 
-// --> YAML data repository structure constants
-
-export const WIKI_INFO_FILE = 'wiki-info.yaml';
-export const BUILD_DIRECTIVE_DATA_FILE = 'build-directives.yaml';
-export const HOMEPAGE_LAYOUT_DATA_FILE = 'homepage.yaml';
-export const ARTIST_DATA_FILE = 'artists.yaml';
-export const FLASH_DATA_FILE = 'flashes.yaml';
-export const NEWS_DATA_FILE = 'news.yaml';
-export const ART_TAG_DATA_FILE = 'tags.yaml';
-export const GROUP_DATA_FILE = 'groups.yaml';
-export const STATIC_PAGE_DATA_FILE = 'static-pages.yaml';
-
-export const DATA_ALBUM_DIRECTORY = 'album';
-
-// --> Document processing functions
-
 // General function for inputting a single document (usually loaded from YAML)
 // and outputting an instance of a provided Thing subclass.
 //
 // makeProcessDocument is a factory function: the returned function will take a
 // document and apply the configuration passed to makeProcessDocument in order
 // to construct a Thing subclass.
-function makeProcessDocument(thingClass, {
-    // Optional early step for transforming field values before providing them
-    // to the Thing's update() method. This is useful when the input format
-    // (i.e. values in the document) differ from the format the actual Thing
-    // expects.
-    //
-    // Each key and value are a field name (not an update() property) and a
-    // function which takes the value for that field and returns the value which
-    // will be passed on to update().
-    fieldTransformations = {},
-
-    // Mapping of Thing.update() source properties to field names.
-    //
-    // Note this is property -> field, not field -> property. This is a
-    // shorthand convenience because properties are generally typical
-    // camel-cased JS properties, while fields may contain whitespace and be
-    // more easily represented as quoted strings.
-    propertyFieldMapping,
-
-    // Completely ignored fields. These won't throw an unknown field error if
-    // they're present in a document, but they won't be used for Thing property
-    // generation, either. Useful for stuff that's present in data files but not
-    // yet implemented as part of a Thing's data model!
-    ignoredFields = []
+//
+function makeProcessDocument(thingConstructor, {
+  // The bulk of configuration happens here in the spec's `fields` property.
+  // Each key is a field that's expected on the source document; fields that
+  // don't match one of these keys will cause an error. Values are object
+  // entries describing what to do with the field.
+  //
+  // A field entry's `property` tells what property the value for this field
+  // will be put into, on the respective Thing (subclass) instance.
+  //
+  // A field entry's `transform` optionally allows converting the raw value in
+  // YAML into some other format before providing setting it on the Thing
+  // instance.
+  //
+  // If a field entry has `ignore: true`, it will be completely skipped by the
+  // YAML parser - it won't be validated, read, or loaded into data objects.
+  // This is mainly useful for fields that are purely annotational or are
+  // currently placeholders.
+  //
+  fields: fieldSpecs = {},
+
+  // List of fields which are invalid when coexisting in a document.
+  // Data objects are generally allowing with regards to what properties go
+  // together, allowing for properties to be set separately from each other
+  // instead of complaining about invalid or unused-data cases. But it's
+  // useful to see these kinds of errors when actually validating YAML files!
+  //
+  // Each item of this array should itself be an object with a descriptive
+  // message and a list of fields. Of those fields, none should ever coexist
+  // with any other. For example:
+  //
+  //   [
+  //     {message: '...', fields: ['A', 'B', 'C']},
+  //     {message: '...', fields: ['C', 'D']},
+  //   ]
+  //
+  // ...means A can't coexist with B or C, B can't coexist with A or C, and
+  // C can't coexist iwth A, B, or D - but it's okay for D to coexist with
+  // A or B.
+  //
+  invalidFieldCombinations = [],
 }) {
-    if (!propertyFieldMapping) {
-        throw new Error(`Expected propertyFieldMapping to be provided`);
-    }
-
-    const knownFields = Object.values(propertyFieldMapping);
-
-    // Invert the property-field mapping, since it'll come in handy for
-    // assigning update() source values later.
-    const fieldPropertyMapping = Object.fromEntries(
-        (Object.entries(propertyFieldMapping)
-            .map(([ property, field ]) => [field, property])));
-
-    const decorateErrorWithName = fn => {
-        const nameField = propertyFieldMapping['name'];
-        if (!nameField) return fn;
-
-        return document => {
-            try {
-                return fn(document);
-            } catch (error) {
-                const name = document[nameField];
-                error.message = (name
-                    ? `(name: ${inspect(name)}) ${error.message}`
-                    : `(${color.dim(`no name found`)}) ${error.message}`);
-                throw error;
-            }
-        };
+  if (!thingConstructor) {
+    throw new Error(`Missing Thing class`);
+  }
+
+  if (!fieldSpecs) {
+    throw new Error(`Expected fields to be provided`);
+  }
+
+  const knownFields = Object.keys(fieldSpecs);
+
+  const ignoredFields =
+    Object.entries(fieldSpecs)
+      .filter(([, {ignore}]) => ignore)
+      .map(([field]) => field);
+
+  const propertyToField =
+    withEntries(fieldSpecs, entries => entries
+      .map(([field, {property}]) => [property, field]));
+
+  // TODO: Is this function even necessary??
+  // Aren't we doing basically the same work in the function it's decorating???
+  const decorateErrorWithName = (fn) => {
+    const nameField = propertyToField.name;
+    if (!nameField) return fn;
+
+    return (document) => {
+      try {
+        return fn(document);
+      } catch (error) {
+        const name = document[nameField];
+        error.message = name
+          ? `(name: ${inspect(name)}) ${error.message}`
+          : `(${colors.dim(`no name found`)}) ${error.message}`;
+        throw error;
+      }
     };
-
-    return decorateErrorWithName(document => {
-        const documentEntries = Object.entries(document)
-            .filter(([ field ]) => !ignoredFields.includes(field));
-
-        const unknownFields = documentEntries
-            .map(([ field ]) => field)
-            .filter(field => !knownFields.includes(field));
-
-        if (unknownFields.length) {
-            throw new makeProcessDocument.UnknownFieldsError(unknownFields);
-        }
-
-        const fieldValues = {};
-
-        for (const [ field, value ] of documentEntries) {
-            if (Object.hasOwn(fieldTransformations, field)) {
-                fieldValues[field] = fieldTransformations[field](value);
-            } else {
-                fieldValues[field] = value;
-            }
-        }
-
-        const sourceProperties = {};
-
-        for (const [ field, value ] of Object.entries(fieldValues)) {
-            const property = fieldPropertyMapping[field];
-            sourceProperties[property] = value;
-        }
-
-        const thing = Reflect.construct(thingClass, []);
-
-        withAggregate({message: `Errors applying ${color.green(thingClass.name)} properties`}, ({ call }) => {
-            for (const [ property, value ] of Object.entries(sourceProperties)) {
-                call(() => (thing[property] = value));
-            }
-        });
-
-        return thing;
+  };
+
+  return decorateErrorWithName((document) => {
+    const nameField = propertyToField.name;
+    const namePart =
+      (nameField
+        ? (document[nameField]
+          ? ` named ${colors.green(`"${document[nameField]}"`)}`
+          : ` (name field, "${nameField}", not specified)`)
+        : ``);
+
+    const constructorPart =
+      (thingConstructor[Thing.friendlyName]
+        ? thingConstructor[Thing.friendlyName]
+     : thingConstructor.name
+        ? thingConstructor.name
+        : `document`);
+
+    const aggregate = openAggregate({
+      message: `Errors processing ${constructorPart}` + namePart,
     });
-}
-
-makeProcessDocument.UnknownFieldsError = class UnknownFieldsError extends Error {
-    constructor(fields) {
-        super(`Unknown fields present: ${fields.join(', ')}`);
-        this.fields = fields;
-    }
-};
 
-export const processAlbumDocument = makeProcessDocument(Album, {
-    fieldTransformations: {
-        'Artists': parseContributors,
-        'Cover Artists': parseContributors,
-        'Default Track Cover Artists': parseContributors,
-        'Wallpaper Artists': parseContributors,
-        'Banner Artists': parseContributors,
+    const documentEntries = Object.entries(document)
+      .filter(([field]) => !ignoredFields.includes(field));
 
-        'Date': value => new Date(value),
-        'Date Added': value => new Date(value),
-        'Cover Art Date': value => new Date(value),
-        'Default Track Cover Art Date': value => new Date(value),
+    const skippedFields = new Set();
 
-        'Banner Dimensions': parseDimensions,
+    const unknownFields = documentEntries
+      .map(([field]) => field)
+      .filter((field) => !knownFields.includes(field));
 
-        'Additional Files': parseAdditionalFiles,
-    },
+    if (!empty(unknownFields)) {
+      aggregate.push(new UnknownFieldsError(unknownFields));
 
-    propertyFieldMapping: {
-        name: 'Album',
-
-        color: 'Color',
-        directory: 'Directory',
-        urls: 'URLs',
-
-        artistContribsByRef: 'Artists',
-        coverArtistContribsByRef: 'Cover Artists',
-        trackCoverArtistContribsByRef: 'Default Track Cover Artists',
-
-        coverArtFileExtension: 'Cover Art File Extension',
-        trackCoverArtFileExtension: 'Track Art File Extension',
+      for (const field of unknownFields) {
+        skippedFields.add(field);
+      }
+    }
 
-        wallpaperArtistContribsByRef: 'Wallpaper Artists',
-        wallpaperStyle: 'Wallpaper Style',
-        wallpaperFileExtension: 'Wallpaper File Extension',
+    const presentFields = Object.keys(document);
 
-        bannerArtistContribsByRef: 'Banner Artists',
-        bannerStyle: 'Banner Style',
-        bannerFileExtension: 'Banner File Extension',
-        bannerDimensions: 'Banner Dimensions',
+    const fieldCombinationErrors = [];
 
-        date: 'Date',
-        trackArtDate: 'Default Track Cover Art Date',
-        coverArtDate: 'Cover Art Date',
-        dateAddedToWiki: 'Date Added',
+    for (const {message, fields} of invalidFieldCombinations) {
+      const fieldsPresent =
+        presentFields.filter(field => fields.includes(field));
 
-        hasCoverArt: 'Has Cover Art',
-        hasTrackArt: 'Has Track Art',
-        hasTrackNumbers: 'Has Track Numbers',
-        isMajorRelease: 'Major Release',
-        isListedOnHomepage: 'Listed on Homepage',
+      if (fieldsPresent.length >= 2) {
+        const filteredDocument =
+          filterProperties(
+            document,
+            fieldsPresent,
+            {preserveOriginalOrder: true});
 
-        groupsByRef: 'Groups',
-        artTagsByRef: 'Art Tags',
-        commentary: 'Commentary',
+        fieldCombinationErrors.push(
+          new FieldCombinationError(filteredDocument, message));
 
-        additionalFiles: 'Additional Files',
+        for (const field of Object.keys(filteredDocument)) {
+          skippedFields.add(field);
+        }
+      }
     }
-});
-
-export const processTrackGroupDocument = makeProcessDocument(TrackGroup, {
-    fieldTransformations: {
-        'Date Originally Released': value => new Date(value),
-    },
 
-    propertyFieldMapping: {
-        name: 'Group',
-        color: 'Color',
-        dateOriginallyReleased: 'Date Originally Released',
+    if (!empty(fieldCombinationErrors)) {
+      aggregate.push(new FieldCombinationAggregateError(fieldCombinationErrors));
     }
-});
-
-export const processTrackDocument = makeProcessDocument(Track, {
-    fieldTransformations: {
-        'Duration': getDurationInSeconds,
 
-        'Date First Released': value => new Date(value),
-        'Cover Art Date': value => new Date(value),
+    const fieldValues = {};
 
-        'Artists': parseContributors,
-        'Contributors': parseContributors,
-        'Cover Artists': parseContributors,
+    for (const [field, documentValue] of documentEntries) {
+      if (skippedFields.has(field)) continue;
 
-        'Additional Files': parseAdditionalFiles,
-    },
+      // This variable would like to certify itself as "not into capitalism".
+      let propertyValue =
+        (fieldSpecs[field].transform
+          ? fieldSpecs[field].transform(documentValue)
+          : documentValue);
 
-    propertyFieldMapping: {
-        name: 'Track',
+      // Completely blank items in a YAML list are read as null.
+      // They're handy to have around when filling out a document and shouldn't
+      // be considered an error (or data at all).
+      if (Array.isArray(propertyValue)) {
+        const wasEmpty = empty(propertyValue);
 
-        directory: 'Directory',
-        duration: 'Duration',
-        urls: 'URLs',
+        propertyValue =
+          propertyValue.filter(item => item !== null);
 
-        coverArtDate: 'Cover Art Date',
-        coverArtFileExtension: 'Cover Art File Extension',
-        dateFirstReleased: 'Date First Released',
-        hasCoverArt: 'Has Cover Art',
-        hasURLs: 'Has URLs',
+        const isEmpty = empty(propertyValue);
 
-        referencedTracksByRef: 'Referenced Tracks',
-        artistContribsByRef: 'Artists',
-        contributorContribsByRef: 'Contributors',
-        coverArtistContribsByRef: 'Cover Artists',
-        artTagsByRef: 'Art Tags',
-        originalReleaseTrackByRef: 'Originally Released As',
-
-        commentary: 'Commentary',
-        lyrics: 'Lyrics',
-
-        additionalFiles: 'Additional Files',
-    },
+        // Don't set arrays which are empty as a result of the above filter.
+        // Arrays which were originally empty, i.e. `Field: []`, are still
+        // valid data, but if it's just an array not containing any filled out
+        // items, it should be treated as a placeholder and skipped over.
+        if (isEmpty && !wasEmpty) {
+          propertyValue = null;
+        }
+      }
 
-    ignoredFields: ['Sampled Tracks']
-});
+      fieldValues[field] = propertyValue;
+    }
 
-export const processArtistDocument = makeProcessDocument(Artist, {
-    propertyFieldMapping: {
-        name: 'Artist',
+    const thing = Reflect.construct(thingConstructor, []);
 
-        directory: 'Directory',
-        urls: 'URLs',
-        hasAvatar: 'Has Avatar',
-        avatarFileExtension: 'Avatar File Extension',
+    const fieldValueErrors = [];
 
-        aliasNames: 'Aliases',
+    for (const [field, value] of Object.entries(fieldValues)) {
+      const {property} = fieldSpecs[field];
 
-        contextNotes: 'Context Notes'
-    },
+      try {
+        thing[property] = value;
+      } catch (caughtError) {
+        skippedFields.add(field);
+        fieldValueErrors.push(new FieldValueError(
+          field, value, {cause: caughtError}));
+      }
+    }
 
-    ignoredFields: ['Dead URLs']
-});
+    if (!empty(fieldValueErrors)) {
+      aggregate.push(new FieldValueAggregateError(
+        fieldValueErrors, thingConstructor));
+    }
 
-export const processFlashDocument = makeProcessDocument(Flash, {
-    fieldTransformations: {
-        'Date': value => new Date(value),
+    if (skippedFields.size >= 1) {
+      aggregate.push(
+        new SkippedFieldsSummaryError(
+          filterProperties(
+            document,
+            Array.from(skippedFields),
+            {preserveOriginalOrder: true})));
+    }
 
-        'Contributors': parseContributors,
-    },
+    return {thing, aggregate};
+  });
+}
 
-    propertyFieldMapping: {
-        name: 'Flash',
+export class UnknownFieldsError extends Error {
+  constructor(fields) {
+    super(`Unknown fields ignored: ${fields.map(field => colors.red(field)).join(', ')}`);
+    this.fields = fields;
+  }
+}
 
-        directory: 'Directory',
-        page: 'Page',
-        date: 'Date',
-        coverArtFileExtension: 'Cover Art File Extension',
+export class FieldCombinationAggregateError extends AggregateError {
+  constructor(errors) {
+    super(errors, `Invalid field combinations - all involved fields ignored`);
+  }
+}
 
-        featuredTracksByRef: 'Featured Tracks',
-        contributorContribsByRef: 'Contributors',
-        urls: 'URLs'
-    },
-});
+export class FieldCombinationError extends Error {
+  constructor(fields, message) {
+    const fieldNames = Object.keys(fields);
+
+    const fieldNamesText =
+      fieldNames
+        .map(field => colors.red(field))
+        .join(', ');
+
+    const mainMessage = `Don't combine ${fieldNamesText}`;
+
+    const causeMessage =
+      (typeof message === 'function'
+        ? message(fields)
+     : typeof message === 'string'
+        ? message
+        : null);
+
+    super(mainMessage, {
+      cause:
+        (causeMessage
+          ? new Error(causeMessage)
+          : null),
+    });
 
-export const processFlashActDocument = makeProcessDocument(FlashAct, {
-    propertyFieldMapping: {
-        name: 'Act',
-        color: 'Color',
-        anchor: 'Anchor',
-        jump: 'Jump',
-        jumpColor: 'Jump Color'
-    }
-});
-
-export const processNewsEntryDocument = makeProcessDocument(NewsEntry, {
-    fieldTransformations: {
-        'Date': value => new Date(value)
-    },
-
-    propertyFieldMapping: {
-        name: 'Name',
-        directory: 'Directory',
-        date: 'Date',
-        content: 'Content',
-    }
-});
-
-export const processArtTagDocument = makeProcessDocument(ArtTag, {
-    propertyFieldMapping: {
-        name: 'Tag',
-        directory: 'Directory',
-        color: 'Color',
-        isContentWarning: 'Is CW'
-    }
-});
-
-export const processGroupDocument = makeProcessDocument(Group, {
-    propertyFieldMapping: {
-        name: 'Group',
-        directory: 'Directory',
-        description: 'Description',
-        urls: 'URLs',
-    }
-});
+    this.fields = fields;
+  }
+}
 
-export const processGroupCategoryDocument = makeProcessDocument(GroupCategory, {
-    propertyFieldMapping: {
-        name: 'Category',
-        color: 'Color',
-    }
-});
+export class FieldValueAggregateError extends AggregateError {
+  [Symbol.for('hsmusic.aggregate.translucent')] = true;
 
-export const processStaticPageDocument = makeProcessDocument(StaticPage, {
-    propertyFieldMapping: {
-        name: 'Name',
-        nameShort: 'Short Name',
-        directory: 'Directory',
+  constructor(errors, thingConstructor) {
+    const constructorText =
+      colors.green(thingConstructor.name);
 
-        content: 'Content',
-        stylesheet: 'Style',
+    super(
+      errors,
+      `Errors processing field values for ${constructorText}`);
+  }
+}
 
-        showInNavigationBar: 'Show in Navigation Bar'
-    }
-});
-
-export const processWikiInfoDocument = makeProcessDocument(WikiInfo, {
-    propertyFieldMapping: {
-        name: 'Name',
-        nameShort: 'Short Name',
-        color: 'Color',
-        description: 'Description',
-        footerContent: 'Footer Content',
-        defaultLanguage: 'Default Language',
-        canonicalBase: 'Canonical Base',
-        divideTrackListsByGroupsByRef: 'Divide Track Lists By Groups',
-        enableFlashesAndGames: 'Enable Flashes & Games',
-        enableListings: 'Enable Listings',
-        enableNews: 'Enable News',
-        enableArtTagUI: 'Enable Art Tag UI',
-        enableGroupUI: 'Enable Group UI',
-    }
-});
+export class FieldValueError extends Error {
+  constructor(field, value, options) {
+    const fieldText =
+      colors.green(`"${field}"`);
 
-export const processHomepageLayoutDocument = makeProcessDocument(HomepageLayout, {
-    propertyFieldMapping: {
-        sidebarContent: 'Sidebar Content'
-    },
+    const valueText =
+      inspect(value, {maxStringLength: 40});
 
-    ignoredFields: ['Homepage']
-});
+    super(
+      `Failed to set ${fieldText} field to ${valueText}`,
+      options);
+  }
+}
 
-export function makeProcessHomepageLayoutRowDocument(rowClass, spec) {
-    return makeProcessDocument(rowClass, {
-        ...spec,
+export class SkippedFieldsSummaryError extends Error {
+  constructor(filteredDocument) {
+    const entries = Object.entries(filteredDocument);
+
+    const lines =
+      entries.map(([field, value]) =>
+        ` - ${field}: ` +
+        inspect(value, {maxStringLength: 70})
+          .split('\n')
+          .map((line, index) => index === 0 ? line : `   ${line}`)
+          .join('\n'));
+
+    const numFieldsText =
+      (entries.length === 1
+        ? `1 field`
+        : `${entries.length} fields`);
+
+    super(
+      colors.bright(colors.yellow(`Altogether, skipped ${numFieldsText}:\n`)) +
+      lines.join('\n') + '\n' +
+      colors.bright(colors.yellow(`See above errors for details.`)));
+  }
+}
 
-        propertyFieldMapping: {
-            name: 'Row',
-            color: 'Color',
-            type: 'Type',
-            ...spec.propertyFieldMapping,
-        }
-    });
+export function parseDate(date) {
+  return new Date(date);
 }
 
-export const homepageLayoutRowTypeProcessMapping = {
-    albums: makeProcessHomepageLayoutRowDocument(HomepageLayoutAlbumsRow, {
-        propertyFieldMapping: {
-            sourceGroupByRef: 'Group',
-            countAlbumsFromGroup: 'Count',
-            sourceAlbumsByRef: 'Albums',
-            actionLinks: 'Actions'
-        }
-    })
-};
+export function parseDuration(string) {
+  if (typeof string !== 'string') {
+    return string;
+  }
+
+  const parts = string.split(':').map((n) => parseInt(n));
+  if (parts.length === 3) {
+    return parts[0] * 3600 + parts[1] * 60 + parts[2];
+  } else if (parts.length === 2) {
+    return parts[0] * 60 + parts[1];
+  } else {
+    return 0;
+  }
+}
 
-export function processHomepageLayoutRowDocument(document) {
-    const type = document['Type'];
+export function parseAdditionalFiles(array) {
+  if (!Array.isArray(array)) {
+    // Error will be caught when validating against whatever this value is
+    return array;
+  }
+
+  return array.map((item) => ({
+    title: item['Title'],
+    description: item['Description'] ?? null,
+    files: item['Files'],
+  }));
+}
 
-    const match = Object.entries(homepageLayoutRowTypeProcessMapping)
-        .find(([ key ]) => key === type);
+export const extractAccentRegex =
+  /^(?<main>.*?)(?: \((?<accent>.*)\))?$/;
 
-    if (!match) {
-        throw new TypeError(`No processDocument function for row type ${type}!`);
-    }
+export const extractPrefixAccentRegex =
+  /^(?:\((?<accent>.*)\) )?(?<main>.*?)$/;
 
-    return match[1](document);
-}
+export function parseContributors(contributionStrings) {
+  // If this isn't something we can parse, just return it as-is.
+  // The Thing object's validators will handle the data error better
+  // than we're able to here.
+  if (!Array.isArray(contributionStrings)) {
+    return contributionStrings;
+  }
 
-// --> Utilities shared across document parsing functions
+  return contributionStrings.map(item => {
+    if (typeof item === 'object' && item['Who'])
+      return {who: item['Who'], what: item['What'] ?? null};
 
-export function getDurationInSeconds(string) {
-    if (typeof string === 'number') {
-        return string;
-    }
+    if (typeof item !== 'string') return item;
 
-    if (typeof string !== 'string') {
-        throw new TypeError(`Expected a string or number, got ${string}`);
-    }
+    const match = item.match(extractAccentRegex);
+    if (!match) return item;
 
-    const parts = string.split(':').map(n => parseInt(n))
-    if (parts.length === 3) {
-        return parts[0] * 3600 + parts[1] * 60 + parts[2]
-    } else if (parts.length === 2) {
-        return parts[0] * 60 + parts[1]
-    } else {
-        return 0
-    }
+    return {
+      who: match.groups.main,
+      what: match.groups.accent ?? null,
+    };
+  });
 }
 
-export function parseAdditionalFiles(array) {
-    if (!array) return null;
-    if (!Array.isArray(array)) {
-        // Error will be caught when validating against whatever this value is
-        return array;
-    }
+export function parseAdditionalNames(additionalNameStrings) {
+  if (!Array.isArray(additionalNameStrings)) {
+    return additionalNameStrings;
+  }
 
-    return array.map(item => ({
-        title: item['Title'],
-        description: item['Description'] ?? null,
-        files: item['Files']
-    }));
-}
+  return additionalNameStrings.map(item => {
+    if (typeof item === 'object' && item['Name'])
+      return {name: item['Name'], annotation: item['Annotation'] ?? null};
 
-export function parseCommentary(text) {
-    if (text) {
-        const lines = String(text).split('\n');
-        if (!lines[0].replace(/<\/b>/g, '').includes(':</i>')) {
-            return {error: `An entry is missing commentary citation: "${lines[0].slice(0, 40)}..."`};
-        }
-        return text;
-    } else {
-        return null;
-    }
-}
+    if (typeof item !== 'string') return item;
 
-export function parseContributors(contributors) {
-    if (!contributors) {
-        return null;
-    }
+    const match = item.match(extractAccentRegex);
+    if (!match) return item;
 
-    if (contributors.length === 1 && contributors[0].startsWith('<i>')) {
-        const arr = [];
-        arr.textContent = contributors[0];
-        return arr;
-    }
+    return {
+      name: match.groups.main,
+      annotation: match.groups.accent ?? null,
+    };
+  });
+}
 
-    contributors = contributors.map(contrib => {
-        // 8asically, the format is "Who (What)", or just "Who". 8e sure to
-        // keep in mind that "what" doesn't necessarily have a value!
-        const match = contrib.match(/^(.*?)( \((.*)\))?$/);
-        if (!match) {
-            return contrib;
-        }
-        const who = match[1];
-        const what = match[3] || null;
-        return {who, what};
-    });
+export function parseDimensions(string) {
+  // It's technically possible to pass an array like [30, 40] through here.
+  // That's not really an issue because if it isn't of the appropriate shape,
+  // the Thing object's validators will handle the error.
+  if (typeof string !== 'string') {
+    return string;
+  }
 
-    const badContributor = contributors.find(val => typeof val === 'string');
-    if (badContributor) {
-        return {error: `An entry has an incorrectly formatted contributor, "${badContributor}".`};
-    }
+  const parts = string.split(/[x,* ]+/g);
 
-    if (contributors.length === 1 && contributors[0].who === 'none') {
-        return null;
-    }
+  if (parts.length !== 2) {
+    throw new Error(`Invalid dimensions: ${string} (expected "width & height")`);
+  }
 
-    return contributors;
-}
+  const nums = parts.map((part) => Number(part.trim()));
 
-function parseDimensions(string) {
-    if (!string) {
-        return null;
-    }
+  if (nums.includes(NaN)) {
+    throw new Error(`Invalid dimensions: ${string} (couldn't parse as numbers)`);
+  }
 
-    const parts = string.split(/[x,* ]+/g);
-    if (parts.length !== 2) throw new Error(`Invalid dimensions: ${string} (expected width & height)`);
-    const nums = parts.map(part => Number(part.trim()));
-    if (nums.includes(NaN)) throw new Error(`Invalid dimensions: ${string} (couldn't parse as numbers)`);
-    return nums;
+  return nums;
 }
 
-// --> Data repository loading functions and descriptors
-
 // documentModes: Symbols indicating sets of behavior for loading and processing
 // data files.
 export const documentModes = {
-    // onePerFile: One document per file. Expects files array (or function) and
-    // processDocument function. Obviously, each specified data file should only
-    // contain one YAML document (an error will be thrown otherwise). Calls save
-    // with an array of processed documents (wiki objects).
-    onePerFile: Symbol('Document mode: onePerFile'),
-
-    // headerAndEntries: One or more documents per file; the first document is
-    // treated as a "header" and represents data which pertains to all following
-    // "entry" documents. Expects files array (or function) and
-    // processHeaderDocument and processEntryDocument functions. Calls save with
-    // an array of {header, entries} objects.
-    //
-    // Please note that the final results loaded from each file may be "missing"
-    // data objects corresponding to entry documents if the processEntryDocument
-    // function throws on any entries, resulting in partial data provided to
-    // save() - errors will be caught and thrown in the final buildSteps
-    // aggregate. However, if the processHeaderDocument function fails, all
-    // following documents in the same file will be ignored as well (i.e. an
-    // entire file will be excempt from the save() function's input).
-    headerAndEntries: Symbol('Document mode: headerAndEntries'),
-
-    // allInOne: One or more documents, all contained in one file. Expects file
-    // string (or function) and processDocument function. Calls save with an
-    // array of processed documents (wiki objects).
-    allInOne: Symbol('Document mode: allInOne'),
-
-    // oneDocumentTotal: Just a single document, represented in one file.
-    // Expects file string (or function) and processDocument function. Calls
-    // save with the single processed wiki document (data object).
-    //
-    // Please note that if the single document fails to process, the save()
-    // function won't be called at all, generally resulting in an altogether
-    // missing property from the global wikiData object. This should be caught
-    // and handled externally.
-    oneDocumentTotal: Symbol('Document mode: oneDocumentTotal'),
+  // onePerFile: One document per file. Expects files array (or function) and
+  // processDocument function. Obviously, each specified data file should only
+  // contain one YAML document (an error will be thrown otherwise). Calls save
+  // with an array of processed documents (wiki objects).
+  onePerFile: Symbol('Document mode: onePerFile'),
+
+  // headerAndEntries: One or more documents per file; the first document is
+  // treated as a "header" and represents data which pertains to all following
+  // "entry" documents. Expects files array (or function) and
+  // processHeaderDocument and processEntryDocument functions. Calls save with
+  // an array of {header, entries} objects.
+  //
+  // Please note that the final results loaded from each file may be "missing"
+  // data objects corresponding to entry documents if the processEntryDocument
+  // function throws on any entries, resulting in partial data provided to
+  // save() - errors will be caught and thrown in the final buildSteps
+  // aggregate. However, if the processHeaderDocument function fails, all
+  // following documents in the same file will be ignored as well (i.e. an
+  // entire file will be excempt from the save() function's input).
+  headerAndEntries: Symbol('Document mode: headerAndEntries'),
+
+  // allInOne: One or more documents, all contained in one file. Expects file
+  // string (or function) and processDocument function. Calls save with an
+  // array of processed documents (wiki objects).
+  allInOne: Symbol('Document mode: allInOne'),
+
+  // oneDocumentTotal: Just a single document, represented in one file.
+  // Expects file string (or function) and processDocument function. Calls
+  // save with the single processed wiki document (data object).
+  //
+  // Please note that if the single document fails to process, the save()
+  // function won't be called at all, generally resulting in an altogether
+  // missing property from the global wikiData object. This should be caught
+  // and handled externally.
+  oneDocumentTotal: Symbol('Document mode: oneDocumentTotal'),
 };
 
 // dataSteps: Top-level array of "steps" for loading YAML document files.
@@ -624,670 +523,490 @@ export const documentModes = {
 //   them to each other, setting additional properties, etc). Input argument
 //   format depends on documentMode.
 //
-export const dataSteps = [
-    {
-        title: `Process wiki info file`,
-        file: WIKI_INFO_FILE,
+export const getDataSteps = () => {
+  const steps = [];
 
-        documentMode: documentModes.oneDocumentTotal,
-        processDocument: processWikiInfoDocument,
+  for (const thingConstructor of Object.values(thingConstructors)) {
+    const getSpecFn = thingConstructor[Thing.getYamlLoadingSpec];
+    if (!getSpecFn) continue;
 
-        save(wikiInfo) {
-            if (!wikiInfo) {
-                return;
-            }
+    steps.push(getSpecFn({
+      documentModes,
+      thingConstructors,
+    }));
+  }
 
-            return {wikiInfo};
+  sortByName(steps, {getName: step => step.title});
+
+  return steps;
+};
+
+export async function loadAndProcessDataDocuments({dataPath}) {
+  const processDataAggregate = openAggregate({
+    message: `Errors processing data files`,
+  });
+  const wikiDataResult = {};
+
+  function decorateErrorWithFile(fn) {
+    return decorateErrorWithAnnotation(fn,
+      (caughtError, firstArg) =>
+        annotateErrorWithFile(
+          caughtError,
+          path.relative(
+            dataPath,
+            (typeof firstArg === 'object'
+              ? firstArg.file
+              : firstArg))));
+  }
+
+  function asyncDecorateErrorWithFile(fn) {
+    return decorateErrorWithFile(fn).async;
+  }
+
+  for (const dataStep of getDataSteps()) {
+    await processDataAggregate.nestAsync(
+      {
+        message: `Errors during data step: ${colors.bright(dataStep.title)}`,
+        translucent: true,
+      },
+      async ({call, callAsync, map, mapAsync, push}) => {
+        const {documentMode} = dataStep;
+
+        if (!Object.values(documentModes).includes(documentMode)) {
+          throw new Error(`Invalid documentMode: ${documentMode.toString()}`);
         }
-    },
-
-    {
-        title: `Process album files`,
-        files: async dataPath => (
-            (await findFiles(path.join(dataPath, DATA_ALBUM_DIRECTORY), {
-                filter: f => path.extname(f) === '.yaml',
-                joinParentDirectory: false
-            })).map(file => path.join(DATA_ALBUM_DIRECTORY, file))),
-
-        documentMode: documentModes.headerAndEntries,
-        processHeaderDocument: processAlbumDocument,
-        processEntryDocument(document) {
-            return ('Group' in document
-                ? processTrackGroupDocument(document)
-                : processTrackDocument(document));
-        },
-
-        save(results) {
-            const albumData = [];
-            const trackData = [];
-
-            for (const { header: album, entries } of results) {
-                // We can't mutate an array once it's set as a property
-                // value, so prepare the tracks and track groups that will
-                // show up in a track list all the way before actually
-                // applying them.
-                const trackGroups = [];
-                let currentTracksByRef = null;
-                let currentTrackGroup = null;
-
-                const albumRef = Thing.getReference(album);
-
-                function closeCurrentTrackGroup() {
-                    if (currentTracksByRef) {
-                        let trackGroup;
-
-                        if (currentTrackGroup) {
-                            trackGroup = currentTrackGroup;
-                        } else {
-                            trackGroup = new TrackGroup();
-                            trackGroup.name = `Default Track Group`;
-                            trackGroup.isDefaultTrackGroup = true;
-                        }
-
-                        trackGroup.album = album;
-                        trackGroup.tracksByRef = currentTracksByRef;
-                        trackGroups.push(trackGroup);
-                    }
-                }
 
-                for (const entry of entries) {
-                    if (entry instanceof TrackGroup) {
-                        closeCurrentTrackGroup();
-                        currentTracksByRef = [];
-                        currentTrackGroup = entry;
-                        continue;
-                    }
+        // Hear me out, it's been like 1200 years since I wrote the rest of
+        // this beautifully error-containing code and I don't know how to
+        // integrate this nicely. So I'm just returning the result and the
+        // error that should be thrown. Yes, we're back in callback hell,
+        // just without the callbacks. Thank you.
+        const filterBlankDocuments = documents => {
+          const aggregate = openAggregate({
+            message: `Found blank documents - check for extra '${colors.cyan(`---`)}'`,
+          });
+
+          const filteredDocuments =
+            documents
+              .filter(doc => doc !== null);
+
+          if (filteredDocuments.length !== documents.length) {
+            const blankIndexRangeInfo =
+              documents
+                .map((doc, index) => [doc, index])
+                .filter(([doc]) => doc === null)
+                .map(([doc, index]) => index)
+                .reduce((accumulator, index) => {
+                  if (accumulator.length === 0) {
+                    return [[index, index]];
+                  }
+                  const current = accumulator.at(-1);
+                  const rest = accumulator.slice(0, -1);
+                  if (current[1] === index - 1) {
+                    return rest.concat([[current[0], index]]);
+                  } else {
+                    return accumulator.concat([[index, index]]);
+                  }
+                }, [])
+                .map(([start, end]) => ({
+                  start,
+                  end,
+                  count: end - start + 1,
+                  previous: atOffset(documents, start, -1),
+                  next: atOffset(documents, end, +1),
+                }));
+
+            for (const {start, end, count, previous, next} of blankIndexRangeInfo) {
+              const parts = [];
+
+              if (count === 1) {
+                const range = `#${start + 1}`;
+                parts.push(`${count} document (${colors.yellow(range)}), `);
+              } else {
+                const range = `#${start + 1}-${end + 1}`;
+                parts.push(`${count} documents (${colors.yellow(range)}), `);
+              }
+
+              if (previous === null) {
+                parts.push(`at start of file`);
+              } else if (next === null) {
+                parts.push(`at end of file`);
+              } else {
+                const previousDescription = Object.entries(previous).at(0).join(': ');
+                const nextDescription = Object.entries(next).at(0).join(': ');
+                parts.push(`between "${colors.cyan(previousDescription)}" and "${colors.cyan(nextDescription)}"`);
+              }
+
+              aggregate.push(new Error(parts.join('')));
+            }
+          }
 
-                    trackData.push(entry);
+          return {documents: filteredDocuments, aggregate};
+        };
 
-                    entry.dataSourceAlbumByRef = albumRef;
+        const processDocument = (document, thingClassOrFn) => {
+          const thingClass =
+            (thingClassOrFn.prototype instanceof Thing
+              ? thingClassOrFn
+              : thingClassOrFn(document));
 
-                    const trackRef = Thing.getReference(entry);
-                    if (currentTracksByRef) {
-                        currentTracksByRef.push(trackRef);
-                    } else {
-                        currentTracksByRef = [trackRef];
-                    }
-                }
+          if (typeof thingClass !== 'function') {
+            throw new Error(`Expected a thing class, got ${typeAppearance(thingClass)}`);
+          }
 
-                closeCurrentTrackGroup();
+          if (!(thingClass.prototype instanceof Thing)) {
+            throw new Error(`Expected a thing class, got ${thingClass.name}`);
+          }
 
-                album.trackGroups = trackGroups;
-                albumData.push(album);
-            }
+          const spec = thingClass[Thing.yamlDocumentSpec];
 
-            return {albumData, trackData};
-        }
-    },
-
-    {
-        title: `Process artists file`,
-        file: ARTIST_DATA_FILE,
-
-        documentMode: documentModes.allInOne,
-        processDocument: processArtistDocument,
-
-        save(results) {
-            const artistData = results;
-
-            const artistAliasData = results.flatMap(artist => {
-                const origRef = Thing.getReference(artist);
-                return (artist.aliasNames?.map(name => {
-                    const alias = new Artist();
-                    alias.name = name;
-                    alias.isAlias = true;
-                    alias.aliasedArtistRef = origRef;
-                    alias.artistData = artistData;
-                    return alias;
-                }) ?? []);
-            });
-
-            return {artistData, artistAliasData};
-        }
-    },
-
-    // TODO: WD.wikiInfo.enableFlashesAndGames &&
-    {
-        title: `Process flashes file`,
-        file: FLASH_DATA_FILE,
-
-        documentMode: documentModes.allInOne,
-        processDocument(document) {
-            return ('Act' in document
-                ? processFlashActDocument(document)
-                : processFlashDocument(document));
-        },
-
-        save(results) {
-            let flashAct;
-            let flashesByRef = [];
-
-            if (results[0] && !(results[0] instanceof FlashAct)) {
-                throw new Error(`Expected an act at top of flash data file`);
-            }
+          if (!spec) {
+            throw new Error(`Class "${thingClass.name}" doesn't specify Thing.yamlDocumentSpec`);
+          }
 
-            for (const thing of results) {
-                if (thing instanceof FlashAct) {
-                    if (flashAct) {
-                        Object.assign(flashAct, {flashesByRef});
-                    }
+          // TODO: Making a function to only call it just like that is
+          // obviously pretty jank! It should be created once per data step.
+          const fn = makeProcessDocument(thingClass, spec);
+          return fn(document);
+        };
 
-                    flashAct = thing;
-                    flashesByRef = [];
+        if (
+          documentMode === documentModes.allInOne ||
+          documentMode === documentModes.oneDocumentTotal
+        ) {
+          if (!dataStep.file) {
+            throw new Error(`Expected 'file' property for ${documentMode.toString()}`);
+          }
+
+          const file = path.join(
+            dataPath,
+            typeof dataStep.file === 'function'
+              ? await callAsync(dataStep.file, dataPath)
+              : dataStep.file);
+
+          const statResult = await callAsync(() =>
+            stat(file).then(
+              () => true,
+              error => {
+                if (error.code === 'ENOENT') {
+                  return false;
                 } else {
-                    flashesByRef.push(Thing.getReference(thing));
+                  throw error;
                 }
-            }
+              }));
 
-            if (flashAct) {
-                Object.assign(flashAct, {flashesByRef});
-            }
+          if (statResult === false) {
+            const saveResult = call(dataStep.save, {
+              [documentModes.allInOne]: [],
+              [documentModes.oneDocumentTotal]: {},
+            }[documentMode]);
 
-            const flashData = results.filter(x => x instanceof Flash);
-            const flashActData = results.filter(x => x instanceof FlashAct);
+            if (!saveResult) return;
 
-            return {flashData, flashActData};
-        }
-    },
+            Object.assign(wikiDataResult, saveResult);
 
-    {
-        title: `Process groups file`,
-        file: GROUP_DATA_FILE,
+            return;
+          }
 
-        documentMode: documentModes.allInOne,
-        processDocument(document) {
-            return ('Category' in document
-                ? processGroupCategoryDocument(document)
-                : processGroupDocument(document));
-        },
+          const readResult = await callAsync(readFile, file, 'utf-8');
 
-        save(results) {
-            let groupCategory;
-            let groupsByRef = [];
+          if (!readResult) {
+            return;
+          }
 
-            if (results[0] && !(results[0] instanceof GroupCategory)) {
-                throw new Error(`Expected a category at top of group data file`);
-            }
+          let processResults;
 
-            for (const thing of results) {
-                if (thing instanceof GroupCategory) {
-                    if (groupCategory) {
-                        Object.assign(groupCategory, {groupsByRef});
-                    }
+          switch (documentMode) {
+            case documentModes.oneDocumentTotal: {
+              const yamlResult = call(yaml.load, readResult);
 
-                    groupCategory = thing;
-                    groupsByRef = [];
-                } else {
-                    groupsByRef.push(Thing.getReference(thing));
-                }
-            }
+              if (!yamlResult) {
+                processResults = null;
+                break;
+              }
 
-            if (groupCategory) {
-                Object.assign(groupCategory, {groupsByRef});
-            }
+              const {thing, aggregate} =
+                processDocument(yamlResult, dataStep.documentThing);
 
-            const groupData = results.filter(x => x instanceof Group);
-            const groupCategoryData = results.filter(x => x instanceof GroupCategory);
+              processResults = thing;
 
-            return {groupData, groupCategoryData};
-        }
-    },
+              call(() => aggregate.close());
 
-    {
-        title: `Process homepage layout file`,
-        files: [HOMEPAGE_LAYOUT_DATA_FILE],
+              break;
+            }
 
-        documentMode: documentModes.headerAndEntries,
-        processHeaderDocument: processHomepageLayoutDocument,
-        processEntryDocument: processHomepageLayoutRowDocument,
+            case documentModes.allInOne: {
+              const yamlResults = call(yaml.loadAll, readResult);
 
-        save(results) {
-            if (!results[0]) {
+              if (!yamlResults) {
+                processResults = [];
                 return;
-            }
+              }
 
-            const { header: homepageLayout, entries: rows } = results[0];
-            Object.assign(homepageLayout, {rows});
-            return {homepageLayout};
-        }
-    },
+              const {documents, aggregate: filterAggregate} =
+                filterBlankDocuments(yamlResults);
 
-    // TODO: WD.wikiInfo.enableNews &&
-    {
-        title: `Process news data file`,
-        file: NEWS_DATA_FILE,
+              call(filterAggregate.close);
 
-        documentMode: documentModes.allInOne,
-        processDocument: processNewsEntryDocument,
+              processResults = [];
 
-        save(newsData) {
-            sortByDate(newsData);
-            newsData.reverse();
+              map(documents, decorateErrorWithIndex(document => {
+                const {thing, aggregate} =
+                  processDocument(document, dataStep.documentThing);
 
-            return {newsData};
-        }
-    },
+                processResults.push(thing);
+                aggregate.close();
+              }), {message: `Errors processing documents`});
 
-    {
-        title: `Process art tags file`,
-        file: ART_TAG_DATA_FILE,
+              break;
+            }
+          }
 
-        documentMode: documentModes.allInOne,
-        processDocument: processArtTagDocument,
+          if (!processResults) return;
 
-        save(artTagData) {
-            artTagData.sort(sortByName);
+          const saveResult = call(dataStep.save, processResults);
 
-            return {artTagData};
-        }
-    },
+          if (!saveResult) return;
 
-    {
-        title: `Process static pages file`,
-        file: STATIC_PAGE_DATA_FILE,
+          Object.assign(wikiDataResult, saveResult);
 
-        documentMode: documentModes.allInOne,
-        processDocument: processStaticPageDocument,
+          return;
+        }
 
-        save(staticPageData) {
-            return {staticPageData};
+        if (!dataStep.files) {
+          throw new Error(`Expected 'files' property for ${documentMode.toString()}`);
         }
-    },
-];
 
-export async function loadAndProcessDataDocuments({
-    dataPath,
-}) {
-    const processDataAggregate = openAggregate({message: `Errors processing data files`});
-    const wikiDataResult = {};
+        const filesFromDataStep =
+          (typeof dataStep.files === 'function'
+            ? await callAsync(() =>
+                dataStep.files(dataPath).then(
+                  files => files,
+                  error => {
+                    if (error.code === 'ENOENT') {
+                      return [];
+                    } else {
+                      throw error;
+                    }
+                  }))
+            : dataStep.files);
 
-    function decorateErrorWithFile(fn) {
-        return (x, index, array) => {
+        const filesUnderDataPath =
+          filesFromDataStep
+            .map(file => path.join(dataPath, file));
+
+        const yamlResults = [];
+
+        await mapAsync(filesUnderDataPath, {message: `Errors loading data files`},
+          asyncDecorateErrorWithFile(async file => {
+            let contents;
             try {
-                return fn(x, index, array);
-            } catch (error) {
-                error.message += (
-                    (error.message.includes('\n') ? '\n' : ' ') +
-                    `(file: ${color.bright(color.blue(path.relative(dataPath, x.file)))})`
-                );
-                throw error;
+              contents = await readFile(file, 'utf-8');
+            } catch (caughtError) {
+              throw new Error(`Failed to read data file`, {cause: caughtError});
             }
-        };
-    }
 
-    for (const dataStep of dataSteps) {
-        await processDataAggregate.nestAsync(
-            {message: `Errors during data step: ${dataStep.title}`},
-            async ({call, callAsync, map, mapAsync, nest}) => {
-                const { documentMode } = dataStep;
+            let documents;
+            try {
+              documents = yaml.loadAll(contents);
+            } catch (caughtError) {
+              throw new Error(`Failed to parse valid YAML`, {cause: caughtError});
+            }
 
-                if (!(Object.values(documentModes).includes(documentMode))) {
-                    throw new Error(`Invalid documentMode: ${documentMode.toString()}`);
-                }
+            const {documents: filteredDocuments, aggregate: filterAggregate} =
+              filterBlankDocuments(documents);
 
-                if (documentMode === documentModes.allInOne || documentMode === documentModes.oneDocumentTotal) {
-                    if (!dataStep.file) {
-                        throw new Error(`Expected 'file' property for ${documentMode.toString()}`);
-                    }
+            try {
+              filterAggregate.close();
+            } catch (caughtError) {
+              // Blank documents aren't a critical error, they're just something
+              // that should be noted - the (filtered) documents still get pushed.
+              const pathToFile = path.relative(dataPath, file);
+              annotateErrorWithFile(caughtError, pathToFile);
+              push(caughtError);
+            }
 
-                    const file = path.join(dataPath,
-                        (typeof dataStep.file === 'function'
-                            ? await callAsync(dataStep.file, dataPath)
-                            : dataStep.file));
+            yamlResults.push({file, documents: filteredDocuments});
+          }));
 
-                    const readResult = await callAsync(readFile, file, 'utf-8');
+        const processResults = [];
 
-                    if (!readResult) {
-                        return;
-                    }
+        switch (documentMode) {
+          case documentModes.headerAndEntries:
+            map(yamlResults, {message: `Errors processing documents in data files`, translucent: true},
+              decorateErrorWithFile(({documents}) => {
+                const headerDocument = documents[0];
+                const entryDocuments = documents.slice(1).filter(Boolean);
 
-                    const yamlResult = (documentMode === documentModes.oneDocumentTotal
-                        ? call(yaml.load, readResult)
-                        : call(yaml.loadAll, readResult));
+                if (!headerDocument)
+                  throw new Error(`Missing header document (empty file or erroneously starting with "---"?)`);
 
-                    if (!yamlResult) {
-                        return;
-                    }
+                withAggregate({message: `Errors processing documents`}, ({push}) => {
+                  const {thing: headerObject, aggregate: headerAggregate} =
+                    processDocument(headerDocument, dataStep.headerDocumentThing);
 
-                    let processResults;
+                  try {
+                    headerAggregate.close();
+                  } catch (caughtError) {
+                    caughtError.message = `(${colors.yellow(`header`)}) ${caughtError.message}`;
+                    push(caughtError);
+                  }
 
-                    if (documentMode === documentModes.oneDocumentTotal) {
-                        nest({message: `Errors processing document`}, ({ call }) => {
-                            processResults = call(dataStep.processDocument, yamlResult);
-                        });
-                    } else {
-                        const { result, aggregate } = mapAggregate(
-                            yamlResult,
-                            decorateErrorWithIndex(dataStep.processDocument),
-                            {message: `Errors processing documents`}
-                        );
-                        processResults = result;
-                        call(aggregate.close);
-                    }
+                  const entryObjects = [];
 
-                    if (!processResults) return;
+                  for (let index = 0; index < entryDocuments.length; index++) {
+                    const entryDocument = entryDocuments[index];
 
-                    const saveResult = call(dataStep.save, processResults);
+                    const {thing: entryObject, aggregate: entryAggregate} =
+                      processDocument(entryDocument, dataStep.entryDocumentThing);
 
-                    if (!saveResult) return;
+                    entryObjects.push(entryObject);
 
-                    Object.assign(wikiDataResult, saveResult);
+                    try {
+                      entryAggregate.close();
+                    } catch (caughtError) {
+                      caughtError.message = `(${colors.yellow(`entry #${index + 1}`)}) ${caughtError.message}`;
+                      push(caughtError);
+                    }
+                  }
 
-                    return;
-                }
+                  processResults.push({
+                    header: headerObject,
+                    entries: entryObjects,
+                  });
+                });
+              }));
+            break;
 
-                if (!dataStep.files) {
-                    throw new Error(`Expected 'files' property for ${documentMode.toString()}`);
-                }
+          case documentModes.onePerFile:
+            map(yamlResults, {message: `Errors processing data files as valid documents`},
+              decorateErrorWithFile(({documents}) => {
+                if (documents.length > 1)
+                  throw new Error(`Only expected one document to be present per file, got ${documents.length} here`);
 
-                const files = (
-                    (typeof dataStep.files === 'function'
-                        ? await callAsync(dataStep.files, dataPath)
-                        : dataStep.files)
-                    .map(file => path.join(dataPath, file)));
-
-                const readResults = await mapAsync(
-                    files,
-                    file => (readFile(file, 'utf-8')
-                        .then(contents => ({file, contents}))),
-                    {message: `Errors reading data files`});
-
-                const yamlResults = map(
-                    readResults,
-                    decorateErrorWithFile(
-                        ({ file, contents }) => ({file, documents: yaml.loadAll(contents)})),
-                    {message: `Errors parsing data files as valid YAML`});
-
-                let processResults;
-
-                if (documentMode === documentModes.headerAndEntries) {
-                    nest({message: `Errors processing data files as valid documents`}, ({ call, map }) => {
-                        processResults = [];
-
-                        yamlResults.forEach(({ file, documents }) => {
-                            const [ headerDocument, ...entryDocuments ] = documents;
-
-                            const header = call(
-                                decorateErrorWithFile(
-                                    ({ document }) => dataStep.processHeaderDocument(document)),
-                                {file, document: headerDocument});
-
-                            // Don't continue processing files whose header
-                            // document is invalid - the entire file is excempt
-                            // from data in this case.
-                            if (!header) {
-                                return;
-                            }
-
-                            const entries = map(
-                                entryDocuments.map(document => ({file, document})),
-                                decorateErrorWithFile(
-                                    decorateErrorWithIndex(
-                                        ({ document }) => dataStep.processEntryDocument(document))),
-                                {message: `Errors processing entry documents`});
-
-                            // Entries may be incomplete (i.e. any errored
-                            // documents won't have a processed output
-                            // represented here) - this is intentional! By
-                            // principle, partial output is preferred over
-                            // erroring an entire file.
-                            processResults.push({header, entries});
-                        });
-                    });
-                }
+                if (empty(documents) || !documents[0])
+                  throw new Error(`Expected a document, this file is empty`);
 
-                if (documentMode === documentModes.onePerFile) {
-                    nest({message: `Errors processing data files as valid documents`}, ({ call, map }) => {
-                        processResults = [];
-
-                        yamlResults.forEach(({ file, documents }) => {
-                            if (documents.length > 1) {
-                                call(decorateErrorWithFile(() => {
-                                    throw new Error(`Only expected one document to be present per file`);
-                                }));
-                                return;
-                            }
-
-                            const result = call(
-                                decorateErrorWithFile(
-                                    ({ document }) => dataStep.processDocument(document)),
-                                {file, document: documents[0]});
-
-                            if (!result) {
-                                return;
-                            }
-
-                            processResults.push(result);
-                        });
-                    });
-                }
+                const {thing, aggregate} =
+                  processDocument(documents[0], dataStep.documentThing);
 
-                const saveResult = call(dataStep.save, processResults);
+                processResults.push(thing);
+                aggregate.close();
+              }));
+            break;
+        }
 
-                if (!saveResult) return;
+        const saveResult = call(dataStep.save, processResults);
 
-                Object.assign(wikiDataResult, saveResult);
-            });
-    }
+        if (!saveResult) return;
 
-    return {
-        aggregate: processDataAggregate,
-        result: wikiDataResult
-    };
+        Object.assign(wikiDataResult, saveResult);
+      }
+    );
+  }
+
+  return {
+    aggregate: processDataAggregate,
+    result: wikiDataResult,
+  };
 }
 
 // Data linking! Basically, provide (portions of) wikiData to the Things which
 // require it - they'll expose dynamically computed properties as a result (many
-// of which are required for page HTML generation).
+// of which are required for page HTML generation and other expected behavior).
 export function linkWikiDataArrays(wikiData) {
-    function assignWikiData(things, ...keys) {
-        for (let i = 0; i < things.length; i++) {
-            for (let j = 0; j < keys.length; j++) {
-                const key = keys[j];
-                things[i][key] = wikiData[key];
-            }
-        }
+  const linkWikiDataSpec = new Map([
+    [wikiData.albumData, [
+      'artTagData',
+      'artistData',
+      'groupData',
+    ]],
+
+    [wikiData.artTagData, [
+      'albumData',
+      'trackData',
+    ]],
+
+    [wikiData.artistData, [
+      'albumData',
+      'artistData',
+      'flashData',
+      'trackData',
+    ]],
+
+    [wikiData.flashData, [
+      'artistData',
+      'flashActData',
+      'trackData',
+    ]],
+
+    [wikiData.flashActData, [
+      'flashData',
+      'flashSideData',
+    ]],
+
+    [wikiData.flashSideData, [
+      'flashActData',
+    ]],
+
+    [wikiData.groupData, [
+      'albumData',
+      'groupCategoryData',
+    ]],
+
+    [wikiData.groupCategoryData, [
+      'groupData',
+    ]],
+
+    [wikiData.homepageLayout?.rows, [
+      'albumData',
+      'groupData',
+    ]],
+
+    [wikiData.trackData, [
+      'albumData',
+      'artTagData',
+      'artistData',
+      'flashData',
+      'trackData',
+    ]],
+
+    [[wikiData.wikiInfo], [
+      'groupData',
+    ]],
+  ]);
+
+  for (const [things, keys] of linkWikiDataSpec.entries()) {
+    if (things === undefined) continue;
+    for (const thing of things) {
+      if (thing === undefined) continue;
+      for (const key of keys) {
+        if (!(key in wikiData)) continue;
+        thing[key] = wikiData[key];
+      }
     }
-
-    const WD = wikiData;
-
-    assignWikiData([WD.wikiInfo], 'groupData');
-
-    assignWikiData(WD.albumData, 'artistData', 'artTagData', 'groupData', 'trackData');
-    WD.albumData.forEach(album => assignWikiData(album.trackGroups, 'trackData'));
-
-    assignWikiData(WD.trackData, 'albumData', 'artistData', 'artTagData', 'flashData', 'trackData');
-    assignWikiData(WD.artistData, 'albumData', 'artistData', 'flashData', 'trackData');
-    assignWikiData(WD.groupData, 'albumData', 'groupCategoryData');
-    assignWikiData(WD.groupCategoryData, 'groupData');
-    assignWikiData(WD.flashData, 'artistData', 'flashActData', 'trackData');
-    assignWikiData(WD.flashActData, 'flashData');
-    assignWikiData(WD.artTagData, 'albumData', 'trackData');
-    assignWikiData(WD.homepageLayout.rows, 'albumData', 'groupData');
+  }
 }
 
 export function sortWikiDataArrays(wikiData) {
-    Object.assign(wikiData, {
-        albumData: sortByDate(wikiData.albumData.slice()),
-        trackData: sortByDate(wikiData.trackData.slice())
-    });
-
-    // Re-link data arrays, so that every object has the new, sorted versions.
-    // Note that the sorting step deliberately creates new arrays (mutating
-    // slices instead of the original arrays) - this is so that the object
-    // caching system understands that it's working with a new ordering.
-    // We still need to actually provide those updated arrays over again!
-    linkWikiDataArrays(wikiData);
-}
-
-// Warn about directories which are reused across more than one of the same type
-// of Thing. Directories are the unique identifier for most data objects across
-// the wiki, so we have to make sure they aren't duplicated!  This also
-// altogether filters out instances of things with duplicate directories (so if
-// two tracks share the directory "megalovania", they'll both be skipped for the
-// build, for example).
-export function filterDuplicateDirectories(wikiData) {
-    const deduplicateSpec = [
-        'albumData',
-        'artTagData',
-        'flashData',
-        'groupData',
-        'newsData',
-        'trackData',
-    ];
-
-    const aggregate = openAggregate({message: `Duplicate directories found`});
-    for (const thingDataProp of deduplicateSpec) {
-        const thingData = wikiData[thingDataProp];
-        aggregate.nest({message: `Duplicate directories found in ${color.green('wikiData.' + thingDataProp)}`}, ({ call }) => {
-            const directoryPlaces = Object.create(null);
-            const duplicateDirectories = [];
-            for (const thing of thingData) {
-                const { directory } = thing;
-                if (directory in directoryPlaces) {
-                    directoryPlaces[directory].push(thing);
-                    duplicateDirectories.push(directory);
-                } else {
-                    directoryPlaces[directory] = [thing];
-                }
-            }
-            if (!duplicateDirectories.length) return;
-            duplicateDirectories.sort((a, b) => {
-                const aL = a.toLowerCase();
-                const bL = b.toLowerCase();
-                return aL < bL ? -1 : aL > bL ? 1 : 0;
-            });
-            for (const directory of duplicateDirectories) {
-                const places = directoryPlaces[directory];
-                call(() => {
-                    throw new Error(`Duplicate directory ${color.green(directory)}:\n` +
-                        places.map(thing => ` - ` + inspect(thing)).join('\n'));
-                });
-            }
-            const allDuplicatedThings = Object.values(directoryPlaces).filter(arr => arr.length > 1).flat();
-            const filteredThings = thingData.filter(thing => !allDuplicatedThings.includes(thing));
-            wikiData[thingDataProp] = filteredThings;
-        });
-    }
-
-    // TODO: This code closes the aggregate but it generally gets closed again
-    // by the caller. This works but it might be weird to assume closing an
-    // aggregate twice is okay, maybe there's a better solution? Expose a new
-    // function on aggregates for checking if it *would* error?
-    // (i.e: errors.length > 0)
-    try {
-        aggregate.close();
-    } catch (error) {
-        // Duplicate entries were found and filtered out, resulting in altered
-        // wikiData arrays. These must be re-linked so objects receive the new
-        // data.
-        linkWikiDataArrays(wikiData);
-    }
-    return aggregate;
-}
-
-// Warn about references across data which don't match anything.  This involves
-// using the find() functions on all references, setting it to 'error' mode, and
-// collecting everything in a structured logged (which gets logged if there are
-// any errors). At the same time, we remove errored references from the thing's
-// data array.
-export function filterReferenceErrors(wikiData) {
-    const referenceSpec = [
-        ['wikiInfo', {
-            divideTrackListsByGroupsByRef: 'group',
-        }],
-
-        ['albumData', {
-            artistContribsByRef: '_contrib',
-            coverArtistContribsByRef: '_contrib',
-            trackCoverArtistContribsByRef: '_contrib',
-            wallpaperArtistContribsByRef: '_contrib',
-            bannerArtistContribsByRef: '_contrib',
-            groupsByRef: 'group',
-            artTagsByRef: 'artTag',
-        }],
-
-        ['trackData', {
-            artistContribsByRef: '_contrib',
-            contributorContribsByRef: '_contrib',
-            coverArtistContribsByRef: '_contrib',
-            referencedTracksByRef: 'track',
-            artTagsByRef: 'artTag',
-            originalReleaseTrackByRef: 'track',
-        }],
-
-        ['groupCategoryData', {
-            groupsByRef: 'group',
-        }],
-
-        ['homepageLayout.rows', {
-            sourceGroupsByRef: 'group',
-            sourceAlbumsByRef: 'album',
-        }],
-
-        ['flashData', {
-            contributorContribsByRef: '_contrib',
-            featuredTracksByRef: 'track',
-        }],
-
-        ['flashActData', {
-            flashesByRef: 'flash',
-        }],
-    ];
-
-    function getNestedProp(obj, key) {
-        const recursive = (o, k) => (k.length === 1
-            ? o[k[0]]
-            : recursive(o[k[0]], k.slice(1)));
-        const keys = key.split(/(?<=(?<!\\)(?:\\\\)*)\./);
-        return recursive(obj, keys);
-    }
-
-    const aggregate = openAggregate({message: `Errors validating between-thing references in data`});
-    const boundFind = bindFind(wikiData, {mode: 'error'});
-    for (const [ thingDataProp, propSpec ] of referenceSpec) {
-        const thingData = getNestedProp(wikiData, thingDataProp);
-        aggregate.nest({message: `Reference errors in ${color.green('wikiData.' + thingDataProp)}`}, ({ nest }) => {
-            const things = Array.isArray(thingData) ? thingData : [thingData];
-            for (const thing of things) {
-                nest({message: `Reference errors in ${inspect(thing)}`}, ({ filter }) => {
-                    for (const [ property, findFnKey ] of Object.entries(propSpec)) {
-                        if (!thing[property]) continue;
-                        if (findFnKey === '_contrib') {
-                            thing[property] = filter(thing[property],
-                                decorateErrorWithIndex(({ who }) => {
-                                    const alias = find.artist(who, wikiData.artistAliasData, {mode: 'quiet'});
-                                    if (alias) {
-                                        const original = find.artist(alias.aliasedArtistRef, wikiData.artistData, {mode: 'quiet'});
-                                        throw new Error(`Reference ${color.red(who)} is to an alias, should be ${color.green(original.name)}`);
-                                    }
-                                    return boundFind.artist(who);
-                                }),
-                                {message: `Reference errors in contributions ${color.green(property)} (${color.green('find.artist')})`});
-                            continue;
-                        }
-                        const findFn = boundFind[findFnKey];
-                        const value = thing[property];
-                        if (Array.isArray(value)) {
-                            thing[property] = filter(value, decorateErrorWithIndex(findFn),
-                                {message: `Reference errors in property ${color.green(property)} (${color.green('find.' + findFnKey)})`});
-                        } else {
-                            nest({message: `Reference error in property ${color.green(property)} (${color.green('find.' + findFnKey)})`}, ({ call }) => {
-                                try {
-                                    call(findFn, value);
-                                } catch (error) {
-                                    thing[property] = null;
-                                    throw error;
-                                }
-                            });
-                        }
-                    }
-                });
-            }
-        });
-    }
-
-    return aggregate;
+  for (const [key, value] of Object.entries(wikiData)) {
+    if (!Array.isArray(value)) continue;
+    wikiData[key] = value.slice();
+  }
+
+  const steps = getDataSteps();
+
+  for (const step of steps) {
+    if (!step.sort) continue;
+    step.sort(wikiData);
+  }
+
+  // Re-link data arrays, so that every object has the new, sorted versions.
+  // Note that the sorting step deliberately creates new arrays (mutating
+  // slices instead of the original arrays) - this is so that the object
+  // caching system understands that it's working with a new ordering.
+  // We still need to actually provide those updated arrays over again!
+  linkWikiDataArrays(wikiData);
 }
 
 // Utility function for loading all wiki data from the provided YAML data
@@ -1297,47 +1016,56 @@ export function filterReferenceErrors(wikiData) {
 // where reporting info about data loading isn't as relevant as during the
 // main wiki build process.
 export async function quickLoadAllFromYAML(dataPath, {
-    showAggregate: customShowAggregate = showAggregate,
-} = {}) {
-    const showAggregate = customShowAggregate;
+  bindFind,
+  getAllFindSpecs,
 
-    let wikiData;
-
-    {
-        const { aggregate, result } = await loadAndProcessDataDocuments({
-            dataPath,
-        });
+  showAggregate: customShowAggregate = showAggregate,
+}) {
+  const showAggregate = customShowAggregate;
 
-        wikiData = result;
+  let wikiData;
 
-        try {
-            aggregate.close();
-            logInfo`Loaded data without errors. (complete data)`;
-        } catch (error) {
-            showAggregate(error);
-            logWarn`Loaded data with errors. (partial data)`;
-        }
-    }
+  {
+    const {aggregate, result} = await loadAndProcessDataDocuments({dataPath});
 
-    linkWikiDataArrays(wikiData);
+    wikiData = result;
 
     try {
-        filterDuplicateDirectories(wikiData).close();
-        logInfo`No duplicate directories found. (complete data)`;
+      aggregate.close();
+      logInfo`Loaded data without errors. (complete data)`;
     } catch (error) {
-        showAggregate(error);
-        logWarn`Duplicate directories found. (partial data)`;
+      showAggregate(error);
+      logWarn`Loaded data with errors. (partial data)`;
     }
-
-    try {
-        filterReferenceErrors(wikiData).close();
-        logInfo`No reference errors found. (complete data)`;
-    } catch (error) {
-        showAggregate(error);
-        logWarn`Duplicate directories found. (partial data)`;
-    }
-
-    sortWikiDataArrays(wikiData);
-
-    return wikiData;
+  }
+
+  linkWikiDataArrays(wikiData);
+
+  try {
+    reportDuplicateDirectories(wikiData, {getAllFindSpecs});
+    logInfo`No duplicate directories found. (complete data)`;
+  } catch (error) {
+    showAggregate(error);
+    logWarn`Duplicate directories found. (partial data)`;
+  }
+
+  try {
+    filterReferenceErrors(wikiData, {bindFind}).close();
+    logInfo`No reference errors found. (complete data)`;
+  } catch (error) {
+    showAggregate(error);
+    logWarn`Reference errors found. (partial data)`;
+  }
+
+  try {
+    reportContentTextErrors(wikiData, {bindFind});
+    logInfo`No content text errors found.`;
+  } catch (error) {
+    showAggregate(error);
+    logWarn`Content text errors found.`;
+  }
+
+  sortWikiDataArrays(wikiData);
+
+  return wikiData;
 }