diff options
Diffstat (limited to 'src/data/yaml.js')
| -rw-r--r-- | src/data/yaml.js | 578 |
1 files changed, 432 insertions, 146 deletions
diff --git a/src/data/yaml.js b/src/data/yaml.js index 07dbe882..4e6f4502 100644 --- a/src/data/yaml.js +++ b/src/data/yaml.js @@ -8,9 +8,11 @@ import {inspect as nodeInspect} from 'node:util'; import yaml from 'js-yaml'; import {colors, ENABLE_COLOR, logInfo, logWarn} from '#cli'; +import {parseContentNodes, splitContentNodesAround} from '#replacer'; import {sortByName} from '#sort'; import Thing from '#thing'; import thingConstructors from '#things'; +import {matchContentEntries, multipleLyricsDetectionRegex} from '#wiki-data'; import { aggregateThrows, @@ -42,6 +44,32 @@ function inspect(value, opts = {}) { return nodeInspect(value, {colors: ENABLE_COLOR, ...opts}); } +function pushWikiData(a, b) { + for (const key of Object.keys(b)) { + if (Object.hasOwn(a, key)) { + if (Array.isArray(a[key])) { + if (Array.isArray(b[key])) { + a[key].push(...b[key]); + } else { + throw new Error(`${key} already present, expected array of items to push`); + } + } else { + if (Array.isArray(a[key])) { + throw new Error(`${key} already present and not an array, refusing to overwrite`); + } else { + throw new Error(`${key} already present, refusing to overwrite`); + } + } + } else { + if (Array.isArray(b[key])) { + a[key] = [...b[key]]; + } else { + a[key] = b[key]; + } + } + } +} + // General function for inputting a single document (usually loaded from YAML) // and outputting an instance of a provided Thing subclass. // @@ -85,7 +113,7 @@ function makeProcessDocument(thingConstructor, { // ] // // ...means A can't coexist with B or C, B can't coexist with A or C, and - // C can't coexist iwth A, B, or D - but it's okay for D to coexist with + // C can't coexist with A, B, or D - but it's okay for D to coexist with // A or B. // invalidFieldCombinations = [], @@ -159,6 +187,16 @@ function makeProcessDocument(thingConstructor, { const thing = Reflect.construct(thingConstructor, []); + const wikiData = {}; + const flat = [thing]; + if (thingConstructor[Thing.wikiData]) { + if (thingConstructor[Thing.oneInstancePerWiki]) { + wikiData[thingConstructor[Thing.wikiData]] = thing; + } else { + wikiData[thingConstructor[Thing.wikiData]] = [thing]; + } + } + const documentEntries = Object.entries(document) .filter(([field]) => !ignoredFields.includes(field)); @@ -180,9 +218,22 @@ function makeProcessDocument(thingConstructor, { const fieldCombinationErrors = []; - for (const {message, fields} of invalidFieldCombinations) { + for (const {message, fields: fieldsSpec} of invalidFieldCombinations) { const fieldsPresent = - presentFields.filter(field => fields.includes(field)); + fieldsSpec.flatMap(fieldSpec => { + if (Array.isArray(fieldSpec)) { + const [field, match] = fieldSpec; + if (!presentFields.includes(field)) return []; + if (typeof match === 'function') { + return match(document[field]) ? [field] : []; + } else { + return document[field] === match ? [field] : []; + } + } + + const field = fieldSpec; + return presentFields.includes(field) ? [field] : []; + }); if (fieldsPresent.length >= 2) { const filteredDocument = @@ -192,7 +243,10 @@ function makeProcessDocument(thingConstructor, { {preserveOriginalOrder: true}); fieldCombinationErrors.push( - new FieldCombinationError(filteredDocument, message)); + new FieldCombinationError( + filteredDocument, + fieldsSpec, + message)); for (const field of Object.keys(filteredDocument)) { skippedFields.add(field); @@ -248,7 +302,9 @@ function makeProcessDocument(thingConstructor, { // This variable would like to certify itself as "not into capitalism". let propertyValue = - (fieldSpecs[field].transform + (documentValue === null + ? null + : fieldSpecs[field].transform ? fieldSpecs[field].transform(documentValue, transformUtilities) : documentValue); @@ -292,26 +348,29 @@ function makeProcessDocument(thingConstructor, { const followSubdocSetup = setup => { let error = null; - let subthing; + let result; try { - const result = bouncer(setup.data, setup.documentType); - subthing = result.thing; - result.aggregate.close(); + let aggregate; + ({result, aggregate} = bouncer(setup.data, setup.documentType)); + aggregate.close(); } catch (caughtError) { error = caughtError; } - if (subthing) { + if (result.thing) { if (setup.bindInto) { - subthing[setup.bindInto] = thing; + result.thing[setup.bindInto] = thing; } if (setup.provide) { - Object.assign(subthing, setup.provide); + Object.assign(result.thing, setup.provide); } } - return {error, subthing}; + pushWikiData(wikiData, result.wikiData); + flat.push(...result.flat); + + return {error, subthing: result.thing}; }; for (const [field, layout] of Object.entries(subdocLayouts)) { @@ -394,7 +453,14 @@ function makeProcessDocument(thingConstructor, { {preserveOriginalOrder: true}))); } - return {thing, aggregate}; + return { + aggregate, + result: { + thing, + flat, + wikiData, + }, + }; }); } @@ -414,19 +480,36 @@ export class FieldCombinationAggregateError extends AggregateError { } export class FieldCombinationError extends Error { - constructor(fields, message) { - const fieldNames = Object.keys(fields); + constructor(filteredDocument, fieldsSpec, message) { + const fieldNames = Object.keys(filteredDocument); const fieldNamesText = fieldNames - .map(field => colors.red(field)) + .map(field => { + if (fieldsSpec.includes(field)) { + return colors.red(field); + } + + const match = + fieldsSpec + .find(fieldSpec => + Array.isArray(fieldSpec) && + fieldSpec[0] === field) + .at(1); + + if (typeof match === 'function') { + return colors.red(`${field}: ${filteredDocument[field]}`); + } else { + return colors.red(`${field}: ${match}`); + } + }) .join(', '); const mainMessage = `Don't combine ${fieldNamesText}`; const causeMessage = (typeof message === 'function' - ? message(fields) + ? message(filteredDocument) : typeof message === 'string' ? message : null); @@ -438,7 +521,7 @@ export class FieldCombinationError extends Error { : null), }); - this.fields = fields; + this.fields = fieldNames; } } @@ -609,49 +692,39 @@ export function parseContributors(entries) { }); } -export function parseAdditionalFiles(entries) { +export function parseAdditionalFiles(entries, {subdoc, AdditionalFile}) { return parseArrayEntries(entries, item => { if (typeof item !== 'object') return item; - return { - title: item['Title'], - description: item['Description'] ?? null, - files: item['Files'], - }; + return subdoc(AdditionalFile, item, {bindInto: 'thing'}); }); } -export function parseAdditionalNames(entries) { +export function parseAdditionalNames(entries, {subdoc, AdditionalName}) { return parseArrayEntries(entries, item => { - if (typeof item === 'object' && typeof item['Name'] === 'string') - return { - name: item['Name'], - annotation: item['Annotation'] ?? null, - }; + if (typeof item === 'object') { + return subdoc(AdditionalName, item, {bindInto: 'thing'}); + } if (typeof item !== 'string') return item; const match = item.match(extractAccentRegex); if (!match) return item; - return { - name: match.groups.main, - annotation: match.groups.accent ?? null, + const document = { + ['Name']: match.groups.main, + ['Annotation']: match.groups.accent ?? null, }; + + return subdoc(AdditionalName, document, {bindInto: 'thing'}); }); } -export function parseSerieses(entries) { +export function parseSerieses(entries, {subdoc, Series}) { return parseArrayEntries(entries, item => { if (typeof item !== 'object') return item; - return { - name: item['Name'], - description: item['Description'] ?? null, - albums: item['Albums'] ?? null, - - showAlbumArtists: item['Show Album Artists'] ?? null, - }; + return subdoc(Series, item, {bindInto: 'group'}); }); } @@ -791,18 +864,24 @@ export function parseAnnotatedReferences(entries, { export function parseArtwork({ single = false, - dimensionsFromThingProperty, - fileExtensionFromThingProperty, - dateFromThingProperty, - artistContribsFromThingProperty, - artistContribsArtistProperty, + thingProperty = null, + dimensionsFromThingProperty = null, + fileExtensionFromThingProperty = null, + dateFromThingProperty = null, + artistContribsFromThingProperty = null, + artistContribsArtistProperty = null, + artTagsFromThingProperty = null, + referencedArtworksFromThingProperty = null, }) { const provide = { + thingProperty, dimensionsFromThingProperty, fileExtensionFromThingProperty, dateFromThingProperty, artistContribsFromThingProperty, artistContribsArtistProperty, + artTagsFromThingProperty, + referencedArtworksFromThingProperty, }; const parseSingleEntry = (entry, {subdoc, Artwork}) => @@ -820,6 +899,159 @@ export function parseArtwork({ return transform; } +export function parseContentEntriesFromSourceText(thingClass, sourceText, {subdoc}) { + function map(matchEntry) { + let artistText = null, artistReferences = null; + + const artistTextNodes = + Array.from( + splitContentNodesAround( + parseContentNodes(matchEntry.artistText), + /\|/g)); + + const separatorIndices = + artistTextNodes + .filter(node => node.type === 'separator') + .map(node => artistTextNodes.indexOf(node)); + + if (empty(separatorIndices)) { + if (artistTextNodes.length === 1 && artistTextNodes[0].type === 'text') { + artistReferences = matchEntry.artistText; + } else { + artistText = matchEntry.artistText; + } + } else { + const firstSeparatorIndex = + separatorIndices.at(0); + + const secondSeparatorIndex = + separatorIndices.at(1) ?? + artistTextNodes.length; + + artistReferences = + matchEntry.artistText.slice( + artistTextNodes.at(0).i, + artistTextNodes.at(firstSeparatorIndex - 1).iEnd); + + artistText = + matchEntry.artistText.slice( + artistTextNodes.at(firstSeparatorIndex).iEnd, + artistTextNodes.at(secondSeparatorIndex - 1).iEnd); + } + + if (artistReferences) { + artistReferences = + artistReferences + .split(',') + .map(ref => ref.trim()); + } + + return { + 'Artists': + artistReferences, + + 'Artist Text': + artistText, + + 'Annotation': + matchEntry.annotation, + + 'Date': + matchEntry.date, + + 'Second Date': + matchEntry.secondDate, + + 'Date Kind': + matchEntry.dateKind, + + 'Access Date': + matchEntry.accessDate, + + 'Access Kind': + matchEntry.accessKind, + + 'Body': + matchEntry.body, + }; + } + + const documents = + matchContentEntries(sourceText) + .map(matchEntry => + withEntries( + map(matchEntry), + entries => entries + .filter(([key, value]) => + value !== undefined && + value !== null))); + + const subdocs = + documents.map(document => + subdoc(thingClass, document, {bindInto: 'thing'})); + + return subdocs; +} + +export function parseContentEntries(thingClass, value, {subdoc}) { + if (typeof value === 'string') { + return parseContentEntriesFromSourceText(thingClass, value, {subdoc}); + } else if (Array.isArray(value)) { + return value.map(doc => subdoc(thingClass, doc, {bindInto: 'thing'})); + } else { + return value; + } +} + +export function parseCommentary(value, {subdoc, CommentaryEntry}) { + return parseContentEntries(CommentaryEntry, value, {subdoc}); +} + +export function parseCreditingSources(value, {subdoc, CreditingSourcesEntry}) { + return parseContentEntries(CreditingSourcesEntry, value, {subdoc}); +} + +export function parseReferencingSources(value, {subdoc, ReferencingSourcesEntry}) { + return parseContentEntries(ReferencingSourcesEntry, value, {subdoc}); +} + +export function parseLyrics(value, {subdoc, LyricsEntry}) { + if ( + typeof value === 'string' && + !multipleLyricsDetectionRegex.test(value) + ) { + const document = {'Body': value}; + + return [subdoc(LyricsEntry, document, {bindInto: 'thing'})]; + } + + return parseContentEntries(LyricsEntry, value, {subdoc}); +} + +export function parseArtistAliases(value, {subdoc, Artist}) { + return parseArrayEntries(value, item => { + const config = { + bindInto: 'aliasedArtist', + provide: {isAlias: true}, + }; + + if (typeof item === 'string') { + return subdoc(Artist, {'Artist': item}, config); + } else if (typeof item === 'object' && !Array.isArray(item)) { + if (item['Name']) { + const clone = {...item}; + clone['Artist'] = item['Name']; + delete clone['Name']; + return subdoc(Artist, clone, config); + } else { + return subdoc(Artist, item, config); + } + } else { + return item; + } + }); +} + // documentModes: Symbols indicating sets of behavior for loading and processing // data files. export const documentModes = { @@ -849,6 +1081,12 @@ export const documentModes = { // array of processed documents (wiki objects). allInOne: Symbol('Document mode: allInOne'), + // allTogether: One or more documens, spread across any number of files. + // Expects files array (or function) and processDocument function. + // Calls save with an array of processed documents (wiki objects) - this is + // a flat array, *not* an array of the documents processed from *each* file. + allTogether: Symbol('Document mode: allTogether'), + // oneDocumentTotal: Just a single document, represented in one file. // Expects file string (or function) and processDocument function. Calls // save with the single processed wiki document (data object). @@ -895,7 +1133,7 @@ export const documentModes = { export function getAllDataSteps() { try { thingConstructors; - } catch (error) { + } catch { throw new Error(`Thing constructors aren't ready yet, can't get all data steps`); } @@ -959,6 +1197,7 @@ export async function getFilesFromDataStep(dataStep, {dataPath}) { } } + case documentModes.allTogether: case documentModes.headerAndEntries: case documentModes.onePerFile: { if (!dataStep.files) { @@ -1114,27 +1353,37 @@ export function processThingsFromDataStep(documents, dataStep) { const {documentMode} = dataStep; switch (documentMode) { - case documentModes.allInOne: { - const result = []; + case documentModes.allInOne: + case documentModes.allTogether: { + const things = []; + const flat = []; + const wikiData = {}; const aggregate = openAggregate({message: `Errors processing documents`}); documents.forEach( decorateErrorWithIndex((document, index) => { - const {thing, aggregate: subAggregate} = + const {result, aggregate: subAggregate} = processDocument(document, dataStep.documentThing); - thing[Thing.yamlSourceDocument] = document; - thing[Thing.yamlSourceDocumentPlacement] = + result.thing[Thing.yamlSourceDocument] = document; + result.thing[Thing.yamlSourceDocumentPlacement] = [documentModes.allInOne, index]; - result.push(thing); + things.push(result.thing); + flat.push(...result.flat); + pushWikiData(wikiData, result.wikiData); + aggregate.call(subAggregate.close); })); return { aggregate, - result, - things: result, + result: { + network: things, + flat: things, + file: things, + wikiData, + }, }; } @@ -1142,17 +1391,21 @@ export function processThingsFromDataStep(documents, dataStep) { if (documents.length > 1) throw new Error(`Only expected one document to be present, got ${documents.length}`); - const {thing, aggregate} = + const {result, aggregate} = processDocument(documents[0], dataStep.documentThing); - thing[Thing.yamlSourceDocument] = documents[0]; - thing[Thing.yamlSourceDocumentPlacement] = + result.thing[Thing.yamlSourceDocument] = documents[0]; + result.thing[Thing.yamlSourceDocumentPlacement] = [documentModes.oneDocumentTotal]; return { aggregate, - result: thing, - things: [thing], + result: { + network: result.thing, + flat: result.flat, + file: [result.thing], + wikiData: result.wikiData, + }, }; } @@ -1164,14 +1417,17 @@ export function processThingsFromDataStep(documents, dataStep) { throw new Error(`Missing header document (empty file or erroneously starting with "---"?)`); const aggregate = openAggregate({message: `Errors processing documents`}); + const wikiData = {}; - const {thing: headerThing, aggregate: headerAggregate} = + const {result: headerResult, aggregate: headerAggregate} = processDocument(headerDocument, dataStep.headerDocumentThing); - headerThing[Thing.yamlSourceDocument] = headerDocument; - headerThing[Thing.yamlSourceDocumentPlacement] = + headerResult.thing[Thing.yamlSourceDocument] = headerDocument; + headerResult.thing[Thing.yamlSourceDocumentPlacement] = [documentModes.headerAndEntries, 'header']; + pushWikiData(wikiData, headerResult.wikiData); + try { headerAggregate.close(); } catch (caughtError) { @@ -1179,17 +1435,18 @@ export function processThingsFromDataStep(documents, dataStep) { aggregate.push(caughtError); } - const entryThings = []; + const entryResults = []; for (const [index, entryDocument] of entryDocuments.entries()) { - const {thing: entryThing, aggregate: entryAggregate} = + const {result: entryResult, aggregate: entryAggregate} = processDocument(entryDocument, dataStep.entryDocumentThing); - entryThing[Thing.yamlSourceDocument] = entryDocument; - entryThing[Thing.yamlSourceDocumentPlacement] = + entryResult.thing[Thing.yamlSourceDocument] = entryDocument; + entryResult.thing[Thing.yamlSourceDocumentPlacement] = [documentModes.headerAndEntries, 'entry', index]; - entryThings.push(entryThing); + entryResults.push(entryResult); + pushWikiData(wikiData, entryResult.wikiData); try { entryAggregate.close(); @@ -1202,10 +1459,16 @@ export function processThingsFromDataStep(documents, dataStep) { return { aggregate, result: { - header: headerThing, - entries: entryThings, + network: { + header: headerResult.thing, + entries: entryResults.map(result => result.thing), + }, + + flat: headerResult.flat.concat(entryResults.flatMap(result => result.flat)), + file: [headerResult.thing, ...entryResults.map(result => result.thing)], + + wikiData, }, - things: [headerThing, ...entryThings], }; } @@ -1216,17 +1479,21 @@ export function processThingsFromDataStep(documents, dataStep) { if (empty(documents) || !documents[0]) throw new Error(`Expected a document, this file is empty`); - const {thing, aggregate} = + const {result, aggregate} = processDocument(documents[0], dataStep.documentThing); - thing[Thing.yamlSourceDocument] = documents[0]; - thing[Thing.yamlSourceDocumentPlacement] = + result.thing[Thing.yamlSourceDocument] = documents[0]; + result.thing[Thing.yamlSourceDocumentPlacement] = [documentModes.onePerFile]; return { aggregate, - result: thing, - things: [thing], + result: { + network: result.thing, + flat: result.flat, + file: [result.thing], + wikiData: result.wikiData, + }, }; } @@ -1327,10 +1594,10 @@ export async function processThingsFromDataSteps(documentLists, fileLists, dataS file: files, documents: documentLists, }).map(({file, documents}) => { - const {result, aggregate, things} = + const {result, aggregate} = processThingsFromDataStep(documents, dataStep); - for (const thing of things) { + for (const thing of result.file) { thing[Thing.yamlSourceFilename] = path.relative(dataPath, file) .split(path.sep) @@ -1357,41 +1624,35 @@ export async function processThingsFromDataSteps(documentLists, fileLists, dataS translucent: true, }).contain(await fileListPromise)); - const thingLists = + const results = aggregate .receive(await Promise.all(dataStepPromises)); - return {aggregate, result: thingLists}; + return {aggregate, result: results}; } -// Flattens a list of *lists* of things for a given data step (each list -// corresponding to one YAML file) into results to be saved on the final -// wikiData object, routing thing lists into the step's save() function. -export function saveThingsFromDataStep(thingLists, dataStep) { +// Runs a data step's connect() function, if present, with representations +// of the results from the YAML files, called "networks" - one network and +// one call to .connect() per YAML file - in order to form data connections +// (direct links) between related objects within a file. +export function connectThingsFromDataStep(results, dataStep) { const {documentMode} = dataStep; switch (documentMode) { - case documentModes.allInOne: { - const things = - (empty(thingLists) - ? [] - : thingLists[0]); - - return dataStep.save(things); + case documentModes.oneDocumentTotal: + case documentModes.onePerFile: { + // These results are never connected. + return; } - case documentModes.oneDocumentTotal: { - const thing = - (empty(thingLists) - ? {} - : thingLists[0]); - - return dataStep.save(thing); - } + case documentModes.allInOne: + case documentModes.allTogether: + case documentModes.headerAndEntries: { + for (const result of results) { + dataStep.connect?.(result.network); + } - case documentModes.headerAndEntries: - case documentModes.onePerFile: { - return dataStep.save(thingLists); + break; } default: @@ -1399,60 +1660,71 @@ export function saveThingsFromDataStep(thingLists, dataStep) { } } -// Flattens a list of *lists* of things for each data step (each list -// corresponding to one YAML file) into the final wikiData object, -// routing thing lists into each step's save() function. -export function saveThingsFromDataSteps(thingLists, dataSteps) { +export function connectThingsFromDataSteps(processThingResultLists, dataSteps) { const aggregate = openAggregate({ - message: `Errors finalizing things from data files`, + message: `Errors connecting things from data files`, translucent: true, }); - const wikiData = {}; - stitchArrays({ dataStep: dataSteps, - thingLists: thingLists, - }).map(({dataStep, thingLists}) => { + processThingResults: processThingResultLists, + }).forEach(({dataStep, processThingResults}) => { try { - return saveThingsFromDataStep(thingLists, dataStep); + connectThingsFromDataStep(processThingResults, dataStep); } catch (caughtError) { const error = new Error( - `Error finalizing things for data step: ${colors.bright(dataStep.title)}`, + `Error connecting things for data step: ${colors.bright(dataStep.title)}`, {cause: caughtError}); error[Symbol.for('hsmusic.aggregate.translucent')] = true; aggregate.push(error); - - return null; } - }) - .filter(Boolean) - .forEach(saveResult => { - for (const [saveKey, saveValue] of Object.entries(saveResult)) { - if (Object.hasOwn(wikiData, saveKey)) { - if (Array.isArray(wikiData[saveKey])) { - if (Array.isArray(saveValue)) { - wikiData[saveKey].push(...saveValue); - } else { - throw new Error(`${saveKey} already present, expected array of items to push`); - } - } else { - if (Array.isArray(saveValue)) { - throw new Error(`${saveKey} already present and not an array, refusing to overwrite`); - } else { - throw new Error(`${saveKey} already present, refusing to overwrite`); - } - } - } else { - wikiData[saveKey] = saveValue; + }); + + return {result: null, aggregate}; +} + +export function makeWikiDataFromDataSteps(processThingResultLists, _dataSteps) { + const wikiData = {}; + + let found = false; + for (const result of processThingResultLists.flat(2)) { + pushWikiData(wikiData, result.wikiData); + } + + const scanForConstituted = + processThingResultLists.flat(2).flatMap(result => result.flat); + + const exists = new Set(scanForConstituted); + + while (scanForConstituted.length) { + const scanningThing = scanForConstituted.pop(); + + for (const key of scanningThing.constructor[Thing.constitutibleProperties] ?? []) { + const maybeConstitutedThings = + (Array.isArray(scanningThing[key]) + ? scanningThing[key] + : scanningThing[key] + ? [scanningThing[key]] + : []); + + for (const thing of maybeConstitutedThings) { + if (exists.has(thing)) continue; + exists.add(thing); + + if (thing.constructor[Thing.wikiData]) { + pushWikiData(wikiData, {[thing.constructor[Thing.wikiData]]: [thing]}); } + + scanForConstituted.push(thing); } - }); + } + } - return {aggregate, result: wikiData}; + return wikiData; } export async function loadAndProcessDataDocuments(dataSteps, {dataPath}) { @@ -1465,13 +1737,15 @@ export async function loadAndProcessDataDocuments(dataSteps, {dataPath}) { aggregate.receive( await loadYAMLDocumentsFromDataSteps(dataSteps, {dataPath})); - const thingLists = + const processThingResultLists = aggregate.receive( await processThingsFromDataSteps(documentLists, fileLists, dataSteps, {dataPath})); + aggregate.receive( + connectThingsFromDataSteps(processThingResultLists, dataSteps)); + const wikiData = - aggregate.receive( - saveThingsFromDataSteps(thingLists, dataSteps)); + makeWikiDataFromDataSteps(processThingResultLists, dataSteps); return {aggregate, result: wikiData}; } @@ -1495,6 +1769,10 @@ export function linkWikiDataArrays(wikiData, {bindFind, bindReverse}) { ['artworkData', ['artworkData']], + ['commentaryData', [/* find */]], + + ['creditingSourceData', [/* find */]], + ['flashData', [ 'wikiInfo', ]], @@ -1509,9 +1787,14 @@ export function linkWikiDataArrays(wikiData, {bindFind, bindReverse}) { ['homepageLayout.sections.rows', [/* find */]], + ['lyricsData', [/* find */]], + + ['referencingSourceData', [/* find */]], + + ['seriesData', [/* find */]], + ['trackData', [ 'artworkData', - 'trackData', 'wikiInfo', ]], @@ -1777,14 +2060,16 @@ export function flattenThingLayoutToDocumentOrder(layout) { } export function* splitDocumentsInYAMLSourceText(sourceText) { - const dividerRegex = /^-{3,}\n?/gm; + // Not multiline! + const dividerRegex = /(?:\r\n|\n|^)-{3,}(?:\r\n|\n|$)/g; + let previousDivider = ''; while (true) { const {lastIndex} = dividerRegex; const match = dividerRegex.exec(sourceText); if (match) { - const nextDivider = match[0].trim(); + const nextDivider = match[0]; yield { previousDivider, @@ -1795,11 +2080,12 @@ export function* splitDocumentsInYAMLSourceText(sourceText) { previousDivider = nextDivider; } else { const nextDivider = ''; + const lineBreak = previousDivider.match(/\r?\n/)?.[0] ?? ''; yield { previousDivider, nextDivider, - text: sourceText.slice(lastIndex).replace(/(?<!\n)$/, '\n'), + text: sourceText.slice(lastIndex).replace(/(?<!\n)$/, lineBreak), }; return; @@ -1825,7 +2111,7 @@ export function recombineDocumentsIntoYAMLSourceText(documents) { for (const document of documents) { if (sourceText) { - sourceText += divider + '\n'; + sourceText += divider; } sourceText += document.text; |