From c047b8c57d4e5012578c072420d2b73dd5b59c4c Mon Sep 17 00:00:00 2001 From: "(quasar) nebula" Date: Sat, 14 Aug 2021 00:11:39 -0300 Subject: handy combine-album.js utility this isn't exposed via the mtui command so like, just run it directly with node right now lol (this commit also makes "." parse in timestamp positions) --- crawlers.js | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) (limited to 'crawlers.js') diff --git a/crawlers.js b/crawlers.js index 3f6e391..6af615d 100644 --- a/crawlers.js +++ b/crawlers.js @@ -11,6 +11,15 @@ const { promisify } = require('util') const readDir = promisify(fs.readdir) const stat = promisify(fs.stat) +const musicExtensions = [ + 'ogg', 'oga', + 'wav', 'mp3', 'm4a', 'aac', 'flac', 'opus', + 'mp4', 'mov', 'mkv', + 'mod' +] + +module.exports.musicExtensions = musicExtensions + // Each value is a function with these additional properties: // * crawlerName: The name of the crawler, such as "crawl-http". Used by // getCrawlerByName. @@ -229,12 +238,7 @@ function getHTMLLinks(text) { } */ -function crawlLocal(dirPath, extensions = [ - 'ogg', 'oga', - 'wav', 'mp3', 'm4a', 'aac', 'flac', 'opus', - 'mp4', 'mov', 'mkv', - 'mod' -], isTop = true) { +function crawlLocal(dirPath, extensions = musicExtensions, isTop = true) { // If the passed path is a file:// URL, try to decode it: try { const url = new URL(dirPath) -- cgit 1.3.0-6-gf8a5 From 43f1a1dd1b44065663a797603012394c52a9baea Mon Sep 17 00:00:00 2001 From: "(quasar) nebula" Date: Sat, 13 May 2023 13:31:58 -0300 Subject: use ESM module syntax & update tui-lib Exciting update! This doesn't make any substantial changes exactly but does update the most quickly-archaic parts of older Node code. --- crawlers.js | 46 +++++++++++++++++++--------------------------- 1 file changed, 19 insertions(+), 27 deletions(-) (limited to 'crawlers.js') diff --git a/crawlers.js b/crawlers.js index 6af615d..b2f13fd 100644 --- a/crawlers.js +++ b/crawlers.js @@ -1,25 +1,21 @@ -const fs = require('fs') -const path = require('path') -const expandHomeDir = require('expand-home-dir') -const fetch = require('node-fetch') -const url = require('url') -const { downloadPlaylistFromOptionValue, promisifyProcess } = require('./general-util') -const { spawn } = require('child_process') -const { orderBy } = require('natural-orderby') - -const { promisify } = require('util') -const readDir = promisify(fs.readdir) -const stat = promisify(fs.stat) - -const musicExtensions = [ +import {spawn} from 'node:child_process' +import {readdir, stat} from 'node:fs/promises' +import url from 'node:url' +import path from 'node:path' + +import {orderBy} from 'natural-orderby' +import expandHomeDir from 'expand-home-dir' +// import fetch from 'node-fetch' + +import {downloadPlaylistFromOptionValue, promisifyProcess} from './general-util.js' + +export const musicExtensions = [ 'ogg', 'oga', 'wav', 'mp3', 'm4a', 'aac', 'flac', 'opus', 'mp4', 'mov', 'mkv', 'mod' ] -module.exports.musicExtensions = musicExtensions - // Each value is a function with these additional properties: // * crawlerName: The name of the crawler, such as "crawl-http". Used by // getCrawlerByName. @@ -30,7 +26,7 @@ module.exports.musicExtensions = musicExtensions const allCrawlers = {} /* TODO: Removed cheerio, so crawl-http no longer works. -function crawlHTTP(absURL, opts = {}, internals = {}) { +export function crawlHTTP(absURL, opts = {}, internals = {}) { // Recursively crawls a given URL, following every link to a deeper path and // recording all links in a tree (in the same format playlists use). Makes // multiple attempts to download failed paths. @@ -251,7 +247,7 @@ function crawlLocal(dirPath, extensions = musicExtensions, isTop = true) { dirPath = expandHomeDir(dirPath) } - return readDir(dirPath).then(items => { + return readdir(dirPath).then(items => { items = orderBy(items) return Promise.all(items.map(item => { @@ -278,7 +274,7 @@ function crawlLocal(dirPath, extensions = musicExtensions, isTop = true) { return {name: item, url: itemURL} } } - }, statErr => null) + }, _statErr => null) })) }, err => { if (err.code === 'ENOENT') { @@ -325,7 +321,7 @@ crawlLocal.isAppropriateForArg = function(arg) { allCrawlers.crawlLocal = crawlLocal -async function crawlYouTube(url) { +export async function crawlYouTube(url) { const ytdl = spawn('youtube-dl', [ '-j', // Output as JSON '--flat-playlist', @@ -385,7 +381,7 @@ crawlYouTube.isAppropriateForArg = function(arg) { allCrawlers.crawlYouTube = crawlYouTube -async function openFile(input) { +export async function openFile(input) { return JSON.parse(await downloadPlaylistFromOptionValue(input)) } @@ -398,14 +394,10 @@ openFile.isAppropriateForArg = function(arg) { allCrawlers.openFile = openFile -// Actual module.exports stuff: - -Object.assign(module.exports, allCrawlers) - -module.exports.getCrawlerByName = function(name) { +export function getCrawlerByName(name) { return Object.values(allCrawlers).find(fn => fn.crawlerName === name) } -module.exports.getAllCrawlersForArg = function(arg) { +export function getAllCrawlersForArg(arg) { return Object.values(allCrawlers).filter(fn => fn.isAppropriateForArg(arg)) } -- cgit 1.3.0-6-gf8a5 From c3425f516dfabe15c71b37faa9fa27ec55612900 Mon Sep 17 00:00:00 2001 From: "(quasar) nebula" Date: Sat, 13 May 2023 18:09:51 -0300 Subject: skip .DS_Store, .git in crawl-local --- crawlers.js | 11 +++++++++++ 1 file changed, 11 insertions(+) (limited to 'crawlers.js') diff --git a/crawlers.js b/crawlers.js index b2f13fd..8197095 100644 --- a/crawlers.js +++ b/crawlers.js @@ -16,6 +16,11 @@ export const musicExtensions = [ 'mod' ] +export const skipNames = [ + '.DS_Store', + '.git', +] + // Each value is a function with these additional properties: // * crawlerName: The name of the crawler, such as "crawl-http". Used by // getCrawlerByName. @@ -251,6 +256,12 @@ function crawlLocal(dirPath, extensions = musicExtensions, isTop = true) { items = orderBy(items) return Promise.all(items.map(item => { + // There are a few files which are just never what we're looking for. + // We skip including or searching under these altogether. + if (skipNames.includes(item)) { + return null + } + const itemPath = path.join(dirPath, item) const itemURL = url.pathToFileURL(itemPath).href -- cgit 1.3.0-6-gf8a5