« get me outta code hell

Make mtui work in webpack - mtui - Music Text User Interface - user-friendly command line music player
about summary refs log tree commit diff
diff options
context:
space:
mode:
authorFlorrie <towerofnix@gmail.com>2018-12-22 13:19:30 -0400
committerFlorrie <towerofnix@gmail.com>2018-12-22 13:19:30 -0400
commiteb13543e95052b2d1a0c6cd3376c9655579527bc (patch)
tree84c58a040332a4ac707bb9b0f54378d5eefde283
parent08a5e7805cee34ae50d958dddd262e6954afcf23 (diff)
Make mtui work in webpack
-rw-r--r--crawlers.js185
-rw-r--r--downloaders.js178
-rw-r--r--general-util.js62
-rw-r--r--players.js4
-rw-r--r--playlist-utils.js8
m---------tui-lib0
-rw-r--r--ui.js13
7 files changed, 23 insertions, 427 deletions
diff --git a/crawlers.js b/crawlers.js
index feeedf2..4c96c85 100644
--- a/crawlers.js
+++ b/crawlers.js
@@ -1,15 +1,5 @@
-const fs = require('fs')
 const path = require('path')
-const naturalSort = require('node-natural-sort')
-const expandHomeDir = require('expand-home-dir')
-const fetch = require('node-fetch')
-const url = require('url')
 const { downloadPlaylistFromOptionValue, promisifyProcess } = require('./general-util')
-const { spawn } = require('child_process')
-
-const { promisify } = require('util')
-const readDir = promisify(fs.readdir)
-const stat = promisify(fs.stat)
 
 // Each value is a function with these additional properties:
 // * crawlerName: The name of the crawler, such as "crawl-http". Used by
@@ -26,7 +16,6 @@ function sortIgnoreCase(sortFunction) {
   }
 }
 
-/* TODO: Removed cheerio, so crawl-http no longer works.
 function crawlHTTP(absURL, opts = {}, internals = {}) {
   // Recursively crawls a given URL, following every link to a deeper path and
   // recording all links in a tree (in the same format playlists use). Makes
@@ -60,7 +49,7 @@ function crawlHTTP(absURL, opts = {}, internals = {}) {
     }
   }
 
-  const absURLObj = new url.URL(absURL)
+  const absURLObj = new URL(absURL)
 
   return fetch(absURL)
     .then(
@@ -86,8 +75,8 @@ function crawlHTTP(absURL, opts = {}, internals = {}) {
 
           name = name.trim()
 
-          const urlObj = new url.URL(href, absURL + '/')
-          const linkURL = url.format(urlObj)
+          const urlObj = new URL(href, absURL + '/')
+          const linkURL = urlObj.toString()
 
           if (internals.allURLs.includes(linkURL)) {
             verboseLog("[Ignored] Already done this URL: " + linkURL)
@@ -183,6 +172,15 @@ function crawlHTTP(absURL, opts = {}, internals = {}) {
     })
 }
 
+function getHTMLLinks(text) {
+  // Never parse HTML with a regex!
+  const doc = (new DOMParser()).parseFromString(text, 'text/html')
+
+  return Array.from(doc.getElementsByTagName('a')).map(el => {
+    return [el.innerText, el.getAttribute('href')]
+  })
+}
+
 crawlHTTP.crawlerName = 'crawl-http'
 
 crawlHTTP.isAppropriateForArg = function(arg) {
@@ -216,165 +214,6 @@ crawlHTTP.isAppropriateForArg = function(arg) {
 
 allCrawlers.crawlHTTP = crawlHTTP
 
-function getHTMLLinks(text) {
-  // Never parse HTML with a regex!
-  // const $ = cheerio.load(text)
-
-  return $('a').get().map(el => {
-    const $el = $(el)
-    return [$el.text(), $el.attr('href')]
-  })
-}
-*/
-
-function crawlLocal(dirPath, extensions = [
-  'ogg', 'oga',
-  'wav', 'mp3', 'mp4', 'm4a', 'aac',
-  'mod'
-], isTop = true) {
-  // If the passed path is a file:// URL, try to decode it:
-  try {
-    const url = new URL(dirPath)
-    if (url.protocol === 'file:') {
-      dirPath = decodeURIComponent(url.pathname)
-    }
-  } catch (error) {
-    // If it's not a URL, it's (assumedly) an ordinary path ("/path/to/the directory").
-    // In this case we'll expand any ~ in the path (e.g. ~/Music -> /home/.../Music).
-    dirPath = expandHomeDir(dirPath)
-  }
-
-  return readDir(dirPath).then(items => {
-    items.sort(sortIgnoreCase(naturalSort()))
-
-    return Promise.all(items.map(item => {
-      const itemPath = path.join(dirPath, item)
-
-      return stat(itemPath).then(stats => {
-        if (stats.isDirectory()) {
-          return crawlLocal(itemPath, extensions, false)
-            .then(group => Object.assign({name: item}, group))
-        } else if (stats.isFile()) {
-          // Extname returns a string starting with a dot; we don't want the
-          // dot, so we slice it off of the front.
-          const ext = path.extname(item).slice(1)
-
-          if (extensions.includes(ext)) {
-            // The name of the track doesn't include the file extension; a user
-            // probably wouldn't add the file extensions to a hand-written
-            // playlist, or want them in an auto-generated one.
-            const basename = path.basename(item, path.extname(item))
-
-            const track = {name: basename, downloaderArg: itemPath}
-            return track
-          } else {
-            return null
-          }
-        }
-      }, statErr => null)
-    }))
-  }, err => {
-    if (err.code === 'ENOENT') {
-      if (isTop) {
-        throw 'That directory path does not exist!'
-      } else {
-        return []
-      }
-    } else if (err.code === 'EACCES') {
-      if (isTop) {
-        throw 'You do not have permission to open that directory.'
-      } else {
-        return []
-      }
-    } else {
-      throw err
-    }
-  }).then(items => items.filter(Boolean))
-    .then(filteredItems => ({items: filteredItems}))
-}
-
-crawlLocal.crawlerName = 'crawl-local'
-
-crawlLocal.isAppropriateForArg = function(arg) {
-  // When the passed argument is a valid URL, it is only used for file://
-  // URLs:
-  try {
-    const url = new URL(arg)
-    if (url.protocol !== 'file:') {
-      return false
-    }
-  } catch (error) {}
-
-  // If the passed argument ends with .json, it is probably not a directory.
-  if (path.extname(arg) === '.json') {
-    return false
-  }
-
-  return true
-}
-
-allCrawlers.crawlLocal = crawlLocal
-
-async function crawlYouTube(url) {
-  const ytdl = spawn('youtube-dl', [
-    '-j', // Output as JSON
-    '--flat-playlist',
-    url
-  ])
-
-  const items = []
-
-  ytdl.stdout.on('data', data => {
-    const lines = data.toString().trim().split('\n')
-
-    items.push(...lines.map(JSON.parse))
-  })
-
-  // Pass false so it doesn't show logging.
-  try {
-    await promisifyProcess(ytdl, false)
-  } catch (error) {
-    // Yeow.
-    throw 'Youtube-dl failed.'
-  }
-
-  return {
-    name: 'A YouTube playlist',
-    items: items.map(item => {
-      return {
-        name: item.title,
-        downloaderArg: 'https://youtube.com/watch?v=' + item.id
-      }
-    })
-  }
-}
-
-crawlYouTube.crawlerName = 'crawl-youtube'
-
-crawlYouTube.isAppropriateForArg = function(arg) {
-  // It is definitely not used for arguments that are not URLs:
-  let url
-  try {
-    url = new URL(arg)
-  } catch (error) {
-    return false
-  }
-
-  // It is only used for URLs on the YouTube domain:
-  if (!(url.hostname === 'youtube.com' || url.hostname === 'www.youtube.com')) {
-    return false
-  }
-
-  // It is only used for playlist pages:
-  if (url.pathname !== '/playlist') {
-    return false
-  }
-
-  return true
-}
-
-allCrawlers.crawlYouTube = crawlYouTube
-
 async function openFile(input) {
   return JSON.parse(await downloadPlaylistFromOptionValue(input))
 }
diff --git a/downloaders.js b/downloaders.js
index 4b4750c..f5df4d2 100644
--- a/downloaders.js
+++ b/downloaders.js
@@ -1,95 +1,3 @@
-const { promisifyProcess } = require('./general-util')
-const { promisify } = require('util')
-const { spawn } = require('child_process')
-const { Base64 } = require('js-base64')
-const mkdirp = promisify(require('mkdirp'))
-const fs = require('fs')
-const fetch = require('node-fetch')
-const tempy = require('tempy')
-const path = require('path')
-const sanitize = require('sanitize-filename')
-
-const writeFile = promisify(fs.writeFile)
-const rename = promisify(fs.rename)
-const stat = promisify(fs.stat)
-const readdir = promisify(fs.readdir)
-const symlink = promisify(fs.symlink)
-
-const copyFile = (source, target) => {
-  // Stolen from https://stackoverflow.com/a/30405105/4633828
-  const rd = fs.createReadStream(source)
-  const wr = fs.createWriteStream(target)
-  return new Promise((resolve, reject) => {
-    rd.on('error', reject)
-    wr.on('error', reject)
-    wr.on('finish', resolve)
-    rd.pipe(wr)
-  }).catch(function(error) {
-    rd.destroy()
-    wr.end()
-    throw error
-  })
-}
-
-const cachify = (identifier, baseFunction) => {
-  return async arg => {
-    // If there was no argument passed (or it aws empty), nothing will work..
-    if (!arg) {
-      throw new TypeError('Expected a downloader argument')
-    }
-
-    // Determine where the final file will end up. This is just a directory -
-    // the file's own name is determined by the downloader.
-    const cacheDir = downloaders.rootCacheDir + '/' + identifier
-    const finalDirectory = cacheDir + '/' + Base64.encode(arg)
-
-    // Check if that directory only exists. If it does, return the file in it,
-    // because it being there means we've already downloaded it at some point
-    // in the past.
-    let exists
-    try {
-      await stat(finalDirectory)
-      exists = true
-    } catch (error) {
-      // ENOENT means the folder doesn't exist, which is one of the potential
-      // expected outputs, so do nothing and let the download continue.
-      if (error.code === 'ENOENT') {
-        exists = false
-      }
-      // Otherwise, there was some unexpected error, so throw it:
-      else {
-        throw error
-      }
-    }
-
-    // If the directory exists, return the file in it. Downloaders always
-    // return only one file, so it's expected that the directory will only
-    // contain a single file. We ignore any other files. Note we also allow
-    // the download to continue if there aren't any files in the directory -
-    // that would mean that the file (but not the directory) was unexpectedly
-    // deleted.
-    if (exists) {
-      const files = await readdir(finalDirectory)
-      if (files.length >= 1) {
-        return finalDirectory + '/' + files[0]
-      }
-    }
-
-    // The "temporary" output, aka the download location. Generally in a
-    // temporary location as returned by tempy.
-    const tempFile = await baseFunction(arg)
-
-    // Then move the download to the final location. First we need to make the
-    // folder exist, then we move the file.
-    const finalFile = finalDirectory + '/' + path.basename(tempFile)
-    await mkdirp(finalDirectory)
-    await rename(tempFile, finalFile)
-
-    // And return.
-    return finalFile
-  }
-}
-
 const removeFileProtocol = arg => {
   const fileProto = 'file://'
   if (arg.startsWith(fileProto)) {
@@ -100,91 +8,17 @@ const removeFileProtocol = arg => {
 }
 
 const downloaders = {
-  extension: 'mp3', // Generally target file extension, used by youtube-dl
-
-  // TODO: Cross-platform stuff
-  rootCacheDir: process.env.HOME + '/.mtui/downloads',
-
-  http: cachify('http', arg => {
-    const out = (
-      tempy.directory() + '/' +
-      sanitize(decodeURIComponent(path.basename(arg))))
-
+  fetch: arg => {
     return fetch(arg)
-      .then(response => response.buffer())
-      .then(buffer => writeFile(out, buffer))
-      .then(() => out)
-  }),
-
-  youtubedl: cachify('youtubedl', arg => {
-    const outDir = tempy.directory()
-    const outFile = outDir + '/%(id)s-%(uploader)s-%(title)s.%(ext)s'
-
-    const opts = [
-      '--quiet',
-      '--no-warnings',
-      '--extract-audio',
-      '--audio-format', downloaders.extension,
-      '--output', outFile,
-      arg
-    ]
-
-    return promisifyProcess(spawn('youtube-dl', opts))
-      .then(() => readdir(outDir))
-      .then(files => outDir + '/' + files[0])
-  }),
-
-  local: cachify('local', arg => {
-    // Usually we'd just return the given argument in a local
-    // downloader, which is efficient, since there's no need to
-    // copy a file from one place on the hard drive to another.
-    // But reading from a separate drive (e.g. a USB stick or a
-    // CD) can take a lot longer than reading directly from the
-    // computer's own drive, so this downloader copies the file
-    // to a temporary file on the computer's drive.
-    // Ideally, we'd be able to check whether a file is on the
-    // computer's main drive mount or not before going through
-    // the steps to copy, but I'm not sure if there's a way to
-    // do that (and it's even less likely there'd be a cross-
-    // platform way).
-
-    // It's possible the downloader argument starts with the "file://"
-    // protocol string; in that case we'll want to snip it off and URL-
-    // decode the string.
-    arg = removeFileProtocol(arg)
-
-    // TODO: Is it necessary to sanitize here?
-    // Haha, the answer to "should I sanitize" is probably always YES..
-    const base = path.basename(arg, path.extname(arg))
-    const out = tempy.directory() + '/' + sanitize(base) + path.extname(arg)
-
-    return copyFile(arg, out)
-      .then(() => out)
-  }),
-
-  locallink: cachify('locallink', arg => {
-    // Like the local downloader, but creates a symbolic link to the argument.
-
-    arg = removeFileProtocol(arg)
-    const base = path.basename(arg, path.extname(arg))
-    const out = tempy.directory() + '/' + sanitize(base) + path.extname(arg)
-
-    return symlink(path.resolve(arg), out)
-      .then(() => out)
-  }),
-
-  echo: arg => arg,
+      .then(response => response.blob())
+      .then(blob => URL.createObjectURL(blob))
+  },
 
   getDownloaderFor: arg => {
     if (arg.startsWith('http://') || arg.startsWith('https://')) {
-      if (arg.includes('youtube.com')) {
-        return downloaders.youtubedl
-      } else {
-        return downloaders.http
-      }
+      return downloaders.http
     } else {
-      // return downloaders.local
-      return downloaders.locallink
+      return null
     }
   }
 }
diff --git a/general-util.js b/general-util.js
index 0b9f081..abe2399 100644
--- a/general-util.js
+++ b/general-util.js
@@ -1,72 +1,14 @@
-const { spawn } = require('child_process')
-const { promisify } = require('util')
-const fetch = require('node-fetch')
-const fs = require('fs')
-const npmCommandExists = require('command-exists')
-
-const readFile = promisify(fs.readFile)
-
-module.exports.promisifyProcess = function(proc, showLogging = true) {
-  // Takes a process (from the child_process module) and returns a promise
-  // that resolves when the process exits (or rejects, if the exit code is
-  // non-zero).
-
-  return new Promise((resolve, reject) => {
-    if (showLogging) {
-      proc.stdout.pipe(process.stdout)
-      proc.stderr.pipe(process.stderr)
-    }
-
-    proc.on('exit', code => {
-      if (code === 0) {
-        resolve()
-      } else {
-        reject(code)
-      }
-    })
-  })
-}
-
 module.exports.commandExists = async function(command) {
-  // When the command-exists module sees that a given command doesn't exist, it
-  // throws an error instead of returning false, which is not what we want.
-
-  try {
-    return await npmCommandExists(command)
-  } catch(err) {
-    return false
-  }
-}
-
-module.exports.killProcess = async function(proc) {
-  // Windows is stupid and doesn't like it when we try to kill processes.
-  // So instead we use taskkill! https://stackoverflow.com/a/28163919/4633828
-
-  if (await module.exports.commandExists('taskkill')) {
-    await module.exports.promisifyProcess(
-      spawn('taskkill', ['/pid', proc.pid, '/f', '/t']),
-      false
-    )
-  } else {
-    proc.kill()
-  }
+  return false
 }
 
 function downloadPlaylistFromURL(url) {
   return fetch(url).then(res => res.text())
 }
 
-function downloadPlaylistFromLocalPath(path) {
-  return readFile(path).then(buf => buf.toString())
-}
-
 module.exports.downloadPlaylistFromOptionValue = function(arg) {
   // TODO: Verify things!
-  if (arg.startsWith('http://') || arg.startsWith('https://')) {
-    return downloadPlaylistFromURL(arg)
-  } else {
-    return downloadPlaylistFromLocalPath(arg)
-  }
+  return downloadPlaylistFromURL(arg)
 }
 
 module.exports.shuffleArray = function(array) {
diff --git a/players.js b/players.js
index be5205f..1eacf7d 100644
--- a/players.js
+++ b/players.js
@@ -1,7 +1,7 @@
 // stolen from http-music
 
-const { spawn } = require('child_process')
-const FIFO = require('fifo-js')
+// const { spawn } = require('child_process')
+// const FIFO = require('fifo-js')
 const EventEmitter = require('events')
 const { commandExists, killProcess } = require('./general-util')
 
diff --git a/playlist-utils.js b/playlist-utils.js
index 4367fb0..0e2d6f0 100644
--- a/playlist-utils.js
+++ b/playlist-utils.js
@@ -1,11 +1,3 @@
-'use strict'
-
-const path = require('path')
-const fs = require('fs')
-
-const { promisify } = require('util')
-const unlink = promisify(fs.unlink)
-
 const { shuffleArray } = require('./general-util')
 
 const parentSymbol = Symbol('Parent group')
diff --git a/tui-lib b/tui-lib
deleted file mode 160000
-Subproject d8331b98aad2e29b23e88901049fc3c91489a2c
diff --git a/ui.js b/ui.js
index dfb75ae..fc38401 100644
--- a/ui.js
+++ b/ui.js
@@ -27,11 +27,7 @@ const {
     telchars: telc,
     unichars: unic,
   }
-} = require('./tui-lib')
-
-const fs = require('fs')
-const { promisify } = require('util')
-const writeFile = promisify(fs.writeFile)
+} = require('tui-lib')
 
 class AppElement extends FocusElement {
   constructor() {
@@ -733,13 +729,6 @@ class AppElement extends FocusElement {
         this.queueListingElement.selectAndShow(item)
       }
 
-      await Promise.all([
-        writeFile(this.rootDirectory + '/current-track.txt',
-          getItemPathString(item)),
-        writeFile(this.rootDirectory + '/current-track.json',
-          JSON.stringify(item, null, 2))
-      ])
-
       try {
         await this.player.playFile(downloadFile)
       } finally {