From e4bccdfdef777747a541693796fe598491498aed Mon Sep 17 00:00:00 2001 From: Florrie Date: Fri, 29 Jun 2018 19:44:44 -0300 Subject: Remove cheerio, disable crawl-http --- crawlers.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) (limited to 'crawlers.js') diff --git a/crawlers.js b/crawlers.js index caf3c0e..5a4987a 100644 --- a/crawlers.js +++ b/crawlers.js @@ -2,7 +2,6 @@ const fs = require('fs') const path = require('path') const naturalSort = require('node-natural-sort') const fetch = require('node-fetch') -const cheerio = require('cheerio') const url = require('url') const { downloadPlaylistFromOptionValue, promisifyProcess } = require('./general-util') const { spawn } = require('child_process') @@ -26,6 +25,7 @@ function sortIgnoreCase(sortFunction) { } } +/* TODO: Removed cheerio, so crawl-http no longer works. function crawlHTTP(absURL, opts = {}, internals = {}) { // Recursively crawls a given URL, following every link to a deeper path and // recording all links in a tree (in the same format playlists use). Makes @@ -217,13 +217,14 @@ allCrawlers.crawlHTTP = crawlHTTP function getHTMLLinks(text) { // Never parse HTML with a regex! - const $ = cheerio.load(text) + // const $ = cheerio.load(text) return $('a').get().map(el => { const $el = $(el) return [$el.text(), $el.attr('href')] }) } +*/ function crawlLocal(dirPath, extensions = [ 'ogg', 'oga', -- cgit 1.3.0-6-gf8a5