Files
asciidisco.com/build/tasks/fetch_content.js
2023-08-01 13:49:46 +02:00

99 lines
3.3 KiB
JavaScript

const fs = require('fs')
const path = require('path')
const Stream = require('stream').Transform
const {URL} = require('url')
const {promisify} = require('util')
const hasha = require('hasha')
const filewalker = require('filewalker')
const chalk = require('chalk')
const {getPosts} = require('./content/ghost')
const {httpsGetAsync, httpGetAsync, parseJSONData} = require('../lib/request')
const log = console.log
const GITHUB_API_HOST = 'api.github.com'
const GITHUB_API_REPOS = '/users/asciidisco/repos'
const ASSET_EXTENSIONS = ['js', 'mjs', 'css', 'png', 'svg', 'jpg', 'jpeg', 'gif', 'webp', 'hbs', 'woff', 'woff2', 'md']
const EXCLUDED_FOLDERS = ['node_modules/', 'build']
const readFileAsync = promisify(fs.readFile)
/* Third party markdown */
const getThirdPartyMarkdown = async config => {
const pagesPromises = config.mdcontents.pages.map(page => {
return new Promise(async (resolve, reject) => {
const pageUrl = new URL(page.url)
const pageReqOptions = {
host: pageUrl.host,
path: pageUrl.pathname,
headers: {'User-Agent': 'Node'}}
const fetcher = pageUrl.protocol === 'http:' ? httpGetAsync : httpsGetAsync
const data = new Stream()
const res = await fetcher(pageReqOptions)
res.on('data', chunk => data.push(chunk))
res.on('end', () => resolve(data.read()))
})
})
return Promise.all(pagesPromises)
}
/* GitHub Repos */
const getRepos = config => {
return new Promise(async (resolve, reject) => {
const reposReqOptions = {
host: GITHUB_API_HOST,
path: GITHUB_API_REPOS,
headers: {'User-Agent': 'Node'}}
const data = new Stream()
const res = await httpsGetAsync(reposReqOptions)
res.on('data', chunk => data.push(chunk))
res.on('end', parseJSONData.bind(null, data, resolve, reject))
})
}
/* Local assets */
const getAssets = directory => {
return new Promise((resolve, reject) => {
let assets = []
const types = ASSET_EXTENSIONS
const excludes = EXCLUDED_FOLDERS.map(dir => path.resolve(path.join(directory, dir)))
filewalker(directory)
.on('file', dir => {
const src = path.join(directory, dir)
const exclude = excludes.filter(ex => src.indexOf(ex) >= 0).length
const ext = path.extname(dir).replace('.', '')
if (!exclude && types.includes(ext)) assets.push({src})
})
.on('done', resolve.bind(null, assets))
.on('error', reject)
.walk()
})
}
const getAssetContents = async config => {
const directory = config.paths.base
const files = await getAssets(directory)
const assets = files.map(async file => {
const buf = await readFileAsync(file.src)
return {...file, original: buf, hash: hasha(buf)}
})
return Promise.all(assets)
}
module.exports = async config => {
// get ghost posts
log(' ', chalk.green('>>'), 'Fetching ghost posts')
const posts = await getPosts(config)
// get local assets
log(' ', chalk.green('>>'), 'Fetching local contents')
const localAssets = await getAssetContents(config)
// get github repos
log(' ', chalk.green('>>'), 'Fetching github repos')
const repos = await getRepos(config)
// get foreign markdown content
log(' ', chalk.green('>>'), 'Fetching third party markdown')
const thirdPartyMarkdown = await getThirdPartyMarkdown(config)
console.log({posts, localAssets, repos, thirdPartyMarkdown})
return {}
}