first commit

This commit is contained in:
s.golasch
2023-08-01 13:49:46 +02:00
commit 1fc239fd54
20238 changed files with 3112246 additions and 0 deletions

View File

@@ -0,0 +1,119 @@
const path = require('path')
const https = require('https')
const Stream = require('stream').Transform
const hasha = require('hasha')
const md = require('markdown-it')()
const {httpsGetAsync, parseJSONData} = require('../../lib/request')
const GLITCH_API_ROOT = '/ghost/api/v0.1/'
// Retrieve ghost access token
const getToken = config => {
return new Promise((resolve, reject) => {
const tokenReqData = `grant_type=password&username=${config.ghost.username}&password=${config.ghost.password}&client_id=${config.ghost.id}&client_secret=${config.ghost.secret}`
const tokenReqOptions = {
host: config.ghost.host,
path: `${GLITCH_API_ROOT}authentication/token`,
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': Buffer.byteLength(tokenReqData, 'utf8')}}
const collectToken = https.request(tokenReqOptions, res => {
const data = new Stream()
res.on('data', chunk => data.push(chunk))
res.on('end', parseJSONData.bind(null, data, resolve, reject))
})
collectToken.write(tokenReqData)
collectToken.end()
collectToken.on('error', error => reject(error))
})
}
const download = (image, config) => {
return new Promise(async (resolve, reject) => {
const src = `https://${config.ghost.host}${image}`
const dest = path.join(config.basepath, config.image_path, image)
const data = new Stream()
const res = await httpsGetAsync(src)
res.on('data', chunk => data.push(chunk))
res.on('end', () => {
const buf = data.read()
resolve({src, dest, hash: hasha(buf), original: buf, optimized: null, responsive: []})
})
})
}
// download images
const downloadImages = async (posts, config) => {
let images = []
posts.posts.forEach((post, idx) => {
let postImages = []
if (post.image) postImages.push(post.image)
// parse inline images
md.parse(post.markdown).forEach(token => {
if (token.type === 'image') postImages.push(token.attrs[0][1])
if (Array.isArray(token.children)) {
token.children.forEach(async child => {
if (child.type === 'image') postImages.push(child.attrs[0][1])
})
}
})
images.push(Promise.all(postImages.map(image => download(image, config))))
})
return Promise.all(images)
}
const mapPostPages = async (posts, images, config) => {
const postsWithMappedImages = posts.posts.map((post, idx) => Object.assign({}, post, {image: images[idx].pop()}, {images: images[idx]}))
const postsWithMappedFields = postsWithMappedImages.map(post => {
return {
id: post.id,
uuid: post.uuid,
title: post.title,
slug: post.slug,
url: post.url,
article: !post.page,
status: post.status,
language: post.language,
visibility: post.visibility,
meta_title: post.meta_title || post.title,
meta_description: post.meta_description,
created_at: new Date(post.created_at),
created_by: post.created_by,
updated_at: new Date(post.updated_at),
updated_by: post.updated_by,
published_at: new Date(post.published_at),
published_by: post.published_by,
tags: post.tags,
author: post.author,
image: post.image,
images: post.images,
markdown: post.markdown
}
})
return postsWithMappedFields
}
// fetch ghost posts
const getPosts = async config => {
const loadPosts = () => {
return new Promise(async (resolve, reject) => {
const token = await getToken(config)
const postsReqOptions = {
host: config.ghost.host,
path: `${GLITCH_API_ROOT}posts/?status=all&staticPages=all&include=tags&limit=30`,
headers: {
'authorization': `Bearer ${token.access_token}`,
'accept': 'application/json'}}
const data = new Stream()
const res = await httpsGetAsync(postsReqOptions)
res.on('data', chunk => data.push(chunk))
res.on('end', parseJSONData.bind(null, data, resolve, reject))
})
}
const posts = await loadPosts()
const images = await downloadImages(posts, config)
return mapPostPages(posts, images, config)
}
module.exports.getPosts = getPosts

68
build/tasks/css.js Normal file
View File

@@ -0,0 +1,68 @@
const fs = require('fs')
const path = require('path')
const crypto = require('crypto')
const zopfli = require('node-zopfli')
const brotli = require('iltorb')
const mkdirp = require('mkdirp')
const postcss = require('postcss')
const chalk = require('chalk')
const log = console.log
const plugins = [
require('postcss-import'),
require('autoprefixer'),
require('css-mqpacker')(),
require('postcss-unique-selectors')(),
require('postcss-merge-selectors')(),
require('postcss-minify-selectors')(),
require('postcss-merge-longhand')(),
require('postcss-merge-idents')(),
require('postcss-convert-values')(),
require('postcss-colormin')(),
require('postcss-discard-unused')(),
require('postcss-minify-font-values'),
require('postcss-discard-duplicates')(),
require('postcss-discard-empty')(),
require('postcss-ordered-values')(),
require('cssnano')({preset: 'default'}),
require('postcss-csso'),
]
// compute the files paths (remove directories in the process)
const get_css_Files = config => new Promise((resolve, reject) => fs.readdir(path.join(config.basepath, config.css_path), (err, files) => err ? reject(err) : resolve(files.map(file => path.join(config.basepath, config.css_path, file)).filter(file => fs.statSync(file).isFile()).filter(file => path.extname(file) === '.css'))))
// read file contents
const read_css_files = (config, files) => Promise.all(files.map(file => new Promise((resolve, reject) => fs.readFile(file, 'utf-8', (err, css) => err ? reject(err) : resolve({file, css, destination: path.join(config.basepath, config.dist_path, file.replace(config.basepath, ''))})))))
// process files
const process_css_files = (config, files) => Promise.all(files.map(file => postcss(plugins).process(file.css, {from: file.file})))
// write files
const write_css_files = (config, files, result) => {
return new Promise(async (resolve, reject) => {
try {
// create directories if not present
await Promise.all(files.map(file => new Promise((res, rej) => mkdirp(path.dirname(file.destination), err => err ? rej(err) : res()))))
// write uncompressed files
await Promise.all(files.map((file, idx) => new Promise((res, rej) => fs.writeFile(file.destination, result[idx].css, err => err ? rej(err) : res()))))
// write zopfli compressed files
await Promise.all(files.map((file, idx) => new Promise((res, rej) => zopfli.gzip(new Buffer(result[idx].css), (error, output) => fs.writeFile(file.destination + '.gz', output, err => (err || error) ? rej(err || error) : res())))))
// write brotli compressed files
await Promise.all(files.map((file, idx) => new Promise((res, rej) => brotli.compress(new Buffer(result[idx].css), (error, output) => fs.writeFile(file.destination + '.br', output, err => (err || error) ? rej(err || error) : res())))))
resolve()
} catch (err) {
reject(err)
}
})
}
// compute result
const compute_result = (config, content, processed) => content.map((file, idx) => Object.assign({include: '/' + file.destination.replace(path.join(config.basepath, config.dist_path), ''), name: path.basename(file.file).replace(path.join(config.basepath, config.css_path), ''), file: file.destination, original: file.file, contents: processed[idx].css, html: '<link rel="stylesheet" href="' + '/' + config.css_path + file.destination.replace(path.join(config.basepath, config.dist_path, config.css_path), '').replace('.css', '') + '.' + crypto.createHash('md5').update(processed[idx].css).digest('hex').substr(0, 6) + '.css"/>', hashed_include: '/' + crypto.createHash('md5').update(processed[idx].css).digest('hex').substr(0, 6) + '.' + file.destination.replace(path.join(config.basepath, config.dist_path), ''), hash: crypto.createHash('md5').update(processed[idx].css).digest('hex')}))
module.exports = async (config, results) => {
log(' ', chalk.green('>>'), 'Loading css files')
const files = await get_css_Files(config)
log(' ', chalk.green('>>'), 'Reading css files')
const content = await read_css_files(config, files)
log(' ', chalk.green('>>'), 'Processing css files')
const processed = await process_css_files(config, content)
log(' ', chalk.green('>>'), 'Writing css files')
await write_css_files(config, content, processed)
return compute_result(config, content, processed)
}

84
build/tasks/deploy.js Normal file
View File

@@ -0,0 +1,84 @@
const path = require('path')
const Client = require('ftp')
const filewalker = require('filewalker')
const chalk = require('chalk')
const log = console.log;
/* FTP functions */
// create client instance
const client = new Client()
// connect to server
const connect = (options) => new Promise((resolve, reject) => client.on('ready', () => resolve()) && client.connect(options))
// get list of files/dirs in root directory
const list = () => new Promise((resolve, reject) => client.list((err, list) => err ? reject(err) : resolve(list)))
// recursively create a directory structure on the server
const mkdir = name => new Promise((resolve, reject) => client.mkdir(name, true, err => err ? reject(err) : resolve(name)))
// recursively remove a directory structure from the server
const rmdir = name => new Promise((resolve, reject) => client.rmdir(name, true, err => err ? reject(err) : resolve(name)))
// remove a file from the server
const remove_file = name => new Promise((resolve, reject) => client.delete(name, err => err ? reject(err) : resolve(name)))
// transfer a file to the server (binary mode by default)
const transfer_file = (local, remote) => new Promise((resolve, reject) => client.put(local, remote, err => err ? reject(err) : resolve([local, remote])))
// close the connection
const close = () => client.end()
/* Path matching functions */
// find local folders & files that should be transferred
const find_paths = (basepath, dist_path) => {
return new Promise((resolve, reject) => {
let paths = []
filewalker(path.join(basepath, dist_path))
.on('file', file => file.search('.htaccess') !== -1 ? false : paths.push([path.join(basepath, dist_path, file), file]))
.on('done', () => resolve(paths))
.walk()
})
}
// get a list of directories that need to be created on the server
const get_dirs_to_create = files => files.map(file => path.dirname(file[1])).filter((item, i, ar) => ar.indexOf(item) === i).filter(item => item !== '.' && item !== '..')
// get a list of directories that should be deleted on the server
const get_dirs_to_delete = paths => paths.filter(path => path.type === 'd' && path.name !== '.' && path.name !== '..').sort((a, b) => parseInt(a.name) < parseInt(b.name)).filter((_, idx, arr) => arr.length < 3 ? false : idx >= 2).map(path => path.name)
/* Runner */
module.exports = config => {
// get config vars
const root_dir = config.root_dir
const basepath = config.basepath
const dist_path = config.dist_path
const options = config.ftp_options
// run async deployment
return new Promise(async (resolve, reject) => {
try {
// make a list of files to be transferred to the server
const files_to_transfer = await find_paths(basepath, dist_path)
// make a list of subfolders to be created
const dirs_to_create = get_dirs_to_create(files_to_transfer)
// connect to the server
log(' ', chalk.green('>>'), 'Connecting to server')
await connect(options)
// fetch the list of existing folders & delete the oldest one and his .htaccess backup
log(' ', chalk.green('>>'), 'Deleting oldest deployments')
const existing_folders = await list()
const folders_to_delete = get_dirs_to_delete(existing_folders)
let delete_promises = []
folders_to_delete.forEach(dir => delete_promises.push(rmdir(dir)))
folders_to_delete.forEach(dir => delete_promises.push(remove_file(dir + '.htaccess')))
await Promise.all(delete_promises)
// create the new directories & transfer the files
log(' ', chalk.green('>>'), 'Uploading assets')
let create_promises = []
dirs_to_create.forEach(dir => create_promises.push(mkdir(path.join(root_dir, dir))))
files_to_transfer.forEach(file => create_promises.push(transfer_file(file[0], path.join(root_dir, file[1]))))
await Promise.all(create_promises)
// transfer .htaccess file
log(' ', chalk.green('>>'), 'Uploading .htaccess changes')
await transfer_file(path.join(basepath, dist_path, '.htaccess'), '.htaccess')
await transfer_file(path.join(basepath, dist_path, '.htaccess'), root_dir + '.htaccess')
// close the connection
log(' ', chalk.green('>>'), 'Closing connection')
close()
resolve(root_dir)
} catch(err) {
reject(err)
}
})
}

79
build/tasks/feed.js Normal file
View File

@@ -0,0 +1,79 @@
const fs = require('fs')
const path = require('path')
const Feed = require('feed')
const zopfli = require('node-zopfli')
const brotli = require('iltorb')
const chalk = require('chalk')
const log = console.log;
const types = {
rss: 'feed.xml',
atom: 'feed.atom',
json: 'feed.json',
}
/* Generate base feed instance */
const generate_base_feed = config => {
const feed = new Feed(config.rss)
config.rss.categories.forEach(cat => feed.addCategory(cat))
return feed
}
/* Generate feed items from ghost blog posts */
const generate_feed_items = (config, contents, feed) => {
contents.content.ghost.posts.forEach(post => {
if (post.visibility === 'public') {
feed.addItem({
title: post.title,
description: post.meta_description,
content: post.html,
link: path.join(config.rss.id, '/articles/', post.slug + '.html'),
id: post.uuid,
date: new Date(post.published_at),
author: [config.rss.author]
})
}
})
return feed
}
// Write plain feed file to dist
const write_feed_file = (config, type, feed) => new Promise((resolve, reject) => fs.writeFile(path.join(config.basepath, config.dist_path, types[type]), feed, err => err ? reject(err) : resolve(feed)))
// Create brotli & zopfli compressed versions and write them to dist
const compress_feed_file = (config, type, feed) => {
return new Promise(async (resolve, reject) => {
const destination = path.join(config.basepath, config.dist_path, types[type])
let write_promises = []
try {
// write zopfli compressed file
write_promises.push(new Promise((res, rej) => zopfli.gzip(new Buffer(feed), (err, output) => fs.writeFile(destination + '.gz', output, err => err ? rej(err) : res()))))
// write brotli compressed file
write_promises.push(new Promise((res, rej) => brotli.compress(new Buffer(feed), (err, output) => fs.writeFile(destination + '.br', output, err => err ? rej(err) : res()))))
await Promise.all(write_promises)
resolve()
} catch(err) {
reject(err)
}
})
}
module.exports = (config, results) => {
return new Promise(async (resolve, reject) => {
try {
log(' ', chalk.green('>>'), 'Building RSS/Atom/JSON feed')
let feed = generate_base_feed(config)
feed = generate_feed_items(config, results, feed)
log(' ', chalk.green('>>'), 'Writing feed files')
const rss = await write_feed_file(config, 'rss', feed.rss2())
const atom = await write_feed_file(config, 'atom', feed.atom1())
const json = await write_feed_file(config, 'json', feed.json1())
log(' ', chalk.green('>>'), 'Compressing feed files')
await compress_feed_file(config, 'rss', rss)
await compress_feed_file(config, 'atom', atom)
await compress_feed_file(config, 'json', json)
resolve({rss, atom, json})
} catch (err) {
reject(err)
}
})
}

View File

@@ -0,0 +1,98 @@
const fs = require('fs')
const path = require('path')
const Stream = require('stream').Transform
const {URL} = require('url')
const {promisify} = require('util')
const hasha = require('hasha')
const filewalker = require('filewalker')
const chalk = require('chalk')
const {getPosts} = require('./content/ghost')
const {httpsGetAsync, httpGetAsync, parseJSONData} = require('../lib/request')
const log = console.log
const GITHUB_API_HOST = 'api.github.com'
const GITHUB_API_REPOS = '/users/asciidisco/repos'
const ASSET_EXTENSIONS = ['js', 'mjs', 'css', 'png', 'svg', 'jpg', 'jpeg', 'gif', 'webp', 'hbs', 'woff', 'woff2', 'md']
const EXCLUDED_FOLDERS = ['node_modules/', 'build']
const readFileAsync = promisify(fs.readFile)
/* Third party markdown */
const getThirdPartyMarkdown = async config => {
const pagesPromises = config.mdcontents.pages.map(page => {
return new Promise(async (resolve, reject) => {
const pageUrl = new URL(page.url)
const pageReqOptions = {
host: pageUrl.host,
path: pageUrl.pathname,
headers: {'User-Agent': 'Node'}}
const fetcher = pageUrl.protocol === 'http:' ? httpGetAsync : httpsGetAsync
const data = new Stream()
const res = await fetcher(pageReqOptions)
res.on('data', chunk => data.push(chunk))
res.on('end', () => resolve(data.read()))
})
})
return Promise.all(pagesPromises)
}
/* GitHub Repos */
const getRepos = config => {
return new Promise(async (resolve, reject) => {
const reposReqOptions = {
host: GITHUB_API_HOST,
path: GITHUB_API_REPOS,
headers: {'User-Agent': 'Node'}}
const data = new Stream()
const res = await httpsGetAsync(reposReqOptions)
res.on('data', chunk => data.push(chunk))
res.on('end', parseJSONData.bind(null, data, resolve, reject))
})
}
/* Local assets */
const getAssets = directory => {
return new Promise((resolve, reject) => {
let assets = []
const types = ASSET_EXTENSIONS
const excludes = EXCLUDED_FOLDERS.map(dir => path.resolve(path.join(directory, dir)))
filewalker(directory)
.on('file', dir => {
const src = path.join(directory, dir)
const exclude = excludes.filter(ex => src.indexOf(ex) >= 0).length
const ext = path.extname(dir).replace('.', '')
if (!exclude && types.includes(ext)) assets.push({src})
})
.on('done', resolve.bind(null, assets))
.on('error', reject)
.walk()
})
}
const getAssetContents = async config => {
const directory = config.paths.base
const files = await getAssets(directory)
const assets = files.map(async file => {
const buf = await readFileAsync(file.src)
return {...file, original: buf, hash: hasha(buf)}
})
return Promise.all(assets)
}
module.exports = async config => {
// get ghost posts
log(' ', chalk.green('>>'), 'Fetching ghost posts')
const posts = await getPosts(config)
// get local assets
log(' ', chalk.green('>>'), 'Fetching local contents')
const localAssets = await getAssetContents(config)
// get github repos
log(' ', chalk.green('>>'), 'Fetching github repos')
const repos = await getRepos(config)
// get foreign markdown content
log(' ', chalk.green('>>'), 'Fetching third party markdown')
const thirdPartyMarkdown = await getThirdPartyMarkdown(config)
console.log({posts, localAssets, repos, thirdPartyMarkdown})
return {}
}

44
build/tasks/fonts.js Normal file
View File

@@ -0,0 +1,44 @@
const fs = require('fs')
const path = require('path')
const mkdirp = require('mkdirp')
const rimraf = require('rimraf')
const filewalker = require('filewalker')
const chalk = require('chalk')
const log = console.log;
/* Path matching functions */
// find local files that should be processed
const find_paths = font_path => {
return new Promise((resolve, reject) => {
let paths = []
filewalker(font_path)
.on('file', file => paths.push(file))
.on('done', () => resolve(paths))
.walk()
})
}
// copy files
const copy_files = (files, source, destination) => files.forEach(file => fs.createReadStream(path.join(source, file)).pipe(fs.createWriteStream(path.join(destination, file))))
module.exports = config => {
const destination = path.join(config.basepath, config.dist_path, config.font_path)
const source = path.join(config.basepath, config.font_path)
return new Promise((resolve, reject) => {
log(' ', chalk.green('>>'), 'Deleting old destination folder')
rimraf(destination, err => {
if (err) return reject(err)
log(' ', chalk.green('>>'), 'Recreating destination folder')
mkdirp(destination, async err => {
if (err) return reject(err)
try {
const files = await find_paths(source)
log(' ', chalk.green('>>'), `Copying ${files.length} files`)
copy_files(files, source, destination)
resolve(files)
} catch(err) {
reject(err)
}
})
})
})
}

118
build/tasks/htaccess.js Normal file
View File

@@ -0,0 +1,118 @@
const fs = require('fs')
const path = require('path')
const filewalker = require('filewalker')
const Handlebars = require('handlebars')
const Manifest = require('http2-push-manifest/lib/manifest')
const chalk = require('chalk')
const log = console.log;
const PUSH_MANIFEST_FILE_TEMPLATE = ' <FilesMatch "{{file}}">\n{{{ressources}}}\n </FilesMatch>'
const PUSH_MANIFEST_RESSOURCE_TEMPLATE = ' Header add Link "<{{file}}>; rel=preload; as={{type}}"'
const NEWLINE = '\n'
/* PUSH Manifest functions */
// loads all generated .html files from the dist directory
const find_generated_html_files = (basepath, dist_path) => {
return new Promise(async (resolve, reject) => {
const fetch_all_files = () => {
return new Promise((resolve, reject) => {
let paths = []
filewalker(path.join(basepath, dist_path))
.on('file', file => paths.push(path.join(basepath, dist_path, file)))
.on('done', () => resolve(paths))
.walk()
})
}
const all_files = await fetch_all_files()
const html_files = all_files.filter(file => path.extname(file) === '.html')
resolve(html_files)
})
}
// analyze files & generate a json manifest
const generate_manifest = (manifestName, files, singleFile) => {
return new Promise((resolve, reject) => {
let jsonOutput = {}
if (!files.length) return resolve({manifest: new Manifest({name: manifestName}), jsonOutput})
// Make a path if one wasn't given. e.g. basic.html -> ./basic.html
let f = files[0]
if (f.indexOf(path.sep) === -1) f = `.${path.sep}${f}`
// determine base & inoput paths
let basePath = f.slice(0, f.lastIndexOf(path.sep))
let inputPath = f.slice(f.lastIndexOf(path.sep) + 1);
// generate the manifest
let manifest = new Manifest({basePath, inputPath, name: manifestName});
manifest.generate().then(output => {
if (singleFile) return resolve({manifest})
jsonOutput[inputPath] = output.file
// Remove processed file from list and proceed with next.
files.shift();
generate_manifest(manifestName, files, singleFile)
}).catch(err => reject(err))
})
}
/* Templating functions */
// load .htaccess template
const load_htaccess_template = basepath => {
return new Promise((resolve, reject) => {
fs.readFile(path.join(basepath, '.htaccess'), (err, body) => {
if (err) return reject(err)
resolve(String(body))
})
})
}
// add public dir to .htaccess rewrite & push manifest
const compile_template = (source, root_dir, manifest) => {
const template = Handlebars.compile(source)
return template({PUBLIC_FOLDER: root_dir, PUSH_MANIFEST: manifest})
}
// generate push manifest templates
const generate_push_manifest = (config, html_files) => {
return new Promise(async (resolve, reject) => {
let manifest_promises = html_files.map(file => generate_manifest(config.root_dir, [file], [file].length < 2))
const manifests = await Promise.all(manifest_promises)
const httaccess_parts = manifests.map(manifest => {
const res_template = Handlebars.compile(PUSH_MANIFEST_RESSOURCE_TEMPLATE)
const file_template = Handlebars.compile(PUSH_MANIFEST_FILE_TEMPLATE)
const ressources = Object.keys(manifest.manifest.fileContent).map(file => res_template({file: file, type: manifest.manifest.fileContent[file].type})).join(NEWLINE)
return file_template({ressources: ressources, file: manifest.manifest.inputPath})
})
resolve(httaccess_parts.join(NEWLINE))
})
}
// write .htaccess files
const write_htaccess = (config, content) => {
new Promise((resolve, reject) => {
fs.writeFile(path.join(config.basepath, config.dist_path, '.htaccess'), content, err => {
if (err) return rej(err)
resolve()
})
})
}
module.exports = config => {
return new Promise(async (resolve, reject) => {
try {
// load source
log(' ', chalk.green('>>'), 'Loading .htaccess template')
const source = await load_htaccess_template(config.basepath)
// get generated html files
log(' ', chalk.green('>>'), 'Grepping generated HTML files')
const html_files = await find_generated_html_files(config.basepath, config.dist_path)
// get push manifest
log(' ', chalk.green('>>'), 'Generating push manifests')
const push_manifest = await generate_push_manifest(config, html_files)
// add push manifests & public dir
log(' ', chalk.green('>>'), 'Compiling .htaccess')
const result = compile_template(source, config.root_dir, push_manifest)
// write htaccess files
log(' ', chalk.green('>>'), 'Writing .htaccess')
await write_htaccess(config, result)
// resolve task
resolve({result})
} catch(err) {
reject(err)
}
})
}

106
build/tasks/html.js Normal file
View File

@@ -0,0 +1,106 @@
/*const fs = require('fs')
const path = require('path')
const zopfli = require('node-zopfli')
const brotli = require('iltorb')
const mkdirp = require('mkdirp')
const minify = require('html-minifier').minify
const options = {
collapseBooleanAttributes: true,
minifyURLs: true,
removeComments: true,
removeEmptyAttributes: true,
removeOptionalTags: true,
removeRedundantAttributes: true,
removeScriptTypeAttributes: true,
removeStyleLinkTypeAttributes: true,
sortAttributes: true,
sortClassName: true,
useShortDoctype: true,
collapseWhitespace: true,
collapseBooleanAttributes: true,
collapseInlineTagWhitespace: true,
conservativeCollapse: true,
preserveLineBreaks: false,
removeTagWhitespace: true
}
const basepath = __dirname + '/../../'
const dist_path = 'dist/'
const file = 'index.html'
fs.readFile(path.normalize(basepath + file), (err, buf) => {
const destination = path.normalize(basepath + dist_path + file)
const result = minify(String(buf), options)
// create directories if not present
mkdirp(path.dirname(destination), err => {
// write uncompressed file
fs.writeFile(destination, result, err => {
if (err) return console.error(err)
})
// write zopfli compressed file
zopfli.gzip(new Buffer(result), (err, output) => {
fs.writeFile(destination + '.gz', output, err => {
if (err) return console.error(err)
})
})
// write brotli compressed file
brotli.compress(new Buffer(result), (err, output) => {
fs.writeFile(destination + '.br', output, err => {
if (err) return console.error(err)
})
})
})
})*/
const fs = require('fs')
const path = require('path')
const mkdirp = require('mkdirp')
const Handlebars = require('handlebars')
/* Templates */
const fetch_templates = async (template_files) => Promise.all(template_files.map(template => new Promise((resolve, reject) => fs.readFile(template.path, (err, body) => err ? reject(err) : resolve(body+'')))))
const get_template_files = config => new Promise((resolve, reject) => fs.readdir(path.join(config.basepath, config.template_path), (err, files) => err ? reject(err) : resolve(files.map(file => path.join(config.basepath, config.template_path, file)).filter(file => fs.statSync(file).isFile()).filter(file => path.extname(file) === '.hbs').map(file => Object.assign({path: file, name: path.basename(file).replace(path.extname(file), '')})))))
const load_templates = async config => {
const template_files = await get_template_files(config)
const template_content = await fetch_templates(template_files)
const templates = template_files.map((template, idx) => Object.assign({}, template, {content: template_content[idx], compiled: Handlebars.compile(template_content[idx])}))
return templates
}
/* Partials */
const fetch_partials = async (partial_files) => Promise.all(partial_files.map(partial => new Promise((resolve, reject) => fs.readFile(partial.path, (err, body) => err ? reject(err) : resolve(body+'')))))
const get_partial_files = config => new Promise((resolve, reject) => fs.readdir(path.join(config.basepath, config.template_path, config.partials_path), (err, files) => err ? reject(err) : resolve(files.map(file => path.join(config.basepath, config.template_path, config.partials_path, file)).filter(file => fs.statSync(file).isFile()).filter(file => path.extname(file) === '.hbs').map(file => Object.assign({path: file, name: path.basename(file).replace(path.extname(file), '')})))))
const load_partials = async config => {
const partial_files = await get_partial_files(config)
const partial_content = await fetch_partials(partial_files)
const partials = partial_files.map((partial, idx) => Object.assign({}, partial, {content: partial_content[idx]}))
partials.forEach(partial => Handlebars.registerPartial(partial.name, partial.content))
return partials
}
/* Compile */
const create_page = (config, result, templates, page) => templates.filter(template => template.name === page.template).map(template => template.compiled)[0]({content: result.content, meta: generate_meta(config, result, page)})
const generate_pages = (config, content, templates) => pages = config.html.pages.map(page => Object.assign({}, page, {content: inject_scripts(create_page(config, content, templates, page), config, content, page)}))
const generate_meta = (config, result, page) => Object.assign({}, {icons: result.icons ? result.icons.html : ''}, {styles: generate_styles(config, result, page)})
const generate_styles = (config, result, page) => page.css.map(style => result.css.filter(css => css.name === style + '.css')[0].html).join('')
const generate_scripts = (config, result, page) => page.js.map(script => result.javascript.filter(js => js.name === script + '.js')[0].html).join('')
const inject_scripts = (content, config, result, page) => content.replace('</body>', generate_scripts(config, result, page) + '</body>')
/* Output */
const write_page = (config, page) => {
const destination = path.join(config.basepath, config.dist_path, page.name + '.html')
return new Promise((resolve, reject) => mkdirp(path.dirname(destination), err => err ? reject(err) : fs.writeFile(destination, page.content, err => err ? reject(err) : resolve())))
}
const write_pages = async (config, pages, templates, partials) => {
await Promise.all(pages.map(write_page.bind(null, config)))
return {config, pages, templates, partials}
}
module.exports = async (config, result) => {
const partials = await load_partials(config)
const templates = await load_templates(config)
const pages = generate_pages(config, result, templates)
return await write_pages(config, pages, templates, partials)
}

66
build/tasks/icons.js Normal file
View File

@@ -0,0 +1,66 @@
const fs = require('fs')
const path = require('path')
const imagemin = require('imagemin')
const favicons = require('favicons')
const chalk = require('chalk')
const log = console.log;
const get_options = config => {
return {
appName: null,
appDescription: null,
developerName: null,
developerURL: null,
background: "#212121",
theme_color: "#212121",
path: path.join(config.basepath, config.dist_path),
display: 'standalone',
orientation: 'portrait',
start_url: '/?homescreen=1',
version: config.root_url,
logging: false,
online: false,
preferOnline: false,
icons: {
android: true,
appleIcon: true,
appleStartup: true,
windows: true,
favicons: true,
firefox: false,
coast: false,
yandex: false
}
}
}
// generate raw favicon data
const generate_favicon_data = config => new Promise((resolve, reject) => favicons(path.join(config.basepath, 'icon.png'), get_options(config), (err, response) => err ? reject(err) : resolve(response)))
// write icon files to destination folder
const write_icon_files = async (config, favicon_data) => Promise.all(favicon_data.images.map(image => new Promise((res, rej) => fs.writeFile(path.join(config.basepath, config.dist_path, image.name), image.contents, err => err ? rej(err) : res()))))
// fix html image file references
const fix_html_references = (config, favicon_data) => Object.assign({}, favicon_data, {html: favicon_data.html.map(item => item.replace(path.join(config.basepath, config.dist_path), '/')).join('')})
// optimize image data
const optimize_icons = async (config, favicon_data) => {
const plugins = [
require('imagemin-pngcrush')(),
require('imagemin-advpng')(),
require('imagemin-optipng')(),
]
const images = favicon_data.images.map(image => imagemin.buffer(image.contents, plugins))
const optimized_images = await Promise.all(images)
return optimized_images.map((image, idx) => path.extname(favicon_data.images[idx].name) === '.png' ? Object.assign({}, favicon_data.images[idx], {contents: image}) : favicon_data.images[idx])
}
module.exports = async config => {
log(' ', chalk.green('>>'), 'Generating raw favicon data')
let favicon_data = await generate_favicon_data(config)
favicon_data = fix_html_references(config, favicon_data)
log(' ', chalk.green('>>'), 'Optimizing icons')
favicon_data.images = await optimize_icons(config, favicon_data)
log(' ', chalk.green('>>'), 'Writing icon files')
await write_icon_files(config, favicon_data)
return favicon_data
}

110
build/tasks/images.js Normal file
View File

@@ -0,0 +1,110 @@
const fs = require('fs')
const path = require('path')
const Jimp = require('jimp')
const imagemin = require('imagemin')
const rimraf = require('rimraf')
const zopfli = require('node-zopfli')
const brotli = require('iltorb')
const filewalker = require('filewalker')
const chalk = require('chalk')
const log = console.log;
// clear dist path
const clear_dist = config => new Promise((resolve, reject) => rimraf(path.join(config.basepath, config.dist_path, config.image_path), err => err ? reject(err) : resolve()))
// webp conversion
const convertToWebp = config => {
return new Promise((resolve, reject) => {
let cbw_counter = 0
const webp_plug = [require('imagemin-webp')({method: 6})]
const wcb = () => cbw_counter++ === config.images.length - 1 ? resolve() : false
config.images.forEach(orig => {
let folder = path.basename(path.dirname(orig))
let destination = path.join(config.basepath, config.dist_path, config.image_path, folder)
let original_path = path.join(destination, path.basename(orig)).replace(',svg', '')
imagemin([original_path], destination, {plugins: webp_plug}).then(wcb)
})
})
}
// losslessly optimize images
const lossless_optimize = config => {
return new Promise((resolve, reject) => {
let cb_counter = 0
const plugins = [
require('imagemin-jpegtran')(),
require('imagemin-jpegoptim')(),
require('imagemin-mozjpeg')(),
require('imagemin-zopfli')({more: true}),
require('imagemin-pngcrush')(),
require('imagemin-advpng')(),
require('imagemin-optipng')(),
require('imagemin-svgo')(),
]
const cb = () => cb_counter++ === config.images.length - 1 ? resolve() : false
config.images.forEach(orig => {
const folder = path.basename(path.dirname(orig))
const destination = path.join(config.basepath, config.dist_path, config.image_path, folder)
const source = path.join(config.basepath, config.image_path, orig)
imagemin([source], destination, {plugins: plugins}).then(cb)
})
})
}
// compress svg with zopfli & brotli
const compress_svg = config => {
return new Promise((resolve, reject) => {
const find_images = () => {
let images = []
filewalker(path.join(config.basepath, config.dist_path, config.image_path))
.on('file', file => images.push(path.join(config.basepath, config.dist_path, config.image_path, file)))
.on('done', filter_svgs.bind(null, images))
.walk()
}
const filter_svgs = images => compress_items(images.filter(image => image.substr(-4) === '.svg'))
const compress_items = images => {
const iterations = images.length * 2
let scb_counter = 0
const scb = () => scb_counter++ === iterations ? resolve() : false
images.forEach(image => {
// read original file contents
fs.readFile(image, 'utf-8', (err, contents) => {
// write zopfli compressed file
zopfli.gzip(Buffer.from(contents), (err, output) => fs.writeFile(image + '.gz', output, err => err ? reject(err) : scb()))
// write brotli compressed file
brotli.compress(Buffer.from(contents), (err, output) => fs.writeFile(image + '.br', output, err => err ? reject(err) : scb()))
})
})
}
find_images()
})
}
module.exports = (config, result) => {
return new Promise(async (resolve, reject) => {
try {
log(' ', chalk.green('>>'), 'Deleting old destination folder')
const cleared = await clear_dist(config)
log(' ', chalk.green('>>'), 'Optimize images')
const optimized = await lossless_optimize(config)
log(' ', chalk.green('>>'), 'Convert to WebP')
const webp = await convertToWebp(config)
log(' ', chalk.green('>>'), 'Static compress SVG')
const compressedSvg = compress_svg(config)
resolve({cleared, optimized, webp, compressedSvg})
} catch (err) {
reject(err)
}
})
}
/*
Jimp.read('../img/content/article.jpg', (err, image) => {
if (err) throw err
image.resize(Jimp.AUTO, 400).quality(100).write('../dist/article-home.jpg')
})
*/

83
build/tasks/javascript.js Normal file
View File

@@ -0,0 +1,83 @@
const fs = require('fs')
const path = require('path')
const crypto = require('crypto')
const mkdirp = require('mkdirp')
const zopfli = require('node-zopfli')
const brotli = require('iltorb')
const Concat = require('concat-with-sourcemaps')
const compile = require('google-closure-compiler-js').compile
const uglify = require('uglify-es').minify
const ssri = require('ssri')
const chalk = require('chalk')
const log = console.log
// compute the files paths (remove directories in the process)
const get_js_Files = config => new Promise((resolve, reject) => fs.readdir(path.join(config.basepath, config.javascipt_path), (err, files) => err ? reject(err) : resolve(files.map(file => path.join(config.basepath, config.javascipt_path, file)).filter(file => fs.statSync(file).isFile()).filter(file => path.extname(file) === '.js'))))
// read file contents
const read_js_files = (config, files) => Promise.all(files.map(file => new Promise((resolve, reject) => fs.readFile(file, 'utf-8', (err, js) => err ? reject(err) : resolve({file, js, destination: path.join(config.basepath, config.dist_path, file.replace(config.basepath, ''))})))))
// process files
const process_js_files = (config, files) => Promise.all(files.map(async file => {
return new Promise((resolve, reject) => {
try {
const concat = new Concat(true, 'scripts.js', '\n')
concat.add('script.js', fs.readFileSync(file.file))
concat.add('fastdom.js', fs.readFileSync(path.normalize(path.join(__dirname, '../../node_modules/fastdom', 'fastdom.js'))))
const compileResult = compile({
createSourceMap: true,
rewritePolyfills: false,
applyInputSourceMaps: true,
assumeFunctionWrapper: true,
newTypeInf: true,
useTypesForOptimization: true,
compilationLevel: 'ADVANCED',
warningLevel: 'VERBOSE',
outputWrapper: '(function __main(window, document){\n%output%\n}).call(this, window, document)',
languageOut: 'ES5',
jsCode: [{src: String(concat.content), path: 'scripts.js', sourceMap: concat.sourceMap}]
})
const uglifyRes = uglify(compileResult.compiledCode, {
sourceMap: {filename: 'scripts.js', url: 'scripts.map.js', content: compileResult.sourceMap},
compress: {
global_defs: {
'JSCOMPILER_PRESERVE': ''
},
unsafe: true
}
})
if (uglifyRes.error) return reject(uglifyRes.error)
resolve(uglifyRes.code)
} catch (err) {
reject(err)
}
})
}))
// compute result
const computeResult = (config, content, processed) => content.map((file, idx) => Object.assign({include: '/' + file.destination.replace(path.join(config.basepath, config.dist_path), ''), name: path.basename(file.file).replace(path.join(config.basepath, config.javascipt_path), ''), file: file.destination, original: file.file, contents: processed[idx], html: '<script src="' + '/' + config.javascipt_path + file.destination.replace(path.join(config.basepath, config.dist_path, config.javascipt_path), '').replace('.js', '') + '.' + crypto.createHash('md5').update(processed[idx]).digest('hex').substr(0, 6) + '.js"></script>', hashed_include: '/' + crypto.createHash('md5').update(processed[idx]).digest('hex').substr(0, 6) + '.' + file.destination.replace(path.join(config.basepath, config.dist_path), ''), hash: crypto.createHash('md5').update(processed[idx]).digest('hex')}))
// write files
const write_js_files = async (config, files, result) => {
// create directories if not present
await Promise.all(files.map(file => new Promise((res, rej) => mkdirp(path.dirname(file.destination), err => err ? rej(err) : res()))))
// write uncompressed files
await Promise.all(files.map((file, idx) => new Promise((res, rej) => fs.writeFile(file.destination, result[idx], err => err ? rej(err) : res()))))
// write zopfli compressed files
await Promise.all(files.map((file, idx) => new Promise((res, rej) => zopfli.gzip(new Buffer(result[idx]), (error, output) => fs.writeFile(file.destination + '.gz', output, err => err || error ? rej(err || error) : res())))))
// write brotli compressed files
await Promise.all(files.map((file, idx) => new Promise((res, rej) => brotli.compress(new Buffer(result[idx]), (error, output) => fs.writeFile(file.destination + '.br', output, err => err || error ? rej(err || error) : res())))))
// get ssi value
files = files.map((file, idx) => Object.assign({}, file, {compressed: result[idx], ssri: ssri.fromData(result[idx])}))
return computeResult(config, files, result)
}
module.exports = async (config, results) => {
log(' ', chalk.green('>>'), 'Loading js files')
const files = await get_js_Files(config)
log(' ', chalk.green('>>'), 'Reading js files')
const content = await read_js_files(config, files)
log(' ', chalk.green('>>'), 'Minfifying js files')
const processed = await process_js_files(config, content)
log(' ', chalk.green('>>'), 'Writing js files')
return write_js_files(config, content, processed)
}

19
build/tasks/manifest.js Normal file
View File

@@ -0,0 +1,19 @@
const path = require('path')
const pwa_manifest = require('@pwa/manifest')
const chalk = require('chalk')
const log = console.log;
module.exports = async config => {
log(' ', chalk.green('>>'), 'Generating pwa manifest')
const manifest = await pwa_manifest({
name: 'asciidisco.com',
short_name: 'asciidisco.com Blog',
start_url: '/index.html',
display: 'standalone',
background_color: '#212121',
theme_color: '#212121'
})
log(' ', chalk.green('>>'), 'Writing pwa manifest')
pwa_manifest.write(path.join(config.basepath, config.dist_path), manifest)
return {}
}

View File

@@ -0,0 +1,60 @@
const fs = require('fs')
const path = require('path')
const generateServiceWorkers = require('generate-service-worker')
const zopfli = require('node-zopfli')
const brotli = require('iltorb')
const compile = require('google-closure-compiler-js').compile
const uglify = require('uglify-js').minify
const sw = require('sw-precache').generate
const chalk = require('chalk')
const log = console.log;
/*
module.exports = (config, result) => {
const destination_path = path.join(config.basepath, config.dist_path, config.javascipt_path, config.serviceworker.file)
const options = {
'staticFileGlobs': config.serviceworker.statics.map(static => path.join(config.basepath, config.dist_path, static)),
'stripPrefix': path.join(config.basepath, config.dist_path),
}
return new Promise((resolve, reject) => {
log(' ', chalk.green('>>'), 'Creating serviceworker code')
sw(options, async (err, code) => {
let create_promises = []
try {
const flags = {jsCode: [{src: code}]}
log(' ', chalk.green('>>'), 'Running closure compiler')
const compile_result = compile(flags)
log(' ', chalk.green('>>'), 'Running UgifyJS')
const uglify_result = uglify(compile_result.compiledCode)
// write plain minified file
log(' ', chalk.green('>>'), 'Writing minified file')
create_promises.push(new Promise((res, rej) => fs.writeFile(destination_path, uglify_result.code, err => err ? rej(err) : res())))
// write zopfli compressed file
log(' ', chalk.green('>>'), 'Writing zopfli compressed file')
create_promises.push(new Promise((res, rej) => zopfli.gzip(new Buffer(uglify_result.code), (err, output) => fs.writeFile(destination_path + '.gz', output, err => err ? rej(err) : res()))))
// write brotli compressed file
log(' ', chalk.green('>>'), 'Writing brotli compressed file')
create_promises.push(new Promise((res, rej) => brotli.compress(new Buffer(uglify_result.code), (err, output) => fs.writeFile(destination_path + '.br', output, err => err ? rej(err) : res()))))
await Promise.all(create_promises)
resolve(uglify_result.code)
} catch(err) {
reject(err)
}
})
})
}*/
// create cache
const create_cache_map = (config, result) => {
return {
offline: true,
precache: result.css.map(item => path.join(path.dirname(item.include), item.hash.substr(0, 6) + '.' + path.basename(item.include))),
strategy: [{type: 'prefer-cache', matches: ['\\' + path.extname(result.css[0].include)]}],
}
}
module.exports = async (config, result) => {
const cache = create_cache_map(config, result)
const serviceWorkers = generateServiceWorkers({cache})
console.log(serviceWorkers.main)
return {}
}