Files
asciidisco.com/build/tasks/javascript.js
2023-08-01 13:49:46 +02:00

84 lines
4.9 KiB
JavaScript

const fs = require('fs')
const path = require('path')
const crypto = require('crypto')
const mkdirp = require('mkdirp')
const zopfli = require('node-zopfli')
const brotli = require('iltorb')
const Concat = require('concat-with-sourcemaps')
const compile = require('google-closure-compiler-js').compile
const uglify = require('uglify-es').minify
const ssri = require('ssri')
const chalk = require('chalk')
const log = console.log
// compute the files paths (remove directories in the process)
const get_js_Files = config => new Promise((resolve, reject) => fs.readdir(path.join(config.basepath, config.javascipt_path), (err, files) => err ? reject(err) : resolve(files.map(file => path.join(config.basepath, config.javascipt_path, file)).filter(file => fs.statSync(file).isFile()).filter(file => path.extname(file) === '.js'))))
// read file contents
const read_js_files = (config, files) => Promise.all(files.map(file => new Promise((resolve, reject) => fs.readFile(file, 'utf-8', (err, js) => err ? reject(err) : resolve({file, js, destination: path.join(config.basepath, config.dist_path, file.replace(config.basepath, ''))})))))
// process files
const process_js_files = (config, files) => Promise.all(files.map(async file => {
return new Promise((resolve, reject) => {
try {
const concat = new Concat(true, 'scripts.js', '\n')
concat.add('script.js', fs.readFileSync(file.file))
concat.add('fastdom.js', fs.readFileSync(path.normalize(path.join(__dirname, '../../node_modules/fastdom', 'fastdom.js'))))
const compileResult = compile({
createSourceMap: true,
rewritePolyfills: false,
applyInputSourceMaps: true,
assumeFunctionWrapper: true,
newTypeInf: true,
useTypesForOptimization: true,
compilationLevel: 'ADVANCED',
warningLevel: 'VERBOSE',
outputWrapper: '(function __main(window, document){\n%output%\n}).call(this, window, document)',
languageOut: 'ES5',
jsCode: [{src: String(concat.content), path: 'scripts.js', sourceMap: concat.sourceMap}]
})
const uglifyRes = uglify(compileResult.compiledCode, {
sourceMap: {filename: 'scripts.js', url: 'scripts.map.js', content: compileResult.sourceMap},
compress: {
global_defs: {
'JSCOMPILER_PRESERVE': ''
},
unsafe: true
}
})
if (uglifyRes.error) return reject(uglifyRes.error)
resolve(uglifyRes.code)
} catch (err) {
reject(err)
}
})
}))
// compute result
const computeResult = (config, content, processed) => content.map((file, idx) => Object.assign({include: '/' + file.destination.replace(path.join(config.basepath, config.dist_path), ''), name: path.basename(file.file).replace(path.join(config.basepath, config.javascipt_path), ''), file: file.destination, original: file.file, contents: processed[idx], html: '<script src="' + '/' + config.javascipt_path + file.destination.replace(path.join(config.basepath, config.dist_path, config.javascipt_path), '').replace('.js', '') + '.' + crypto.createHash('md5').update(processed[idx]).digest('hex').substr(0, 6) + '.js"></script>', hashed_include: '/' + crypto.createHash('md5').update(processed[idx]).digest('hex').substr(0, 6) + '.' + file.destination.replace(path.join(config.basepath, config.dist_path), ''), hash: crypto.createHash('md5').update(processed[idx]).digest('hex')}))
// write files
const write_js_files = async (config, files, result) => {
// create directories if not present
await Promise.all(files.map(file => new Promise((res, rej) => mkdirp(path.dirname(file.destination), err => err ? rej(err) : res()))))
// write uncompressed files
await Promise.all(files.map((file, idx) => new Promise((res, rej) => fs.writeFile(file.destination, result[idx], err => err ? rej(err) : res()))))
// write zopfli compressed files
await Promise.all(files.map((file, idx) => new Promise((res, rej) => zopfli.gzip(new Buffer(result[idx]), (error, output) => fs.writeFile(file.destination + '.gz', output, err => err || error ? rej(err || error) : res())))))
// write brotli compressed files
await Promise.all(files.map((file, idx) => new Promise((res, rej) => brotli.compress(new Buffer(result[idx]), (error, output) => fs.writeFile(file.destination + '.br', output, err => err || error ? rej(err || error) : res())))))
// get ssi value
files = files.map((file, idx) => Object.assign({}, file, {compressed: result[idx], ssri: ssri.fromData(result[idx])}))
return computeResult(config, files, result)
}
module.exports = async (config, results) => {
log(' ', chalk.green('>>'), 'Loading js files')
const files = await get_js_Files(config)
log(' ', chalk.green('>>'), 'Reading js files')
const content = await read_js_files(config, files)
log(' ', chalk.green('>>'), 'Minfifying js files')
const processed = await process_js_files(config, content)
log(' ', chalk.green('>>'), 'Writing js files')
return write_js_files(config, content, processed)
}