first commit
This commit is contained in:
1
build/node_modules/extract-zip/.npmignore
generated
vendored
Normal file
1
build/node_modules/extract-zip/.npmignore
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
test/
|
||||
7
build/node_modules/extract-zip/.travis.yml
generated
vendored
Normal file
7
build/node_modules/extract-zip/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
sudo: false
|
||||
language: node_js
|
||||
node_js:
|
||||
- '0.12'
|
||||
- 'iojs'
|
||||
- '4.0'
|
||||
- '6.0'
|
||||
1
build/node_modules/extract-zip/CONTRIBUTING.md
generated
vendored
Normal file
1
build/node_modules/extract-zip/CONTRIBUTING.md
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
Before potentially wasting your time by making major, opinionated changes to this codebase please feel free to open a discussion repos in the Issues section of the repository. Outline your proposed idea and seek feedback from the maintainer first before implementing major features.
|
||||
23
build/node_modules/extract-zip/LICENSE
generated
vendored
Normal file
23
build/node_modules/extract-zip/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
Copyright (c) 2014 Max Ogden and other contributors
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
20
build/node_modules/extract-zip/cli.js
generated
vendored
Executable file
20
build/node_modules/extract-zip/cli.js
generated
vendored
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
var extract = require('./')
|
||||
|
||||
var args = process.argv.slice(2)
|
||||
var source = args[0]
|
||||
var dest = args[1] || process.cwd()
|
||||
if (!source) {
|
||||
console.error('Usage: extract-zip foo.zip <targetDirectory>')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
extract(source, {dir: dest}, function (err, results) {
|
||||
if (err) {
|
||||
console.error('error!', err)
|
||||
process.exit(1)
|
||||
} else {
|
||||
process.exit(0)
|
||||
}
|
||||
})
|
||||
205
build/node_modules/extract-zip/index.js
generated
vendored
Normal file
205
build/node_modules/extract-zip/index.js
generated
vendored
Normal file
@@ -0,0 +1,205 @@
|
||||
var fs = require('fs')
|
||||
var path = require('path')
|
||||
var yauzl = require('yauzl')
|
||||
var mkdirp = require('mkdirp')
|
||||
var concat = require('concat-stream')
|
||||
var debug = require('debug')('extract-zip')
|
||||
|
||||
module.exports = function (zipPath, opts, cb) {
|
||||
debug('creating target directory', opts.dir)
|
||||
|
||||
if (path.isAbsolute(opts.dir) === false) {
|
||||
return cb(new Error('Target directory is expected to be absolute'))
|
||||
}
|
||||
|
||||
mkdirp(opts.dir, function (err) {
|
||||
if (err) return cb(err)
|
||||
|
||||
fs.realpath(opts.dir, function (err, canonicalDir) {
|
||||
if (err) return cb(err)
|
||||
|
||||
opts.dir = canonicalDir
|
||||
|
||||
openZip(opts)
|
||||
})
|
||||
})
|
||||
|
||||
function openZip () {
|
||||
debug('opening', zipPath, 'with opts', opts)
|
||||
|
||||
yauzl.open(zipPath, {lazyEntries: true}, function (err, zipfile) {
|
||||
if (err) return cb(err)
|
||||
|
||||
var cancelled = false
|
||||
|
||||
zipfile.readEntry()
|
||||
|
||||
zipfile.on('close', function () {
|
||||
if (!cancelled) {
|
||||
debug('zip extraction complete')
|
||||
cb()
|
||||
}
|
||||
})
|
||||
|
||||
zipfile.on('entry', function (entry) {
|
||||
if (cancelled) {
|
||||
debug('skipping entry', entry.fileName, {cancelled: cancelled})
|
||||
return
|
||||
}
|
||||
|
||||
debug('zipfile entry', entry.fileName)
|
||||
|
||||
if (/^__MACOSX\//.test(entry.fileName)) {
|
||||
// dir name starts with __MACOSX/
|
||||
zipfile.readEntry()
|
||||
return
|
||||
}
|
||||
|
||||
var destDir = path.dirname(path.join(opts.dir, entry.fileName))
|
||||
|
||||
mkdirp(destDir, function (err) {
|
||||
if (err) {
|
||||
cancelled = true
|
||||
zipfile.close()
|
||||
return cb(err)
|
||||
}
|
||||
|
||||
fs.realpath(destDir, function (err, canonicalDestDir) {
|
||||
if (err) {
|
||||
cancelled = true
|
||||
zipfile.close()
|
||||
return cb(err)
|
||||
}
|
||||
|
||||
var relativeDestDir = path.relative(opts.dir, canonicalDestDir)
|
||||
|
||||
if (relativeDestDir.split(path.sep).indexOf('..') !== -1) {
|
||||
cancelled = true
|
||||
zipfile.close()
|
||||
return cb(new Error('Out of bound path "' + canonicalDestDir + '" found while processing file ' + entry.fileName))
|
||||
}
|
||||
|
||||
extractEntry(entry, function (err) {
|
||||
// if any extraction fails then abort everything
|
||||
if (err) {
|
||||
cancelled = true
|
||||
zipfile.close()
|
||||
return cb(err)
|
||||
}
|
||||
debug('finished processing', entry.fileName)
|
||||
zipfile.readEntry()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
function extractEntry (entry, done) {
|
||||
if (cancelled) {
|
||||
debug('skipping entry extraction', entry.fileName, {cancelled: cancelled})
|
||||
return setImmediate(done)
|
||||
}
|
||||
|
||||
if (opts.onEntry) {
|
||||
opts.onEntry(entry, zipfile)
|
||||
}
|
||||
|
||||
var dest = path.join(opts.dir, entry.fileName)
|
||||
|
||||
// convert external file attr int into a fs stat mode int
|
||||
var mode = (entry.externalFileAttributes >> 16) & 0xFFFF
|
||||
// check if it's a symlink or dir (using stat mode constants)
|
||||
var IFMT = 61440
|
||||
var IFDIR = 16384
|
||||
var IFLNK = 40960
|
||||
var symlink = (mode & IFMT) === IFLNK
|
||||
var isDir = (mode & IFMT) === IFDIR
|
||||
|
||||
// Failsafe, borrowed from jsZip
|
||||
if (!isDir && entry.fileName.slice(-1) === '/') {
|
||||
isDir = true
|
||||
}
|
||||
|
||||
// check for windows weird way of specifying a directory
|
||||
// https://github.com/maxogden/extract-zip/issues/13#issuecomment-154494566
|
||||
var madeBy = entry.versionMadeBy >> 8
|
||||
if (!isDir) isDir = (madeBy === 0 && entry.externalFileAttributes === 16)
|
||||
|
||||
// if no mode then default to default modes
|
||||
if (mode === 0) {
|
||||
if (isDir) {
|
||||
if (opts.defaultDirMode) mode = parseInt(opts.defaultDirMode, 10)
|
||||
if (!mode) mode = 493 // Default to 0755
|
||||
} else {
|
||||
if (opts.defaultFileMode) mode = parseInt(opts.defaultFileMode, 10)
|
||||
if (!mode) mode = 420 // Default to 0644
|
||||
}
|
||||
}
|
||||
|
||||
debug('extracting entry', { filename: entry.fileName, isDir: isDir, isSymlink: symlink })
|
||||
|
||||
// reverse umask first (~)
|
||||
var umask = ~process.umask()
|
||||
// & with processes umask to override invalid perms
|
||||
var procMode = mode & umask
|
||||
|
||||
// always ensure folders are created
|
||||
var destDir = dest
|
||||
if (!isDir) destDir = path.dirname(dest)
|
||||
|
||||
debug('mkdirp', {dir: destDir})
|
||||
mkdirp(destDir, function (err) {
|
||||
if (err) {
|
||||
debug('mkdirp error', destDir, {error: err})
|
||||
cancelled = true
|
||||
return done(err)
|
||||
}
|
||||
|
||||
if (isDir) return done()
|
||||
|
||||
debug('opening read stream', dest)
|
||||
zipfile.openReadStream(entry, function (err, readStream) {
|
||||
if (err) {
|
||||
debug('openReadStream error', err)
|
||||
cancelled = true
|
||||
return done(err)
|
||||
}
|
||||
|
||||
readStream.on('error', function (err) {
|
||||
console.log('read err', err)
|
||||
})
|
||||
|
||||
if (symlink) writeSymlink()
|
||||
else writeStream()
|
||||
|
||||
function writeStream () {
|
||||
var writeStream = fs.createWriteStream(dest, {mode: procMode})
|
||||
readStream.pipe(writeStream)
|
||||
|
||||
writeStream.on('finish', function () {
|
||||
done()
|
||||
})
|
||||
|
||||
writeStream.on('error', function (err) {
|
||||
debug('write error', {error: err})
|
||||
cancelled = true
|
||||
return done(err)
|
||||
})
|
||||
}
|
||||
|
||||
// AFAICT the content of the symlink file itself is the symlink target filename string
|
||||
function writeSymlink () {
|
||||
readStream.pipe(concat(function (data) {
|
||||
var link = data.toString()
|
||||
debug('creating symlink', link, dest)
|
||||
fs.symlink(link, dest, function (err) {
|
||||
if (err) cancelled = true
|
||||
done(err)
|
||||
})
|
||||
}))
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
1
build/node_modules/extract-zip/node_modules/.bin/mkdirp
generated
vendored
Symbolic link
1
build/node_modules/extract-zip/node_modules/.bin/mkdirp
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../mkdirp/bin/cmd.js
|
||||
4
build/node_modules/extract-zip/node_modules/minimist/.travis.yml
generated
vendored
Normal file
4
build/node_modules/extract-zip/node_modules/minimist/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- "0.8"
|
||||
- "0.10"
|
||||
18
build/node_modules/extract-zip/node_modules/minimist/LICENSE
generated
vendored
Normal file
18
build/node_modules/extract-zip/node_modules/minimist/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
This software is released under the MIT license:
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
2
build/node_modules/extract-zip/node_modules/minimist/example/parse.js
generated
vendored
Normal file
2
build/node_modules/extract-zip/node_modules/minimist/example/parse.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
var argv = require('../')(process.argv.slice(2));
|
||||
console.dir(argv);
|
||||
187
build/node_modules/extract-zip/node_modules/minimist/index.js
generated
vendored
Normal file
187
build/node_modules/extract-zip/node_modules/minimist/index.js
generated
vendored
Normal file
@@ -0,0 +1,187 @@
|
||||
module.exports = function (args, opts) {
|
||||
if (!opts) opts = {};
|
||||
|
||||
var flags = { bools : {}, strings : {} };
|
||||
|
||||
[].concat(opts['boolean']).filter(Boolean).forEach(function (key) {
|
||||
flags.bools[key] = true;
|
||||
});
|
||||
|
||||
[].concat(opts.string).filter(Boolean).forEach(function (key) {
|
||||
flags.strings[key] = true;
|
||||
});
|
||||
|
||||
var aliases = {};
|
||||
Object.keys(opts.alias || {}).forEach(function (key) {
|
||||
aliases[key] = [].concat(opts.alias[key]);
|
||||
aliases[key].forEach(function (x) {
|
||||
aliases[x] = [key].concat(aliases[key].filter(function (y) {
|
||||
return x !== y;
|
||||
}));
|
||||
});
|
||||
});
|
||||
|
||||
var defaults = opts['default'] || {};
|
||||
|
||||
var argv = { _ : [] };
|
||||
Object.keys(flags.bools).forEach(function (key) {
|
||||
setArg(key, defaults[key] === undefined ? false : defaults[key]);
|
||||
});
|
||||
|
||||
var notFlags = [];
|
||||
|
||||
if (args.indexOf('--') !== -1) {
|
||||
notFlags = args.slice(args.indexOf('--')+1);
|
||||
args = args.slice(0, args.indexOf('--'));
|
||||
}
|
||||
|
||||
function setArg (key, val) {
|
||||
var value = !flags.strings[key] && isNumber(val)
|
||||
? Number(val) : val
|
||||
;
|
||||
setKey(argv, key.split('.'), value);
|
||||
|
||||
(aliases[key] || []).forEach(function (x) {
|
||||
setKey(argv, x.split('.'), value);
|
||||
});
|
||||
}
|
||||
|
||||
for (var i = 0; i < args.length; i++) {
|
||||
var arg = args[i];
|
||||
|
||||
if (/^--.+=/.test(arg)) {
|
||||
// Using [\s\S] instead of . because js doesn't support the
|
||||
// 'dotall' regex modifier. See:
|
||||
// http://stackoverflow.com/a/1068308/13216
|
||||
var m = arg.match(/^--([^=]+)=([\s\S]*)$/);
|
||||
setArg(m[1], m[2]);
|
||||
}
|
||||
else if (/^--no-.+/.test(arg)) {
|
||||
var key = arg.match(/^--no-(.+)/)[1];
|
||||
setArg(key, false);
|
||||
}
|
||||
else if (/^--.+/.test(arg)) {
|
||||
var key = arg.match(/^--(.+)/)[1];
|
||||
var next = args[i + 1];
|
||||
if (next !== undefined && !/^-/.test(next)
|
||||
&& !flags.bools[key]
|
||||
&& (aliases[key] ? !flags.bools[aliases[key]] : true)) {
|
||||
setArg(key, next);
|
||||
i++;
|
||||
}
|
||||
else if (/^(true|false)$/.test(next)) {
|
||||
setArg(key, next === 'true');
|
||||
i++;
|
||||
}
|
||||
else {
|
||||
setArg(key, flags.strings[key] ? '' : true);
|
||||
}
|
||||
}
|
||||
else if (/^-[^-]+/.test(arg)) {
|
||||
var letters = arg.slice(1,-1).split('');
|
||||
|
||||
var broken = false;
|
||||
for (var j = 0; j < letters.length; j++) {
|
||||
var next = arg.slice(j+2);
|
||||
|
||||
if (next === '-') {
|
||||
setArg(letters[j], next)
|
||||
continue;
|
||||
}
|
||||
|
||||
if (/[A-Za-z]/.test(letters[j])
|
||||
&& /-?\d+(\.\d*)?(e-?\d+)?$/.test(next)) {
|
||||
setArg(letters[j], next);
|
||||
broken = true;
|
||||
break;
|
||||
}
|
||||
|
||||
if (letters[j+1] && letters[j+1].match(/\W/)) {
|
||||
setArg(letters[j], arg.slice(j+2));
|
||||
broken = true;
|
||||
break;
|
||||
}
|
||||
else {
|
||||
setArg(letters[j], flags.strings[letters[j]] ? '' : true);
|
||||
}
|
||||
}
|
||||
|
||||
var key = arg.slice(-1)[0];
|
||||
if (!broken && key !== '-') {
|
||||
if (args[i+1] && !/^(-|--)[^-]/.test(args[i+1])
|
||||
&& !flags.bools[key]
|
||||
&& (aliases[key] ? !flags.bools[aliases[key]] : true)) {
|
||||
setArg(key, args[i+1]);
|
||||
i++;
|
||||
}
|
||||
else if (args[i+1] && /true|false/.test(args[i+1])) {
|
||||
setArg(key, args[i+1] === 'true');
|
||||
i++;
|
||||
}
|
||||
else {
|
||||
setArg(key, flags.strings[key] ? '' : true);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
argv._.push(
|
||||
flags.strings['_'] || !isNumber(arg) ? arg : Number(arg)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Object.keys(defaults).forEach(function (key) {
|
||||
if (!hasKey(argv, key.split('.'))) {
|
||||
setKey(argv, key.split('.'), defaults[key]);
|
||||
|
||||
(aliases[key] || []).forEach(function (x) {
|
||||
setKey(argv, x.split('.'), defaults[key]);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
notFlags.forEach(function(key) {
|
||||
argv._.push(key);
|
||||
});
|
||||
|
||||
return argv;
|
||||
};
|
||||
|
||||
function hasKey (obj, keys) {
|
||||
var o = obj;
|
||||
keys.slice(0,-1).forEach(function (key) {
|
||||
o = (o[key] || {});
|
||||
});
|
||||
|
||||
var key = keys[keys.length - 1];
|
||||
return key in o;
|
||||
}
|
||||
|
||||
function setKey (obj, keys, value) {
|
||||
var o = obj;
|
||||
keys.slice(0,-1).forEach(function (key) {
|
||||
if (o[key] === undefined) o[key] = {};
|
||||
o = o[key];
|
||||
});
|
||||
|
||||
var key = keys[keys.length - 1];
|
||||
if (o[key] === undefined || typeof o[key] === 'boolean') {
|
||||
o[key] = value;
|
||||
}
|
||||
else if (Array.isArray(o[key])) {
|
||||
o[key].push(value);
|
||||
}
|
||||
else {
|
||||
o[key] = [ o[key], value ];
|
||||
}
|
||||
}
|
||||
|
||||
function isNumber (x) {
|
||||
if (typeof x === 'number') return true;
|
||||
if (/^0x[0-9a-f]+$/i.test(x)) return true;
|
||||
return /^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x);
|
||||
}
|
||||
|
||||
function longest (xs) {
|
||||
return Math.max.apply(null, xs.map(function (x) { return x.length }));
|
||||
}
|
||||
71
build/node_modules/extract-zip/node_modules/minimist/package.json
generated
vendored
Normal file
71
build/node_modules/extract-zip/node_modules/minimist/package.json
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
{
|
||||
"_from": "minimist@0.0.8",
|
||||
"_id": "minimist@0.0.8",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=",
|
||||
"_location": "/extract-zip/minimist",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "version",
|
||||
"registry": true,
|
||||
"raw": "minimist@0.0.8",
|
||||
"name": "minimist",
|
||||
"escapedName": "minimist",
|
||||
"rawSpec": "0.0.8",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "0.0.8"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/extract-zip/mkdirp"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz",
|
||||
"_shasum": "857fcabfc3397d2625b8228262e86aa7a011b05d",
|
||||
"_spec": "minimist@0.0.8",
|
||||
"_where": "/Users/asciidisco/Desktop/asciidisco.com/build/node_modules/extract-zip/node_modules/mkdirp",
|
||||
"author": {
|
||||
"name": "James Halliday",
|
||||
"email": "mail@substack.net",
|
||||
"url": "http://substack.net"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/substack/minimist/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"deprecated": false,
|
||||
"description": "parse argument options",
|
||||
"devDependencies": {
|
||||
"tap": "~0.4.0",
|
||||
"tape": "~1.0.4"
|
||||
},
|
||||
"homepage": "https://github.com/substack/minimist",
|
||||
"keywords": [
|
||||
"argv",
|
||||
"getopt",
|
||||
"parser",
|
||||
"optimist"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "index.js",
|
||||
"name": "minimist",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/substack/minimist.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap test/*.js"
|
||||
},
|
||||
"testling": {
|
||||
"files": "test/*.js",
|
||||
"browsers": [
|
||||
"ie/6..latest",
|
||||
"ff/5",
|
||||
"firefox/latest",
|
||||
"chrome/10",
|
||||
"chrome/latest",
|
||||
"safari/5.1",
|
||||
"safari/latest",
|
||||
"opera/12"
|
||||
]
|
||||
},
|
||||
"version": "0.0.8"
|
||||
}
|
||||
73
build/node_modules/extract-zip/node_modules/minimist/readme.markdown
generated
vendored
Normal file
73
build/node_modules/extract-zip/node_modules/minimist/readme.markdown
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
# minimist
|
||||
|
||||
parse argument options
|
||||
|
||||
This module is the guts of optimist's argument parser without all the
|
||||
fanciful decoration.
|
||||
|
||||
[](http://ci.testling.com/substack/minimist)
|
||||
|
||||
[](http://travis-ci.org/substack/minimist)
|
||||
|
||||
# example
|
||||
|
||||
``` js
|
||||
var argv = require('minimist')(process.argv.slice(2));
|
||||
console.dir(argv);
|
||||
```
|
||||
|
||||
```
|
||||
$ node example/parse.js -a beep -b boop
|
||||
{ _: [], a: 'beep', b: 'boop' }
|
||||
```
|
||||
|
||||
```
|
||||
$ node example/parse.js -x 3 -y 4 -n5 -abc --beep=boop foo bar baz
|
||||
{ _: [ 'foo', 'bar', 'baz' ],
|
||||
x: 3,
|
||||
y: 4,
|
||||
n: 5,
|
||||
a: true,
|
||||
b: true,
|
||||
c: true,
|
||||
beep: 'boop' }
|
||||
```
|
||||
|
||||
# methods
|
||||
|
||||
``` js
|
||||
var parseArgs = require('minimist')
|
||||
```
|
||||
|
||||
## var argv = parseArgs(args, opts={})
|
||||
|
||||
Return an argument object `argv` populated with the array arguments from `args`.
|
||||
|
||||
`argv._` contains all the arguments that didn't have an option associated with
|
||||
them.
|
||||
|
||||
Numeric-looking arguments will be returned as numbers unless `opts.string` or
|
||||
`opts.boolean` is set for that argument name.
|
||||
|
||||
Any arguments after `'--'` will not be parsed and will end up in `argv._`.
|
||||
|
||||
options can be:
|
||||
|
||||
* `opts.string` - a string or array of strings argument names to always treat as
|
||||
strings
|
||||
* `opts.boolean` - a string or array of strings to always treat as booleans
|
||||
* `opts.alias` - an object mapping string names to strings or arrays of string
|
||||
argument names to use as aliases
|
||||
* `opts.default` - an object mapping string argument names to default values
|
||||
|
||||
# install
|
||||
|
||||
With [npm](https://npmjs.org) do:
|
||||
|
||||
```
|
||||
npm install minimist
|
||||
```
|
||||
|
||||
# license
|
||||
|
||||
MIT
|
||||
24
build/node_modules/extract-zip/node_modules/minimist/test/dash.js
generated
vendored
Normal file
24
build/node_modules/extract-zip/node_modules/minimist/test/dash.js
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
var parse = require('../');
|
||||
var test = require('tape');
|
||||
|
||||
test('-', function (t) {
|
||||
t.plan(5);
|
||||
t.deepEqual(parse([ '-n', '-' ]), { n: '-', _: [] });
|
||||
t.deepEqual(parse([ '-' ]), { _: [ '-' ] });
|
||||
t.deepEqual(parse([ '-f-' ]), { f: '-', _: [] });
|
||||
t.deepEqual(
|
||||
parse([ '-b', '-' ], { boolean: 'b' }),
|
||||
{ b: true, _: [ '-' ] }
|
||||
);
|
||||
t.deepEqual(
|
||||
parse([ '-s', '-' ], { string: 's' }),
|
||||
{ s: '-', _: [] }
|
||||
);
|
||||
});
|
||||
|
||||
test('-a -- b', function (t) {
|
||||
t.plan(3);
|
||||
t.deepEqual(parse([ '-a', '--', 'b' ]), { a: true, _: [ 'b' ] });
|
||||
t.deepEqual(parse([ '--a', '--', 'b' ]), { a: true, _: [ 'b' ] });
|
||||
t.deepEqual(parse([ '--a', '--', 'b' ]), { a: true, _: [ 'b' ] });
|
||||
});
|
||||
20
build/node_modules/extract-zip/node_modules/minimist/test/default_bool.js
generated
vendored
Normal file
20
build/node_modules/extract-zip/node_modules/minimist/test/default_bool.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
var test = require('tape');
|
||||
var parse = require('../');
|
||||
|
||||
test('boolean default true', function (t) {
|
||||
var argv = parse([], {
|
||||
boolean: 'sometrue',
|
||||
default: { sometrue: true }
|
||||
});
|
||||
t.equal(argv.sometrue, true);
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('boolean default false', function (t) {
|
||||
var argv = parse([], {
|
||||
boolean: 'somefalse',
|
||||
default: { somefalse: false }
|
||||
});
|
||||
t.equal(argv.somefalse, false);
|
||||
t.end();
|
||||
});
|
||||
16
build/node_modules/extract-zip/node_modules/minimist/test/dotted.js
generated
vendored
Normal file
16
build/node_modules/extract-zip/node_modules/minimist/test/dotted.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
var parse = require('../');
|
||||
var test = require('tape');
|
||||
|
||||
test('dotted alias', function (t) {
|
||||
var argv = parse(['--a.b', '22'], {default: {'a.b': 11}, alias: {'a.b': 'aa.bb'}});
|
||||
t.equal(argv.a.b, 22);
|
||||
t.equal(argv.aa.bb, 22);
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('dotted default', function (t) {
|
||||
var argv = parse('', {default: {'a.b': 11}, alias: {'a.b': 'aa.bb'}});
|
||||
t.equal(argv.a.b, 11);
|
||||
t.equal(argv.aa.bb, 11);
|
||||
t.end();
|
||||
});
|
||||
31
build/node_modules/extract-zip/node_modules/minimist/test/long.js
generated
vendored
Normal file
31
build/node_modules/extract-zip/node_modules/minimist/test/long.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
var test = require('tape');
|
||||
var parse = require('../');
|
||||
|
||||
test('long opts', function (t) {
|
||||
t.deepEqual(
|
||||
parse([ '--bool' ]),
|
||||
{ bool : true, _ : [] },
|
||||
'long boolean'
|
||||
);
|
||||
t.deepEqual(
|
||||
parse([ '--pow', 'xixxle' ]),
|
||||
{ pow : 'xixxle', _ : [] },
|
||||
'long capture sp'
|
||||
);
|
||||
t.deepEqual(
|
||||
parse([ '--pow=xixxle' ]),
|
||||
{ pow : 'xixxle', _ : [] },
|
||||
'long capture eq'
|
||||
);
|
||||
t.deepEqual(
|
||||
parse([ '--host', 'localhost', '--port', '555' ]),
|
||||
{ host : 'localhost', port : 555, _ : [] },
|
||||
'long captures sp'
|
||||
);
|
||||
t.deepEqual(
|
||||
parse([ '--host=localhost', '--port=555' ]),
|
||||
{ host : 'localhost', port : 555, _ : [] },
|
||||
'long captures eq'
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
318
build/node_modules/extract-zip/node_modules/minimist/test/parse.js
generated
vendored
Normal file
318
build/node_modules/extract-zip/node_modules/minimist/test/parse.js
generated
vendored
Normal file
@@ -0,0 +1,318 @@
|
||||
var parse = require('../');
|
||||
var test = require('tape');
|
||||
|
||||
test('parse args', function (t) {
|
||||
t.deepEqual(
|
||||
parse([ '--no-moo' ]),
|
||||
{ moo : false, _ : [] },
|
||||
'no'
|
||||
);
|
||||
t.deepEqual(
|
||||
parse([ '-v', 'a', '-v', 'b', '-v', 'c' ]),
|
||||
{ v : ['a','b','c'], _ : [] },
|
||||
'multi'
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('comprehensive', function (t) {
|
||||
t.deepEqual(
|
||||
parse([
|
||||
'--name=meowmers', 'bare', '-cats', 'woo',
|
||||
'-h', 'awesome', '--multi=quux',
|
||||
'--key', 'value',
|
||||
'-b', '--bool', '--no-meep', '--multi=baz',
|
||||
'--', '--not-a-flag', 'eek'
|
||||
]),
|
||||
{
|
||||
c : true,
|
||||
a : true,
|
||||
t : true,
|
||||
s : 'woo',
|
||||
h : 'awesome',
|
||||
b : true,
|
||||
bool : true,
|
||||
key : 'value',
|
||||
multi : [ 'quux', 'baz' ],
|
||||
meep : false,
|
||||
name : 'meowmers',
|
||||
_ : [ 'bare', '--not-a-flag', 'eek' ]
|
||||
}
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('nums', function (t) {
|
||||
var argv = parse([
|
||||
'-x', '1234',
|
||||
'-y', '5.67',
|
||||
'-z', '1e7',
|
||||
'-w', '10f',
|
||||
'--hex', '0xdeadbeef',
|
||||
'789'
|
||||
]);
|
||||
t.deepEqual(argv, {
|
||||
x : 1234,
|
||||
y : 5.67,
|
||||
z : 1e7,
|
||||
w : '10f',
|
||||
hex : 0xdeadbeef,
|
||||
_ : [ 789 ]
|
||||
});
|
||||
t.deepEqual(typeof argv.x, 'number');
|
||||
t.deepEqual(typeof argv.y, 'number');
|
||||
t.deepEqual(typeof argv.z, 'number');
|
||||
t.deepEqual(typeof argv.w, 'string');
|
||||
t.deepEqual(typeof argv.hex, 'number');
|
||||
t.deepEqual(typeof argv._[0], 'number');
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('flag boolean', function (t) {
|
||||
var argv = parse([ '-t', 'moo' ], { boolean: 't' });
|
||||
t.deepEqual(argv, { t : true, _ : [ 'moo' ] });
|
||||
t.deepEqual(typeof argv.t, 'boolean');
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('flag boolean value', function (t) {
|
||||
var argv = parse(['--verbose', 'false', 'moo', '-t', 'true'], {
|
||||
boolean: [ 't', 'verbose' ],
|
||||
default: { verbose: true }
|
||||
});
|
||||
|
||||
t.deepEqual(argv, {
|
||||
verbose: false,
|
||||
t: true,
|
||||
_: ['moo']
|
||||
});
|
||||
|
||||
t.deepEqual(typeof argv.verbose, 'boolean');
|
||||
t.deepEqual(typeof argv.t, 'boolean');
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('flag boolean default false', function (t) {
|
||||
var argv = parse(['moo'], {
|
||||
boolean: ['t', 'verbose'],
|
||||
default: { verbose: false, t: false }
|
||||
});
|
||||
|
||||
t.deepEqual(argv, {
|
||||
verbose: false,
|
||||
t: false,
|
||||
_: ['moo']
|
||||
});
|
||||
|
||||
t.deepEqual(typeof argv.verbose, 'boolean');
|
||||
t.deepEqual(typeof argv.t, 'boolean');
|
||||
t.end();
|
||||
|
||||
});
|
||||
|
||||
test('boolean groups', function (t) {
|
||||
var argv = parse([ '-x', '-z', 'one', 'two', 'three' ], {
|
||||
boolean: ['x','y','z']
|
||||
});
|
||||
|
||||
t.deepEqual(argv, {
|
||||
x : true,
|
||||
y : false,
|
||||
z : true,
|
||||
_ : [ 'one', 'two', 'three' ]
|
||||
});
|
||||
|
||||
t.deepEqual(typeof argv.x, 'boolean');
|
||||
t.deepEqual(typeof argv.y, 'boolean');
|
||||
t.deepEqual(typeof argv.z, 'boolean');
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('newlines in params' , function (t) {
|
||||
var args = parse([ '-s', "X\nX" ])
|
||||
t.deepEqual(args, { _ : [], s : "X\nX" });
|
||||
|
||||
// reproduce in bash:
|
||||
// VALUE="new
|
||||
// line"
|
||||
// node program.js --s="$VALUE"
|
||||
args = parse([ "--s=X\nX" ])
|
||||
t.deepEqual(args, { _ : [], s : "X\nX" });
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('strings' , function (t) {
|
||||
var s = parse([ '-s', '0001234' ], { string: 's' }).s;
|
||||
t.equal(s, '0001234');
|
||||
t.equal(typeof s, 'string');
|
||||
|
||||
var x = parse([ '-x', '56' ], { string: 'x' }).x;
|
||||
t.equal(x, '56');
|
||||
t.equal(typeof x, 'string');
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('stringArgs', function (t) {
|
||||
var s = parse([ ' ', ' ' ], { string: '_' })._;
|
||||
t.same(s.length, 2);
|
||||
t.same(typeof s[0], 'string');
|
||||
t.same(s[0], ' ');
|
||||
t.same(typeof s[1], 'string');
|
||||
t.same(s[1], ' ');
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('empty strings', function(t) {
|
||||
var s = parse([ '-s' ], { string: 's' }).s;
|
||||
t.equal(s, '');
|
||||
t.equal(typeof s, 'string');
|
||||
|
||||
var str = parse([ '--str' ], { string: 'str' }).str;
|
||||
t.equal(str, '');
|
||||
t.equal(typeof str, 'string');
|
||||
|
||||
var letters = parse([ '-art' ], {
|
||||
string: [ 'a', 't' ]
|
||||
});
|
||||
|
||||
t.equal(letters.a, '');
|
||||
t.equal(letters.r, true);
|
||||
t.equal(letters.t, '');
|
||||
|
||||
t.end();
|
||||
});
|
||||
|
||||
|
||||
test('slashBreak', function (t) {
|
||||
t.same(
|
||||
parse([ '-I/foo/bar/baz' ]),
|
||||
{ I : '/foo/bar/baz', _ : [] }
|
||||
);
|
||||
t.same(
|
||||
parse([ '-xyz/foo/bar/baz' ]),
|
||||
{ x : true, y : true, z : '/foo/bar/baz', _ : [] }
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('alias', function (t) {
|
||||
var argv = parse([ '-f', '11', '--zoom', '55' ], {
|
||||
alias: { z: 'zoom' }
|
||||
});
|
||||
t.equal(argv.zoom, 55);
|
||||
t.equal(argv.z, argv.zoom);
|
||||
t.equal(argv.f, 11);
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('multiAlias', function (t) {
|
||||
var argv = parse([ '-f', '11', '--zoom', '55' ], {
|
||||
alias: { z: [ 'zm', 'zoom' ] }
|
||||
});
|
||||
t.equal(argv.zoom, 55);
|
||||
t.equal(argv.z, argv.zoom);
|
||||
t.equal(argv.z, argv.zm);
|
||||
t.equal(argv.f, 11);
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('nested dotted objects', function (t) {
|
||||
var argv = parse([
|
||||
'--foo.bar', '3', '--foo.baz', '4',
|
||||
'--foo.quux.quibble', '5', '--foo.quux.o_O',
|
||||
'--beep.boop'
|
||||
]);
|
||||
|
||||
t.same(argv.foo, {
|
||||
bar : 3,
|
||||
baz : 4,
|
||||
quux : {
|
||||
quibble : 5,
|
||||
o_O : true
|
||||
}
|
||||
});
|
||||
t.same(argv.beep, { boop : true });
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('boolean and alias with chainable api', function (t) {
|
||||
var aliased = [ '-h', 'derp' ];
|
||||
var regular = [ '--herp', 'derp' ];
|
||||
var opts = {
|
||||
herp: { alias: 'h', boolean: true }
|
||||
};
|
||||
var aliasedArgv = parse(aliased, {
|
||||
boolean: 'herp',
|
||||
alias: { h: 'herp' }
|
||||
});
|
||||
var propertyArgv = parse(regular, {
|
||||
boolean: 'herp',
|
||||
alias: { h: 'herp' }
|
||||
});
|
||||
var expected = {
|
||||
herp: true,
|
||||
h: true,
|
||||
'_': [ 'derp' ]
|
||||
};
|
||||
|
||||
t.same(aliasedArgv, expected);
|
||||
t.same(propertyArgv, expected);
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('boolean and alias with options hash', function (t) {
|
||||
var aliased = [ '-h', 'derp' ];
|
||||
var regular = [ '--herp', 'derp' ];
|
||||
var opts = {
|
||||
alias: { 'h': 'herp' },
|
||||
boolean: 'herp'
|
||||
};
|
||||
var aliasedArgv = parse(aliased, opts);
|
||||
var propertyArgv = parse(regular, opts);
|
||||
var expected = {
|
||||
herp: true,
|
||||
h: true,
|
||||
'_': [ 'derp' ]
|
||||
};
|
||||
t.same(aliasedArgv, expected);
|
||||
t.same(propertyArgv, expected);
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('boolean and alias using explicit true', function (t) {
|
||||
var aliased = [ '-h', 'true' ];
|
||||
var regular = [ '--herp', 'true' ];
|
||||
var opts = {
|
||||
alias: { h: 'herp' },
|
||||
boolean: 'h'
|
||||
};
|
||||
var aliasedArgv = parse(aliased, opts);
|
||||
var propertyArgv = parse(regular, opts);
|
||||
var expected = {
|
||||
herp: true,
|
||||
h: true,
|
||||
'_': [ ]
|
||||
};
|
||||
|
||||
t.same(aliasedArgv, expected);
|
||||
t.same(propertyArgv, expected);
|
||||
t.end();
|
||||
});
|
||||
|
||||
// regression, see https://github.com/substack/node-optimist/issues/71
|
||||
test('boolean and --x=true', function(t) {
|
||||
var parsed = parse(['--boool', '--other=true'], {
|
||||
boolean: 'boool'
|
||||
});
|
||||
|
||||
t.same(parsed.boool, true);
|
||||
t.same(parsed.other, 'true');
|
||||
|
||||
parsed = parse(['--boool', '--other=false'], {
|
||||
boolean: 'boool'
|
||||
});
|
||||
|
||||
t.same(parsed.boool, true);
|
||||
t.same(parsed.other, 'false');
|
||||
t.end();
|
||||
});
|
||||
9
build/node_modules/extract-zip/node_modules/minimist/test/parse_modified.js
generated
vendored
Normal file
9
build/node_modules/extract-zip/node_modules/minimist/test/parse_modified.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
var parse = require('../');
|
||||
var test = require('tape');
|
||||
|
||||
test('parse with modifier functions' , function (t) {
|
||||
t.plan(1);
|
||||
|
||||
var argv = parse([ '-b', '123' ], { boolean: 'b' });
|
||||
t.deepEqual(argv, { b: true, _: ['123'] });
|
||||
});
|
||||
67
build/node_modules/extract-zip/node_modules/minimist/test/short.js
generated
vendored
Normal file
67
build/node_modules/extract-zip/node_modules/minimist/test/short.js
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
var parse = require('../');
|
||||
var test = require('tape');
|
||||
|
||||
test('numeric short args', function (t) {
|
||||
t.plan(2);
|
||||
t.deepEqual(parse([ '-n123' ]), { n: 123, _: [] });
|
||||
t.deepEqual(
|
||||
parse([ '-123', '456' ]),
|
||||
{ 1: true, 2: true, 3: 456, _: [] }
|
||||
);
|
||||
});
|
||||
|
||||
test('short', function (t) {
|
||||
t.deepEqual(
|
||||
parse([ '-b' ]),
|
||||
{ b : true, _ : [] },
|
||||
'short boolean'
|
||||
);
|
||||
t.deepEqual(
|
||||
parse([ 'foo', 'bar', 'baz' ]),
|
||||
{ _ : [ 'foo', 'bar', 'baz' ] },
|
||||
'bare'
|
||||
);
|
||||
t.deepEqual(
|
||||
parse([ '-cats' ]),
|
||||
{ c : true, a : true, t : true, s : true, _ : [] },
|
||||
'group'
|
||||
);
|
||||
t.deepEqual(
|
||||
parse([ '-cats', 'meow' ]),
|
||||
{ c : true, a : true, t : true, s : 'meow', _ : [] },
|
||||
'short group next'
|
||||
);
|
||||
t.deepEqual(
|
||||
parse([ '-h', 'localhost' ]),
|
||||
{ h : 'localhost', _ : [] },
|
||||
'short capture'
|
||||
);
|
||||
t.deepEqual(
|
||||
parse([ '-h', 'localhost', '-p', '555' ]),
|
||||
{ h : 'localhost', p : 555, _ : [] },
|
||||
'short captures'
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('mixed short bool and capture', function (t) {
|
||||
t.same(
|
||||
parse([ '-h', 'localhost', '-fp', '555', 'script.js' ]),
|
||||
{
|
||||
f : true, p : 555, h : 'localhost',
|
||||
_ : [ 'script.js' ]
|
||||
}
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('short and long', function (t) {
|
||||
t.deepEqual(
|
||||
parse([ '-h', 'localhost', '-fp', '555', 'script.js' ]),
|
||||
{
|
||||
f : true, p : 555, h : 'localhost',
|
||||
_ : [ 'script.js' ]
|
||||
}
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
8
build/node_modules/extract-zip/node_modules/minimist/test/whitespace.js
generated
vendored
Normal file
8
build/node_modules/extract-zip/node_modules/minimist/test/whitespace.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
var parse = require('../');
|
||||
var test = require('tape');
|
||||
|
||||
test('whitespace should be whitespace' , function (t) {
|
||||
t.plan(1);
|
||||
var x = parse([ '-x', '\t' ]).x;
|
||||
t.equal(x, '\t');
|
||||
});
|
||||
2
build/node_modules/extract-zip/node_modules/mkdirp/.npmignore
generated
vendored
Normal file
2
build/node_modules/extract-zip/node_modules/mkdirp/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
node_modules/
|
||||
npm-debug.log
|
||||
5
build/node_modules/extract-zip/node_modules/mkdirp/.travis.yml
generated
vendored
Normal file
5
build/node_modules/extract-zip/node_modules/mkdirp/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- 0.6
|
||||
- 0.8
|
||||
- "0.10"
|
||||
21
build/node_modules/extract-zip/node_modules/mkdirp/LICENSE
generated
vendored
Normal file
21
build/node_modules/extract-zip/node_modules/mkdirp/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
Copyright 2010 James Halliday (mail@substack.net)
|
||||
|
||||
This project is free software released under the MIT/X11 license:
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
33
build/node_modules/extract-zip/node_modules/mkdirp/bin/cmd.js
generated
vendored
Executable file
33
build/node_modules/extract-zip/node_modules/mkdirp/bin/cmd.js
generated
vendored
Executable file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
var mkdirp = require('../');
|
||||
var minimist = require('minimist');
|
||||
var fs = require('fs');
|
||||
|
||||
var argv = minimist(process.argv.slice(2), {
|
||||
alias: { m: 'mode', h: 'help' },
|
||||
string: [ 'mode' ]
|
||||
});
|
||||
if (argv.help) {
|
||||
fs.createReadStream(__dirname + '/usage.txt').pipe(process.stdout);
|
||||
return;
|
||||
}
|
||||
|
||||
var paths = argv._.slice();
|
||||
var mode = argv.mode ? parseInt(argv.mode, 8) : undefined;
|
||||
|
||||
(function next () {
|
||||
if (paths.length === 0) return;
|
||||
var p = paths.shift();
|
||||
|
||||
if (mode === undefined) mkdirp(p, cb)
|
||||
else mkdirp(p, mode, cb)
|
||||
|
||||
function cb (err) {
|
||||
if (err) {
|
||||
console.error(err.message);
|
||||
process.exit(1);
|
||||
}
|
||||
else next();
|
||||
}
|
||||
})();
|
||||
12
build/node_modules/extract-zip/node_modules/mkdirp/bin/usage.txt
generated
vendored
Normal file
12
build/node_modules/extract-zip/node_modules/mkdirp/bin/usage.txt
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
usage: mkdirp [DIR1,DIR2..] {OPTIONS}
|
||||
|
||||
Create each supplied directory including any necessary parent directories that
|
||||
don't yet exist.
|
||||
|
||||
If the directory already exists, do nothing.
|
||||
|
||||
OPTIONS are:
|
||||
|
||||
-m, --mode If a directory needs to be created, set the mode as an octal
|
||||
permission string.
|
||||
|
||||
6
build/node_modules/extract-zip/node_modules/mkdirp/examples/pow.js
generated
vendored
Normal file
6
build/node_modules/extract-zip/node_modules/mkdirp/examples/pow.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
var mkdirp = require('mkdirp');
|
||||
|
||||
mkdirp('/tmp/foo/bar/baz', function (err) {
|
||||
if (err) console.error(err)
|
||||
else console.log('pow!')
|
||||
});
|
||||
97
build/node_modules/extract-zip/node_modules/mkdirp/index.js
generated
vendored
Normal file
97
build/node_modules/extract-zip/node_modules/mkdirp/index.js
generated
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
|
||||
module.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP;
|
||||
|
||||
function mkdirP (p, opts, f, made) {
|
||||
if (typeof opts === 'function') {
|
||||
f = opts;
|
||||
opts = {};
|
||||
}
|
||||
else if (!opts || typeof opts !== 'object') {
|
||||
opts = { mode: opts };
|
||||
}
|
||||
|
||||
var mode = opts.mode;
|
||||
var xfs = opts.fs || fs;
|
||||
|
||||
if (mode === undefined) {
|
||||
mode = 0777 & (~process.umask());
|
||||
}
|
||||
if (!made) made = null;
|
||||
|
||||
var cb = f || function () {};
|
||||
p = path.resolve(p);
|
||||
|
||||
xfs.mkdir(p, mode, function (er) {
|
||||
if (!er) {
|
||||
made = made || p;
|
||||
return cb(null, made);
|
||||
}
|
||||
switch (er.code) {
|
||||
case 'ENOENT':
|
||||
mkdirP(path.dirname(p), opts, function (er, made) {
|
||||
if (er) cb(er, made);
|
||||
else mkdirP(p, opts, cb, made);
|
||||
});
|
||||
break;
|
||||
|
||||
// In the case of any other error, just see if there's a dir
|
||||
// there already. If so, then hooray! If not, then something
|
||||
// is borked.
|
||||
default:
|
||||
xfs.stat(p, function (er2, stat) {
|
||||
// if the stat fails, then that's super weird.
|
||||
// let the original error be the failure reason.
|
||||
if (er2 || !stat.isDirectory()) cb(er, made)
|
||||
else cb(null, made);
|
||||
});
|
||||
break;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
mkdirP.sync = function sync (p, opts, made) {
|
||||
if (!opts || typeof opts !== 'object') {
|
||||
opts = { mode: opts };
|
||||
}
|
||||
|
||||
var mode = opts.mode;
|
||||
var xfs = opts.fs || fs;
|
||||
|
||||
if (mode === undefined) {
|
||||
mode = 0777 & (~process.umask());
|
||||
}
|
||||
if (!made) made = null;
|
||||
|
||||
p = path.resolve(p);
|
||||
|
||||
try {
|
||||
xfs.mkdirSync(p, mode);
|
||||
made = made || p;
|
||||
}
|
||||
catch (err0) {
|
||||
switch (err0.code) {
|
||||
case 'ENOENT' :
|
||||
made = sync(path.dirname(p), opts, made);
|
||||
sync(p, opts, made);
|
||||
break;
|
||||
|
||||
// In the case of any other error, just see if there's a dir
|
||||
// there already. If so, then hooray! If not, then something
|
||||
// is borked.
|
||||
default:
|
||||
var stat;
|
||||
try {
|
||||
stat = xfs.statSync(p);
|
||||
}
|
||||
catch (err1) {
|
||||
throw err0;
|
||||
}
|
||||
if (!stat.isDirectory()) throw err0;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return made;
|
||||
};
|
||||
62
build/node_modules/extract-zip/node_modules/mkdirp/package.json
generated
vendored
Normal file
62
build/node_modules/extract-zip/node_modules/mkdirp/package.json
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
{
|
||||
"_from": "mkdirp@0.5.0",
|
||||
"_id": "mkdirp@0.5.0",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha1-HXMHam35hs2TROFecfzAWkyavxI=",
|
||||
"_location": "/extract-zip/mkdirp",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "version",
|
||||
"registry": true,
|
||||
"raw": "mkdirp@0.5.0",
|
||||
"name": "mkdirp",
|
||||
"escapedName": "mkdirp",
|
||||
"rawSpec": "0.5.0",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "0.5.0"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/extract-zip"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.0.tgz",
|
||||
"_shasum": "1d73076a6df986cd9344e15e71fcc05a4c9abf12",
|
||||
"_spec": "mkdirp@0.5.0",
|
||||
"_where": "/Users/asciidisco/Desktop/asciidisco.com/build/node_modules/extract-zip",
|
||||
"author": {
|
||||
"name": "James Halliday",
|
||||
"email": "mail@substack.net",
|
||||
"url": "http://substack.net"
|
||||
},
|
||||
"bin": {
|
||||
"mkdirp": "bin/cmd.js"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/substack/node-mkdirp/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"dependencies": {
|
||||
"minimist": "0.0.8"
|
||||
},
|
||||
"deprecated": false,
|
||||
"description": "Recursively mkdir, like `mkdir -p`",
|
||||
"devDependencies": {
|
||||
"mock-fs": "~2.2.0",
|
||||
"tap": "~0.4.0"
|
||||
},
|
||||
"homepage": "https://github.com/substack/node-mkdirp#readme",
|
||||
"keywords": [
|
||||
"mkdir",
|
||||
"directory"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "./index",
|
||||
"name": "mkdirp",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/substack/node-mkdirp.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap test/*.js"
|
||||
},
|
||||
"version": "0.5.0"
|
||||
}
|
||||
100
build/node_modules/extract-zip/node_modules/mkdirp/readme.markdown
generated
vendored
Normal file
100
build/node_modules/extract-zip/node_modules/mkdirp/readme.markdown
generated
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
# mkdirp
|
||||
|
||||
Like `mkdir -p`, but in node.js!
|
||||
|
||||
[](http://travis-ci.org/substack/node-mkdirp)
|
||||
|
||||
# example
|
||||
|
||||
## pow.js
|
||||
|
||||
```js
|
||||
var mkdirp = require('mkdirp');
|
||||
|
||||
mkdirp('/tmp/foo/bar/baz', function (err) {
|
||||
if (err) console.error(err)
|
||||
else console.log('pow!')
|
||||
});
|
||||
```
|
||||
|
||||
Output
|
||||
|
||||
```
|
||||
pow!
|
||||
```
|
||||
|
||||
And now /tmp/foo/bar/baz exists, huzzah!
|
||||
|
||||
# methods
|
||||
|
||||
```js
|
||||
var mkdirp = require('mkdirp');
|
||||
```
|
||||
|
||||
## mkdirp(dir, opts, cb)
|
||||
|
||||
Create a new directory and any necessary subdirectories at `dir` with octal
|
||||
permission string `opts.mode`. If `opts` is a non-object, it will be treated as
|
||||
the `opts.mode`.
|
||||
|
||||
If `opts.mode` isn't specified, it defaults to `0777 & (~process.umask())`.
|
||||
|
||||
`cb(err, made)` fires with the error or the first directory `made`
|
||||
that had to be created, if any.
|
||||
|
||||
You can optionally pass in an alternate `fs` implementation by passing in
|
||||
`opts.fs`. Your implementation should have `opts.fs.mkdir(path, mode, cb)` and
|
||||
`opts.fs.stat(path, cb)`.
|
||||
|
||||
## mkdirp.sync(dir, opts)
|
||||
|
||||
Synchronously create a new directory and any necessary subdirectories at `dir`
|
||||
with octal permission string `opts.mode`. If `opts` is a non-object, it will be
|
||||
treated as the `opts.mode`.
|
||||
|
||||
If `opts.mode` isn't specified, it defaults to `0777 & (~process.umask())`.
|
||||
|
||||
Returns the first directory that had to be created, if any.
|
||||
|
||||
You can optionally pass in an alternate `fs` implementation by passing in
|
||||
`opts.fs`. Your implementation should have `opts.fs.mkdirSync(path, mode)` and
|
||||
`opts.fs.statSync(path)`.
|
||||
|
||||
# usage
|
||||
|
||||
This package also ships with a `mkdirp` command.
|
||||
|
||||
```
|
||||
usage: mkdirp [DIR1,DIR2..] {OPTIONS}
|
||||
|
||||
Create each supplied directory including any necessary parent directories that
|
||||
don't yet exist.
|
||||
|
||||
If the directory already exists, do nothing.
|
||||
|
||||
OPTIONS are:
|
||||
|
||||
-m, --mode If a directory needs to be created, set the mode as an octal
|
||||
permission string.
|
||||
|
||||
```
|
||||
|
||||
# install
|
||||
|
||||
With [npm](http://npmjs.org) do:
|
||||
|
||||
```
|
||||
npm install mkdirp
|
||||
```
|
||||
|
||||
to get the library, or
|
||||
|
||||
```
|
||||
npm install -g mkdirp
|
||||
```
|
||||
|
||||
to get the command.
|
||||
|
||||
# license
|
||||
|
||||
MIT
|
||||
38
build/node_modules/extract-zip/node_modules/mkdirp/test/chmod.js
generated
vendored
Normal file
38
build/node_modules/extract-zip/node_modules/mkdirp/test/chmod.js
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
var mkdirp = require('../').mkdirp;
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
var ps = [ '', 'tmp' ];
|
||||
|
||||
for (var i = 0; i < 25; i++) {
|
||||
var dir = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
ps.push(dir);
|
||||
}
|
||||
|
||||
var file = ps.join('/');
|
||||
|
||||
test('chmod-pre', function (t) {
|
||||
var mode = 0744
|
||||
mkdirp(file, mode, function (er) {
|
||||
t.ifError(er, 'should not error');
|
||||
fs.stat(file, function (er, stat) {
|
||||
t.ifError(er, 'should exist');
|
||||
t.ok(stat && stat.isDirectory(), 'should be directory');
|
||||
t.equal(stat && stat.mode & 0777, mode, 'should be 0744');
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('chmod', function (t) {
|
||||
var mode = 0755
|
||||
mkdirp(file, mode, function (er) {
|
||||
t.ifError(er, 'should not error');
|
||||
fs.stat(file, function (er, stat) {
|
||||
t.ifError(er, 'should exist');
|
||||
t.ok(stat && stat.isDirectory(), 'should be directory');
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
});
|
||||
37
build/node_modules/extract-zip/node_modules/mkdirp/test/clobber.js
generated
vendored
Normal file
37
build/node_modules/extract-zip/node_modules/mkdirp/test/clobber.js
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
var mkdirp = require('../').mkdirp;
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
var ps = [ '', 'tmp' ];
|
||||
|
||||
for (var i = 0; i < 25; i++) {
|
||||
var dir = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
ps.push(dir);
|
||||
}
|
||||
|
||||
var file = ps.join('/');
|
||||
|
||||
// a file in the way
|
||||
var itw = ps.slice(0, 3).join('/');
|
||||
|
||||
|
||||
test('clobber-pre', function (t) {
|
||||
console.error("about to write to "+itw)
|
||||
fs.writeFileSync(itw, 'I AM IN THE WAY, THE TRUTH, AND THE LIGHT.');
|
||||
|
||||
fs.stat(itw, function (er, stat) {
|
||||
t.ifError(er)
|
||||
t.ok(stat && stat.isFile(), 'should be file')
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
test('clobber', function (t) {
|
||||
t.plan(2);
|
||||
mkdirp(file, 0755, function (err) {
|
||||
t.ok(err);
|
||||
t.equal(err.code, 'ENOTDIR');
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
26
build/node_modules/extract-zip/node_modules/mkdirp/test/mkdirp.js
generated
vendored
Normal file
26
build/node_modules/extract-zip/node_modules/mkdirp/test/mkdirp.js
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var exists = fs.exists || path.exists;
|
||||
var test = require('tap').test;
|
||||
|
||||
test('woo', function (t) {
|
||||
t.plan(5);
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var file = '/tmp/' + [x,y,z].join('/');
|
||||
|
||||
mkdirp(file, 0755, function (err) {
|
||||
t.ifError(err);
|
||||
exists(file, function (ex) {
|
||||
t.ok(ex, 'file created');
|
||||
fs.stat(file, function (err, stat) {
|
||||
t.ifError(err);
|
||||
t.equal(stat.mode & 0777, 0755);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
27
build/node_modules/extract-zip/node_modules/mkdirp/test/opts_fs.js
generated
vendored
Normal file
27
build/node_modules/extract-zip/node_modules/mkdirp/test/opts_fs.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var test = require('tap').test;
|
||||
var mockfs = require('mock-fs');
|
||||
|
||||
test('opts.fs', function (t) {
|
||||
t.plan(5);
|
||||
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var file = '/beep/boop/' + [x,y,z].join('/');
|
||||
var xfs = mockfs.fs();
|
||||
|
||||
mkdirp(file, { fs: xfs, mode: 0755 }, function (err) {
|
||||
t.ifError(err);
|
||||
xfs.exists(file, function (ex) {
|
||||
t.ok(ex, 'created file');
|
||||
xfs.stat(file, function (err, stat) {
|
||||
t.ifError(err);
|
||||
t.equal(stat.mode & 0777, 0755);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
25
build/node_modules/extract-zip/node_modules/mkdirp/test/opts_fs_sync.js
generated
vendored
Normal file
25
build/node_modules/extract-zip/node_modules/mkdirp/test/opts_fs_sync.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var test = require('tap').test;
|
||||
var mockfs = require('mock-fs');
|
||||
|
||||
test('opts.fs sync', function (t) {
|
||||
t.plan(4);
|
||||
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var file = '/beep/boop/' + [x,y,z].join('/');
|
||||
var xfs = mockfs.fs();
|
||||
|
||||
mkdirp.sync(file, { fs: xfs, mode: 0755 });
|
||||
xfs.exists(file, function (ex) {
|
||||
t.ok(ex, 'created file');
|
||||
xfs.stat(file, function (err, stat) {
|
||||
t.ifError(err);
|
||||
t.equal(stat.mode & 0777, 0755);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
});
|
||||
});
|
||||
});
|
||||
30
build/node_modules/extract-zip/node_modules/mkdirp/test/perm.js
generated
vendored
Normal file
30
build/node_modules/extract-zip/node_modules/mkdirp/test/perm.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var exists = fs.exists || path.exists;
|
||||
var test = require('tap').test;
|
||||
|
||||
test('async perm', function (t) {
|
||||
t.plan(5);
|
||||
var file = '/tmp/' + (Math.random() * (1<<30)).toString(16);
|
||||
|
||||
mkdirp(file, 0755, function (err) {
|
||||
t.ifError(err);
|
||||
exists(file, function (ex) {
|
||||
t.ok(ex, 'file created');
|
||||
fs.stat(file, function (err, stat) {
|
||||
t.ifError(err);
|
||||
t.equal(stat.mode & 0777, 0755);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
test('async root perm', function (t) {
|
||||
mkdirp('/tmp', 0755, function (err) {
|
||||
if (err) t.fail(err);
|
||||
t.end();
|
||||
});
|
||||
t.end();
|
||||
});
|
||||
34
build/node_modules/extract-zip/node_modules/mkdirp/test/perm_sync.js
generated
vendored
Normal file
34
build/node_modules/extract-zip/node_modules/mkdirp/test/perm_sync.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var exists = fs.exists || path.exists;
|
||||
var test = require('tap').test;
|
||||
|
||||
test('sync perm', function (t) {
|
||||
t.plan(4);
|
||||
var file = '/tmp/' + (Math.random() * (1<<30)).toString(16) + '.json';
|
||||
|
||||
mkdirp.sync(file, 0755);
|
||||
exists(file, function (ex) {
|
||||
t.ok(ex, 'file created');
|
||||
fs.stat(file, function (err, stat) {
|
||||
t.ifError(err);
|
||||
t.equal(stat.mode & 0777, 0755);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('sync root perm', function (t) {
|
||||
t.plan(3);
|
||||
|
||||
var file = '/tmp';
|
||||
mkdirp.sync(file, 0755);
|
||||
exists(file, function (ex) {
|
||||
t.ok(ex, 'file created');
|
||||
fs.stat(file, function (err, stat) {
|
||||
t.ifError(err);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
})
|
||||
});
|
||||
});
|
||||
40
build/node_modules/extract-zip/node_modules/mkdirp/test/race.js
generated
vendored
Normal file
40
build/node_modules/extract-zip/node_modules/mkdirp/test/race.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
var mkdirp = require('../').mkdirp;
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var exists = fs.exists || path.exists;
|
||||
var test = require('tap').test;
|
||||
|
||||
test('race', function (t) {
|
||||
t.plan(6);
|
||||
var ps = [ '', 'tmp' ];
|
||||
|
||||
for (var i = 0; i < 25; i++) {
|
||||
var dir = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
ps.push(dir);
|
||||
}
|
||||
var file = ps.join('/');
|
||||
|
||||
var res = 2;
|
||||
mk(file, function () {
|
||||
if (--res === 0) t.end();
|
||||
});
|
||||
|
||||
mk(file, function () {
|
||||
if (--res === 0) t.end();
|
||||
});
|
||||
|
||||
function mk (file, cb) {
|
||||
mkdirp(file, 0755, function (err) {
|
||||
t.ifError(err);
|
||||
exists(file, function (ex) {
|
||||
t.ok(ex, 'file created');
|
||||
fs.stat(file, function (err, stat) {
|
||||
t.ifError(err);
|
||||
t.equal(stat.mode & 0777, 0755);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
if (cb) cb();
|
||||
});
|
||||
})
|
||||
});
|
||||
}
|
||||
});
|
||||
30
build/node_modules/extract-zip/node_modules/mkdirp/test/rel.js
generated
vendored
Normal file
30
build/node_modules/extract-zip/node_modules/mkdirp/test/rel.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var exists = fs.exists || path.exists;
|
||||
var test = require('tap').test;
|
||||
|
||||
test('rel', function (t) {
|
||||
t.plan(5);
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var cwd = process.cwd();
|
||||
process.chdir('/tmp');
|
||||
|
||||
var file = [x,y,z].join('/');
|
||||
|
||||
mkdirp(file, 0755, function (err) {
|
||||
t.ifError(err);
|
||||
exists(file, function (ex) {
|
||||
t.ok(ex, 'file created');
|
||||
fs.stat(file, function (err, stat) {
|
||||
t.ifError(err);
|
||||
process.chdir(cwd);
|
||||
t.equal(stat.mode & 0777, 0755);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
25
build/node_modules/extract-zip/node_modules/mkdirp/test/return.js
generated
vendored
Normal file
25
build/node_modules/extract-zip/node_modules/mkdirp/test/return.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('return value', function (t) {
|
||||
t.plan(4);
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var file = '/tmp/' + [x,y,z].join('/');
|
||||
|
||||
// should return the first dir created.
|
||||
// By this point, it would be profoundly surprising if /tmp didn't
|
||||
// already exist, since every other test makes things in there.
|
||||
mkdirp(file, function (err, made) {
|
||||
t.ifError(err);
|
||||
t.equal(made, '/tmp/' + x);
|
||||
mkdirp(file, function (err, made) {
|
||||
t.ifError(err);
|
||||
t.equal(made, null);
|
||||
});
|
||||
});
|
||||
});
|
||||
24
build/node_modules/extract-zip/node_modules/mkdirp/test/return_sync.js
generated
vendored
Normal file
24
build/node_modules/extract-zip/node_modules/mkdirp/test/return_sync.js
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('return value', function (t) {
|
||||
t.plan(2);
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var file = '/tmp/' + [x,y,z].join('/');
|
||||
|
||||
// should return the first dir created.
|
||||
// By this point, it would be profoundly surprising if /tmp didn't
|
||||
// already exist, since every other test makes things in there.
|
||||
// Note that this will throw on failure, which will fail the test.
|
||||
var made = mkdirp.sync(file);
|
||||
t.equal(made, '/tmp/' + x);
|
||||
|
||||
// making the same file again should have no effect.
|
||||
made = mkdirp.sync(file);
|
||||
t.equal(made, null);
|
||||
});
|
||||
18
build/node_modules/extract-zip/node_modules/mkdirp/test/root.js
generated
vendored
Normal file
18
build/node_modules/extract-zip/node_modules/mkdirp/test/root.js
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('root', function (t) {
|
||||
// '/' on unix, 'c:/' on windows.
|
||||
var file = path.resolve('/');
|
||||
|
||||
mkdirp(file, 0755, function (err) {
|
||||
if (err) throw err
|
||||
fs.stat(file, function (er, stat) {
|
||||
if (er) throw er
|
||||
t.ok(stat.isDirectory(), 'target is a directory');
|
||||
t.end();
|
||||
})
|
||||
});
|
||||
});
|
||||
30
build/node_modules/extract-zip/node_modules/mkdirp/test/sync.js
generated
vendored
Normal file
30
build/node_modules/extract-zip/node_modules/mkdirp/test/sync.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var exists = fs.exists || path.exists;
|
||||
var test = require('tap').test;
|
||||
|
||||
test('sync', function (t) {
|
||||
t.plan(4);
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var file = '/tmp/' + [x,y,z].join('/');
|
||||
|
||||
try {
|
||||
mkdirp.sync(file, 0755);
|
||||
} catch (err) {
|
||||
t.fail(err);
|
||||
return t.end();
|
||||
}
|
||||
|
||||
exists(file, function (ex) {
|
||||
t.ok(ex, 'file created');
|
||||
fs.stat(file, function (err, stat) {
|
||||
t.ifError(err);
|
||||
t.equal(stat.mode & 0777, 0755);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
});
|
||||
});
|
||||
});
|
||||
26
build/node_modules/extract-zip/node_modules/mkdirp/test/umask.js
generated
vendored
Normal file
26
build/node_modules/extract-zip/node_modules/mkdirp/test/umask.js
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var exists = fs.exists || path.exists;
|
||||
var test = require('tap').test;
|
||||
|
||||
test('implicit mode from umask', function (t) {
|
||||
t.plan(5);
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var file = '/tmp/' + [x,y,z].join('/');
|
||||
|
||||
mkdirp(file, function (err) {
|
||||
t.ifError(err);
|
||||
exists(file, function (ex) {
|
||||
t.ok(ex, 'file created');
|
||||
fs.stat(file, function (err, stat) {
|
||||
t.ifError(err);
|
||||
t.equal(stat.mode & 0777, 0777 & (~process.umask()));
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
});
|
||||
})
|
||||
});
|
||||
});
|
||||
30
build/node_modules/extract-zip/node_modules/mkdirp/test/umask_sync.js
generated
vendored
Normal file
30
build/node_modules/extract-zip/node_modules/mkdirp/test/umask_sync.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var exists = fs.exists || path.exists;
|
||||
var test = require('tap').test;
|
||||
|
||||
test('umask sync modes', function (t) {
|
||||
t.plan(4);
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var file = '/tmp/' + [x,y,z].join('/');
|
||||
|
||||
try {
|
||||
mkdirp.sync(file);
|
||||
} catch (err) {
|
||||
t.fail(err);
|
||||
return t.end();
|
||||
}
|
||||
|
||||
exists(file, function (ex) {
|
||||
t.ok(ex, 'file created');
|
||||
fs.stat(file, function (err, stat) {
|
||||
t.ifError(err);
|
||||
t.equal(stat.mode & 0777, (0777 & (~process.umask())));
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
});
|
||||
});
|
||||
});
|
||||
2
build/node_modules/extract-zip/node_modules/yauzl/.npmignore
generated
vendored
Normal file
2
build/node_modules/extract-zip/node_modules/yauzl/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
test/
|
||||
examples/
|
||||
8
build/node_modules/extract-zip/node_modules/yauzl/.travis.yml
generated
vendored
Normal file
8
build/node_modules/extract-zip/node_modules/yauzl/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- "0.10"
|
||||
script:
|
||||
- "npm run test-travis"
|
||||
after_script:
|
||||
- "npm install coveralls@2 && cat ./coverage/lcov.info | ./node_modules/.bin/coveralls"
|
||||
|
||||
21
build/node_modules/extract-zip/node_modules/yauzl/LICENSE
generated
vendored
Normal file
21
build/node_modules/extract-zip/node_modules/yauzl/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Josh Wolfe
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
467
build/node_modules/extract-zip/node_modules/yauzl/README.md
generated
vendored
Normal file
467
build/node_modules/extract-zip/node_modules/yauzl/README.md
generated
vendored
Normal file
@@ -0,0 +1,467 @@
|
||||
# yauzl
|
||||
|
||||
[](https://travis-ci.org/thejoshwolfe/yauzl)
|
||||
[](https://coveralls.io/r/thejoshwolfe/yauzl)
|
||||
|
||||
yet another unzip library for node. For zipping, see
|
||||
[yazl](https://github.com/thejoshwolfe/yazl).
|
||||
|
||||
Design principles:
|
||||
|
||||
* Follow the spec.
|
||||
Don't scan for local file headers.
|
||||
Read the central directory for file metadata.
|
||||
(see [No Streaming Unzip API](#no-streaming-unzip-api)).
|
||||
* Don't block the JavaScript thread.
|
||||
Use and provide async APIs.
|
||||
* Keep memory usage under control.
|
||||
Don't attempt to buffer entire files in RAM at once.
|
||||
* Never crash (if used properly).
|
||||
Don't let malformed zip files bring down client applications who are trying to catch errors.
|
||||
* Catch unsafe filenames entries.
|
||||
A zip file entry throws an error if its file name starts with `"/"` or `/[A-Za-z]:\//`
|
||||
or if it contains `".."` path segments or `"\\"` (per the spec).
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
var yauzl = require("yauzl");
|
||||
var fs = require("fs");
|
||||
var path = require("path");
|
||||
var mkdirp = require("mkdirp"); // or similar
|
||||
|
||||
yauzl.open("path/to/file.zip", {lazyEntries: true}, function(err, zipfile) {
|
||||
if (err) throw err;
|
||||
zipfile.readEntry();
|
||||
zipfile.on("entry", function(entry) {
|
||||
if (/\/$/.test(entry.fileName)) {
|
||||
// directory file names end with '/'
|
||||
mkdirp(entry.fileName, function(err) {
|
||||
if (err) throw err;
|
||||
zipfile.readEntry();
|
||||
});
|
||||
} else {
|
||||
// file entry
|
||||
zipfile.openReadStream(entry, function(err, readStream) {
|
||||
if (err) throw err;
|
||||
// ensure parent directory exists
|
||||
mkdirp(path.dirname(entry.fileName), function(err) {
|
||||
if (err) throw err;
|
||||
readStream.pipe(fs.createWriteStream(entry.fileName));
|
||||
readStream.on("end", function() {
|
||||
zipfile.readEntry();
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
The default for every optional `callback` parameter is:
|
||||
|
||||
```js
|
||||
function defaultCallback(err) {
|
||||
if (err) throw err;
|
||||
}
|
||||
```
|
||||
|
||||
### open(path, [options], [callback])
|
||||
|
||||
Calls `fs.open(path, "r")` and gives the `fd`, `options`, and `callback` to `fromFd()` below.
|
||||
|
||||
`options` may be omitted or `null`. The defaults are `{autoClose: true, lazyEntries: false}`.
|
||||
|
||||
`autoClose` is effectively equivalent to:
|
||||
|
||||
```js
|
||||
zipfile.once("end", function() {
|
||||
zipfile.close();
|
||||
});
|
||||
```
|
||||
|
||||
`lazyEntries` indicates that entries should be read only when `readEntry()` is called.
|
||||
If `lazyEntries` is `false`, `entry` events will be emitted as fast as possible to allow `pipe()`ing
|
||||
file data from all entries in parallel.
|
||||
This is not recommended, as it can lead to out of control memory usage for zip files with many entries.
|
||||
See [issue #22](https://github.com/thejoshwolfe/yauzl/issues/22).
|
||||
If `lazyEntries` is `true`, an `entry` or `end` event will be emitted in response to each call to `readEntry()`.
|
||||
This allows processing of one entry at a time, and will keep memory usage under control for zip files with many entries.
|
||||
|
||||
### fromFd(fd, [options], [callback])
|
||||
|
||||
Reads from the fd, which is presumed to be an open .zip file.
|
||||
Note that random access is required by the zip file specification,
|
||||
so the fd cannot be an open socket or any other fd that does not support random access.
|
||||
|
||||
The `callback` is given the arguments `(err, zipfile)`.
|
||||
An `err` is provided if the End of Central Directory Record Signature cannot be found in the file,
|
||||
which indicates that the fd is not a zip file.
|
||||
`zipfile` is an instance of `ZipFile`.
|
||||
|
||||
`options` may be omitted or `null`. The defaults are `{autoClose: false, lazyEntries: false}`.
|
||||
See `open()` for the meaning of the options.
|
||||
|
||||
### fromBuffer(buffer, [options], [callback])
|
||||
|
||||
Like `fromFd()`, but reads from a RAM buffer instead of an open file.
|
||||
`buffer` is a `Buffer`.
|
||||
`callback` is effectively passed directly to `fromFd()`.
|
||||
|
||||
If a `ZipFile` is acquired from this method,
|
||||
it will never emit the `close` event,
|
||||
and calling `close()` is not necessary.
|
||||
|
||||
`options` may be omitted or `null`. The defaults are `{lazyEntries: false}`.
|
||||
See `open()` for the meaning of the options.
|
||||
The `autoClose` option is ignored for this method.
|
||||
|
||||
### fromRandomAccessReader(reader, totalSize, [options], [callback])
|
||||
|
||||
This method of creating a zip file allows clients to implement their own back-end file system.
|
||||
For example, a client might translate read calls into network requests.
|
||||
|
||||
The `reader` parameter must be of a type that is a subclass of
|
||||
[RandomAccessReader](#class-randomaccessreader) that implements the required methods.
|
||||
The `totalSize` is a Number and indicates the total file size of the zip file.
|
||||
|
||||
`options` may be omitted or `null`. The defaults are `{autoClose: true, lazyEntries: false}`.
|
||||
See `open()` for the meaning of the options.
|
||||
|
||||
### dosDateTimeToDate(date, time)
|
||||
|
||||
Converts MS-DOS `date` and `time` data into a JavaScript `Date` object.
|
||||
Each parameter is a `Number` treated as an unsigned 16-bit integer.
|
||||
Note that this format does not support timezones,
|
||||
so the returned object will use the local timezone.
|
||||
|
||||
### Class: ZipFile
|
||||
|
||||
The constructor for the class is not part of the public API.
|
||||
Use `open()`, `fromFd()`, `fromBuffer()`, or `fromRandomAccessReader()` instead.
|
||||
|
||||
#### Event: "entry"
|
||||
|
||||
Callback gets `(entry)`, which is an `Entry`.
|
||||
See `open()` and `readEntry()` for when this event is emitted.
|
||||
|
||||
#### Event: "end"
|
||||
|
||||
Emitted after the last `entry` event has been emitted.
|
||||
See `open()` and `readEntry()` for more info on when this event is emitted.
|
||||
|
||||
#### Event: "close"
|
||||
|
||||
Emitted after the fd is actually closed.
|
||||
This is after calling `close()` (or after the `end` event when `autoClose` is `true`),
|
||||
and after all stream pipelines created from `openReadStream()` have finished reading data from the fd.
|
||||
|
||||
If this `ZipFile` was acquired from `fromRandomAccessReader()`,
|
||||
the "fd" in the previous paragraph refers to the `RandomAccessReader` implemented by the client.
|
||||
|
||||
If this `ZipFile` was acquired from `fromBuffer()`, this event is never emitted.
|
||||
|
||||
#### Event: "error"
|
||||
|
||||
Emitted in the case of errors with reading the zip file.
|
||||
(Note that other errors can be emitted from the streams created from `openReadStream()` as well.)
|
||||
After this event has been emitted, no further `entry`, `end`, or `error` events will be emitted,
|
||||
but the `close` event may still be emitted.
|
||||
|
||||
#### readEntry()
|
||||
|
||||
Causes this `ZipFile` to emit an `entry` or `end` event (or an `error` event).
|
||||
This method must only be called when this `ZipFile` was created with the `lazyEntries` option set to `true` (see `open()`).
|
||||
When this `ZipFile` was created with the `lazyEntries` option set to `true`,
|
||||
`entry` and `end` events are only ever emitted in response to this method call.
|
||||
|
||||
The event that is emitted in response to this method will not be emitted until after this method has returned,
|
||||
so it is safe to call this method before attaching event listeners.
|
||||
|
||||
After calling this method, calling this method again before the response event has been emitted will cause undefined behavior.
|
||||
Calling this method after the `end` event has been emitted will cause undefined behavior.
|
||||
Calling this method after calling `close()` will cause undefined behavior.
|
||||
|
||||
#### openReadStream(entry, callback)
|
||||
|
||||
`entry` must be an `Entry` object from this `ZipFile`.
|
||||
`callback` gets `(err, readStream)`, where `readStream` is a `Readable Stream`.
|
||||
If the entry is compressed (with a supported compression method),
|
||||
the read stream provides the decompressed data.
|
||||
If this zipfile is already closed (see `close()`), the `callback` will receive an `err`.
|
||||
|
||||
It's possible for the `readStream` it to emit errors for several reasons.
|
||||
For example, if zlib cannot decompress the data, the zlib error will be emitted from the `readStream`.
|
||||
Two more error cases are if the decompressed data has too many or too few actual bytes
|
||||
compared to the reported byte count from the entry's `uncompressedSize` field.
|
||||
yauzl notices this false information and emits an error from the `readStream`
|
||||
after some number of bytes have already been piped through the stream.
|
||||
|
||||
Because of this check, clients can always trust the `uncompressedSize` field in `Entry` objects.
|
||||
Guarding against [zip bomb](http://en.wikipedia.org/wiki/Zip_bomb) attacks can be accomplished by
|
||||
doing some heuristic checks on the size metadata and then watching out for the above errors.
|
||||
Such heuristics are outside the scope of this library,
|
||||
but enforcing the `uncompressedSize` is implemented here as a security feature.
|
||||
|
||||
It is possible to destroy the `readStream` before it has piped all of its data.
|
||||
To do this, call `readStream.destroy()`.
|
||||
You must `unpipe()` the `readStream` from any destination before calling `readStream.destroy()`.
|
||||
If this zipfile was created using `fromRandomAccessReader()`, the `RandomAccessReader` implementation
|
||||
must provide readable streams that implement a `.destroy()` method (see `randomAccessReader._readStreamForRange()`)
|
||||
in order for calls to `readStream.destroy()` to work in this context.
|
||||
|
||||
#### close()
|
||||
|
||||
Causes all future calls to `openReadStream()` to fail,
|
||||
and closes the fd after all streams created by `openReadStream()` have emitted their `end` events.
|
||||
|
||||
If the `autoClose` option is set to `true` (see `open()`),
|
||||
this function will be called automatically effectively in response to this object's `end` event.
|
||||
|
||||
If the `lazyEntries` option is set to `false` (see `open()`) and this object's `end` event has not been emitted yet,
|
||||
this function causes undefined behavior.
|
||||
If the `lazyEntries` option is set to `true`,
|
||||
you can call this function instead of calling `readEntry()` to abort reading the entries of a zipfile.
|
||||
|
||||
It is safe to call this function multiple times; after the first call, successive calls have no effect.
|
||||
This includes situations where the `autoClose` option effectively calls this function for you.
|
||||
|
||||
#### isOpen
|
||||
|
||||
`Boolean`. `true` until `close()` is called; then it's `false`.
|
||||
|
||||
#### entryCount
|
||||
|
||||
`Number`. Total number of central directory records.
|
||||
|
||||
#### comment
|
||||
|
||||
`String`. Always decoded with `CP437` per the spec.
|
||||
|
||||
### Class: Entry
|
||||
|
||||
Objects of this class represent Central Directory Records.
|
||||
Refer to the zipfile specification for more details about these fields.
|
||||
|
||||
These fields are of type `Number`:
|
||||
|
||||
* `versionMadeBy`
|
||||
* `versionNeededToExtract`
|
||||
* `generalPurposeBitFlag`
|
||||
* `compressionMethod`
|
||||
* `lastModFileTime` (MS-DOS format, see `getLastModDateTime`)
|
||||
* `lastModFileDate` (MS-DOS format, see `getLastModDateTime`)
|
||||
* `crc32`
|
||||
* `compressedSize`
|
||||
* `uncompressedSize`
|
||||
* `fileNameLength` (bytes)
|
||||
* `extraFieldLength` (bytes)
|
||||
* `fileCommentLength` (bytes)
|
||||
* `internalFileAttributes`
|
||||
* `externalFileAttributes`
|
||||
* `relativeOffsetOfLocalHeader`
|
||||
|
||||
#### fileName
|
||||
|
||||
`String`.
|
||||
Following the spec, the bytes for the file name are decoded with
|
||||
`UTF-8` if `generalPurposeBitFlag & 0x800`, otherwise with `CP437`.
|
||||
|
||||
If `fileName` would contain unsafe characters, such as an absolute path or
|
||||
a relative directory, yauzl emits an error instead of an entry.
|
||||
|
||||
#### extraFields
|
||||
|
||||
`Array` with each entry in the form `{id: id, data: data}`,
|
||||
where `id` is a `Number` and `data` is a `Buffer`.
|
||||
This library looks for and reads the ZIP64 Extended Information Extra Field (0x0001)
|
||||
in order to support ZIP64 format zip files.
|
||||
None of the other fields are considered significant by this library.
|
||||
|
||||
#### comment
|
||||
|
||||
`String` decoded with the same charset as used for `fileName`.
|
||||
|
||||
#### getLastModDate()
|
||||
|
||||
Effectively implemented as:
|
||||
|
||||
```js
|
||||
return dosDateTimeToDate(this.lastModFileDate, this.lastModFileTime);
|
||||
```
|
||||
|
||||
### Class: RandomAccessReader
|
||||
|
||||
This class is meant to be subclassed by clients and instantiated for the `fromRandomAccessReader()` function.
|
||||
|
||||
An example implementation can be found in `test/test.js`.
|
||||
|
||||
#### randomAccessReader._readStreamForRange(start, end)
|
||||
|
||||
Subclasses *must* implement this method.
|
||||
|
||||
`start` and `end` are Numbers and indicate byte offsets from the start of the file.
|
||||
`end` is exclusive, so `_readStreamForRange(0x1000, 0x2000)` would indicate to read `0x1000` bytes.
|
||||
`end - start` will always be at least `1`.
|
||||
|
||||
This method should return a readable stream which will be `pipe()`ed into another stream.
|
||||
It is expected that the readable stream will provide data in several chunks if necessary.
|
||||
If the readable stream provides too many or too few bytes, an error will be emitted.
|
||||
Any errors emitted on the readable stream will be handled and re-emitted on the client-visible stream
|
||||
(returned from `zipfile.openReadStream()`) or provided as the `err` argument to the appropriate callback
|
||||
(for example, for `fromRandomAccessReader()`).
|
||||
|
||||
The returned stream *must* implement a method `.destroy()`
|
||||
if you call `readStream.destroy()` on streams you get from `openReadStream()`.
|
||||
If you never call `readStream.destroy()`, then streams returned from this method do not need to implement a method `.destroy()`.
|
||||
`.destroy()` should abort any streaming that is in progress and clean up any associated resources.
|
||||
`.destroy()` will only be called after the stream has been `unpipe()`d from its destination.
|
||||
|
||||
Note that the stream returned from this method might not be the same object that is provided by `openReadStream()`.
|
||||
The stream returned from this method might be `pipe()`d through one or more filter streams (for example, a zlib inflate stream).
|
||||
|
||||
#### randomAccessReader.read(buffer, offset, length, position, callback)
|
||||
|
||||
Subclasses may implement this method.
|
||||
The default implementation uses `createReadStream()` to fill the `buffer`.
|
||||
|
||||
This method should behave like `fs.read()`.
|
||||
|
||||
#### randomAccessReader.close(callback)
|
||||
|
||||
Subclasses may implement this method.
|
||||
The default implementation is effectively `setImmediate(callback);`.
|
||||
|
||||
`callback` takes parameters `(err)`.
|
||||
|
||||
This method is called once the all streams returned from `_readStreamForRange()` have ended,
|
||||
and no more `_readStreamForRange()` or `read()` requests will be issued to this object.
|
||||
|
||||
## How to Avoid Crashing
|
||||
|
||||
When a malformed zipfile is encountered, the default behavior is to crash (throw an exception).
|
||||
If you want to handle errors more gracefully than this,
|
||||
be sure to do the following:
|
||||
|
||||
* Provide `callback` parameters where they are allowed, and check the `err` parameter.
|
||||
* Attach a listener for the `error` event on any `ZipFile` object you get from `open()`, `fromFd()`, `fromBuffer()`, or `fromRandomAccessReader()`.
|
||||
* Attach a listener for the `error` event on any stream you get from `openReadStream()`.
|
||||
|
||||
## Limitations
|
||||
|
||||
### No Streaming Unzip API
|
||||
|
||||
Due to the design of the .zip file format, it's impossible to interpret a .zip file from start to finish
|
||||
(such as from a readable stream) without sacrificing correctness.
|
||||
The Central Directory, which is the authority on the contents of the .zip file, is at the end of a .zip file, not the beginning.
|
||||
A streaming API would need to either buffer the entire .zip file to get to the Central Directory before interpreting anything
|
||||
(defeating the purpose of a streaming interface), or rely on the Local File Headers which are interspersed through the .zip file.
|
||||
However, the Local File Headers are explicitly denounced in the spec as being unreliable copies of the Central Directory,
|
||||
so trusting them would be a violation of the spec.
|
||||
|
||||
Any library that offers a streaming unzip API must make one of the above two compromises,
|
||||
which makes the library either dishonest or nonconformant (usually the latter).
|
||||
This library insists on correctness and adherence to the spec, and so does not offer a streaming API.
|
||||
|
||||
### Limitted ZIP64 Support
|
||||
|
||||
For ZIP64, only zip files smaller than `8PiB` are supported,
|
||||
not the full `16EiB` range that a 64-bit integer should be able to index.
|
||||
This is due to the JavaScript Number type being an IEEE 754 double precision float.
|
||||
|
||||
The Node.js `fs` module probably has this same limitation.
|
||||
|
||||
### ZIP64 Extensible Data Sector Is Ignored
|
||||
|
||||
The spec does not allow zip file creators to put arbitrary data here,
|
||||
but rather reserves its use for PKWARE and mentions something about Z390.
|
||||
This doesn't seem useful to expose in this library, so it is ignored.
|
||||
|
||||
### No Multi-Disk Archive Support
|
||||
|
||||
This library does not support multi-disk zip files.
|
||||
The multi-disk fields in the zipfile spec were intended for a zip file to span multiple floppy disks,
|
||||
which probably never happens now.
|
||||
If the "number of this disk" field in the End of Central Directory Record is not `0`,
|
||||
the `open()`, `fromFd()`, `fromBuffer()`, or `fromRandomAccessReader()` `callback` will receive an `err`.
|
||||
By extension the following zip file fields are ignored by this library and not provided to clients:
|
||||
|
||||
* Disk where central directory starts
|
||||
* Number of central directory records on this disk
|
||||
* Disk number where file starts
|
||||
|
||||
### No Encryption Support
|
||||
|
||||
Currently, the presence of encryption is not even checked,
|
||||
and encrypted zip files will cause undefined behavior.
|
||||
|
||||
### Local File Headers Are Ignored
|
||||
|
||||
Many unzip libraries mistakenly read the Local File Header data in zip files.
|
||||
This data is officially defined to be redundant with the Central Directory information,
|
||||
and is not to be trusted.
|
||||
Aside from checking the signature, yauzl ignores the content of the Local File Header.
|
||||
|
||||
### No CRC-32 Checking
|
||||
|
||||
This library provides the `crc32` field of `Entry` objects read from the Central Directory.
|
||||
However, this field is not used for anything in this library.
|
||||
|
||||
### versionNeededToExtract Is Ignored
|
||||
|
||||
The field `versionNeededToExtract` is ignored,
|
||||
because this library doesn't support the complete zip file spec at any version,
|
||||
|
||||
### No Support For Obscure Compression Methods
|
||||
|
||||
Regarding the `compressionMethod` field of `Entry` objects,
|
||||
only method `0` (stored with no compression)
|
||||
and method `8` (deflated) are supported.
|
||||
Any of the other 15 official methods will cause the `openReadStream()` `callback` to receive an `err`.
|
||||
|
||||
### Data Descriptors Are Ignored
|
||||
|
||||
There may or may not be Data Descriptor sections in a zip file.
|
||||
This library provides no support for finding or interpreting them.
|
||||
|
||||
### Archive Extra Data Record Is Ignored
|
||||
|
||||
There may or may not be an Archive Extra Data Record section in a zip file.
|
||||
This library provides no support for finding or interpreting it.
|
||||
|
||||
### No Language Encoding Flag Support
|
||||
|
||||
Zip files officially support charset encodings other than CP437 and UTF-8,
|
||||
but the zip file spec does not specify how it works.
|
||||
This library makes no attempt to interpret the Language Encoding Flag.
|
||||
|
||||
## Change History
|
||||
|
||||
* 2.4.1
|
||||
* Fix error handling.
|
||||
* 2.4.0
|
||||
* Add ZIP64 support. [issue #6](https://github.com/thejoshwolfe/yazl/issues/6)
|
||||
* Add `lazyEntries` option. [issue #22](https://github.com/thejoshwolfe/yazl/issues/22)
|
||||
* Add `readStream.destroy()` method. [issue #26](https://github.com/thejoshwolfe/yazl/issues/26)
|
||||
* Add `fromRandomAccessReader()`. [issue #14](https://github.com/thejoshwolfe/yazl/issues/14)
|
||||
* Add `examples/unzip.js`.
|
||||
* 2.3.1
|
||||
* Documentation updates.
|
||||
* 2.3.0
|
||||
* Check that `uncompressedSize` is correct, or else emit an error. [issue #13](https://github.com/thejoshwolfe/yazl/issues/13)
|
||||
* 2.2.1
|
||||
* Update dependencies.
|
||||
* 2.2.0
|
||||
* Update dependencies.
|
||||
* 2.1.0
|
||||
* Remove dependency on `iconv`.
|
||||
* 2.0.3
|
||||
* Fix crash when trying to read a 0-byte file.
|
||||
* 2.0.2
|
||||
* Fix event behavior after errors.
|
||||
* 2.0.1
|
||||
* Fix bug with using `iconv`.
|
||||
* 2.0.0
|
||||
* Initial release.
|
||||
626
build/node_modules/extract-zip/node_modules/yauzl/index.js
generated
vendored
Normal file
626
build/node_modules/extract-zip/node_modules/yauzl/index.js
generated
vendored
Normal file
@@ -0,0 +1,626 @@
|
||||
var fs = require("fs");
|
||||
var zlib = require("zlib");
|
||||
var fd_slicer = require("fd-slicer");
|
||||
var util = require("util");
|
||||
var EventEmitter = require("events").EventEmitter;
|
||||
var Transform = require("stream").Transform;
|
||||
var PassThrough = require("stream").PassThrough;
|
||||
var Writable = require("stream").Writable;
|
||||
|
||||
exports.open = open;
|
||||
exports.fromFd = fromFd;
|
||||
exports.fromBuffer = fromBuffer;
|
||||
exports.fromRandomAccessReader = fromRandomAccessReader;
|
||||
exports.dosDateTimeToDate = dosDateTimeToDate;
|
||||
exports.ZipFile = ZipFile;
|
||||
exports.Entry = Entry;
|
||||
exports.RandomAccessReader = RandomAccessReader;
|
||||
|
||||
function open(path, options, callback) {
|
||||
if (typeof options === "function") {
|
||||
callback = options;
|
||||
options = null;
|
||||
}
|
||||
if (options == null) options = {};
|
||||
if (options.autoClose == null) options.autoClose = true;
|
||||
if (options.lazyEntries == null) options.lazyEntries = false;
|
||||
if (callback == null) callback = defaultCallback;
|
||||
fs.open(path, "r", function(err, fd) {
|
||||
if (err) return callback(err);
|
||||
fromFd(fd, options, function(err, zipfile) {
|
||||
if (err) fs.close(fd, defaultCallback);
|
||||
callback(err, zipfile);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function fromFd(fd, options, callback) {
|
||||
if (typeof options === "function") {
|
||||
callback = options;
|
||||
options = null;
|
||||
}
|
||||
if (options == null) options = {};
|
||||
if (options.autoClose == null) options.autoClose = false;
|
||||
if (options.lazyEntries == null) options.lazyEntries = false;
|
||||
if (callback == null) callback = defaultCallback;
|
||||
fs.fstat(fd, function(err, stats) {
|
||||
if (err) return callback(err);
|
||||
var reader = fd_slicer.createFromFd(fd, {autoClose: true});
|
||||
fromRandomAccessReader(reader, stats.size, options, callback);
|
||||
});
|
||||
}
|
||||
|
||||
function fromBuffer(buffer, options, callback) {
|
||||
if (typeof options === "function") {
|
||||
callback = options;
|
||||
options = null;
|
||||
}
|
||||
if (options == null) options = {};
|
||||
options.autoClose = false;
|
||||
if (options.lazyEntries == null) options.lazyEntries = false;
|
||||
// i got your open file right here.
|
||||
var reader = fd_slicer.createFromBuffer(buffer);
|
||||
fromRandomAccessReader(reader, buffer.length, options, callback);
|
||||
}
|
||||
|
||||
function fromRandomAccessReader(reader, totalSize, options, callback) {
|
||||
if (typeof options === "function") {
|
||||
callback = options;
|
||||
options = null;
|
||||
}
|
||||
if (options == null) options = {};
|
||||
if (options.autoClose == null) options.autoClose = true;
|
||||
if (options.lazyEntries == null) options.lazyEntries = false;
|
||||
if (callback == null) callback = defaultCallback;
|
||||
if (typeof totalSize !== "number") throw new Error("expected totalSize parameter to be a number");
|
||||
if (totalSize > Number.MAX_SAFE_INTEGER) {
|
||||
throw new Error("zip file too large. only file sizes up to 2^52 are supported due to JavaScript's Number type being an IEEE 754 double.");
|
||||
}
|
||||
|
||||
// the matching unref() call is in zipfile.close()
|
||||
reader.ref();
|
||||
|
||||
// eocdr means End of Central Directory Record.
|
||||
// search backwards for the eocdr signature.
|
||||
// the last field of the eocdr is a variable-length comment.
|
||||
// the comment size is encoded in a 2-byte field in the eocdr, which we can't find without trudging backwards through the comment to find it.
|
||||
// as a consequence of this design decision, it's possible to have ambiguous zip file metadata if a coherent eocdr was in the comment.
|
||||
// we search backwards for a eocdr signature, and hope that whoever made the zip file was smart enough to forbid the eocdr signature in the comment.
|
||||
var eocdrWithoutCommentSize = 22;
|
||||
var maxCommentSize = 0x10000; // 2-byte size
|
||||
var bufferSize = Math.min(eocdrWithoutCommentSize + maxCommentSize, totalSize);
|
||||
var buffer = new Buffer(bufferSize);
|
||||
var bufferReadStart = totalSize - buffer.length;
|
||||
readAndAssertNoEof(reader, buffer, 0, bufferSize, bufferReadStart, function(err) {
|
||||
if (err) return callback(err);
|
||||
for (var i = bufferSize - eocdrWithoutCommentSize; i >= 0; i -= 1) {
|
||||
if (buffer.readUInt32LE(i) !== 0x06054b50) continue;
|
||||
// found eocdr
|
||||
var eocdrBuffer = buffer.slice(i);
|
||||
|
||||
// 0 - End of central directory signature = 0x06054b50
|
||||
// 4 - Number of this disk
|
||||
var diskNumber = eocdrBuffer.readUInt16LE(4);
|
||||
if (diskNumber !== 0) return callback(new Error("multi-disk zip files are not supported: found disk number: " + diskNumber));
|
||||
// 6 - Disk where central directory starts
|
||||
// 8 - Number of central directory records on this disk
|
||||
// 10 - Total number of central directory records
|
||||
var entryCount = eocdrBuffer.readUInt16LE(10);
|
||||
// 12 - Size of central directory (bytes)
|
||||
// 16 - Offset of start of central directory, relative to start of archive
|
||||
var centralDirectoryOffset = eocdrBuffer.readUInt32LE(16);
|
||||
// 20 - Comment length
|
||||
var commentLength = eocdrBuffer.readUInt16LE(20);
|
||||
var expectedCommentLength = eocdrBuffer.length - eocdrWithoutCommentSize;
|
||||
if (commentLength !== expectedCommentLength) {
|
||||
return callback(new Error("invalid comment length. expected: " + expectedCommentLength + ". found: " + commentLength));
|
||||
}
|
||||
// 22 - Comment
|
||||
// the encoding is always cp437.
|
||||
var comment = bufferToString(eocdrBuffer, 22, eocdrBuffer.length, false);
|
||||
|
||||
if (!(entryCount === 0xffff || centralDirectoryOffset === 0xffffffff)) {
|
||||
return callback(null, new ZipFile(reader, centralDirectoryOffset, totalSize, entryCount, comment, options.autoClose, options.lazyEntries));
|
||||
}
|
||||
|
||||
// ZIP64 format
|
||||
|
||||
// ZIP64 Zip64 end of central directory locator
|
||||
var zip64EocdlBuffer = new Buffer(20);
|
||||
var zip64EocdlOffset = bufferReadStart + i - zip64EocdlBuffer.length;
|
||||
readAndAssertNoEof(reader, zip64EocdlBuffer, 0, zip64EocdlBuffer.length, zip64EocdlOffset, function(err) {
|
||||
if (err) return callback(err);
|
||||
|
||||
// 0 - zip64 end of central dir locator signature = 0x07064b50
|
||||
if (zip64EocdlBuffer.readUInt32LE(0) !== 0x07064b50) {
|
||||
return callback(new Error("invalid ZIP64 End of Central Directory Locator signature"));
|
||||
}
|
||||
// 4 - number of the disk with the start of the zip64 end of central directory
|
||||
// 8 - relative offset of the zip64 end of central directory record
|
||||
var zip64EocdrOffset = readUInt64LE(zip64EocdlBuffer, 8);
|
||||
// 16 - total number of disks
|
||||
|
||||
// ZIP64 end of central directory record
|
||||
var zip64EocdrBuffer = new Buffer(56);
|
||||
readAndAssertNoEof(reader, zip64EocdrBuffer, 0, zip64EocdrBuffer.length, zip64EocdrOffset, function(err) {
|
||||
if (err) return callback(err);
|
||||
|
||||
// 0 - zip64 end of central dir signature 4 bytes (0x06064b50)
|
||||
if (zip64EocdrBuffer.readUInt32LE(0) !== 0x06064b50) return callback(new Error("invalid ZIP64 end of central directory record signature"));
|
||||
// 4 - size of zip64 end of central directory record 8 bytes
|
||||
// 12 - version made by 2 bytes
|
||||
// 14 - version needed to extract 2 bytes
|
||||
// 16 - number of this disk 4 bytes
|
||||
// 20 - number of the disk with the start of the central directory 4 bytes
|
||||
// 24 - total number of entries in the central directory on this disk 8 bytes
|
||||
// 32 - total number of entries in the central directory 8 bytes
|
||||
entryCount = readUInt64LE(zip64EocdrBuffer, 32);
|
||||
// 40 - size of the central directory 8 bytes
|
||||
// 48 - offset of start of central directory with respect to the starting disk number 8 bytes
|
||||
centralDirectoryOffset = readUInt64LE(zip64EocdrBuffer, 48);
|
||||
// 56 - zip64 extensible data sector (variable size)
|
||||
return callback(null, new ZipFile(reader, centralDirectoryOffset, totalSize, entryCount, comment, options.autoClose, options.lazyEntries));
|
||||
});
|
||||
});
|
||||
return;
|
||||
}
|
||||
callback(new Error("end of central directory record signature not found"));
|
||||
});
|
||||
}
|
||||
|
||||
util.inherits(ZipFile, EventEmitter);
|
||||
function ZipFile(reader, centralDirectoryOffset, fileSize, entryCount, comment, autoClose, lazyEntries) {
|
||||
var self = this;
|
||||
EventEmitter.call(self);
|
||||
self.reader = reader;
|
||||
// forward close events
|
||||
self.reader.on("error", function(err) {
|
||||
// error closing the fd
|
||||
emitError(self, err);
|
||||
});
|
||||
self.reader.once("close", function() {
|
||||
self.emit("close");
|
||||
});
|
||||
self.readEntryCursor = centralDirectoryOffset;
|
||||
self.fileSize = fileSize;
|
||||
self.entryCount = entryCount;
|
||||
self.comment = comment;
|
||||
self.entriesRead = 0;
|
||||
self.autoClose = !!autoClose;
|
||||
self.lazyEntries = !!lazyEntries;
|
||||
self.isOpen = true;
|
||||
self.emittedError = false;
|
||||
|
||||
if (!self.lazyEntries) self.readEntry();
|
||||
}
|
||||
ZipFile.prototype.close = function() {
|
||||
if (!this.isOpen) return;
|
||||
this.isOpen = false;
|
||||
this.reader.unref();
|
||||
};
|
||||
|
||||
function emitErrorAndAutoClose(self, err) {
|
||||
if (self.autoClose) self.close();
|
||||
emitError(self, err);
|
||||
}
|
||||
function emitError(self, err) {
|
||||
if (self.emittedError) return;
|
||||
self.emittedError = true;
|
||||
self.emit("error", err);
|
||||
}
|
||||
|
||||
ZipFile.prototype.readEntry = function() {
|
||||
var self = this;
|
||||
if (self.entryCount === self.entriesRead) {
|
||||
// done with metadata
|
||||
setImmediate(function() {
|
||||
if (self.autoClose) self.close();
|
||||
if (self.emittedError) return;
|
||||
self.emit("end");
|
||||
});
|
||||
return;
|
||||
}
|
||||
if (self.emittedError) return;
|
||||
var buffer = new Buffer(46);
|
||||
readAndAssertNoEof(self.reader, buffer, 0, buffer.length, self.readEntryCursor, function(err) {
|
||||
if (err) return emitErrorAndAutoClose(self, err);
|
||||
if (self.emittedError) return;
|
||||
var entry = new Entry();
|
||||
// 0 - Central directory file header signature
|
||||
var signature = buffer.readUInt32LE(0);
|
||||
if (signature !== 0x02014b50) return emitErrorAndAutoClose(self, new Error("invalid central directory file header signature: 0x" + signature.toString(16)));
|
||||
// 4 - Version made by
|
||||
entry.versionMadeBy = buffer.readUInt16LE(4);
|
||||
// 6 - Version needed to extract (minimum)
|
||||
entry.versionNeededToExtract = buffer.readUInt16LE(6);
|
||||
// 8 - General purpose bit flag
|
||||
entry.generalPurposeBitFlag = buffer.readUInt16LE(8);
|
||||
// 10 - Compression method
|
||||
entry.compressionMethod = buffer.readUInt16LE(10);
|
||||
// 12 - File last modification time
|
||||
entry.lastModFileTime = buffer.readUInt16LE(12);
|
||||
// 14 - File last modification date
|
||||
entry.lastModFileDate = buffer.readUInt16LE(14);
|
||||
// 16 - CRC-32
|
||||
entry.crc32 = buffer.readUInt32LE(16);
|
||||
// 20 - Compressed size
|
||||
entry.compressedSize = buffer.readUInt32LE(20);
|
||||
// 24 - Uncompressed size
|
||||
entry.uncompressedSize = buffer.readUInt32LE(24);
|
||||
// 28 - File name length (n)
|
||||
entry.fileNameLength = buffer.readUInt16LE(28);
|
||||
// 30 - Extra field length (m)
|
||||
entry.extraFieldLength = buffer.readUInt16LE(30);
|
||||
// 32 - File comment length (k)
|
||||
entry.fileCommentLength = buffer.readUInt16LE(32);
|
||||
// 34 - Disk number where file starts
|
||||
// 36 - Internal file attributes
|
||||
entry.internalFileAttributes = buffer.readUInt16LE(36);
|
||||
// 38 - External file attributes
|
||||
entry.externalFileAttributes = buffer.readUInt32LE(38);
|
||||
// 42 - Relative offset of local file header
|
||||
entry.relativeOffsetOfLocalHeader = buffer.readUInt32LE(42);
|
||||
|
||||
self.readEntryCursor += 46;
|
||||
|
||||
buffer = new Buffer(entry.fileNameLength + entry.extraFieldLength + entry.fileCommentLength);
|
||||
readAndAssertNoEof(self.reader, buffer, 0, buffer.length, self.readEntryCursor, function(err) {
|
||||
if (err) return emitErrorAndAutoClose(self, err);
|
||||
if (self.emittedError) return;
|
||||
// 46 - File name
|
||||
var isUtf8 = entry.generalPurposeBitFlag & 0x800
|
||||
try {
|
||||
entry.fileName = bufferToString(buffer, 0, entry.fileNameLength, isUtf8);
|
||||
} catch (e) {
|
||||
return emitErrorAndAutoClose(self, e);
|
||||
}
|
||||
|
||||
// 46+n - Extra field
|
||||
var fileCommentStart = entry.fileNameLength + entry.extraFieldLength;
|
||||
var extraFieldBuffer = buffer.slice(entry.fileNameLength, fileCommentStart);
|
||||
entry.extraFields = [];
|
||||
var i = 0;
|
||||
while (i < extraFieldBuffer.length) {
|
||||
var headerId = extraFieldBuffer.readUInt16LE(i + 0);
|
||||
var dataSize = extraFieldBuffer.readUInt16LE(i + 2);
|
||||
var dataStart = i + 4;
|
||||
var dataEnd = dataStart + dataSize;
|
||||
var dataBuffer = new Buffer(dataSize);
|
||||
extraFieldBuffer.copy(dataBuffer, 0, dataStart, dataEnd);
|
||||
entry.extraFields.push({
|
||||
id: headerId,
|
||||
data: dataBuffer,
|
||||
});
|
||||
i = dataEnd;
|
||||
}
|
||||
|
||||
// 46+n+m - File comment
|
||||
try {
|
||||
entry.fileComment = bufferToString(buffer, fileCommentStart, fileCommentStart + entry.fileCommentLength, isUtf8);
|
||||
} catch (e) {
|
||||
return emitErrorAndAutoClose(self, e);
|
||||
}
|
||||
|
||||
self.readEntryCursor += buffer.length;
|
||||
self.entriesRead += 1;
|
||||
|
||||
if (entry.uncompressedSize === 0xffffffff ||
|
||||
entry.compressedSize === 0xffffffff ||
|
||||
entry.relativeOffsetOfLocalHeader === 0xffffffff) {
|
||||
// ZIP64 format
|
||||
// find the Zip64 Extended Information Extra Field
|
||||
var zip64EiefBuffer = null;
|
||||
for (var i = 0; i < entry.extraFields.length; i++) {
|
||||
var extraField = entry.extraFields[i];
|
||||
if (extraField.id === 0x0001) {
|
||||
zip64EiefBuffer = extraField.data;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (zip64EiefBuffer == null) return emitErrorAndAutoClose(self, new Error("expected Zip64 Extended Information Extra Field"));
|
||||
var index = 0;
|
||||
// 0 - Original Size 8 bytes
|
||||
if (entry.uncompressedSize === 0xffffffff) {
|
||||
if (index + 8 > zip64EiefBuffer.length) return emitErrorAndAutoClose(self, new Error("Zip64 Extended Information Extra Field does not include Original Size"));
|
||||
entry.uncompressedSize = readUInt64LE(zip64EiefBuffer, index);
|
||||
index += 8;
|
||||
}
|
||||
// 8 - Compressed Size 8 bytes
|
||||
if (entry.compressedSize === 0xffffffff) {
|
||||
if (index + 8 > zip64EiefBuffer.length) return emitErrorAndAutoClose(self, new Error("Zip64 Extended Information Extra Field does not include Compressed Size"));
|
||||
entry.compressedSize = readUInt64LE(zip64EiefBuffer, index);
|
||||
index += 8;
|
||||
}
|
||||
// 16 - Relative Header Offset 8 bytes
|
||||
if (entry.relativeOffsetOfLocalHeader === 0xffffffff) {
|
||||
if (index + 8 > zip64EiefBuffer.length) return emitErrorAndAutoClose(self, new Error("Zip64 Extended Information Extra Field does not include Relative Header Offset"));
|
||||
entry.relativeOffsetOfLocalHeader = readUInt64LE(zip64EiefBuffer, index);
|
||||
index += 8;
|
||||
}
|
||||
// 24 - Disk Start Number 4 bytes
|
||||
}
|
||||
|
||||
// validate file size
|
||||
if (entry.compressionMethod === 0) {
|
||||
if (entry.compressedSize !== entry.uncompressedSize) {
|
||||
var msg = "compressed/uncompressed size mismatch for stored file: " + entry.compressedSize + " != " + entry.uncompressedSize;
|
||||
return emitErrorAndAutoClose(self, new Error(msg));
|
||||
}
|
||||
}
|
||||
|
||||
// validate file name
|
||||
if (entry.fileName.indexOf("\\") !== -1) return emitErrorAndAutoClose(self, new Error("invalid characters in fileName: " + entry.fileName));
|
||||
if (/^[a-zA-Z]:/.test(entry.fileName) || /^\//.test(entry.fileName)) return emitErrorAndAutoClose(self, new Error("absolute path: " + entry.fileName));
|
||||
if (entry.fileName.split("/").indexOf("..") !== -1) return emitErrorAndAutoClose(self, new Error("invalid relative path: " + entry.fileName));
|
||||
self.emit("entry", entry);
|
||||
|
||||
if (!self.lazyEntries) self.readEntry();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
ZipFile.prototype.openReadStream = function(entry, callback) {
|
||||
var self = this;
|
||||
if (!self.isOpen) return callback(new Error("closed"));
|
||||
// make sure we don't lose the fd before we open the actual read stream
|
||||
self.reader.ref();
|
||||
var buffer = new Buffer(30);
|
||||
readAndAssertNoEof(self.reader, buffer, 0, buffer.length, entry.relativeOffsetOfLocalHeader, function(err) {
|
||||
try {
|
||||
if (err) return callback(err);
|
||||
// 0 - Local file header signature = 0x04034b50
|
||||
var signature = buffer.readUInt32LE(0);
|
||||
if (signature !== 0x04034b50) return callback(new Error("invalid local file header signature: 0x" + signature.toString(16)));
|
||||
// all this should be redundant
|
||||
// 4 - Version needed to extract (minimum)
|
||||
// 6 - General purpose bit flag
|
||||
// 8 - Compression method
|
||||
// 10 - File last modification time
|
||||
// 12 - File last modification date
|
||||
// 14 - CRC-32
|
||||
// 18 - Compressed size
|
||||
// 22 - Uncompressed size
|
||||
// 26 - File name length (n)
|
||||
var fileNameLength = buffer.readUInt16LE(26);
|
||||
// 28 - Extra field length (m)
|
||||
var extraFieldLength = buffer.readUInt16LE(28);
|
||||
// 30 - File name
|
||||
// 30+n - Extra field
|
||||
var localFileHeaderEnd = entry.relativeOffsetOfLocalHeader + buffer.length + fileNameLength + extraFieldLength;
|
||||
var compressed;
|
||||
if (entry.compressionMethod === 0) {
|
||||
// 0 - The file is stored (no compression)
|
||||
compressed = false;
|
||||
} else if (entry.compressionMethod === 8) {
|
||||
// 8 - The file is Deflated
|
||||
compressed = true;
|
||||
} else {
|
||||
return callback(new Error("unsupported compression method: " + entry.compressionMethod));
|
||||
}
|
||||
var fileDataStart = localFileHeaderEnd;
|
||||
var fileDataEnd = fileDataStart + entry.compressedSize;
|
||||
if (entry.compressedSize !== 0) {
|
||||
// bounds check now, because the read streams will probably not complain loud enough.
|
||||
// since we're dealing with an unsigned offset plus an unsigned size,
|
||||
// we only have 1 thing to check for.
|
||||
if (fileDataEnd > self.fileSize) {
|
||||
return callback(new Error("file data overflows file bounds: " +
|
||||
fileDataStart + " + " + entry.compressedSize + " > " + self.fileSize));
|
||||
}
|
||||
}
|
||||
var readStream = self.reader.createReadStream({start: fileDataStart, end: fileDataEnd});
|
||||
var endpointStream = readStream;
|
||||
if (compressed) {
|
||||
var destroyed = false;
|
||||
var inflateFilter = zlib.createInflateRaw();
|
||||
readStream.on("error", function(err) {
|
||||
// setImmediate here because errors can be emitted during the first call to pipe()
|
||||
setImmediate(function() {
|
||||
if (!destroyed) inflateFilter.emit("error", err);
|
||||
});
|
||||
});
|
||||
|
||||
var checkerStream = new AssertByteCountStream(entry.uncompressedSize);
|
||||
inflateFilter.on("error", function(err) {
|
||||
// forward zlib errors to the client-visible stream
|
||||
setImmediate(function() {
|
||||
if (!destroyed) checkerStream.emit("error", err);
|
||||
});
|
||||
});
|
||||
checkerStream.destroy = function() {
|
||||
destroyed = true;
|
||||
inflateFilter.unpipe(checkerStream);
|
||||
readStream.unpipe(inflateFilter);
|
||||
// TODO: the inflateFilter now causes a memory leak. see Issue #27.
|
||||
readStream.destroy();
|
||||
};
|
||||
endpointStream = readStream.pipe(inflateFilter).pipe(checkerStream);
|
||||
}
|
||||
callback(null, endpointStream);
|
||||
} finally {
|
||||
self.reader.unref();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
function Entry() {
|
||||
}
|
||||
Entry.prototype.getLastModDate = function() {
|
||||
return dosDateTimeToDate(this.lastModFileDate, this.lastModFileTime);
|
||||
};
|
||||
|
||||
function dosDateTimeToDate(date, time) {
|
||||
var day = date & 0x1f; // 1-31
|
||||
var month = (date >> 5 & 0xf) - 1; // 1-12, 0-11
|
||||
var year = (date >> 9 & 0x7f) + 1980; // 0-128, 1980-2108
|
||||
|
||||
var millisecond = 0;
|
||||
var second = (time & 0x1f) * 2; // 0-29, 0-58 (even numbers)
|
||||
var minute = time >> 5 & 0x3f; // 0-59
|
||||
var hour = time >> 11 & 0x1f; // 0-23
|
||||
|
||||
return new Date(year, month, day, hour, minute, second, millisecond);
|
||||
}
|
||||
|
||||
function readAndAssertNoEof(reader, buffer, offset, length, position, callback) {
|
||||
if (length === 0) {
|
||||
// fs.read will throw an out-of-bounds error if you try to read 0 bytes from a 0 byte file
|
||||
return setImmediate(function() { callback(null, new Buffer(0)); });
|
||||
}
|
||||
reader.read(buffer, offset, length, position, function(err, bytesRead) {
|
||||
if (err) return callback(err);
|
||||
if (bytesRead < length) return callback(new Error("unexpected EOF"));
|
||||
callback();
|
||||
});
|
||||
}
|
||||
|
||||
util.inherits(AssertByteCountStream, Transform);
|
||||
function AssertByteCountStream(byteCount) {
|
||||
Transform.call(this);
|
||||
this.actualByteCount = 0;
|
||||
this.expectedByteCount = byteCount;
|
||||
}
|
||||
AssertByteCountStream.prototype._transform = function(chunk, encoding, cb) {
|
||||
this.actualByteCount += chunk.length;
|
||||
if (this.actualByteCount > this.expectedByteCount) {
|
||||
var msg = "too many bytes in the stream. expected " + this.expectedByteCount + ". got at least " + this.actualByteCount;
|
||||
return cb(new Error(msg));
|
||||
}
|
||||
cb(null, chunk);
|
||||
};
|
||||
AssertByteCountStream.prototype._flush = function(cb) {
|
||||
if (this.actualByteCount < this.expectedByteCount) {
|
||||
var msg = "not enough bytes in the stream. expected " + this.expectedByteCount + ". got only " + this.actualByteCount;
|
||||
return cb(new Error(msg));
|
||||
}
|
||||
cb();
|
||||
};
|
||||
|
||||
util.inherits(RandomAccessReader, EventEmitter);
|
||||
function RandomAccessReader() {
|
||||
EventEmitter.call(this);
|
||||
this.refCount = 0;
|
||||
}
|
||||
RandomAccessReader.prototype.ref = function() {
|
||||
this.refCount += 1;
|
||||
};
|
||||
RandomAccessReader.prototype.unref = function() {
|
||||
var self = this;
|
||||
self.refCount -= 1;
|
||||
|
||||
if (self.refCount > 0) return;
|
||||
if (self.refCount < 0) throw new Error("invalid unref");
|
||||
|
||||
self.close(onCloseDone);
|
||||
|
||||
function onCloseDone(err) {
|
||||
if (err) return self.emit('error', err);
|
||||
self.emit('close');
|
||||
}
|
||||
};
|
||||
RandomAccessReader.prototype.createReadStream = function(options) {
|
||||
var start = options.start;
|
||||
var end = options.end;
|
||||
if (start === end) {
|
||||
var emptyStream = new PassThrough();
|
||||
setImmediate(function() {
|
||||
emptyStream.end();
|
||||
});
|
||||
return emptyStream;
|
||||
}
|
||||
var stream = this._readStreamForRange(start, end);
|
||||
|
||||
var destroyed = false;
|
||||
var refUnrefFilter = new RefUnrefFilter(this);
|
||||
stream.on("error", function(err) {
|
||||
setImmediate(function() {
|
||||
if (!destroyed) refUnrefFilter.emit("error", err);
|
||||
});
|
||||
});
|
||||
refUnrefFilter.destroy = function() {
|
||||
stream.unpipe(refUnrefFilter);
|
||||
refUnrefFilter.unref();
|
||||
stream.destroy();
|
||||
};
|
||||
|
||||
var byteCounter = new AssertByteCountStream(end - start);
|
||||
refUnrefFilter.on("error", function(err) {
|
||||
setImmediate(function() {
|
||||
if (!destroyed) byteCounter.emit("error", err);
|
||||
});
|
||||
});
|
||||
byteCounter.destroy = function() {
|
||||
destroyed = true;
|
||||
refUnrefFilter.unpipe(byteCounter);
|
||||
refUnrefFilter.destroy();
|
||||
};
|
||||
|
||||
return stream.pipe(refUnrefFilter).pipe(byteCounter);
|
||||
};
|
||||
RandomAccessReader.prototype._readStreamForRange = function(start, end) {
|
||||
throw new Error("not implemented");
|
||||
};
|
||||
RandomAccessReader.prototype.read = function(buffer, offset, length, position, callback) {
|
||||
var readStream = this.createReadStream({start: position, end: position + length});
|
||||
var writeStream = new Writable();
|
||||
var written = 0;
|
||||
writeStream._write = function(chunk, encoding, cb) {
|
||||
chunk.copy(buffer, offset + written, 0, chunk.length);
|
||||
written += chunk.length;
|
||||
cb();
|
||||
};
|
||||
writeStream.on("finish", callback);
|
||||
readStream.on("error", function(error) {
|
||||
callback(error);
|
||||
});
|
||||
readStream.pipe(writeStream);
|
||||
};
|
||||
RandomAccessReader.prototype.close = function(callback) {
|
||||
setImmediate(callback);
|
||||
};
|
||||
|
||||
util.inherits(RefUnrefFilter, PassThrough);
|
||||
function RefUnrefFilter(context) {
|
||||
PassThrough.call(this);
|
||||
this.context = context;
|
||||
this.context.ref();
|
||||
this.unreffedYet = false;
|
||||
}
|
||||
RefUnrefFilter.prototype._flush = function(cb) {
|
||||
this.unref();
|
||||
cb();
|
||||
};
|
||||
RefUnrefFilter.prototype.unref = function(cb) {
|
||||
if (this.unreffedYet) return;
|
||||
this.unreffedYet = true;
|
||||
this.context.unref();
|
||||
};
|
||||
|
||||
var cp437 = '\u0000☺☻♥♦♣♠•◘○◙♂♀♪♫☼►◄↕‼¶§▬↨↑↓→←∟↔▲▼ !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~⌂ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜ¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ ';
|
||||
function bufferToString(buffer, start, end, isUtf8) {
|
||||
if (isUtf8) {
|
||||
return buffer.toString("utf8", start, end);
|
||||
} else {
|
||||
var result = "";
|
||||
for (var i = start; i < end; i++) {
|
||||
result += cp437[buffer[i]];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
function readUInt64LE(buffer, offset) {
|
||||
// there is no native function for this, because we can't actually store 64-bit integers precisely.
|
||||
// after 53 bits, JavaScript's Number type (IEEE 754 double) can't store individual integers anymore.
|
||||
// but since 53 bits is a whole lot more than 32 bits, we do our best anyway.
|
||||
var lower32 = buffer.readUInt32LE(offset);
|
||||
var upper32 = buffer.readUInt32LE(offset + 4);
|
||||
// we can't use bitshifting here, because JavaScript bitshifting only works on 32-bit integers.
|
||||
return upper32 * 0x100000000 + lower32;
|
||||
// as long as we're bounds checking the result of this function against the total file size,
|
||||
// we'll catch any overflow errors, because we already made sure the total file size was within reason.
|
||||
}
|
||||
|
||||
function defaultCallback(err) {
|
||||
if (err) throw err;
|
||||
}
|
||||
64
build/node_modules/extract-zip/node_modules/yauzl/package.json
generated
vendored
Normal file
64
build/node_modules/extract-zip/node_modules/yauzl/package.json
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
{
|
||||
"_from": "yauzl@2.4.1",
|
||||
"_id": "yauzl@2.4.1",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha1-lSj0QtqxsihOWLQ3m7GU4i4MQAU=",
|
||||
"_location": "/extract-zip/yauzl",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "version",
|
||||
"registry": true,
|
||||
"raw": "yauzl@2.4.1",
|
||||
"name": "yauzl",
|
||||
"escapedName": "yauzl",
|
||||
"rawSpec": "2.4.1",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "2.4.1"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/extract-zip"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.4.1.tgz",
|
||||
"_shasum": "9528f442dab1b2284e58b4379bb194e22e0c4005",
|
||||
"_spec": "yauzl@2.4.1",
|
||||
"_where": "/Users/asciidisco/Desktop/asciidisco.com/build/node_modules/extract-zip",
|
||||
"author": {
|
||||
"name": "Josh Wolfe",
|
||||
"email": "thejoshwolfe@gmail.com"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/thejoshwolfe/yauzl/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"dependencies": {
|
||||
"fd-slicer": "~1.0.1"
|
||||
},
|
||||
"deprecated": false,
|
||||
"description": "yet another unzip library for node",
|
||||
"devDependencies": {
|
||||
"bl": "~1.0.0",
|
||||
"istanbul": "~0.3.4",
|
||||
"pend": "~1.2.0"
|
||||
},
|
||||
"homepage": "https://github.com/thejoshwolfe/yauzl",
|
||||
"keywords": [
|
||||
"unzip",
|
||||
"zip",
|
||||
"stream",
|
||||
"archive",
|
||||
"file"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "index.js",
|
||||
"name": "yauzl",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/thejoshwolfe/yauzl.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "node test/test.js",
|
||||
"test-cov": "istanbul cover test/test.js",
|
||||
"test-travis": "istanbul cover --report lcovonly test/test.js"
|
||||
},
|
||||
"version": "2.4.1"
|
||||
}
|
||||
71
build/node_modules/extract-zip/package.json
generated
vendored
Normal file
71
build/node_modules/extract-zip/package.json
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
{
|
||||
"_from": "extract-zip@^1.6.5",
|
||||
"_id": "extract-zip@1.6.6",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha1-EpDt6NINCHK0Kf0/NRyhKOxe+Fw=",
|
||||
"_location": "/extract-zip",
|
||||
"_phantomChildren": {
|
||||
"fd-slicer": "1.0.1"
|
||||
},
|
||||
"_requested": {
|
||||
"type": "range",
|
||||
"registry": true,
|
||||
"raw": "extract-zip@^1.6.5",
|
||||
"name": "extract-zip",
|
||||
"escapedName": "extract-zip",
|
||||
"rawSpec": "^1.6.5",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "^1.6.5"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/phantomjs-prebuilt"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-1.6.6.tgz",
|
||||
"_shasum": "1290ede8d20d0872b429fd3f351ca128ec5ef85c",
|
||||
"_spec": "extract-zip@^1.6.5",
|
||||
"_where": "/Users/asciidisco/Desktop/asciidisco.com/build/node_modules/phantomjs-prebuilt",
|
||||
"author": {
|
||||
"name": "max ogden"
|
||||
},
|
||||
"bin": {
|
||||
"extract-zip": "cli.js"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/maxogden/extract-zip/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"dependencies": {
|
||||
"concat-stream": "1.6.0",
|
||||
"debug": "2.6.9",
|
||||
"mkdirp": "0.5.0",
|
||||
"yauzl": "2.4.1"
|
||||
},
|
||||
"deprecated": false,
|
||||
"description": "unzip a zip file into a directory using 100% javascript",
|
||||
"devDependencies": {
|
||||
"rimraf": "^2.2.8",
|
||||
"standard": "^5.2.2",
|
||||
"tape": "^4.2.0",
|
||||
"temp": "^0.8.3"
|
||||
},
|
||||
"directories": {
|
||||
"test": "test"
|
||||
},
|
||||
"homepage": "https://github.com/maxogden/extract-zip#readme",
|
||||
"keywords": [
|
||||
"unzip",
|
||||
"zip",
|
||||
"extract"
|
||||
],
|
||||
"license": "BSD-2-Clause",
|
||||
"main": "index.js",
|
||||
"name": "extract-zip",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/maxogden/extract-zip.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "standard && node test/test.js"
|
||||
},
|
||||
"version": "1.6.6"
|
||||
}
|
||||
49
build/node_modules/extract-zip/readme.md
generated
vendored
Normal file
49
build/node_modules/extract-zip/readme.md
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
# extract-zip
|
||||
|
||||
Unzip written in pure JavaScript. Extracts a zip into a directory. Available as a library or a command line program.
|
||||
|
||||
Uses the [`yauzl`](http://npmjs.org/yauzl) ZIP parser.
|
||||
|
||||
[](https://nodei.co/npm/extract-zip/)
|
||||
[](https://github.com/feross/standard)
|
||||
[](https://travis-ci.org/maxogden/extract-zip)
|
||||
|
||||
## Installation
|
||||
|
||||
Get the library:
|
||||
|
||||
```
|
||||
npm install extract-zip --save
|
||||
```
|
||||
|
||||
Install the command line program:
|
||||
|
||||
```
|
||||
npm install extract-zip -g
|
||||
```
|
||||
|
||||
## JS API
|
||||
|
||||
```js
|
||||
var extract = require('extract-zip')
|
||||
extract(source, {dir: target}, function (err) {
|
||||
// extraction is complete. make sure to handle the err
|
||||
})
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
- `dir` - defaults to `process.cwd()`
|
||||
- `defaultDirMode` - integer - Directory Mode (permissions) will default to `493` (octal `0755` in integer)
|
||||
- `defaultFileMode` - integer - File Mode (permissions) will default to `420` (octal `0644` in integer)
|
||||
- `onEntry` - function - if present, will be called with `(entry, zipfile)`, entry is every entry from the zip file forwarded from the `entry` event from yauzl. `zipfile` is the `yauzl` instance
|
||||
|
||||
Default modes are only used if no permissions are set in the zip file.
|
||||
|
||||
## CLI Usage
|
||||
|
||||
```
|
||||
extract-zip foo.zip <targetDirectory>
|
||||
```
|
||||
|
||||
If not specified, `targetDirectory` will default to `process.cwd()`.
|
||||
Reference in New Issue
Block a user