refactor(Cypress): add nodemodules

This commit is contained in:
2021-09-02 17:18:41 +02:00
parent 1aa57bbd0a
commit bc6e1bc12e
4238 changed files with 340975 additions and 8 deletions

19
node_modules/module-deps/.travis.yml generated vendored Normal file
View File

@@ -0,0 +1,19 @@
language: node_js
node_js:
- "12"
- "11"
- "10"
- "8"
- "6"
- "4"
- "iojs"
- "0.12"
- "0.10"
- "0.8"
sudo: false
before_install:
# Old npm certs are untrusted https://github.com/npm/npm/issues/20191
- 'if [ "${TRAVIS_NODE_VERSION}" = "0.6" ] || [ "${TRAVIS_NODE_VERSION}" = "0.8" ]; then export NPM_CONFIG_STRICT_SSL=false; fi'
- 'nvm install-latest-npm'
matrix:
fast_finish: true

41
node_modules/module-deps/CHANGELOG.md generated vendored Normal file
View File

@@ -0,0 +1,41 @@
# module-deps Change Log
All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](http://semver.org/).
## 6.2.3 - 2020-08-03
* Improve error message when dependencies cannot be found [#123](https://github.com/browserify/module-deps/pull/123)
* Upgrade `browser-resolve` to 2.0 [#164](https://github.com/browserify/module-deps/pull/164)
## 6.2.2 - 2019-12-13
* Update minimum required version of `detective` [#161](https://github.com/browserify/module-deps/pull/161)
## 6.2.1 - 2019-05-24
* Update minimum required version of `cached-path-relative` [#155](https://github.com/browserify/module-deps/pull/155)
* Add CI testing on Windows [#152](https://github.com/browserify/module-deps/pull/152)
* Add CI testing on recent Node.js versions (10, 11, 12) [#157](https://github.com/browserify/module-deps/pull/157)
## 6.2.0 - 2018-11-13
* Add `.dirname` property to the object given to `opts.resolve` [#154](https://github.com/browserify/module-deps/pull/154)
## 6.1.0 - 2018-05-16
* Add a `detect` option for custom dependency detection [#63](https://github.com/browserify/module-deps/pull/63), [2dcc339](https://github.com/browserify/module-deps/commit/2dcc3399ee67ba51ed26d9a0605a8ccdc70c9db7)
## 6.0.2 - 2018-03-28
* Fix missing 'file' event when file has a syntax error [#146](https://github.com/browserify/module-deps/pull/146)
## 6.0.1 - 2018-03-27
* Fix crash when file has a transform and a syntax error [#145](https://github.com/browserify/module-deps/pull/145)
## 6.0.0 - 2018-02-07
* Ignore package.json files that do not contain JSON objects [#142](https://github.com/browserify/module-deps/pull/142)
* Don't preserve symlinks when resolving transforms, matching Node resolution behaviour [#133](https://github.com/browserify/module-deps/pull/133)
* Fix 'file' events with `persistentCache` [#127](https://github.com/browserify/module-deps/pull/127)
* Add dependencies to a file when transforms emit 'dep' event [#141](https://github.com/browserify/module-deps/pull/141)
## 5.0.1 - 2018-01-06
* Restore support for node < 4.0.0.
## 5.0.0 - 2018-01-02
* Update deps
* Drop support for node < 0.12 due due to detective dropping support
* Add engines field set to `>=4.0.0`

18
node_modules/module-deps/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,18 @@
This software is released under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

29
node_modules/module-deps/appveyor.yml generated vendored Normal file
View File

@@ -0,0 +1,29 @@
environment:
matrix:
# not yet available on appveyor
# - nodejs_version: "12"
- nodejs_version: "11"
- nodejs_version: "10"
- nodejs_version: "8"
- nodejs_version: "6"
- nodejs_version: "4"
- nodejs_version: "0.12"
- nodejs_version: "0.10"
# Install scripts. (runs after repo cloning)
install:
# Get the latest stable version of Node.js or io.js
- ps: Install-Product node $env:nodejs_version
# install modules
- npm install
# Post-install test scripts.
test_script:
# Output useful info for debugging.
- node --version
- npm --version
# run tests
- npm test
# Don't actually build.
build: off

28
node_modules/module-deps/bin/cmd.js generated vendored Executable file
View File

@@ -0,0 +1,28 @@
#!/usr/bin/env node
var mdeps = require('../');
var subarg = require('subarg');
var fs = require('fs');
var path = require('path');
var argv = subarg(process.argv.slice(2), {
alias: { h: 'help', t: 'transform', g: 'globalTransform' }
});
if (argv.help) return usage(0);
var JSONStream = require('JSONStream');
var files = argv._.map(function (file) {
if (file === '-') return process.stdin;
return path.resolve(file);
});
var md = mdeps(argv);
md.pipe(JSONStream.stringify()).pipe(process.stdout);
files.forEach(function (file) { md.write(file) });
md.end();
function usage (code) {
var r = fs.createReadStream(__dirname + '/usage.txt');
r.pipe(process.stdout);
if (code) r.on('end', function () { process.exit(code) });
}

9
node_modules/module-deps/bin/usage.txt generated vendored Normal file
View File

@@ -0,0 +1,9 @@
module-deps [FILES] OPTIONS
Generate json output for the entry point FILES.
OPTIONS are:
-t TRANSFORM Apply a TRANSFORM.
-g TRANSFORM Apply a global TRANSFORM.

6
node_modules/module-deps/example/deps.js generated vendored Normal file
View File

@@ -0,0 +1,6 @@
var mdeps = require('../');
var JSONStream = require('JSONStream');
var md = mdeps();
md.pipe(JSONStream.stringify()).pipe(process.stdout);
md.end({ file: __dirname + '/files/main.js' });

3
node_modules/module-deps/example/files/bar.js generated vendored Normal file
View File

@@ -0,0 +1,3 @@
module.exports = function (n) {
return n * 100;
};

5
node_modules/module-deps/example/files/foo.js generated vendored Normal file
View File

@@ -0,0 +1,5 @@
var bar = require('./bar');
module.exports = function (n) {
return n * 111 + bar(n);
};

2
node_modules/module-deps/example/files/main.js generated vendored Normal file
View File

@@ -0,0 +1,2 @@
var foo = require('./foo');
console.log('main: ' + foo(5));

2
node_modules/module-deps/example/files/xyz.js generated vendored Normal file
View File

@@ -0,0 +1,2 @@
var foo = require('./foo');
console.log('xyz: ' + foo(6));

635
node_modules/module-deps/index.js generated vendored Normal file
View File

@@ -0,0 +1,635 @@
var fs = require('fs');
var path = require('path');
var relativePath = require('cached-path-relative');
var browserResolve = require('browser-resolve');
var nodeResolve = require('resolve');
var detective = require('detective');
var through = require('through2');
var concat = require('concat-stream');
var parents = require('parents');
var combine = require('stream-combiner2');
var duplexer = require('duplexer2');
var xtend = require('xtend');
var defined = require('defined');
var inherits = require('inherits');
var Transform = require('readable-stream').Transform;
module.exports = Deps;
inherits(Deps, Transform);
function Deps (opts) {
var self = this;
if (!(this instanceof Deps)) return new Deps(opts);
Transform.call(this, { objectMode: true });
if (!opts) opts = {};
this.basedir = opts.basedir || process.cwd();
this.persistentCache = opts.persistentCache || function (file, id, pkg, fallback, cb) {
process.nextTick(function () {
fallback(null, cb);
});
};
this.cache = opts.cache;
this.fileCache = opts.fileCache;
this.pkgCache = opts.packageCache || {};
this.pkgFileCache = {};
this.pkgFileCachePending = {};
this._emittedPkg = {};
this._transformDeps = {};
this.visited = {};
this.walking = {};
this.entries = [];
this._input = [];
this.paths = opts.paths || process.env.NODE_PATH || '';
if (typeof this.paths === 'string') {
var delimiter = path.delimiter || (process.platform === 'win32' ? ';' : ':');
this.paths = this.paths.split(delimiter);
}
this.paths = this.paths
.filter(Boolean)
.map(function (p) {
return path.resolve(self.basedir, p);
});
this.transforms = [].concat(opts.transform).filter(Boolean);
this.globalTransforms = [].concat(opts.globalTransform).filter(Boolean);
this.resolver = opts.resolve || browserResolve;
this.detective = opts.detect || detective;
this.options = xtend(opts);
if (!this.options.modules) this.options.modules = {};
// If the caller passes options.expose, store resolved pathnames for exposed
// modules in it. If not, set it anyway so it's defined later.
if (!this.options.expose) this.options.expose = {};
this.pending = 0;
this.inputPending = 0;
var topfile = path.join(this.basedir, '__fake.js');
this.top = {
id: topfile,
filename: topfile,
paths: this.paths,
basedir: this.basedir
};
}
Deps.prototype._isTopLevel = function (file) {
var isTopLevel = this.entries.some(function (main) {
var m = relativePath(path.dirname(main), file);
return m.split(/[\\\/]/).indexOf('node_modules') < 0;
});
if (!isTopLevel) {
var m = relativePath(this.basedir, file);
isTopLevel = m.split(/[\\\/]/).indexOf('node_modules') < 0;
}
return isTopLevel;
};
Deps.prototype._transform = function (row, enc, next) {
var self = this;
if (typeof row === 'string') {
row = { file: row };
}
if (row.transform && row.global) {
this.globalTransforms.push([ row.transform, row.options ]);
return next();
}
else if (row.transform) {
this.transforms.push([ row.transform, row.options ]);
return next();
}
self.pending ++;
var basedir = defined(row.basedir, self.basedir);
if (row.entry !== false) {
self.entries.push(path.resolve(basedir, row.file || row.id));
}
self.lookupPackage(row.file, function (err, pkg) {
if (err && self.options.ignoreMissing) {
self.emit('missing', row.file, self.top);
self.pending --;
return next();
}
if (err) return self.emit('error', err)
self.pending --;
self._input.push({ row: row, pkg: pkg });
next();
});
};
Deps.prototype._flush = function () {
var self = this;
var files = {};
self._input.forEach(function (r) {
var w = r.row, f = files[w.file || w.id];
if (f) {
f.row.entry = f.row.entry || w.entry;
var ex = f.row.expose || w.expose;
f.row.expose = ex;
if (ex && f.row.file === f.row.id && w.file !== w.id) {
f.row.id = w.id;
}
}
else files[w.file || w.id] = r;
});
Object.keys(files).forEach(function (key) {
var r = files[key];
var pkg = r.pkg || {};
var dir = r.row.file ? path.dirname(r.row.file) : self.basedir;
if (!pkg.__dirname) pkg.__dirname = dir;
self.walk(r.row, xtend(self.top, {
filename: path.join(dir, '_fake.js')
}));
});
if (this.pending === 0) this.push(null);
this._ended = true;
};
Deps.prototype.resolve = function (id, parent, cb) {
var self = this;
var opts = self.options;
if (xhas(self.cache, parent.id, 'deps', id)
&& self.cache[parent.id].deps[id]) {
var file = self.cache[parent.id].deps[id];
var pkg = self.pkgCache[file];
if (pkg) return cb(null, file, pkg);
return self.lookupPackage(file, function (err, pkg) {
cb(null, file, pkg);
});
}
parent.packageFilter = function (p, x) {
var pkgdir = path.dirname(x);
if (opts.packageFilter) p = opts.packageFilter(p, x);
p.__dirname = pkgdir;
return p;
};
// have `resolve` do all the package.json lookups,
// see discussion in https://github.com/browserify/browser-resolve/issues/93#issuecomment-667837808
parent.package = undefined;
if (opts.extensions) parent.extensions = opts.extensions;
if (opts.modules) parent.modules = opts.modules;
self.resolver(id, parent, function onresolve (err, file, pkg, fakePath) {
if (err) return cb(err);
if (!file) return cb(new Error(
'module not found: "' + id + '" from file '
+ parent.filename
));
if (!pkg || !pkg.__dirname) {
self.lookupPackage(file, function (err, p) {
if (err) return cb(err);
if (!p) p = {};
if (!p.__dirname) p.__dirname = path.dirname(file);
self.pkgCache[file] = p;
onresolve(err, file, opts.packageFilter
? opts.packageFilter(p, p.__dirname) : p,
fakePath
);
});
}
else cb(err, file, pkg, fakePath);
});
};
Deps.prototype.readFile = function (file, id, pkg) {
var self = this;
if (xhas(this.fileCache, file)) {
return toStream(this.fileCache[file]);
}
var rs = fs.createReadStream(file, {
encoding: 'utf8'
});
return rs;
};
Deps.prototype.getTransforms = function (file, pkg, opts) {
if (!opts) opts = {};
var self = this;
var isTopLevel;
if (opts.builtin || opts.inNodeModules) isTopLevel = false;
else isTopLevel = this._isTopLevel(file);
var transforms = [].concat(isTopLevel ? this.transforms : [])
.concat(getTransforms(pkg, {
globalTransform: this.globalTransforms,
transformKey: this.options.transformKey
}))
;
if (transforms.length === 0) return through();
var pending = transforms.length;
var streams = [];
var input = through();
var output = through();
var dup = duplexer(input, output);
for (var i = 0; i < transforms.length; i++) (function (i) {
makeTransform(transforms[i], function (err, trs) {
if (err) {
return dup.emit('error', err);
}
streams[i] = trs;
if (-- pending === 0) done();
});
})(i);
return dup;
function done () {
var middle = combine.apply(null, streams);
middle.on('error', function (err) {
err.message += ' while parsing file: ' + file;
if (!err.filename) err.filename = file;
dup.emit('error', err);
});
input.pipe(middle).pipe(output);
}
function makeTransform (tr, cb) {
var trOpts = {};
if (Array.isArray(tr)) {
trOpts = tr[1] || {};
tr = tr[0];
}
trOpts._flags = trOpts.hasOwnProperty('_flags') ? trOpts._flags : self.options;
if (typeof tr === 'function') {
var t = tr(file, trOpts);
// allow transforms to `stream.emit('dep', path)` to add dependencies for this file
t.on('dep', function (dep) {
if (!self._transformDeps[file]) self._transformDeps[file] = [];
self._transformDeps[file].push(dep);
});
self.emit('transform', t, file);
nextTick(cb, null, wrapTransform(t));
}
else {
loadTransform(tr, trOpts, function (err, trs) {
if (err) return cb(err);
cb(null, wrapTransform(trs));
});
}
}
function loadTransform (id, trOpts, cb) {
var params = {
basedir: path.dirname(file),
preserveSymlinks: false
};
nodeResolve(id, params, function nr (err, res, again) {
if (err && again) return cb && cb(err);
if (err) {
params.basedir = pkg.__dirname;
return nodeResolve(id, params, function (e, r) {
nr(e, r, true);
});
}
if (!res) return cb(new Error(
'cannot find transform module ' + tr
+ ' while transforming ' + file
));
var r = require(res);
if (typeof r !== 'function') {
return cb(new Error(
'Unexpected ' + typeof r + ' exported by the '
+ JSON.stringify(res) + ' package. '
+ 'Expected a transform function.'
));
}
var trs = r(file, trOpts);
// allow transforms to `stream.emit('dep', path)` to add dependencies for this file
trs.on('dep', function (dep) {
if (!self._transformDeps[file]) self._transformDeps[file] = [];
self._transformDeps[file].push(dep);
});
self.emit('transform', trs, file);
cb(null, trs);
});
}
};
Deps.prototype.walk = function (id, parent, cb) {
var self = this;
var opts = self.options;
this.pending ++;
var rec = {};
var input;
if (typeof id === 'object') {
rec = xtend(id);
if (rec.entry === false) delete rec.entry;
id = rec.file || rec.id;
input = true;
this.inputPending ++;
}
self.resolve(id, parent, function (err, file, pkg, fakePath) {
// this is checked early because parent.modules is also modified
// by this function.
var builtin = has(parent.modules, id);
if (rec.expose) {
// Set options.expose to make the resolved pathname available to the
// caller. They may or may not have requested it, but it's harmless
// to set this if they didn't.
self.options.expose[rec.expose] =
self.options.modules[rec.expose] = file;
}
if (pkg && !self._emittedPkg[pkg.__dirname]) {
self._emittedPkg[pkg.__dirname] = true;
self.emit('package', pkg);
}
if (opts.postFilter && !opts.postFilter(id, file, pkg)) {
if (--self.pending === 0) self.push(null);
if (input) --self.inputPending;
return cb && cb(null, undefined);
}
if (err && rec.source) {
file = rec.file;
var ts = self.getTransforms(file, pkg);
ts.on('error', function (err) {
self.emit('error', err);
});
ts.pipe(concat(function (body) {
rec.source = body.toString('utf8');
fromSource(file, rec.source, pkg);
}));
return ts.end(rec.source);
}
if (err && self.options.ignoreMissing) {
if (--self.pending === 0) self.push(null);
if (input) --self.inputPending;
self.emit('missing', id, parent);
return cb && cb(null, undefined);
}
if (err) {
var message = 'Can\'t walk dependency graph: ' + err.message;
message += '\n required by ' + parent.filename;
err.message = message;
return self.emit('error', err);
}
if (self.visited[file]) {
if (-- self.pending === 0) self.push(null);
if (input) --self.inputPending;
return cb && cb(null, file);
}
self.visited[file] = true;
if (rec.source) {
var ts = self.getTransforms(file, pkg);
ts.on('error', function (err) {
self.emit('error', err);
});
ts.pipe(concat(function (body) {
rec.source = body.toString('utf8');
fromSource(file, rec.source, pkg);
}));
return ts.end(rec.source);
}
var c = self.cache && self.cache[file];
if (c) return fromDeps(file, c.source, c.package, fakePath, Object.keys(c.deps));
self.persistentCache(file, id, pkg, persistentCacheFallback, function (err, c) {
self.emit('file', file, id);
if (err) {
self.emit('error', err);
return;
}
fromDeps(file, c.source, c.package, fakePath, Object.keys(c.deps));
});
function persistentCacheFallback (dataAsString, cb) {
var stream = dataAsString ? toStream(dataAsString) : self.readFile(file, id, pkg).on('error', cb);
stream
.pipe(self.getTransforms(fakePath || file, pkg, {
builtin: builtin,
inNodeModules: parent.inNodeModules
}))
.on('error', cb)
.pipe(concat(function (body) {
var src = body.toString('utf8');
try { var deps = getDeps(file, src); }
catch (err) { cb(err); }
if (deps) {
cb(null, {
source: src,
package: pkg,
deps: deps.reduce(function (deps, dep) {
deps[dep] = true;
return deps;
}, {})
});
}
}));
}
});
function getDeps (file, src) {
var deps = rec.noparse ? [] : self.parseDeps(file, src);
// dependencies emitted by transforms
if (self._transformDeps[file]) deps = deps.concat(self._transformDeps[file]);
return deps;
}
function fromSource (file, src, pkg, fakePath) {
var deps = getDeps(file, src);
if (deps) fromDeps(file, src, pkg, fakePath, deps);
}
function fromDeps (file, src, pkg, fakePath, deps) {
var p = deps.length;
var resolved = {};
if (input) --self.inputPending;
(function resolve () {
if (self.inputPending > 0) return setTimeout(resolve);
deps.forEach(function (id) {
if (opts.filter && !opts.filter(id)) {
resolved[id] = false;
if (--p === 0) done();
return;
}
var isTopLevel = self._isTopLevel(fakePath || file);
var current = {
id: file,
filename: file,
basedir: path.dirname(file),
paths: self.paths,
package: pkg,
inNodeModules: parent.inNodeModules || !isTopLevel
};
self.walk(id, current, function (err, r) {
resolved[id] = r;
if (--p === 0) done();
});
});
if (deps.length === 0) done();
})();
function done () {
if (!rec.id) rec.id = file;
if (!rec.source) rec.source = src;
if (!rec.deps) rec.deps = resolved;
if (!rec.file) rec.file = file;
if (self.entries.indexOf(file) >= 0) {
rec.entry = true;
}
self.push(rec);
if (cb) cb(null, file);
if (-- self.pending === 0) self.push(null);
}
}
};
Deps.prototype.parseDeps = function (file, src, cb) {
var self = this;
if (this.options.noParse === true) return [];
if (/\.json$/.test(file)) return [];
if (Array.isArray(this.options.noParse)
&& this.options.noParse.indexOf(file) >= 0) {
return [];
}
try { var deps = self.detective(src) }
catch (ex) {
var message = ex && ex.message ? ex.message : ex;
throw new Error(
'Parsing file ' + file + ': ' + message
);
}
return deps;
};
Deps.prototype.lookupPackage = function (file, cb) {
var self = this;
var cached = this.pkgCache[file];
if (cached) return nextTick(cb, null, cached);
if (cached === false) return nextTick(cb, null, undefined);
var dirs = parents(file ? path.dirname(file) : self.basedir);
(function next () {
if (dirs.length === 0) {
self.pkgCache[file] = false;
return cb(null, undefined);
}
var dir = dirs.shift();
if (dir.split(/[\\\/]/).slice(-1)[0] === 'node_modules') {
return cb(null, undefined);
}
var pkgfile = path.join(dir, 'package.json');
var cached = self.pkgCache[pkgfile];
if (cached) return nextTick(cb, null, cached);
else if (cached === false) return next();
var pcached = self.pkgFileCachePending[pkgfile];
if (pcached) return pcached.push(onpkg);
pcached = self.pkgFileCachePending[pkgfile] = [];
fs.readFile(pkgfile, function (err, src) {
if (err) return onpkg();
try { var pkg = JSON.parse(src) }
catch (err) {
return onpkg(new Error([
err + ' while parsing json file ' + pkgfile
].join('')));
}
pkg.__dirname = dir;
self.pkgCache[pkgfile] = pkg;
self.pkgCache[file] = pkg;
onpkg(null, pkg);
});
function onpkg (err, pkg) {
if (self.pkgFileCachePending[pkgfile]) {
var fns = self.pkgFileCachePending[pkgfile];
delete self.pkgFileCachePending[pkgfile];
fns.forEach(function (f) { f(err, pkg) });
}
if (err) cb(err);
else if (pkg && typeof pkg === 'object') cb(null, pkg);
else {
self.pkgCache[pkgfile] = false;
next();
}
}
})();
};
function getTransforms (pkg, opts) {
var trx = [];
if (opts.transformKey) {
var n = pkg;
var keys = opts.transformKey;
for (var i = 0; i < keys.length; i++) {
if (n && typeof n === 'object') n = n[keys[i]];
else break;
}
if (i === keys.length) {
trx = [].concat(n).filter(Boolean);
}
}
return trx.concat(opts.globalTransform || []);
}
function nextTick (cb) {
var args = [].slice.call(arguments, 1);
process.nextTick(function () { cb.apply(null, args) });
}
function xhas (obj) {
if (!obj) return false;
for (var i = 1; i < arguments.length; i++) {
var key = arguments[i];
if (!has(obj, key)) return false;
obj = obj[key];
}
return true;
}
function toStream (dataAsString) {
var tr = through();
tr.push(dataAsString);
tr.push(null);
return tr;
}
function has (obj, key) {
return obj && Object.prototype.hasOwnProperty.call(obj, key);
}
function wrapTransform (tr) {
if (typeof tr.read === 'function') return tr;
var input = through(), output = through();
input.pipe(tr).pipe(output);
var wrapper = duplexer(input, output);
tr.on('error', function (err) { wrapper.emit('error', err) });
return wrapper;
}

View File

@@ -0,0 +1,8 @@
# browser-resolve Change Log
All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](http://semver.org/).
## 2.0.0 - 2020-08-03
* Update `resolve` to 1.17.0+.
Technically, this is a bugfix and feature update. However, older browserify versions rely on a `resolve` bug, and would break if this was published as a minor version update.

View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2013-2015 Roman Shtylman <shtylman@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,165 @@
# browser-resolve [![Build Status](https://travis-ci.org/browserify/browser-resolve.png?branch=master)](https://travis-ci.org/browserify/browser-resolve)
node.js resolve algorithm with [browser field](https://github.com/defunctzombie/package-browser-field-spec) support.
## api
### bresolve(id, opts={}, cb)
Resolve a module path and call `cb(err, path [, pkg])`
Options:
* `basedir` - directory to begin resolving from
* `browser` - the 'browser' property to use from package.json (defaults to 'browser')
* `filename` - the calling filename where the `require()` call originated (in the source)
* `modules` - object with module id/name -> path mappings to consult before doing manual resolution (use to provide core modules)
* `packageFilter` - transform the parsed `package.json` contents before looking at the `main` field
* `paths` - `require.paths` array to use if nothing is found on the normal `node_modules` recursive walk
Additionally, options supported by [node-resolve](https://github.com/browserify/resolve#resolveid-opts-cb) can be used.
### bresolve.sync(id, opts={})
Same as the async resolve, just uses sync methods.
Additionally, options supported by [node-resolve](https://github.com/browserify/resolve#resolvesyncid-opts-cb) can be used.
## basic usage
you can resolve files like `require.resolve()`:
``` js
var bresolve = require('browser-resolve');
bresolve('../', { filename: __filename }, function(err, path) {
console.log(path);
});
```
```
$ node example/resolve.js
/home/substack/projects/browser-resolve/index.js
```
## core modules
By default, core modules (http, dgram, etc) will return their same name as the path. If you want to have specific paths returned, specify a `modules` property in the options object.
``` js
var shims = {
http: '/your/path/to/http.js'
};
var bresolve = require('browser-resolve');
bresolve('http', { modules: shims }, function(err, path) {
console.log(path);
});
```
```
$ node example/builtin.js
/home/substack/projects/browser-resolve/builtin/http.js
```
## browser field
browser-specific versions of modules
``` json
{
"name": "custom",
"version": "0.0.0",
"browser": {
"./main.js": "custom.js"
}
}
```
``` js
var bresolve = require('browser-resolve');
var parent = { filename: __dirname + '/custom/file.js' };
bresolve('./main.js', parent, function(err, path) {
console.log(path);
});
```
```
$ node example/custom.js
/home/substack/projects/browser-resolve/example/custom/custom.js
```
You can use different package.json properties for the resolution, if you want to allow packages to target different environments for example:
``` json
{
"browser": { "./main.js": "custom.js" },
"chromeapp": { "./main.js": "custom-chromeapp.js" }
}
```
``` js
var bresolve = require('browser-resolve');
var parent = { filename: __dirname + '/custom/file.js', browser: 'chromeapp' };
bresolve('./main.js', parent, function(err, path) {
console.log(path);
});
```
```
$ node example/custom.js
/home/substack/projects/browser-resolve/example/custom/custom-chromeapp.js
```
## skip
You can skip over dependencies by setting a
[browser field](https://gist.github.com/defunctzombie/4339901)
value to `false`:
``` json
{
"name": "skip",
"version": "0.0.0",
"browser": {
"tar": false
}
}
```
This is handy if you have code like:
``` js
var tar = require('tar');
exports.add = function (a, b) {
return a + b;
};
exports.parse = function () {
return tar.Parse();
};
```
so that `require('tar')` will just return `{}` in the browser because you don't
intend to support the `.parse()` export in a browser environment.
``` js
var bresolve = require('browser-resolve');
var parent = { filename: __dirname + '/skip/main.js' };
bresolve('tar', parent, function(err, path) {
console.log(path);
});
```
```
$ node example/skip.js
/home/substack/projects/browser-resolve/empty.js
```
# license
MIT
# upgrade notes
Prior to v1.x this library provided shims for node core modules. These have since been removed. If you want to have alternative core modules provided, use the `modules` option when calling `bresolve()`.
This was done to allow package managers to choose which shims they want to use without browser-resolve being the central point of update.

View File

View File

@@ -0,0 +1,345 @@
// builtin
var fs = require('fs');
var path = require('path');
// vendor
var resv = require('resolve');
// given a path, create an array of node_module paths for it
// borrowed from substack/resolve
function nodeModulesPaths (start, cb) {
var splitRe = process.platform === 'win32' ? /[\/\\]/ : /\/+/;
var parts = start.split(splitRe);
var dirs = [];
for (var i = parts.length - 1; i >= 0; i--) {
if (parts[i] === 'node_modules') continue;
var dir = path.join.apply(
path, parts.slice(0, i + 1).concat(['node_modules'])
);
if (!parts[0].match(/([A-Za-z]:)/)) {
dir = '/' + dir;
}
dirs.push(dir);
}
return dirs;
}
function find_shims_in_package(pkgJson, cur_path, shims, browser) {
try {
var info = JSON.parse(pkgJson);
}
catch (err) {
err.message = pkgJson + ' : ' + err.message
throw err;
}
var replacements = getReplacements(info, browser);
// no replacements, skip shims
if (!replacements) {
return;
}
// if browser mapping is a string
// then it just replaces the main entry point
if (typeof replacements === 'string') {
var key = path.resolve(cur_path, info.main || 'index.js');
shims[key] = path.resolve(cur_path, replacements);
return;
}
// http://nodejs.org/api/modules.html#modules_loading_from_node_modules_folders
Object.keys(replacements).forEach(function(key) {
var val;
if (replacements[key] === false) {
val = path.normalize(__dirname + '/empty.js');
}
else {
val = replacements[key];
// if target is a relative path, then resolve
// otherwise we assume target is a module
if (val[0] === '.') {
val = path.resolve(cur_path, val);
}
}
if (key[0] === '/' || key[0] === '.') {
// if begins with / ../ or ./ then we must resolve to a full path
key = path.resolve(cur_path, key);
}
shims[key] = val;
});
[ '.js', '.json' ].forEach(function (ext) {
Object.keys(shims).forEach(function (key) {
if (!shims[key + ext]) {
shims[key + ext] = shims[key];
}
});
});
}
// paths is mutated
// load shims from first package.json file found
function load_shims(paths, browser, cb) {
// identify if our file should be replaced per the browser field
// original filename|id -> replacement
var shims = Object.create(null);
(function next() {
var cur_path = paths.shift();
if (!cur_path) {
return cb(null, shims);
}
var pkg_path = path.join(cur_path, 'package.json');
fs.readFile(pkg_path, 'utf8', function(err, data) {
if (err) {
// ignore paths we can't open
// avoids an exists check
if (err.code === 'ENOENT') {
return next();
}
return cb(err);
}
try {
find_shims_in_package(data, cur_path, shims, browser);
return cb(null, shims);
}
catch (err) {
return cb(err);
}
});
})();
};
// paths is mutated
// synchronously load shims from first package.json file found
function load_shims_sync(paths, browser) {
// identify if our file should be replaced per the browser field
// original filename|id -> replacement
var shims = Object.create(null);
var cur_path;
while (cur_path = paths.shift()) {
var pkg_path = path.join(cur_path, 'package.json');
try {
var data = fs.readFileSync(pkg_path, 'utf8');
find_shims_in_package(data, cur_path, shims, browser);
return shims;
}
catch (err) {
// ignore paths we can't open
// avoids an exists check
if (err.code === 'ENOENT') {
continue;
}
throw err;
}
}
return shims;
}
function build_resolve_opts(opts, base) {
var packageFilter = opts.packageFilter;
var browser = normalizeBrowserFieldName(opts.browser)
opts.basedir = base;
opts.packageFilter = function (info, pkgdir) {
if (packageFilter) info = packageFilter(info, pkgdir);
var replacements = getReplacements(info, browser);
// no browser field, keep info unchanged
if (!replacements) {
return info;
}
info[browser] = replacements;
// replace main
if (typeof replacements === 'string') {
info.main = replacements;
return info;
}
var replace_main = replacements[info.main || './index.js'] ||
replacements['./' + info.main || './index.js'];
info.main = replace_main || info.main;
return info;
};
var pathFilter = opts.pathFilter;
opts.pathFilter = function(info, resvPath, relativePath) {
if (relativePath[0] != '.') {
relativePath = './' + relativePath;
}
var mappedPath;
if (pathFilter) {
mappedPath = pathFilter.apply(this, arguments);
}
if (mappedPath) {
return mappedPath;
}
var replacements = info[browser];
if (!replacements) {
return;
}
mappedPath = replacements[relativePath];
if (!mappedPath && path.extname(relativePath) === '') {
mappedPath = replacements[relativePath + '.js'];
if (!mappedPath) {
mappedPath = replacements[relativePath + '.json'];
}
}
return mappedPath;
};
return opts;
}
function resolve(id, opts, cb) {
// opts.filename
// opts.paths
// opts.modules
// opts.packageFilter
opts = opts || {};
opts.filename = opts.filename || '';
var base = path.dirname(opts.filename);
if (opts.basedir) {
base = opts.basedir;
}
var paths = nodeModulesPaths(base);
if (opts.paths) {
paths.push.apply(paths, opts.paths);
}
paths = paths.map(function(p) {
return path.dirname(p);
});
// we must always load shims because the browser field could shim out a module
load_shims(paths, opts.browser, function(err, shims) {
if (err) {
return cb(err);
}
var resid = path.resolve(opts.basedir || path.dirname(opts.filename), id);
if (shims[id] || shims[resid]) {
var xid = shims[id] ? id : resid;
// if the shim was is an absolute path, it was fully resolved
if (shims[xid][0] === '/') {
return resv(shims[xid], build_resolve_opts(opts, base), function(err, full, pkg) {
cb(null, full, pkg);
});
}
// module -> alt-module shims
id = shims[xid];
}
var modules = opts.modules || Object.create(null);
var shim_path = modules[id];
if (shim_path) {
return cb(null, shim_path);
}
// our browser field resolver
// if browser field is an object tho?
var full = resv(id, build_resolve_opts(opts, base), function(err, full, pkg) {
if (err) {
return cb(err);
}
var resolved = (shims) ? shims[full] || full : full;
cb(null, resolved, pkg);
});
});
};
resolve.sync = function (id, opts) {
// opts.filename
// opts.paths
// opts.modules
// opts.packageFilter
opts = opts || {};
opts.filename = opts.filename || '';
var base = path.dirname(opts.filename);
if (opts.basedir) {
base = opts.basedir;
}
var paths = nodeModulesPaths(base);
if (opts.paths) {
paths.push.apply(paths, opts.paths);
}
paths = paths.map(function(p) {
return path.dirname(p);
});
// we must always load shims because the browser field could shim out a module
var shims = load_shims_sync(paths, opts.browser);
var resid = path.resolve(opts.basedir || path.dirname(opts.filename), id);
if (shims[id] || shims[resid]) {
var xid = shims[id] ? id : resid;
// if the shim was is an absolute path, it was fully resolved
if (shims[xid][0] === '/') {
return resv.sync(shims[xid], build_resolve_opts(opts, base));
}
// module -> alt-module shims
id = shims[xid];
}
var modules = opts.modules || Object.create(null);
var shim_path = modules[id];
if (shim_path) {
return shim_path;
}
// our browser field resolver
// if browser field is an object tho?
var full = resv.sync(id, build_resolve_opts(opts, base));
return (shims) ? shims[full] || full : full;
};
function normalizeBrowserFieldName(browser) {
return browser || 'browser';
}
function getReplacements(info, browser) {
browser = normalizeBrowserFieldName(browser);
var replacements = info[browser] || info.browser;
// support legacy browserify field for easier migration from legacy
// many packages used this field historically
if (typeof info.browserify === 'string' && !replacements) {
replacements = info.browserify;
}
return replacements;
}
module.exports = resolve;

View File

@@ -0,0 +1,61 @@
{
"_from": "browser-resolve@^2.0.0",
"_id": "browser-resolve@2.0.0",
"_inBundle": false,
"_integrity": "sha512-7sWsQlYL2rGLy2IWm8WL8DCTJvYLc/qlOnsakDac87SOoCd16WLsaAMdCiAqsTNHIe+SXfaqyxyo6THoWqs8WQ==",
"_location": "/module-deps/browser-resolve",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "browser-resolve@^2.0.0",
"name": "browser-resolve",
"escapedName": "browser-resolve",
"rawSpec": "^2.0.0",
"saveSpec": null,
"fetchSpec": "^2.0.0"
},
"_requiredBy": [
"/module-deps"
],
"_resolved": "https://registry.npmjs.org/browser-resolve/-/browser-resolve-2.0.0.tgz",
"_shasum": "99b7304cb392f8d73dba741bb2d7da28c6d7842b",
"_spec": "browser-resolve@^2.0.0",
"_where": "/home/simon/Documents/lifen-autotest/node_modules/module-deps",
"author": {
"name": "Roman Shtylman",
"email": "shtylman@gmail.com"
},
"bugs": {
"url": "https://github.com/browserify/browser-resolve/issues"
},
"bundleDependencies": false,
"dependencies": {
"resolve": "^1.17.0"
},
"deprecated": false,
"description": "resolve which handles browser field support in package.json",
"devDependencies": {
"mocha": "^2.5.3"
},
"files": [
"index.js",
"empty.js"
],
"homepage": "https://github.com/browserify/browser-resolve#readme",
"keywords": [
"resolve",
"browser"
],
"license": "MIT",
"main": "index.js",
"name": "browser-resolve",
"repository": {
"type": "git",
"url": "git://github.com/browserify/browser-resolve.git"
},
"scripts": {
"test": "node scripts/setup-symlinks.js && mocha --reporter list test/*.js"
},
"version": "2.0.0"
}

86
node_modules/module-deps/package.json generated vendored Normal file
View File

@@ -0,0 +1,86 @@
{
"_from": "module-deps@^6.0.0",
"_id": "module-deps@6.2.3",
"_inBundle": false,
"_integrity": "sha512-fg7OZaQBcL4/L+AK5f4iVqf9OMbCclXfy/znXRxTVhJSeW5AIlS9AwheYwDaXM3lVW7OBeaeUEY3gbaC6cLlSA==",
"_location": "/module-deps",
"_phantomChildren": {
"resolve": "1.20.0"
},
"_requested": {
"type": "range",
"registry": true,
"raw": "module-deps@^6.0.0",
"name": "module-deps",
"escapedName": "module-deps",
"rawSpec": "^6.0.0",
"saveSpec": null,
"fetchSpec": "^6.0.0"
},
"_requiredBy": [
"/browserify"
],
"_resolved": "https://registry.npmjs.org/module-deps/-/module-deps-6.2.3.tgz",
"_shasum": "15490bc02af4b56cf62299c7c17cba32d71a96ee",
"_spec": "module-deps@^6.0.0",
"_where": "/home/simon/Documents/lifen-autotest/node_modules/browserify",
"author": {
"name": "James Halliday",
"email": "mail@substack.net",
"url": "http://substack.net"
},
"bin": {
"module-deps": "bin/cmd.js"
},
"bugs": {
"url": "https://github.com/browserify/module-deps/issues"
},
"bundleDependencies": false,
"dependencies": {
"JSONStream": "^1.0.3",
"browser-resolve": "^2.0.0",
"cached-path-relative": "^1.0.2",
"concat-stream": "~1.6.0",
"defined": "^1.0.0",
"detective": "^5.2.0",
"duplexer2": "^0.1.2",
"inherits": "^2.0.1",
"parents": "^1.0.0",
"readable-stream": "^2.0.2",
"resolve": "^1.4.0",
"stream-combiner2": "^1.1.1",
"subarg": "^1.0.0",
"through2": "^2.0.0",
"xtend": "^4.0.0"
},
"deprecated": false,
"description": "walk the dependency graph to generate json output that can be fed into browser-pack",
"devDependencies": {
"browser-pack": "^6.0.2",
"tap": "^10.7.3"
},
"engines": {
"node": ">= 0.8.0"
},
"homepage": "https://github.com/browserify/module-deps",
"keywords": [
"dependency",
"graph",
"browser",
"require",
"module",
"exports",
"json"
],
"license": "MIT",
"main": "index.js",
"name": "module-deps",
"repository": {
"type": "git",
"url": "git://github.com/browserify/module-deps.git"
},
"scripts": {
"test": "tap test/*.js"
},
"version": "6.2.3"
}

312
node_modules/module-deps/readme.markdown generated vendored Normal file
View File

@@ -0,0 +1,312 @@
# module-deps
walk the dependency graph to generate json output that can be fed into
[browser-pack](https://github.com/browserify/browser-pack)
[![build status](https://secure.travis-ci.org/browserify/module-deps.png)](http://travis-ci.org/browserify/module-deps)
# example
``` js
var mdeps = require('module-deps');
var JSONStream = require('JSONStream');
var md = mdeps();
md.pipe(JSONStream.stringify()).pipe(process.stdout);
md.end({ file: __dirname + '/files/main.js' });
```
output:
```json
$ node example/deps.js
[
{"id":"/home/substack/projects/module-deps/example/files/main.js","source":"var foo = require('./foo');\nconsole.log('main: ' + foo(5));\n","entry":true,"deps":{"./foo":"/home/substack/projects/module-deps/example/files/foo.js"}}
,
{"id":"/home/substack/projects/module-deps/example/files/foo.js","source":"var bar = require('./bar');\n\nmodule.exports = function (n) {\n return n * 111 + bar(n);\n};\n","deps":{"./bar":"/home/substack/projects/module-deps/example/files/bar.js"}}
,
{"id":"/home/substack/projects/module-deps/example/files/bar.js","source":"module.exports = function (n) {\n return n * 100;\n};\n","deps":{}}
]
```
and you can feed this json data into
[browser-pack](https://github.com/browserify/browser-pack):
```bash
$ node example/deps.js | browser-pack | node
main: 1055
```
# usage
```
usage: module-deps [files]
generate json output from each entry file
```
# methods
``` js
var mdeps = require('module-deps')
```
## var d = mdeps(opts={})
Return an object transform stream `d` that expects entry filenames or
`{ id: ..., file: ... }` objects as input and produces objects for every
dependency from a recursive module traversal as output.
Each file in `files` can be a string filename or a stream.
Optionally pass in some `opts`:
* `opts.transform` - a string or array of string transforms (see below)
* `opts.transformKey` - an array path of strings showing where to look in the
package.json for source transformations. If falsy, don't look at the
package.json at all.
* `opts.resolve` - custom resolve function using the
`opts.resolve(id, parent, cb)` signature that
[browser-resolve](https://github.com/shtylman/node-browser-resolve) has
* `opts.detect` - a custom dependency detection function. `opts.detect(source)`
should return an array of dependency module names. By default
[detective](https://github.com/browserify/detective) is used.
* `opts.filter` - a function (id) to skip resolution of some module `id` strings.
If defined, `opts.filter(id)` should return truthy for all the ids to include
and falsey for all the ids to skip.
* `opts.postFilter` - a function (id, file, pkg) that gets called after `id` has
been resolved. Return false to skip this file.
* `opts.packageFilter` - transform the parsed package.json contents before using
the values. `opts.packageFilter(pkg, dir)` should return the new `pkg` object to
use.
* `opts.noParse` - an array of absolute paths to not parse for dependencies. Use
this for large dependencies like jquery or threejs which take forever to parse.
* `opts.cache` - an object mapping filenames to file objects to skip costly io
* `opts.packageCache` - an object mapping filenames to their parent package.json
contents for browser fields, main entries, and transforms
* `opts.fileCache` - an object mapping filenames to raw source to avoid reading
from disk.
* `opts.persistentCache` - a complex cache handler that allows async and persistent
caching of data. A `persistentCache` needs to follow this interface:
```js
function persistentCache (
file, // the path to the file that is loaded
id, // the id that is used to reference this file
pkg, // the package that this file belongs to fallback
fallback, // async fallback handler to be called if the cache doesn't hold the given file
cb // callback handler that receives the cache data
) {
if (hasError()) {
return cb(error) // Pass any error to the callback
}
var fileData = fs.readFileSync(file)
var key = keyFromFile(file, fileData)
if (db.has(key)) {
return cb(null, {
source: db.get(key).toString(),
package: pkg, // The package for housekeeping
deps: {
'id': // id that is used to reference a required file
'file' // file path to the required file
}
})
}
//
// The fallback will process the file in case the file is not
// in cache.
//
// Note that if your implementation doesn't need the file data
// then you can pass `null` instead of the source and the fallback will
// fetch the data by itself.
//
fallback(fileData, function (error, cacheableEntry) {
if (error) {
return cb(error)
}
db.addToCache(key, cacheableEntry)
cb(null, cacheableEntry)
})
}
```
* `opts.paths` - array of global paths to search. Defaults to splitting on `':'`
in `process.env.NODE_PATH`
* `opts.ignoreMissing` - ignore files that failed to resolve
# input objects
Input objects should be string filenames or objects with these parameters:
* `row.file` - filename
* `row.entry` - whether to treat this file as an entry point, defaults to
`true`. Set to `false` to include this file, but not run it automatically.
* `row.expose` - name to be exposed as
* `row.noparse` - when true, don't parse the file contents for dependencies
or objects can specify transforms:
* `row.transform` - string name, path, or function
* `row.options` - transform options as an object
* `row.global` - boolean, whether the transform is global
# output objects
Output objects describe files with dependencies. They have these properties:
* `row.id` - an identifier for the file, used in the `row.deps` prperty
* `row.file` - path to the source file
* `row.entry` - true if the file is an entry point
* `row.expose` - name to be exposed as
* `row.source` - source file content as a string
* `row.deps` - object describing dependencies. The keys are strings as used
in `require()` calls in the file, and values are the row IDs (file paths)
of dependencies.
# events
## d.on('transform', function (tr, file) {})
Every time a transform is applied to a `file`, a `'transform'` event fires with
the instantiated transform stream `tr`.
## d.on('file', function (file) {})
Every time a file is read, this event fires with the file path.
## d.on('missing', function (id, parent) {})
When `opts.ignoreMissing` is enabled, this event fires for each missing package.
## d.on('package', function (pkg) {})
Every time a package is read, this event fires. The directory name of the
package is available in `pkg.__dirname`.
# transforms
module-deps can be configured to run source transformations on files before
parsing them for `require()` calls. These transforms are useful if you want to
compile a language like [coffeescript](http://coffeescript.org/) on the fly or
if you want to load static assets into your bundle by parsing the AST for
`fs.readFileSync()` calls.
If the transform is a function, it should take the `file` name as an argument
and return a through stream that will be written file contents and should output
the new transformed file contents.
If the transform is a string, it is treated as a module name that will resolve
to a module that is expected to follow this format:
``` js
var through = require('through2');
module.exports = function (file, opts) { return through() };
```
You don't necessarily need to use the
[through2](https://github.com/rvagg/through2) module to create a
readable/writable filter stream for transforming file contents, but this is an
easy way to do it.
module-deps looks for `require()` calls and adds their arguments as dependencies
of a file. Transform streams can emit `'dep'` events to include additional
dependencies that are not consumed with `require()`.
When you call `mdeps()` with an `opts.transform`, the transformations you
specify will not be run for any files in node_modules/. This is because modules
you include should be self-contained and not need to worry about guarding
themselves against transformations that may happen upstream.
Modules can apply their own transformations by setting a transformation pipeline
in their package.json at the `opts.transformKey` path. These transformations
only apply to the files directly in the module itself, not to the module's
dependants nor to its dependencies.
## package.json transformKey
Transform keys live at a configurable location in the package.json denoted by
the `opts.transformKey` array.
For a transformKey of `['foo','bar']`, the transformKey can be a single string
(`"fff"`):
``` json
{
"foo": {
"bar": "fff"
}
}
```
or an array of strings (`["fff","ggg"]`):
``` json
{
"foo": {
"bar": ["fff","ggg"]
}
}
```
If you want to pass options to the transforms, you can use a 2-element array
inside of the primary array. Here `fff` gets an options object with `{"x":3}`
and `ggg` gets `{"y":4}`:
``` json
{
"foo": {
"bar": [["fff",{"x":3}],["ggg",{"y":4}]]
}
}
```
Options sent to the module-deps constructor are also provided under
`opts._flags`. These options are sometimes required if your transform
needs to do something different when browserify is run in debug mode, for
example.
# usage
```
module-deps [FILES] OPTIONS
Generate json output for the entry point FILES.
OPTIONS are:
-t TRANSFORM Apply a TRANSFORM.
-g TRANSFORM Apply a global TRANSFORM.
```
# install
With [npm](http://npmjs.org), to get the module do:
```
npm install module-deps
```
and to get the `module-deps` command do:
```
npm install -g module-deps
```
# license
MIT

23
node_modules/module-deps/test/bundle.js generated vendored Normal file
View File

@@ -0,0 +1,23 @@
var parser = require('../');
var test = require('tap').test;
var JSONStream = require('JSONStream');
var packer = require('browser-pack');
var path = require('path');
test('bundle', function (t) {
t.plan(1);
var p = parser();
p.end(path.join(__dirname, '/files/main.js'));
p.on('error', t.fail.bind(t));
var pack = packer();
p.pipe(JSONStream.stringify()).pipe(pack);
var src = '';
pack.on('data', function (buf) { src += buf });
pack.on('end', function () {
Function('console', src)({
log: function (s) { t.equal(s, 'main: 1055') }
});
});
});

50
node_modules/module-deps/test/cache.js generated vendored Normal file
View File

@@ -0,0 +1,50 @@
var parser = require('../');
var test = require('tap').test;
var path = require('path');
var files = {
foo: path.join(__dirname, '/files/foo.js'),
bar: path.join(__dirname, '/files/bar.js')
};
var sources = {
foo: 'notreal foo',
bar: 'notreal bar'
};
var cache = {};
cache[files.foo] = {
source: sources.foo,
deps: { './bar': files.bar }
};
cache[files.bar] = {
source: sources.bar,
deps: {}
};
test('uses cache', function (t) {
t.plan(1);
var p = parser({ cache: cache });
p.end({ id: 'foo', file: files.foo, entry: false });
var rows = [];
p.on('data', function (row) { rows.push(row) });
p.on('end', function () {
t.same(rows.sort(cmp), [
{
id: 'foo',
file: files.foo,
source: sources.foo,
deps: { './bar': files.bar }
},
{
id: files.bar,
file: files.bar,
source: sources.bar,
deps: {}
}
].sort(cmp));
});
});
function cmp (a, b) { return a.id < b.id ? -1 : 1 }

54
node_modules/module-deps/test/cache_expose.js generated vendored Normal file
View File

@@ -0,0 +1,54 @@
var parser = require('../');
var test = require('tap').test;
var path = require('path');
var files = {
foo: path.join(__dirname, '/files/foo.js'),
bar: path.join(__dirname, '/files/bar.js')
};
var sources = {
foo: 'notreal foo',
bar: 'notreal bar'
};
var cache = {};
cache[files.foo] = {
source: sources.foo,
deps: { './bar': files.bar }
};
cache[files.bar] = {
source: sources.bar,
deps: {}
};
test('cache preserves expose and entry', function (t) {
t.plan(1);
var p = parser({ cache: cache });
p.write({ id: files.bar, expose: 'bar2', entry: false });
p.end({ id: 'foo', file: files.foo, entry: true, expose: 'foo2' });
var rows = [];
p.on('data', function (row) { rows.push(row) });
p.on('end', function () {
t.same(rows.sort(cmp), [
{
id: 'foo',
expose: 'foo2',
entry: true,
file: files.foo,
source: sources.foo,
deps: { './bar': files.bar }
},
{
id: files.bar,
expose: 'bar2',
file: files.bar,
source: sources.bar,
deps: {}
}
].sort(cmp));
});
});
function cmp (a, b) { return a.id < b.id ? -1 : 1 }

47
node_modules/module-deps/test/cache_partial.js generated vendored Normal file
View File

@@ -0,0 +1,47 @@
var parser = require('../');
var test = require('tap').test;
var fs = require('fs');
var path = require('path');
var files = {
foo: path.join(__dirname, '/files/foo.js'),
bar: path.join(__dirname, '/files/bar.js')
};
var sources = {
foo: 'notreal foo',
bar: fs.readFileSync(files.bar, 'utf8')
};
var cache = {};
cache[files.foo] = {
source: sources.foo,
deps: { './bar': files.bar }
};
test('uses cache and reads from disk', function (t) {
t.plan(1);
var p = parser({ cache: cache });
p.end({ id: 'foo', file: files.foo, entry: false });
var rows = [];
p.on('data', function (row) { rows.push(row) });
p.on('end', function () {
t.same(rows.sort(cmp), [
{
id: 'foo',
file: files.foo,
source: sources.foo,
deps: { './bar': files.bar }
},
{
id: files.bar,
file: files.bar,
source: sources.bar,
deps: {}
}
].sort(cmp));
});
});
function cmp (a, b) { return a.id < b.id ? -1 : 1 }

104
node_modules/module-deps/test/cache_partial_expose.js generated vendored Normal file
View File

@@ -0,0 +1,104 @@
var parser = require('../');
var test = require('tap').test;
var fs = require('fs');
var path = require('path');
var xtend = require('xtend');
var files = {
abc: path.join(__dirname, '/expose/lib/abc.js'),
xyz: path.join(__dirname, '/expose/lib/xyz.js'),
foo: path.join(__dirname, '/expose/foo.js'),
bar: path.join(__dirname, '/expose/bar.js'),
main: path.join(__dirname, '/expose/main.js')
};
var sources = Object.keys(files).reduce(function (acc, file) {
acc[file] = fs.readFileSync(files[file], 'utf8');
return acc;
}, {});
var cache = {};
cache[files.abc] = {
source: sources.abc,
deps: {}
};
cache[files.xyz] = {
source: sources.xyz,
deps: {'../foo': files.foo}
};
cache[files.foo] = {
source: sources.foo,
deps: {'./lib/abc': files.abc}
};
cache[files.bar] = {
source: sources.bar,
deps: {xyz: files.xyz}
};
cache[files.main] = {
source: sources.main,
deps: {
abc: files.abc,
xyz: files.xyz,
'./bar': files.bar
}
};
test('preserves expose and entry with partial cache', function(t) {
t.plan(1);
var partialCache = xtend(cache);
delete partialCache[files.bar];
var p = parser({ cache: partialCache });
p.write({ id: 'abc', file: files.abc, expose: 'abc' });
p.write({ id: 'xyz', file: files.xyz, expose: 'xyz' });
p.end({ id: 'main', file: files.main, entry: true });
var rows = [];
p.on('data', function (row) { rows.push(row); });
p.on('end', function () {
t.same(rows.sort(cmp), [
{
id: files.bar,
file: files.bar,
source: sources.bar,
deps: {xyz: files.xyz}
},
{
file: files.foo,
id: files.foo,
source: sources.foo,
deps: {'./lib/abc': files.abc}
},
{
id: 'abc',
file: files.abc,
source: sources.abc,
deps: {},
entry: true,
expose: 'abc'
},
{
id: 'main',
file: files.main,
source: sources.main,
deps: {
'./bar': files.bar,
abc: files.abc,
xyz: files.xyz
},
entry: true
},
{
id: 'xyz',
file: files.xyz,
source: sources.xyz,
deps: {'../foo': files.foo},
entry: true,
expose: 'xyz'
}
].sort(cmp));
});
});
function cmp (a, b) { return a.id < b.id ? -1 : 1 }

122
node_modules/module-deps/test/cache_persistent.js generated vendored Normal file
View File

@@ -0,0 +1,122 @@
var parser = require('../');
var test = require('tap').test;
var path = require('path');
var fs = require('fs');
var files = {
foo: path.join(__dirname, '/files/foo.js'),
bar: path.join(__dirname, '/files/bar.js')
};
test('uses persistent cache', function (t) {
t.plan(1);
var p = parser({
persistentCache: function (file, id, pkg, fallback, cb) {
if (file === files.bar) {
return fallback(null, cb);
}
cb(null, {
source: 'file at ' + file + '@' + id,
package: pkg,
deps: { './bar': files.bar }
});
}
});
p.end({ id: 'foo', file: files.foo, entry: false });
var rows = [];
p.on('data', function (row) { rows.push(row) });
p.on('end', function () {
t.same(rows.sort(cmp), [
{
id: files.bar,
file: files.bar,
source: fs.readFileSync(files.bar, 'utf8'),
deps: {}
},
{
id: 'foo',
file: files.foo,
source: 'file at ' + files.foo + '@' + files.foo,
deps: { './bar': files.bar }
}
].sort(cmp));
});
});
test('passes persistent cache error through', function (t) {
t.plan(1);
var p = parser({
persistentCache: function (file, id, pkg, fallback, cb) {
cb(new Error('foo'));
}
});
p.end({ id: 'foo', file: files.foo, entry: false });
p.on('error', function (err) { t.equals(err.message, 'foo') });
});
test('allow passing of the raw source as string', function (t) {
t.plan(1);
var p = parser({
persistentCache: function (file, id, pkg, fallback, cb) {
fallback(fs.readFileSync(files.bar, 'utf8'), cb);
}
});
p.end({ id: 'foo', file: files.foo, entry: false });
var rows = [];
p.on('data', function (row) { rows.push(row) });
p.on('end', function () {
t.same(rows.sort(cmp), [
{
id: 'foo',
file: files.foo,
source: fs.readFileSync(files.bar, 'utf8'),
deps: {}
}
].sort(cmp));
});
});
test('send file event with persistent cache', function (t) {
t.plan(2);
var p = parser({
persistentCache: function (file, id, pkg, fallback, cb) {
cb(null, {
source: 'file at ' + file + '@' + id,
package: pkg,
deps: {}
});
}
});
p.end({ id: 'foo', file: files.foo, entry: false });
p.on('file', function (file, id) {
t.same(file, path.resolve(files.foo));
t.same(id, path.resolve(files.foo));
});
});
test('errors of transforms occur in the correct order with a persistent cache', function (t) {
t.plan(3);
var p = parser({
transform: [
path.join(__dirname, 'cache_persistent', 'error_transform')
],
persistentCache: function (file, id, pkg, fallback, cb) {
fallback(fs.readFileSync(files.foo, 'utf8'), cb);
}
});
p.end({ id: 'foo', file: files.foo, entry: false });
var order = 0;
p.on('file', function (file, id) {
t.same(order, 0);
order += 1;
});
p.on('error', function (err) {
t.same(order, 1);
t.same(err.message, 'rawr while parsing file: ' + path.resolve(files.foo));
});
});
function cmp (a, b) { return a.id < b.id ? -1 : 1 }

View File

@@ -0,0 +1,6 @@
var through = require('through2');
module.exports = function (file) {
return through(function (chunk, enc, callback) {
callback(new Error('rawr'));
});
};

19
node_modules/module-deps/test/cycle.js generated vendored Normal file
View File

@@ -0,0 +1,19 @@
var mdeps = require('../');
var test = require('tap').test;
var JSONStream = require('JSONStream');
var packer = require('browser-pack');
var concat = require('concat-stream');
var path = require('path');
test('cycle', function (t) {
t.plan(1);
var p = mdeps();
p.end(path.join(__dirname, '/cycle/main.js'));
var pack = packer();
p.pipe(JSONStream.stringify()).pipe(pack).pipe(concat(function (src) {
Function('console', src.toString('utf8'))({
log: function (msg) { t.equal(msg, 333) }
});
}));
});

3
node_modules/module-deps/test/cycle/bar.js generated vendored Normal file
View File

@@ -0,0 +1,3 @@
var foo = require('./foo.js');
module.exports = function (n) { return foo.p(n, 1) };

4
node_modules/module-deps/test/cycle/foo.js generated vendored Normal file
View File

@@ -0,0 +1,4 @@
var bar = require('./bar.js');
exports.ooo = function (n) { return n * bar(110) };
exports.p = function (a, b) { return a + b }

4
node_modules/module-deps/test/cycle/main.js generated vendored Normal file
View File

@@ -0,0 +1,4 @@
var foo = require('./foo.js');
var bar = require('./bar.js');
console.log(foo.ooo(bar(2)));

49
node_modules/module-deps/test/deps.js generated vendored Normal file
View File

@@ -0,0 +1,49 @@
var parser = require('../');
var test = require('tap').test;
var fs = require('fs');
var path = require('path');
var files = {
main: path.join(__dirname, '/files/main.js'),
foo: path.join(__dirname, '/files/foo.js'),
bar: path.join(__dirname, '/files/bar.js')
};
var sources = Object.keys(files).reduce(function (acc, file) {
acc[file] = fs.readFileSync(files[file], 'utf8');
return acc;
}, {});
test('deps', function (t) {
t.plan(1);
var p = parser();
p.end({ file: files.main, entry: true });
var rows = [];
p.on('data', function (row) { rows.push(row) });
p.on('end', function () {
t.same(rows.sort(cmp), [
{
id: files.main,
file: files.main,
source: sources.main,
entry: true,
deps: { './foo': files.foo }
},
{
id: files.foo,
file: files.foo,
source: sources.foo,
deps: { './bar': files.bar }
},
{
id: files.bar,
file: files.bar,
source: sources.bar,
deps: {}
}
].sort(cmp));
});
});
function cmp (a, b) { return a.id < b.id ? -1 : 1 }

32
node_modules/module-deps/test/detect.js generated vendored Normal file
View File

@@ -0,0 +1,32 @@
var parser = require('../');
var test = require('tap').test;
var JSONStream = require('JSONStream');
var packer = require('browser-pack');
var path = require('path');
test('detect', function (t) {
t.plan(1);
var p = parser({
detect: function (source) {
var rx = /require\(["'](.*?)["']\)/g;
var m, deps = [];
while (m = rx.exec(source)) {
deps.push(m[1]);
}
return deps;
}
});
p.end(path.join(__dirname, '/files/main.js'));
p.on('error', t.fail.bind(t));
var pack = packer();
p.pipe(JSONStream.stringify()).pipe(pack);
var src = '';
pack.on('data', function (buf) { src += buf });
pack.on('end', function () {
Function('console', src)({
log: function (s) { t.equal(s, 'main: 1055') }
});
});
});

20
node_modules/module-deps/test/dotdot.js generated vendored Normal file
View File

@@ -0,0 +1,20 @@
var mdeps = require('../');
var test = require('tap').test;
var through = require('through2');
var path = require('path');
test('dotdot', function (t) {
var expected = [
path.join(__dirname, '/dotdot/index.js'),
path.join(__dirname, '/dotdot/abc/index.js')
];
t.plan(expected.length);
var d = mdeps();
d.end(path.join(__dirname, '/dotdot/abc/index.js'));
d.pipe(through.obj(function (row, enc, next) {
t.deepEqual(row.file, expected.shift());
next();
}));
});

2
node_modules/module-deps/test/dotdot/abc/index.js generated vendored Normal file
View File

@@ -0,0 +1,2 @@
var x = require('..');
console.log(x);

1
node_modules/module-deps/test/dotdot/index.js generated vendored Normal file
View File

@@ -0,0 +1 @@
module.exports = 'whatever'

41
node_modules/module-deps/test/expose.js generated vendored Normal file
View File

@@ -0,0 +1,41 @@
var parser = require('../');
var test = require('tap').test;
var fs = require('fs');
var path = require('path');
var files = {
foo: path.join(__dirname, '/files/foo.js'),
bar: path.join(__dirname, '/files/bar.js')
};
var sources = Object.keys(files).reduce(function (acc, file) {
acc[file] = fs.readFileSync(files[file], 'utf8');
return acc;
}, {});
test('single id export', function (t) {
t.plan(1);
var p = parser();
p.end({ id: 'foo', file: files.foo, entry: false });
var rows = [];
p.on('data', function (row) { rows.push(row) });
p.on('end', function () {
t.same(rows.sort(cmp), [
{
id: 'foo',
file: files.foo,
source: sources.foo,
deps: { './bar': files.bar }
},
{
id: files.bar,
file: files.bar,
source: sources.bar,
deps: {}
}
].sort(cmp));
});
});
function cmp (a, b) { return a.id < b.id ? -1 : 1 }

1
node_modules/module-deps/test/expose/bar.js generated vendored Normal file
View File

@@ -0,0 +1 @@
require('xyz');

1
node_modules/module-deps/test/expose/foo.js generated vendored Normal file
View File

@@ -0,0 +1 @@
require('./lib/abc');

1
node_modules/module-deps/test/expose/lib/abc.js generated vendored Normal file
View File

@@ -0,0 +1 @@
console.log('abc');

2
node_modules/module-deps/test/expose/lib/xyz.js generated vendored Normal file
View File

@@ -0,0 +1,2 @@
require('../foo');
console.log('xyz');

3
node_modules/module-deps/test/expose/main.js generated vendored Normal file
View File

@@ -0,0 +1,3 @@
require('abc');
require('xyz');
require('./bar');

58
node_modules/module-deps/test/file_cache.js generated vendored Normal file
View File

@@ -0,0 +1,58 @@
var mdeps = require('../');
var test = require('tap').test;
var path = require('path');
var through = require('through2');
var files = {
foo: path.join(__dirname, '/files/foo.js'),
bar: path.join(__dirname, '/files/bar.js')
};
var sources = {
foo: 'require("./bar"); var tongs;',
bar: 'notreal tongs'
};
var fileCache = {};
fileCache[files.foo] = sources.foo;
fileCache[files.bar] = sources.bar;
var specialReplace = function(input) {
return input.replace(/tongs/g, 'tangs');
};
test('uses file cache', function (t) {
t.plan(1);
var p = mdeps({
fileCache: fileCache,
transform: function (file) {
return through(function (buf, enc, next) {
this.push(specialReplace(String(buf)));
next();
});
},
transformKey: [ 'browserify', 'transform' ]
});
p.end({ id: 'foo', file: files.foo, entry: false });
var rows = [];
p.on('data', function (row) { rows.push(row) });
p.on('end', function () {
t.same(rows.sort(cmp), [
{
id: 'foo',
file: files.foo,
source: specialReplace(sources.foo),
deps: { './bar': files.bar }
},
{
id: files.bar,
file: files.bar,
source: specialReplace(sources.bar),
deps: {}
}
].sort(cmp));
});
});
function cmp (a, b) { return a.id < b.id ? -1 : 1 }

3
node_modules/module-deps/test/files/bar.js generated vendored Normal file
View File

@@ -0,0 +1,3 @@
module.exports = function (n) {
return n * 100;
};

1
node_modules/module-deps/test/files/extra.js generated vendored Normal file
View File

@@ -0,0 +1 @@
module.exports = 555

10
node_modules/module-deps/test/files/filterable.js generated vendored Normal file
View File

@@ -0,0 +1,10 @@
module.exports = {
events: require('events'),
fs : require('fs'),
net : require('net'),
http : require('http'),
https : require('https'),
dgram : require('dgram'),
dns : require('dns')
}

5
node_modules/module-deps/test/files/foo.js generated vendored Normal file
View File

@@ -0,0 +1,5 @@
var bar = require('./bar');
module.exports = function (n) {
return n * 111 + bar(n);
};

2
node_modules/module-deps/test/files/main.js generated vendored Normal file
View File

@@ -0,0 +1,2 @@
var foo = require('./foo');
console.log('main: ' + foo(5));

View File

@@ -0,0 +1 @@
module.exports = 1

View File

@@ -0,0 +1,3 @@
{
"main": "one.js"
}

View File

@@ -0,0 +1 @@
t.equal(require('./'), 2);

View File

@@ -0,0 +1 @@
module.exports = 2

1
node_modules/module-deps/test/files/quotes/bar.js generated vendored Normal file
View File

@@ -0,0 +1 @@
module.exports = 'success';

1
node_modules/module-deps/test/files/quotes/baz.js generated vendored Normal file
View File

@@ -0,0 +1 @@
module.exports = 'success';

1
node_modules/module-deps/test/files/quotes/foo.js generated vendored Normal file
View File

@@ -0,0 +1 @@
module.exports = 'success';

3
node_modules/module-deps/test/files/quotes/main.js generated vendored Normal file
View File

@@ -0,0 +1,3 @@
var foo = require('./foo');
var bar = require("./bar");
var baz = require(`./baz`);

View File

@@ -0,0 +1,5 @@
var bar2 = require('./bar2');
module.exports = function () {
return 'bar';
};

View File

@@ -0,0 +1,3 @@
module.exports = function () {
return 'bar2';
};

View File

@@ -0,0 +1,3 @@
module.exports = function () {
return 'baz';
};

View File

@@ -0,0 +1,6 @@
var bar = require('../bar/bar.js');
var baz = require('./baz/baz.js');
module.exports = function () {
return 'foo';
};

2
node_modules/module-deps/test/files/syntax_error.js generated vendored Normal file
View File

@@ -0,0 +1,2 @@
const a = require('a');
const a = 0;

View File

@@ -0,0 +1 @@
module.exports = function (x) { return x + BBB }

View File

@@ -0,0 +1,7 @@
var f = require('./f.js');
var m = require('m');
var g = require('g');
t.equal(m(f(AAA)), 777, 'transformation scope');
t.equal(g(3), 333, 'sub-transformation applied');
t.equal(typeof GGG, 'undefined', 'GGG leak');

View File

@@ -0,0 +1 @@
console.log(AAA + BBB + CCC + DDD + EEE + FFF);

View File

@@ -0,0 +1,5 @@
{
"browserify": {
"transform": [ "tr-a", "tr-b" ]
}
}

1
node_modules/module-deps/test/files/tr_module/f.js generated vendored Normal file
View File

@@ -0,0 +1 @@
module.exports = function (x) { return x + BBB }

View File

@@ -0,0 +1,8 @@
var through = require('through2');
module.exports = function (file) {
return through(function (buf, enc, next) {
this.push(String(buf).replace(/XXX/g, '123'));
next();
});
};

View File

@@ -0,0 +1,8 @@
var f = require('./f.js');
var m = require('m');
var g = require('g');
t.equal(m(f(AAA)), 555, 'transformation scope');
t.equal(g(3), 333, 'sub-transformation applied');
t.equal(typeof GGG, 'undefined', 'GGG leak');
t.equal(XXX, 123, 'XXX');

View File

@@ -0,0 +1,5 @@
{
"browserify": {
"transform": [ "./xxx.js" ]
}
}

8
node_modules/module-deps/test/files/tr_module/xxx.js generated vendored Normal file
View File

@@ -0,0 +1,8 @@
var through = require('through2');
module.exports = function (file) {
return through(function (buf, enc, next) {
this.push(String(buf).replace(/XXX/g, '123'));
next();
});
};

View File

@@ -0,0 +1 @@
console.log(AAA)

View File

@@ -0,0 +1,5 @@
{
"browserify": {
"transform": [ "./xxx.js" ]
}
}

View File

@@ -0,0 +1 @@
console.log(XXX * 3)

8
node_modules/module-deps/test/files/tr_rel/xxx.js generated vendored Normal file
View File

@@ -0,0 +1,8 @@
var through = require('through2');
module.exports = function (file) {
return through(function (buf, enc, next) {
this.push(String(buf).replace(/XXX/g, '111'));
next();
});
};

1
node_modules/module-deps/test/files/tr_sh/f.js generated vendored Normal file
View File

@@ -0,0 +1 @@
module.exports = function (x) { return x + BBB }

7
node_modules/module-deps/test/files/tr_sh/main.js generated vendored Normal file
View File

@@ -0,0 +1,7 @@
var f = require('./f.js');
var m = require('m');
var g = require('g');
t.equal(m(f(AAA)), 555, 'transformation scope');
t.equal(g(3), 333, 'sub-transformation applied');
t.equal(typeof GGG, 'undefined', 'GGG leak');

7
node_modules/module-deps/test/files/tr_sh/tr_a.js generated vendored Normal file
View File

@@ -0,0 +1,7 @@
var through = require('through2');
module.exports = function (file) {
return through(function (buf, enc, next) {
this.push(String(buf).replace(/AAA/g, '5'));
next();
});
};

7
node_modules/module-deps/test/files/tr_sh/tr_b.js generated vendored Normal file
View File

@@ -0,0 +1,7 @@
var through = require('through2');
module.exports = function (file) {
return through(function (buf, enc, next) {
this.push(String(buf).replace(/BBB/g, '50'));
next();
});
};

View File

@@ -0,0 +1,3 @@
var calc = require('algo').calc;
module.exports = function (x) { return calc(x); }

View File

@@ -0,0 +1,3 @@
var f = require('./f.js');
t.equal(f(14), 11, 'transformation scope');

1
node_modules/module-deps/test/files/transformdeps.js generated vendored Normal file
View File

@@ -0,0 +1 @@
// dependencies added by transform

3
node_modules/module-deps/test/files/unicode/bar.js generated vendored Normal file
View File

@@ -0,0 +1,3 @@
module.exports = function (ñ) {
return ñ * 100;
};

5
node_modules/module-deps/test/files/unicode/foo.js generated vendored Normal file
View File

@@ -0,0 +1,5 @@
var é = require('./bar');
module.exports = function (ñ) {
return ñ * 111 + é(n);
};

2
node_modules/module-deps/test/files/unicode/main.js generated vendored Normal file
View File

@@ -0,0 +1,2 @@
var π = require('./foo');
console.log('main: ' + foo(5));

2
node_modules/module-deps/test/files/xyz.js generated vendored Normal file
View File

@@ -0,0 +1,2 @@
var foo = require('./foo');
console.log('xyz: ' + foo(6));

36
node_modules/module-deps/test/filter.js generated vendored Normal file
View File

@@ -0,0 +1,36 @@
var test = require('tap').test;
var path = require('path')
var mdeps = require('../')
var core = ['events', 'util', 'dns', 'dgram', 'http', 'https', 'net', 'fs']
var collect = []
var entry = path.join(__dirname, 'files', 'filterable.js')
test('can filter core deps', function (t) {
var p = mdeps({
filter: function (e) {
return !~core.indexOf(e)
}
})
p.on('data', function (d) {
collect.push(d)
t.equal(d.id, entry)
t.deepEqual(d.deps, {
events: false,
fs: false,
net: false,
http: false,
https: false,
dgram: false,
dns: false
})
t.equal(d.entry, true)
});
p.on('end', function () {
t.equal(collect.length, 1)
t.end()
});
p.end(entry);
})

60
node_modules/module-deps/test/ignore_missing.js generated vendored Normal file
View File

@@ -0,0 +1,60 @@
var parser = require('../');
var test = require('tap').test;
var fs = require('fs');
var path = require('path');
var files = {
main: path.join(__dirname, '/ignore_missing/main.js'),
other: path.join(__dirname, '/ignore_missing/other.js')
};
var sources = Object.keys(files).reduce(function (acc, file) {
acc[file] = fs.readFileSync(files[file], 'utf8');
return acc;
}, {});
test('ignoreMissing', function (t) {
t.plan(1);
var p = parser({ignoreMissing: true});
p.end({file: files.main, entry: true});
var rows = [];
p.on('data', function (row) { rows.push(row) });
p.on('end', function () {
t.same(rows.sort(cmp), [
{
id: files.main,
file: files.main,
source: sources.main,
entry: true,
deps: { './other': files.other }
},
{
id: files.other,
file: files.other,
source: sources.other,
deps: { 'missingModule': undefined }
}
].sort(cmp));
});
});
test('ignoreMissing off', function (t) {
t.plan(1);
var p = parser();
p.end({file: files.main, entry: true});
var rows = [];
p.on('data', function (row) { rows.push(row) });
p.on('error', function (err) {
t.match(
String(err),
/Cannot find module 'missingModule'/
);
});
p.on('end', function () {
t.fail('should have errored');
});
});
function cmp (a, b) { return a.id < b.id ? -1 : 1 }

1
node_modules/module-deps/test/ignore_missing/main.js generated vendored Normal file
View File

@@ -0,0 +1 @@
require('./other');

View File

@@ -0,0 +1 @@
require('missingModule');

52
node_modules/module-deps/test/ignore_missing_cache.js generated vendored Normal file
View File

@@ -0,0 +1,52 @@
var parser = require('../');
var test = require('tap').test;
var fs = require('fs');
var path = require('path');
var files = {
main: path.join(__dirname, '/ignore_missing/main.js'),
other: path.join(__dirname, '/ignore_missing/other.js')
};
var sources = Object.keys(files).reduce(function (acc, file) {
acc[file] = fs.readFileSync(files[file], 'utf8');
return acc;
}, {});
var cache = {};
cache[files.main] = {
source: sources.main,
deps: { './other': files.other }
};
cache[files.other] = {
source: sources.other,
deps: { 'missingModule': undefined }
};
test('ignoreMissing with cache', function (t) {
t.plan(1);
var p = parser({ cache: cache, ignoreMissing: true });
p.end({file: files.main, entry: true});
var rows = [];
p.on('data', function (row) { rows.push(row) });
p.on('end', function () {
t.same(rows.sort(cmp), [
{
id: files.main,
file: files.main,
source: sources.main,
entry: true,
deps: { './other': files.other }
},
{
id: files.other,
file: files.other,
source: sources.other,
deps: { 'missingModule': undefined }
}
].sort(cmp));
});
});
function cmp (a, b) { return a.id < b.id ? -1 : 1 }

16
node_modules/module-deps/test/invalid_pkg.js generated vendored Normal file
View File

@@ -0,0 +1,16 @@
var mdeps = require('../');
var test = require('tap').test;
var path = require('path');
var fs = require('fs');
test('invalid pkg', function (t) {
var d = mdeps();
d.on('package', function (pkg_) {
// console.error({pkg_});
});
d.end(path.join(__dirname, '/invalid_pkg/file.js'));
d.on('data', function () {});
d.on('end', function () {
t.end();
});
});

1
node_modules/module-deps/test/invalid_pkg/file.js generated vendored Normal file
View File

@@ -0,0 +1 @@
require('./')

1
node_modules/module-deps/test/invalid_pkg/index.js generated vendored Normal file
View File

@@ -0,0 +1 @@
T.pass()

View File

@@ -0,0 +1 @@
"just a string"

43
node_modules/module-deps/test/noparse.js generated vendored Normal file
View File

@@ -0,0 +1,43 @@
var parser = require('../');
var test = require('tap').test;
var fs = require('fs');
var path = require('path');
var files = {
main: path.join(__dirname, '/files/main.js'),
foo: path.join(__dirname, '/files/foo.js'),
bar: path.join(__dirname, '/files/bar.js')
};
var sources = Object.keys(files).reduce(function (acc, file) {
acc[file] = fs.readFileSync(files[file], 'utf8');
return acc;
}, {});
test('noParse', function (t) {
t.plan(1);
var p = parser({ noParse: [ files.foo ] });
p.end(files.main);
var rows = [];
p.on('data', function (row) { rows.push(row) });
p.on('end', function () {
t.deepEqual(rows.sort(cmp), [
{
id: files.main,
file: files.main,
source: sources.main,
entry: true,
deps: { './foo': files.foo }
},
{
id: files.foo,
file: files.foo,
source: sources.foo,
deps: {}
}
].sort(cmp));
});
});
function cmp (a, b) { return a.id < b.id ? -1 : 1 }

39
node_modules/module-deps/test/noparse_row.js generated vendored Normal file
View File

@@ -0,0 +1,39 @@
var parser = require('../');
var test = require('tap').test;
var fs = require('fs');
var concat = require('concat-stream');
var path = require('path');
var files = {
main: path.join(__dirname, '/files/main.js'),
foo: path.join(__dirname, '/files/foo.js'),
bar: path.join(__dirname, '/files/bar.js')
};
var sources = Object.keys(files).reduce(function (acc, file) {
acc[file] = fs.readFileSync(files[file], 'utf8');
return acc;
}, {});
test('noParse row', function (t) {
t.plan(1);
var p = parser();
p.end({ file: files.main, noparse: true });
var rows = [];
p.on('data', function (row) { rows.push(row) });
p.on('end', function () {
t.deepEqual(rows.sort(cmp), [
{
id: files.main,
file: files.main,
source: sources.main,
entry: true,
noparse: true,
deps: {}
}
].sort(cmp));
});
});
function cmp (a, b) { return a.id < b.id ? -1 : 1 }

20
node_modules/module-deps/test/pkg.js generated vendored Normal file
View File

@@ -0,0 +1,20 @@
var mdeps = require('../');
var test = require('tap').test;
var path = require('path');
var fs = require('fs');
var dirname = path.join(__dirname, '/pkg');
test('pkg', function (t) {
t.plan(4);
var d = mdeps();
d.on('package', function (pkg_) {
var pkg = JSON.parse(fs.readFileSync(dirname + pkg_.dir + '/package.json'));
pkg.__dirname = path.join(dirname, pkg_.dir);
t.deepEqual(pkg_, pkg);
});
d.end(path.join(__dirname, '/pkg/main.js'));
d.resume();
});

2
node_modules/module-deps/test/pkg/main.js generated vendored Normal file
View File

@@ -0,0 +1,2 @@
require('pkga');
require('pkgb');

4
node_modules/module-deps/test/pkg/package.json generated vendored Normal file
View File

@@ -0,0 +1,4 @@
{
"dir": "",
"main": "index.js"
}

28
node_modules/module-deps/test/pkg_filter.js generated vendored Normal file
View File

@@ -0,0 +1,28 @@
var mdeps = require('../');
var test = require('tap').test;
var JSONStream = require('JSONStream');
var packer = require('browser-pack');
var concat = require('concat-stream');
var path = require('path');
test('pkg filter', function (t) {
t.plan(3);
var p = mdeps({
packageFilter: function (pkg) {
if (pkg.name === undefined) {
t.equal(pkg.main, 'one.js');
pkg.main = 'two.js'
}
return pkg;
}
});
p.end(path.join(__dirname, '/files/pkg_filter/test.js'));
var pack = packer();
p.pipe(JSONStream.stringify()).pipe(pack);
pack.pipe(concat(function (src) {
Function('t', src)(t);
}));
});

Some files were not shown because too many files have changed in this diff Show More