feat: ✨ Created a mini nodeJS server with NewMan for testing without PostMan GUI.
This will mimic a run in a CD/CI environment or docker container.
This commit is contained in:
136
node_modules/newman/lib/run/export-file.js
generated
vendored
Normal file
136
node_modules/newman/lib/run/export-file.js
generated
vendored
Normal file
@@ -0,0 +1,136 @@
|
||||
var fs = require('fs'),
|
||||
nodePath = require('path'),
|
||||
|
||||
_ = require('lodash'),
|
||||
async = require('async'),
|
||||
mkdirp = require('mkdirp'),
|
||||
|
||||
// @todo: ES6: Change the sequence below to use object destructuring when Node v4 support is dropped
|
||||
joinPath = nodePath.join,
|
||||
parsePath = nodePath.parse,
|
||||
resolvePath = nodePath.resolve,
|
||||
|
||||
/**
|
||||
* The root path specifier
|
||||
*
|
||||
* @const
|
||||
* @private
|
||||
* @type {string}
|
||||
*/
|
||||
E = '',
|
||||
|
||||
/**
|
||||
* Default timestamp separator.
|
||||
*
|
||||
* @const
|
||||
* @private
|
||||
* @type {string}
|
||||
*/
|
||||
TS_SEP = '-',
|
||||
|
||||
/**
|
||||
* Writes the specified content to a file at the provided path.
|
||||
*
|
||||
* @param {Object} path - A set of path details for file writing.
|
||||
* @param {String|Buffer} content - The content to be written to the file.
|
||||
* @param {Object} options - A set of options for the current file write.
|
||||
* @param {Function} cb - The callback invoked when the file writing operation has completed, with/without errors.
|
||||
*/
|
||||
writeFile = function (path, content, options, cb) {
|
||||
fs.writeFile(path.unparsed, content, function (err) {
|
||||
cb(_.set(err, 'help',
|
||||
`error writing file "${path.unparsed}" for ${options.name || 'unknown-source'}`), path);
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Generate a timestamp from date
|
||||
*
|
||||
* @param {Date=} date - The timestamp used to mark the exported file.
|
||||
* @param {String=} separator - The optional string with which to separate different sections of the timestamp,
|
||||
* defaults to TS_SEP
|
||||
* @returns {String} - yyyy-mm-dd-HH-MM-SS-MS-0
|
||||
*/
|
||||
timestamp = function (date, separator) {
|
||||
// use the iso string to ensure left padding and other stuff is taken care of
|
||||
return (date || new Date()).toISOString().replace(/[^\d]+/g, _.isString(separator) ? separator : TS_SEP);
|
||||
};
|
||||
|
||||
/**
|
||||
* Module whose job is to export a file which is in an export format.
|
||||
*
|
||||
* @param {Object} options - The set of file export options.
|
||||
* @param {String} options.path - The path to the exported file.
|
||||
* @param {String|Object} options.content - The JSON / stringified content that is to be written to the file.
|
||||
* @param {Function} done - The callback whose invocation marks the end of the file export routine.
|
||||
* @returns {*}
|
||||
*/
|
||||
module.exports = function (options, done) {
|
||||
// parse the path if one is available as string
|
||||
var path = _.isString(options.path) && parsePath(resolvePath(options.path)),
|
||||
content = _.isPlainObject(options.content) ? JSON.stringify(options.content, 0, 2) : (options.content || E);
|
||||
|
||||
// if a path was not provided by user, we need to prepare the default path. but create the default path only if one
|
||||
// is provided.
|
||||
if (!path && _.isString(options.default)) {
|
||||
path = parsePath(options.default);
|
||||
// delete the path and directory if one is detected when parsing defaults
|
||||
path.root = E;
|
||||
path.dir = 'newman';
|
||||
|
||||
// append timestamp
|
||||
path.name = `${path.name}-${timestamp()}0`; // @todo make -0 become incremental if file name exists
|
||||
path.base = path.name + path.ext;
|
||||
}
|
||||
// final check that path is valid
|
||||
if (!(path && path.base)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// now sore the unparsed result back for quick re-use during writing and a single place for unparsing
|
||||
path.unparsed = joinPath(path.dir, path.base);
|
||||
|
||||
// in case the path has a directory, ensure that the directory is available
|
||||
if (path.dir) {
|
||||
async.waterfall([
|
||||
function (next) {
|
||||
mkdirp(path.dir)
|
||||
.then(() => {
|
||||
return next();
|
||||
})
|
||||
.catch((err) => {
|
||||
return next(_.set(err, 'help',
|
||||
`error creating path for file "${path.unparsed}" for ${options.name || 'unknown-source'}`));
|
||||
});
|
||||
},
|
||||
function (next) {
|
||||
fs.stat(path.unparsed, function (err, stat) { // eslint-disable-line handle-callback-err
|
||||
next(null, stat);
|
||||
});
|
||||
},
|
||||
function (stat, next) {
|
||||
var target;
|
||||
|
||||
// handle cases where the specified export path is a pre-existing directory
|
||||
if (stat && stat.isDirectory()) {
|
||||
target = parsePath(options.default);
|
||||
|
||||
// append timestamp
|
||||
// @todo make -0 become incremental if file name exists
|
||||
target.name += '-' + timestamp() + '0';
|
||||
target.base = target.name + target.ext;
|
||||
|
||||
path.unparsed = joinPath(path.unparsed, target.base);
|
||||
}
|
||||
|
||||
next(null, path);
|
||||
},
|
||||
function (path, next) {
|
||||
writeFile(path, content, options, next);
|
||||
}
|
||||
], done);
|
||||
}
|
||||
else {
|
||||
writeFile(path, content, options, done);
|
||||
}
|
||||
};
|
441
node_modules/newman/lib/run/index.js
generated
vendored
Normal file
441
node_modules/newman/lib/run/index.js
generated
vendored
Normal file
@@ -0,0 +1,441 @@
|
||||
var _ = require('lodash'),
|
||||
asyncEach = require('async/each'),
|
||||
sdk = require('postman-collection'),
|
||||
runtime = require('postman-runtime'),
|
||||
request = require('postman-request'),
|
||||
EventEmitter = require('eventemitter3'),
|
||||
SecureFS = require('./secure-fs'),
|
||||
RunSummary = require('./summary'),
|
||||
getOptions = require('./options'),
|
||||
exportFile = require('./export-file'),
|
||||
util = require('../util'),
|
||||
|
||||
/**
|
||||
* This object describes the various events raised by Newman, and what each event argument contains.
|
||||
* Error and cursor are present in all events.
|
||||
*
|
||||
* @type {Object}
|
||||
*/
|
||||
runtimeEvents = {
|
||||
beforeIteration: [],
|
||||
beforeItem: ['item'],
|
||||
beforePrerequest: ['events', 'item'],
|
||||
prerequest: ['executions', 'item'],
|
||||
beforeRequest: ['request', 'item'],
|
||||
request: ['response', 'request', 'item', 'cookies', 'history'],
|
||||
beforeTest: ['events', 'item'],
|
||||
test: ['executions', 'item'],
|
||||
item: ['item'],
|
||||
iteration: [],
|
||||
beforeScript: ['script', 'event', 'item'],
|
||||
script: ['execution', 'script', 'event', 'item']
|
||||
},
|
||||
|
||||
/**
|
||||
* load all the default reporters here. if you have new reporter, add it to this list
|
||||
* we know someone, who does not like dynamic requires
|
||||
*
|
||||
* @type {Object}
|
||||
*/
|
||||
defaultReporters = {
|
||||
cli: require('../reporters/cli'),
|
||||
json: require('../reporters/json'),
|
||||
junit: require('../reporters/junit'),
|
||||
progress: require('../reporters/progress'),
|
||||
emojitrain: require('../reporters/emojitrain')
|
||||
},
|
||||
|
||||
/**
|
||||
* The object of known reporters and their install instruction in case the reporter is not loaded.
|
||||
* Pad message with two spaces since its a follow-up message for reporter warning.
|
||||
*
|
||||
* @private
|
||||
* @type {Object}
|
||||
*/
|
||||
knownReporterErrorMessages = {
|
||||
html: ' run `npm install newman-reporter-html`\n',
|
||||
teamcity: ' run `npm install newman-reporter-teamcity`\n'
|
||||
},
|
||||
|
||||
/**
|
||||
* Multiple ids or names entrypoint lookup strategy.
|
||||
*
|
||||
* @private
|
||||
* @type {String}
|
||||
*/
|
||||
MULTIENTRY_LOOKUP_STRATEGY = 'multipleIdOrName';
|
||||
|
||||
/**
|
||||
* Runs the collection, with all the provided options, returning an EventEmitter.
|
||||
*
|
||||
* @param {Object} options - The set of wrapped options, passed by the CLI parser.
|
||||
* @param {Collection|Object|String} options.collection - A JSON / Collection / String representing the collection.
|
||||
* @param {Object|String} options.environment - An environment JSON / file path for the current collection run.
|
||||
* @param {Object|String} options.globals - A globals JSON / file path for the current collection run.
|
||||
* @param {String} options.workingDir - Path of working directory that contains files needed for the collection run.
|
||||
* @param {String} options.insecureFileRead - If true, allow reading files outside of working directory.
|
||||
* @param {Object|String} options.iterationData - An iterationData JSON / file path for the current collection run.
|
||||
* @param {Object|String} options.reporters - A set of reporter names and their associated options for the current run.
|
||||
* @param {Object|String} options.cookieJar - A tough-cookie cookieJar / file path for the current collection run.
|
||||
* @param {String} options.exportGlobals - The relative path to export the globals file from the current run to.
|
||||
* @param {String} options.exportEnvironment - The relative path to export the environment file from the current run to.
|
||||
* @param {String} options.exportCollection - The relative path to export the collection from the current run to.
|
||||
* @param {String} options.exportCookieJar - The relative path to export the cookie jar from the current run to.
|
||||
* @param {Function} callback - The callback function invoked to mark the end of the collection run.
|
||||
* @returns {EventEmitter} - An EventEmitter instance with done and error event attachments.
|
||||
*/
|
||||
module.exports = function (options, callback) {
|
||||
// validate all options. it is to be noted that `options` parameter is option and is polymorphic
|
||||
(!callback && _.isFunction(options)) && (
|
||||
(callback = options),
|
||||
(options = {})
|
||||
);
|
||||
!_.isFunction(callback) && (callback = _.noop);
|
||||
|
||||
var emitter = new EventEmitter(), // @todo: create a new inherited constructor
|
||||
runner = new runtime.Runner(),
|
||||
stopOnFailure,
|
||||
entrypoint;
|
||||
|
||||
// get the configuration from various sources
|
||||
getOptions(options, function (err, options) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
// ensure that the collection option is present before starting a run
|
||||
if (!_.isObject(options.collection)) {
|
||||
return callback(new Error('expecting a collection to run'));
|
||||
}
|
||||
|
||||
// use client certificate list to allow different ssl certificates for
|
||||
// different URLs
|
||||
var sslClientCertList = options.sslClientCertList || [],
|
||||
// allow providing custom cookieJar
|
||||
cookieJar = options.cookieJar || request.jar();
|
||||
|
||||
// if sslClientCert option is set, put it at the end of the list to
|
||||
// match all URLs that didn't match in the list
|
||||
if (options.sslClientCert) {
|
||||
sslClientCertList.push({
|
||||
name: 'client-cert',
|
||||
matches: [sdk.UrlMatchPattern.MATCH_ALL_URLS],
|
||||
key: { src: options.sslClientKey },
|
||||
cert: { src: options.sslClientCert },
|
||||
passphrase: options.sslClientPassphrase
|
||||
});
|
||||
}
|
||||
|
||||
// iterates over the bail array and sets each item as an obj key with a value of boolean true
|
||||
// [item1, item2] => {item1: true, item2: true}
|
||||
if (_.isArray(options.bail)) {
|
||||
options.bail = _.transform(options.bail, function (result, value) {
|
||||
result[value] = true;
|
||||
}, {});
|
||||
}
|
||||
|
||||
// sets entrypoint to execute if options.folder is specified.
|
||||
if (options.folder) {
|
||||
entrypoint = { execute: options.folder };
|
||||
|
||||
// uses `multipleIdOrName` lookupStrategy in case of multiple folders.
|
||||
_.isArray(entrypoint.execute) && (entrypoint.lookupStrategy = MULTIENTRY_LOOKUP_STRATEGY);
|
||||
}
|
||||
|
||||
// sets stopOnFailure to true in case bail is used without any modifiers or with failure
|
||||
// --bail => stopOnFailure = true
|
||||
// --bail failure => stopOnFailure = true
|
||||
(typeof options.bail !== 'undefined' &&
|
||||
(options.bail === true || (_.isObject(options.bail) && options.bail.failure))) ?
|
||||
stopOnFailure = true : stopOnFailure = false;
|
||||
|
||||
// store summary object and other relevant information inside the emitter
|
||||
emitter.summary = new RunSummary(emitter, options);
|
||||
|
||||
// to store the exported content from reporters
|
||||
emitter.exports = [];
|
||||
|
||||
// expose the runner object for reporter and programmatic use
|
||||
emitter.runner = runner;
|
||||
|
||||
// now start the run!
|
||||
runner.run(options.collection, {
|
||||
stopOnFailure: stopOnFailure, // LOL, you just got trolled ¯\_(ツ)_/¯
|
||||
abortOnFailure: options.abortOnFailure, // used in integration tests, to be considered for a future release
|
||||
abortOnError: _.get(options, 'bail.folder'),
|
||||
iterationCount: options.iterationCount,
|
||||
environment: options.environment,
|
||||
globals: options.globals,
|
||||
entrypoint: entrypoint,
|
||||
data: options.iterationData,
|
||||
delay: {
|
||||
item: options.delayRequest
|
||||
},
|
||||
timeout: {
|
||||
global: options.timeout || 0,
|
||||
request: options.timeoutRequest || 0,
|
||||
script: options.timeoutScript || 0
|
||||
},
|
||||
fileResolver: new SecureFS(options.workingDir, options.insecureFileRead),
|
||||
requester: {
|
||||
useWhatWGUrlParser: true,
|
||||
cookieJar: cookieJar,
|
||||
followRedirects: _.has(options, 'ignoreRedirects') ? !options.ignoreRedirects : undefined,
|
||||
strictSSL: _.has(options, 'insecure') ? !options.insecure : undefined,
|
||||
timings: Boolean(options.verbose),
|
||||
extendedRootCA: options.sslExtraCaCerts,
|
||||
agents: _.isObject(options.requestAgents) ? options.requestAgents : undefined
|
||||
},
|
||||
certificates: sslClientCertList.length && new sdk.CertificateList({}, sslClientCertList)
|
||||
}, function (err, run) {
|
||||
if (err) { return callback(err); }
|
||||
|
||||
var callbacks = {},
|
||||
// ensure that the reporter option type polymorphism is handled
|
||||
reporters = _.isString(options.reporters) ? [options.reporters] : options.reporters,
|
||||
// keep a track of start assertion indices of legacy assertions
|
||||
legacyAssertionIndices = {};
|
||||
|
||||
// emit events for all the callbacks triggered by the runtime
|
||||
_.forEach(runtimeEvents, function (definition, eventName) {
|
||||
// intercept each runtime.* callback and expose a global object based event
|
||||
callbacks[eventName] = function (err, cursor) {
|
||||
var args = arguments,
|
||||
obj = { cursor };
|
||||
|
||||
// convert the arguments into an object by taking the key name reference from the definition
|
||||
// object
|
||||
_.forEach(definition, function (key, index) {
|
||||
obj[key] = args[index + 2]; // first two are err, cursor
|
||||
});
|
||||
|
||||
args = [eventName, err, obj];
|
||||
emitter.emit.apply(emitter, args); // eslint-disable-line prefer-spread
|
||||
};
|
||||
});
|
||||
|
||||
// add non generic callback handling
|
||||
_.assignIn(callbacks, {
|
||||
|
||||
/**
|
||||
* Emits event for start of the run. It injects/exposes additional objects useful for
|
||||
* programmatic usage and reporters
|
||||
*
|
||||
* @param {?Error} err - An Error instance / null object.
|
||||
* @param {Object} cursor - The run cursor instance.
|
||||
* @returns {*}
|
||||
*/
|
||||
start (err, cursor) {
|
||||
emitter.emit('start', err, {
|
||||
cursor,
|
||||
run
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Bubbles up console messages.
|
||||
*
|
||||
* @param {Object} cursor - The run cursor instance.
|
||||
* @param {String} level - The level of console logging [error, silent, etc].
|
||||
* @returns {*}
|
||||
*/
|
||||
console (cursor, level) {
|
||||
emitter.emit('console', null, {
|
||||
cursor: cursor,
|
||||
level: level,
|
||||
messages: _.slice(arguments, 2)
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* The exception handler for the current run instance.
|
||||
*
|
||||
* @todo Fix bug of arg order in runtime.
|
||||
* @param {Object} cursor - The run cursor.
|
||||
* @param {?Error} err - An Error instance / null object.
|
||||
* @returns {*}
|
||||
*/
|
||||
exception (cursor, err) {
|
||||
emitter.emit('exception', null, {
|
||||
cursor: cursor,
|
||||
error: err
|
||||
});
|
||||
},
|
||||
|
||||
assertion (cursor, assertions) {
|
||||
_.forEach(assertions, function (assertion) {
|
||||
var errorName = _.get(assertion, 'error.name', 'AssertionError');
|
||||
|
||||
!assertion && (assertion = {});
|
||||
|
||||
// store the legacy assertion index
|
||||
assertion.index && (legacyAssertionIndices[cursor.ref] = assertion.index);
|
||||
|
||||
emitter.emit('assertion', (assertion.passed ? null : {
|
||||
name: errorName,
|
||||
index: assertion.index,
|
||||
test: assertion.name,
|
||||
message: _.get(assertion, 'error.message', assertion.name || ''),
|
||||
|
||||
stack: errorName + ': ' + _.get(assertion, 'error.message', '') + '\n' +
|
||||
' at Object.eval sandbox-script.js:' + (assertion.index + 1) + ':' +
|
||||
((cursor && cursor.position || 0) + 1) + ')'
|
||||
}), {
|
||||
cursor: cursor,
|
||||
assertion: assertion.name,
|
||||
skipped: assertion.skipped,
|
||||
error: assertion.error,
|
||||
item: run.resolveCursor(cursor)
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Custom callback to override the `done` event to fire the end callback.
|
||||
*
|
||||
* @todo Do some memory cleanup here?
|
||||
* @param {?Error} err - An error instance / null passed from the done event handler.
|
||||
* @param {Object} cursor - The run instance cursor.
|
||||
* @returns {*}
|
||||
*/
|
||||
done (err, cursor) {
|
||||
// in case runtime faced an error during run, we do not process any other event and emit `done`.
|
||||
// we do it this way since, an error in `done` callback would have anyway skipped any intermediate
|
||||
// events or callbacks
|
||||
if (err) {
|
||||
emitter.emit('done', err, emitter.summary);
|
||||
callback(err, emitter.summary);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// we emit a `beforeDone` event so that reporters and other such addons can do computation before
|
||||
// the run is marked as done
|
||||
emitter.emit('beforeDone', null, {
|
||||
cursor: cursor,
|
||||
summary: emitter.summary
|
||||
});
|
||||
|
||||
_.forEach(['environment', 'globals', 'collection', 'cookie-jar'], function (item) {
|
||||
// fetch the path name from options if one is provided
|
||||
var path = _.get(options, _.camelCase(`export-${item}`));
|
||||
|
||||
// if the options have an export path, then add the item to export queue
|
||||
path && emitter.exports.push({
|
||||
name: item,
|
||||
default: `newman-${item}.json`,
|
||||
path: path,
|
||||
content: item === 'cookie-jar' ?
|
||||
cookieJar.toJSON() :
|
||||
_(emitter.summary[item].toJSON())
|
||||
.defaults({
|
||||
name: item
|
||||
})
|
||||
.merge({
|
||||
_postman_variable_scope: item,
|
||||
_postman_exported_at: (new Date()).toISOString(),
|
||||
_postman_exported_using: util.userAgent
|
||||
})
|
||||
.value()
|
||||
});
|
||||
});
|
||||
|
||||
asyncEach(emitter.exports, exportFile, function (err) {
|
||||
// we now trigger actual done event which we had overridden
|
||||
emitter.emit('done', err, emitter.summary);
|
||||
callback(err, emitter.summary);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
emitter.on('script', function (err, o) {
|
||||
// bubble special script name based events
|
||||
o && o.event && emitter.emit(o.event.listen + 'Script', err, o);
|
||||
});
|
||||
|
||||
emitter.on('beforeScript', function (err, o) {
|
||||
// bubble special script name based events
|
||||
o && o.event && emitter.emit(_.camelCase('before-' + o.event.listen + 'Script'), err, o);
|
||||
});
|
||||
|
||||
// initialise all the reporters
|
||||
!emitter.reporters && (emitter.reporters = {});
|
||||
_.isArray(reporters) && _.forEach(reporters, function (reporterName) {
|
||||
// disallow duplicate reporter initialisation
|
||||
if (_.has(emitter.reporters, reporterName)) { return; }
|
||||
|
||||
var Reporter;
|
||||
|
||||
try {
|
||||
// check if the reporter is an external reporter
|
||||
Reporter = require((function (name) { // ensure scoped packages are loaded
|
||||
var prefix = '',
|
||||
scope = (name.charAt(0) === '@') && name.substr(0, name.indexOf('/') + 1);
|
||||
|
||||
if (scope) {
|
||||
prefix = scope;
|
||||
name = name.substr(scope.length);
|
||||
}
|
||||
|
||||
return prefix + 'newman-reporter-' + name;
|
||||
}(reporterName)));
|
||||
}
|
||||
// @todo - maybe have a debug mode and log error there
|
||||
catch (error) {
|
||||
if (!defaultReporters[reporterName]) {
|
||||
// @todo: route this via print module to respect silent flags
|
||||
console.warn(`newman: could not find "${reporterName}" reporter`);
|
||||
console.warn(' ensure that the reporter is installed in the same directory as newman');
|
||||
|
||||
// print install instruction in case a known reporter is missing
|
||||
if (knownReporterErrorMessages[reporterName]) {
|
||||
console.warn(knownReporterErrorMessages[reporterName]);
|
||||
}
|
||||
else {
|
||||
console.warn(' please install reporter using npm\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// load local reporter if its not an external reporter
|
||||
!Reporter && (Reporter = defaultReporters[reporterName]);
|
||||
|
||||
try {
|
||||
// we could have checked _.isFunction(Reporter), here, but we do not do that so that the nature of
|
||||
// reporter error can be bubbled up
|
||||
Reporter && (emitter.reporters[reporterName] = new Reporter(emitter,
|
||||
_.get(options, ['reporter', reporterName], {}), options));
|
||||
}
|
||||
catch (error) {
|
||||
// if the reporter errored out during initialisation, we should not stop the run simply log
|
||||
// the error stack trace for debugging
|
||||
console.warn(`newman: could not load "${reporterName}" reporter`);
|
||||
|
||||
if (!defaultReporters[reporterName]) {
|
||||
// @todo: route this via print module to respect silent flags
|
||||
console.warn(` this seems to be a problem in the "${reporterName}" reporter.\n`);
|
||||
}
|
||||
console.warn(error);
|
||||
}
|
||||
});
|
||||
|
||||
// raise warning when more than one dominant reporters are used
|
||||
(function (reporters) {
|
||||
// find all reporters whose `dominant` key is set to true
|
||||
var conflicts = _.keys(_.transform(reporters, function (conflicts, reporter, name) {
|
||||
reporter.dominant && (conflicts[name] = true);
|
||||
}));
|
||||
|
||||
(conflicts.length > 1) && // if more than one dominant, raise a warning
|
||||
console.warn(`newman: ${conflicts.join(', ')} reporters might not work well together.`);
|
||||
}(emitter.reporters));
|
||||
|
||||
// we ensure that everything is async to comply with event paradigm and start the run
|
||||
setImmediate(function () {
|
||||
run.start(callbacks);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
return emitter;
|
||||
};
|
396
node_modules/newman/lib/run/options.js
generated
vendored
Normal file
396
node_modules/newman/lib/run/options.js
generated
vendored
Normal file
@@ -0,0 +1,396 @@
|
||||
var _ = require('lodash'),
|
||||
fs = require('fs'),
|
||||
async = require('async'),
|
||||
Collection = require('postman-collection').Collection,
|
||||
VariableScope = require('postman-collection').VariableScope,
|
||||
CookieJar = require('tough-cookie').CookieJar,
|
||||
transformer = require('postman-collection-transformer'),
|
||||
liquidJSON = require('liquid-json'),
|
||||
parseCsv = require('csv-parse'),
|
||||
util = require('../util'),
|
||||
config = require('../config'),
|
||||
|
||||
/**
|
||||
* The message displayed when the specified collection file can't be loaded.
|
||||
*
|
||||
* @const
|
||||
* @type {String}
|
||||
*/
|
||||
COLLECTION_LOAD_ERROR_MESSAGE = 'collection could not be loaded',
|
||||
|
||||
/**
|
||||
* The message displayed when the specified iteration data file can't be loaded.
|
||||
*
|
||||
* @const
|
||||
* @type {String}
|
||||
*/
|
||||
ITERATION_DATA_LOAD_ERROR_MESSAGE = 'iteration data could not be loaded',
|
||||
|
||||
/**
|
||||
* The message displayed when the specified environment or globals file can't be loaded.
|
||||
*
|
||||
* @const
|
||||
* @type {String}
|
||||
*/
|
||||
LOAD_ERROR_MESSAGE = 'could not load ',
|
||||
|
||||
/**
|
||||
* The set of postman collection transformer options, to convert collection v1 to collection v2.
|
||||
*
|
||||
* @const
|
||||
* @type {Object}
|
||||
*/
|
||||
COLLECTION_TRANSFORMER_OPTION = { inputVersion: '1.0.0', outputVersion: '2.1.0' },
|
||||
|
||||
/**
|
||||
* Accepts an object, and extracts the property inside an object which is supposed to contain the required data.
|
||||
* In case of variables, it also extracts them into plain JS objects.
|
||||
*
|
||||
* @param {Object} source - The source wrapper object that may or may not contain inner wrapped properties.
|
||||
* @param {String} type - "environment" or "globals", etc.
|
||||
* @returns {Object} - The object representation of the current extracted property.
|
||||
*/
|
||||
extractModel = function (source, type) {
|
||||
source = source[type] || source; // extract object that holds variable. these usually come from cloud API
|
||||
if (!_.isObject(source)) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// ensure we un-box the JSON if it comes from cloud-api or similar sources
|
||||
!source.values && _.isObject(source[type]) && (source = source[type]);
|
||||
|
||||
// we ensure that environment passed as array is converted to plain object. runtime does this too, but we do it
|
||||
// here for consistency of options passed to reporters
|
||||
return source;
|
||||
},
|
||||
|
||||
/**
|
||||
* Loads the given data of type from a specified external location
|
||||
*
|
||||
* @param {String} type - The type of data to load.
|
||||
* @param {String} location - The location to load from (file path or URL).
|
||||
* @param {Object} options - The set of wrapped options.
|
||||
* @param {function} cb - The callback function whose invocation marks the end of the external load routine.
|
||||
* @returns {*}
|
||||
*/
|
||||
externalLoader = function (type, location, options, cb) {
|
||||
return _.isString(location) ? util.fetchJson(type, location, options, function (err, data) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
return cb(null, extractModel(data, type));
|
||||
}) : cb(null, extractModel(location, type));
|
||||
},
|
||||
|
||||
/**
|
||||
* A helper method to process a collection and convert it to a V2 equivalent if necessary, and return it.
|
||||
*
|
||||
* @todo Drop support for the v1 collection format in Newman v5.
|
||||
* Reference: https://github.com/postmanlabs/newman/pull/1660
|
||||
*
|
||||
* @param {Object} collection The input collection, specified as a JSON object.
|
||||
* @param {Function} callback A handler function that consumes an error object and the processed collection.
|
||||
* @returns {*}
|
||||
*/
|
||||
processCollection = function (collection, callback) {
|
||||
if (util.isV1Collection(collection)) {
|
||||
// @todo: route this via print module to respect silent flags
|
||||
console.warn('newman: Newman v4 deprecates support for the v1 collection format');
|
||||
console.warn(' Use the Postman Native app to export collections in the v2 format\n');
|
||||
|
||||
return transformer.convert(collection, COLLECTION_TRANSFORMER_OPTION, callback);
|
||||
}
|
||||
|
||||
callback(null, collection);
|
||||
},
|
||||
|
||||
/**
|
||||
* Helper function that manages the load of environments and globals
|
||||
*
|
||||
* @private
|
||||
* @param {String} type - The type of resource to load: collection, environment, etc.
|
||||
* @param {String|Object} value - The value derived from the CLI or run command.
|
||||
* @param {Object} options - The set of wrapped options.
|
||||
* @param {Function} callback - The function invoked when the scope has been loaded.
|
||||
*/
|
||||
loadScopes = function (type, value, options, callback) {
|
||||
var done = function (err, scope) {
|
||||
if (err) { return callback(new Error(LOAD_ERROR_MESSAGE + `${type}\n ${err.message || err}`)); }
|
||||
|
||||
if (!_.isObject(scope)) {
|
||||
return done(new Error(LOAD_ERROR_MESSAGE + type));
|
||||
}
|
||||
|
||||
callback(null, new VariableScope(VariableScope.isVariableScope(scope) ? scope.toJSON() : scope));
|
||||
};
|
||||
|
||||
if (_.isObject(value)) {
|
||||
return done(null, value);
|
||||
}
|
||||
|
||||
externalLoader(type, value, options, done);
|
||||
},
|
||||
|
||||
/**
|
||||
* Custom method to auto parse CSV values
|
||||
*
|
||||
* @private
|
||||
* @param {String} value - CSV field value
|
||||
* @param {Object} context - Context of field value
|
||||
* @param {Boolean} context.quoting - A boolean indicating if the field was surrounded by quotes.
|
||||
* @returns {String|Number|Date}
|
||||
*/
|
||||
csvAutoParse = function (value, context) {
|
||||
if (context.quoting) {
|
||||
// avoid parsing quoted values
|
||||
return value;
|
||||
}
|
||||
|
||||
if (util.isInt(value)) {
|
||||
return parseInt(value, 10);
|
||||
}
|
||||
|
||||
if (util.isFloat(value)) {
|
||||
return parseFloat(value);
|
||||
}
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
/**
|
||||
* Custom configuration loaders for the required configuration keys.
|
||||
*
|
||||
* @type {Object}
|
||||
*/
|
||||
configLoaders = {
|
||||
|
||||
/**
|
||||
* The collection file load helper for the current run.
|
||||
*
|
||||
* @param {Object|String} value - The collection, specified as a JSON object, or the path to it's file.
|
||||
* @param {Object} options - The set of wrapped options.
|
||||
* @param {Function} callback - The callback function invoked to mark the end of the collection load routine.
|
||||
* @returns {*}
|
||||
*/
|
||||
collection: function (value, options, callback) {
|
||||
/**
|
||||
* The post collection load handler.
|
||||
*
|
||||
* @param {?Error} err - An Error instance / null, passed from the collection loader.
|
||||
* @param {Object} collection - The collection / raw JSON object, passed from the collection loader.
|
||||
* @returns {*}
|
||||
*/
|
||||
var done = function (err, collection) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
// ensure that the collection option is present before starting a run
|
||||
if (!_.isObject(collection)) {
|
||||
return callback(new Error(COLLECTION_LOAD_ERROR_MESSAGE));
|
||||
}
|
||||
|
||||
// ensure that the collection reference is an SDK instance
|
||||
// @todo - should this be handled by config loaders?
|
||||
collection = new Collection(Collection.isCollection(collection) ?
|
||||
// if the option contain an instance of collection, we simply clone it for future use
|
||||
// create a collection in case it is not one. user can send v2 JSON as a source and that will be
|
||||
// converted to a collection
|
||||
collection.toJSON() : collection);
|
||||
|
||||
callback(null, collection);
|
||||
};
|
||||
|
||||
// if the collection has been specified as an object, convert to V2 if necessary and return the result
|
||||
if (_.isObject(value)) {
|
||||
return processCollection(value, done);
|
||||
}
|
||||
|
||||
externalLoader('collection', value, options, function (err, data) {
|
||||
if (err) {
|
||||
return done(new Error(COLLECTION_LOAD_ERROR_MESSAGE +
|
||||
(err.help ? `\n ${err.help}` : '') +
|
||||
`\n ${err.message || err}`));
|
||||
}
|
||||
if (!_.isObject(data)) {
|
||||
return done(new Error(COLLECTION_LOAD_ERROR_MESSAGE));
|
||||
}
|
||||
|
||||
return processCollection(data, done);
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* The environment configuration object, loaded for the current collection run.
|
||||
*
|
||||
* @type {Object}
|
||||
*/
|
||||
environment: loadScopes.bind(this, 'environment'),
|
||||
|
||||
/**
|
||||
* The object of globals, loaded for the collection run.
|
||||
*
|
||||
* @type {Object}
|
||||
*/
|
||||
globals: loadScopes.bind(this, 'globals'),
|
||||
|
||||
/**
|
||||
* Helper function to sanitize folder option.
|
||||
*
|
||||
* @param {String[]|String} value - The list of folders to execute
|
||||
* @param {Object} options - The set of wrapped options.
|
||||
* @param {Function} callback - The callback function invoked to mark the end of the folder load routine.
|
||||
* @returns {*}
|
||||
*/
|
||||
folder: function (value, options, callback) {
|
||||
if (!value.length) {
|
||||
return callback(); // avoids empty string or array
|
||||
}
|
||||
|
||||
if (Array.isArray(value) && value.length === 1) {
|
||||
return callback(null, value[0]); // avoids using multipleIdOrName strategy for a single item array
|
||||
}
|
||||
|
||||
callback(null, value);
|
||||
},
|
||||
|
||||
/**
|
||||
* The iterationData loader module, with support for JSON or CSV data files.
|
||||
*
|
||||
* @param {String|Object[]} location - The path to the iteration data file for the current collection run, or
|
||||
* the array of iteration data objects.
|
||||
* @param {Object} options - The set of wrapped options.
|
||||
* @param {Function} callback - The function invoked to indicate the end of the iteration data loading routine.
|
||||
* @returns {*}
|
||||
*/
|
||||
iterationData: function (location, options, callback) {
|
||||
if (_.isArray(location)) { return callback(null, location); }
|
||||
|
||||
util.fetch(location, function (err, data) {
|
||||
if (err) {
|
||||
return callback(new Error(ITERATION_DATA_LOAD_ERROR_MESSAGE + `\n ${err.message || err}`));
|
||||
}
|
||||
|
||||
// Try loading as a JSON, fall-back to CSV.
|
||||
async.waterfall([
|
||||
(cb) => {
|
||||
try {
|
||||
return cb(null, liquidJSON.parse(data.trim()));
|
||||
}
|
||||
catch (e) {
|
||||
return cb(null, undefined); // e masked to avoid displaying JSON parse errors for CSV files
|
||||
}
|
||||
},
|
||||
(json, cb) => {
|
||||
if (json) {
|
||||
return cb(null, json);
|
||||
}
|
||||
// Wasn't JSON
|
||||
parseCsv(data, {
|
||||
columns: true, // infer the columns names from the first row
|
||||
escape: '"', // escape character
|
||||
cast: csvAutoParse, // function to cast values of individual fields
|
||||
trim: true, // ignore whitespace immediately around the delimiter
|
||||
relax: true, // allow using quotes without escaping inside unquoted string
|
||||
relax_column_count: true, // ignore inconsistent columns count
|
||||
bom: true // strip the byte order mark (BOM) from the input string
|
||||
}, cb);
|
||||
}
|
||||
], (err, parsed) => {
|
||||
if (err) {
|
||||
return callback(new Error(ITERATION_DATA_LOAD_ERROR_MESSAGE + `\n ${err.message || err}`));
|
||||
}
|
||||
|
||||
callback(null, parsed);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
sslClientCertList: function (location, options, callback) {
|
||||
if (Array.isArray(location)) {
|
||||
return callback(null, location);
|
||||
}
|
||||
|
||||
if (typeof location !== 'string') {
|
||||
return callback(new Error('path for ssl client certificates list file must be a string'));
|
||||
}
|
||||
|
||||
fs.readFile(location, function (err, value) {
|
||||
if (err) {
|
||||
return callback(new Error(`unable to read the ssl client certificates file "${location}"`));
|
||||
}
|
||||
|
||||
try {
|
||||
value = liquidJSON.parse(value.toString(util.detectEncoding(value)).trim());
|
||||
}
|
||||
catch (e) {
|
||||
return callback(new Error(`the file at ${location} does not contain valid JSON data.`));
|
||||
}
|
||||
|
||||
// ensure that `sslClientCertList` is an array
|
||||
if (!Array.isArray(value)) {
|
||||
return callback(new Error('expected ssl client certificates list to be an array.'));
|
||||
}
|
||||
|
||||
return callback(null, value);
|
||||
});
|
||||
},
|
||||
|
||||
cookieJar: function (location, options, callback) {
|
||||
if (_.isObject(location) && location.constructor.name === 'CookieJar') {
|
||||
return callback(null, location);
|
||||
}
|
||||
|
||||
if (typeof location !== 'string') {
|
||||
return callback(new Error('cookieJar must be a path to a JSON file or a CookieJar instance'));
|
||||
}
|
||||
|
||||
fs.readFile(location, function (err, value) {
|
||||
if (err) {
|
||||
return callback(new Error(`unable to read the cookie jar file "${location}"`));
|
||||
}
|
||||
|
||||
try {
|
||||
value = CookieJar.fromJSON(value.toString());
|
||||
}
|
||||
catch (e) {
|
||||
return callback(new Error(`the file at ${location} does not contain valid JSON data.`));
|
||||
}
|
||||
|
||||
return callback(null, value);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* The helper function to load all file based information for the current collection run.
|
||||
*
|
||||
* @param {Object} options - The set of generic collection run options.
|
||||
* @param {Function} callback - The function called to mark the completion of the configuration load routine.
|
||||
* @returns {*}
|
||||
*/
|
||||
module.exports = function (options, callback) {
|
||||
// set newman version used for collection run
|
||||
options.newmanVersion = util.version;
|
||||
|
||||
// set working directory if not provided
|
||||
options.workingDir = options.workingDir || process.cwd();
|
||||
|
||||
// allow insecure file read by default
|
||||
options.insecureFileRead = Boolean(_.get(options, 'insecureFileRead', true));
|
||||
|
||||
config.get(options, { loaders: configLoaders, command: 'run' }, function (err, result) {
|
||||
if (err) { return callback(err); }
|
||||
|
||||
!_.isEmpty(options.globalVar) && _.forEach(options.globalVar, function (variable) {
|
||||
variable && (result.globals.set(variable.key, variable.value));
|
||||
});
|
||||
|
||||
!_.isEmpty(options.envVar) && _.forEach(options.envVar, function (variable) {
|
||||
variable && (result.environment.set(variable.key, variable.value));
|
||||
});
|
||||
|
||||
callback(null, result);
|
||||
});
|
||||
};
|
187
node_modules/newman/lib/run/secure-fs.js
generated
vendored
Normal file
187
node_modules/newman/lib/run/secure-fs.js
generated
vendored
Normal file
@@ -0,0 +1,187 @@
|
||||
const fs = require('fs'),
|
||||
_ = require('lodash'),
|
||||
path = require('path'),
|
||||
util = require('util'),
|
||||
Readable = require('stream').Readable,
|
||||
|
||||
PPERM_ERR = 'PPERM: insecure file access outside working directory',
|
||||
FUNCTION = 'function',
|
||||
DEPRECATED_SYNC_WRITE_STREAM = 'SyncWriteStream',
|
||||
EXPERIMENTAL_PROMISE = 'promises',
|
||||
|
||||
// Use simple character check instead of regex to prevent regex attack
|
||||
/*
|
||||
* Windows root directory can be of the following from
|
||||
*
|
||||
* | File System | Actual | Modified |
|
||||
* |-------------|------------------|-------------------|
|
||||
* | LFS (Local) | C:\Program | /C:/Program |
|
||||
* | UNC | \\Server\Program | ///Server/Program |
|
||||
*/
|
||||
isWindowsRoot = function (path) {
|
||||
const drive = path.charAt(1);
|
||||
|
||||
return ((path.charAt(0) === '/') &&
|
||||
((drive >= 'A' && drive <= 'Z') || (drive >= 'a' && drive <= 'z')) &&
|
||||
(path.charAt(2) === ':')) ||
|
||||
path.slice(0, 3) === '///'; // Modified UNC path
|
||||
},
|
||||
|
||||
stripTrailingSep = function (thePath) {
|
||||
if (thePath[thePath.length - 1] === path.sep) {
|
||||
return thePath.slice(0, -1);
|
||||
}
|
||||
|
||||
return thePath;
|
||||
},
|
||||
|
||||
pathIsInside = function (thePath, potentialParent) {
|
||||
// For inside-directory checking, we want to allow trailing slashes, so normalize.
|
||||
thePath = stripTrailingSep(thePath);
|
||||
potentialParent = stripTrailingSep(potentialParent);
|
||||
|
||||
// Node treats only Windows as case-insensitive in its path module; we follow those conventions.
|
||||
if (global.process.platform === 'win32') {
|
||||
thePath = thePath.toLowerCase();
|
||||
potentialParent = potentialParent.toLowerCase();
|
||||
}
|
||||
|
||||
return thePath.lastIndexOf(potentialParent, 0) === 0 &&
|
||||
(
|
||||
thePath[potentialParent.length] === path.sep ||
|
||||
thePath[potentialParent.length] === undefined
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Secure file resolver wrapper over fs. It only allow access to files inside working directory unless specified.
|
||||
*
|
||||
* @param {*} workingDir - Path of working directory
|
||||
* @param {*} [insecureFileRead=false] - If true, allow reading files outside working directory
|
||||
* @param {*} [fileWhitelist=[]] - List of allowed files outside of working directory
|
||||
*/
|
||||
function SecureFS (workingDir, insecureFileRead = false, fileWhitelist = []) {
|
||||
this._fs = fs;
|
||||
this._path = path;
|
||||
this.constants = this._fs.constants;
|
||||
|
||||
this.workingDir = workingDir;
|
||||
this.insecureFileRead = insecureFileRead;
|
||||
this.fileWhitelist = fileWhitelist;
|
||||
|
||||
this.isWindows = global.process.platform === 'win32';
|
||||
}
|
||||
|
||||
/**
|
||||
* Private method to resole the path based based on working directory
|
||||
*
|
||||
* @param {String} relOrAbsPath - Relative or absolute path to resolve
|
||||
* @param {Array} whiteList - A list of absolute path to whitelist
|
||||
*
|
||||
* @returns {String} The resolved path
|
||||
*/
|
||||
SecureFS.prototype._resolve = function (relOrAbsPath, whiteList) {
|
||||
// Special handling for windows absolute paths to work cross platform
|
||||
this.isWindows && isWindowsRoot(relOrAbsPath) && (relOrAbsPath = relOrAbsPath.substring(1));
|
||||
|
||||
// Resolve the path from the working directory. The file should always be resolved so that
|
||||
// cross os variations are mitigated
|
||||
let resolvedPath = this._path.resolve(this.workingDir, relOrAbsPath);
|
||||
|
||||
// Check file is within working directory
|
||||
if (!this.insecureFileRead && // insecureFile read disabled
|
||||
!pathIsInside(resolvedPath, this.workingDir) && // File not inside working directory
|
||||
!_.includes(whiteList, resolvedPath)) { // File not in whitelist
|
||||
// Exit
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return resolvedPath;
|
||||
};
|
||||
|
||||
/**
|
||||
* Asynchronous path resolver function
|
||||
*
|
||||
* @param {String} relOrAbsPath - Relative or absolute path to resolve
|
||||
* @param {Array} [whiteList] - A optional list of additional absolute path to whitelist
|
||||
* @param {Function} callback -
|
||||
*/
|
||||
SecureFS.prototype.resolvePath = function (relOrAbsPath, whiteList, callback) {
|
||||
if (!callback && typeof whiteList === FUNCTION) {
|
||||
callback = whiteList;
|
||||
whiteList = [];
|
||||
}
|
||||
|
||||
let resolvedPath = this._resolve(relOrAbsPath, _.concat(this.fileWhitelist, whiteList));
|
||||
|
||||
if (!resolvedPath) {
|
||||
return callback(new Error(PPERM_ERR));
|
||||
}
|
||||
|
||||
return callback(null, resolvedPath);
|
||||
};
|
||||
|
||||
/**
|
||||
* Synchronous path resolver function
|
||||
*
|
||||
* @param {String} relOrAbsPath - Relative or absolute path to resolve
|
||||
* @param {Array} [whiteList] - A optional list of additional absolute path to whitelist
|
||||
*
|
||||
* @returns {String} The resolved path
|
||||
*/
|
||||
SecureFS.prototype.resolvePathSync = function (relOrAbsPath, whiteList) {
|
||||
// Resolve the path from the working directory
|
||||
const resolvedPath = this._resolve(relOrAbsPath, _.concat(this.fileWhitelist, whiteList));
|
||||
|
||||
if (!resolvedPath) {
|
||||
throw new Error(PPERM_ERR);
|
||||
}
|
||||
|
||||
return resolvedPath;
|
||||
};
|
||||
|
||||
// Attach all functions in fs to postman-fs
|
||||
Object.getOwnPropertyNames(fs).map((prop) => {
|
||||
// Bail-out early to prevent fs module from logging warning for deprecated and experimental methods
|
||||
if (prop === DEPRECATED_SYNC_WRITE_STREAM || prop === EXPERIMENTAL_PROMISE || typeof fs[prop] !== FUNCTION) {
|
||||
return;
|
||||
}
|
||||
|
||||
SecureFS.prototype[prop] = fs[prop];
|
||||
});
|
||||
|
||||
// Override the required functions
|
||||
SecureFS.prototype.stat = function (path, callback) {
|
||||
this.resolvePath(path, (err, resolvedPath) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
return this._fs.stat(resolvedPath, callback);
|
||||
});
|
||||
};
|
||||
|
||||
SecureFS.prototype.createReadStream = function (path, options) {
|
||||
try {
|
||||
return this._fs.createReadStream(this.resolvePathSync(path), options);
|
||||
}
|
||||
catch (err) {
|
||||
// Create a fake read steam that emits and error and
|
||||
const ErrorReadStream = function () {
|
||||
// Replicating behavior of fs module of disabling emitClose on destroy
|
||||
Readable.call(this, { emitClose: false });
|
||||
|
||||
// Emit the error event with insure file access error
|
||||
this.emit('error', new Error(PPERM_ERR));
|
||||
|
||||
// Options exists and disables autoClose then don't destroy
|
||||
(options && !options.autoClose) || this.destroy();
|
||||
};
|
||||
|
||||
util.inherits(ErrorReadStream, Readable);
|
||||
|
||||
return new ErrorReadStream();
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = SecureFS;
|
408
node_modules/newman/lib/run/summary.js
generated
vendored
Normal file
408
node_modules/newman/lib/run/summary.js
generated
vendored
Normal file
@@ -0,0 +1,408 @@
|
||||
var _ = require('lodash'),
|
||||
sdk = require('postman-collection'),
|
||||
SerialiseError = require('serialised-error'),
|
||||
RunSummary;
|
||||
|
||||
/**
|
||||
* Creates and returns a RunSummary instance for the current collection run.
|
||||
*
|
||||
* @constructor
|
||||
* @param {EventEmitter} emitter - An EventEmitter instance with event handler attachments to add run information to.
|
||||
* @param {Object} options - A set of run summary creation options.
|
||||
*/
|
||||
RunSummary = function RunSummary (emitter, options) {
|
||||
// keep a copy of this instance since, we need to refer to this from various events
|
||||
var summary = this;
|
||||
|
||||
// and store the trackers and failures in the summary object itself
|
||||
_.assign(summary, /** @lends RunSummary.prototype */ {
|
||||
/**
|
||||
* The collection that is being executed.
|
||||
*
|
||||
* @type {Collection}
|
||||
*/
|
||||
collection: _.get(options, 'collection'),
|
||||
|
||||
/**
|
||||
* The environment that is being used during the run
|
||||
*
|
||||
* @type {VariableScope}
|
||||
*
|
||||
*/
|
||||
environment: _.get(options, 'environment'),
|
||||
|
||||
/**
|
||||
* Global variables being used during the run
|
||||
*
|
||||
* @type {VariableScope}
|
||||
*/
|
||||
globals: _.get(options, 'globals'),
|
||||
|
||||
/**
|
||||
* Holds information related to the run.
|
||||
*/
|
||||
run: {
|
||||
/**
|
||||
* Holds the statistics of the run. Each property in it is the item being tracked and has three numeric
|
||||
* properties - total, failed, pending
|
||||
*
|
||||
* @type {Object.<Object>}
|
||||
*/
|
||||
stats: {
|
||||
iterations: {},
|
||||
items: {},
|
||||
scripts: {},
|
||||
prerequests: {},
|
||||
requests: {},
|
||||
tests: {},
|
||||
assertions: {},
|
||||
testScripts: {},
|
||||
prerequestScripts: {}
|
||||
},
|
||||
|
||||
/**
|
||||
* Stores all generic timing information
|
||||
*
|
||||
* @type {Object}
|
||||
*/
|
||||
timings: {
|
||||
/**
|
||||
* The average response time of the run
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
responseAverage: 0,
|
||||
|
||||
/**
|
||||
* The miminum response time of the run
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
responseMin: 0,
|
||||
|
||||
/**
|
||||
* The maximum response time of the run
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
responseMax: 0,
|
||||
|
||||
/**
|
||||
* Standard deviation of response time of the run
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
responseSd: 0,
|
||||
|
||||
/**
|
||||
* The average DNS lookup time of the run
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
dnsAverage: 0,
|
||||
|
||||
/**
|
||||
* The minimum DNS lookup time of the run
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
dnsMin: 0,
|
||||
|
||||
/**
|
||||
* The maximum DNS lookup time of the run
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
dnsMax: 0,
|
||||
|
||||
/**
|
||||
* Standard deviation of DNS lookup time of the run
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
dnsSd: 0,
|
||||
|
||||
/**
|
||||
* The average first byte time of the run
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
firstByteAverage: 0,
|
||||
|
||||
/**
|
||||
* The minimum first byte time of the run
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
firstByteMin: 0,
|
||||
|
||||
/**
|
||||
* The maximum first byte time of the run
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
firstByteMax: 0,
|
||||
|
||||
/**
|
||||
* Standard deviation of first byte time of the run
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
firstByteSd: 0
|
||||
},
|
||||
|
||||
/**
|
||||
* Stores detailed information about the order of execution, request, response and assertions
|
||||
*
|
||||
* @type {Array<Object>}
|
||||
*/
|
||||
executions: [],
|
||||
|
||||
/**
|
||||
* Stores information on data transfer made during the collection
|
||||
*
|
||||
* @type {Object}
|
||||
*/
|
||||
transfers: {
|
||||
/**
|
||||
* The total data received as response to every request
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
responseTotal: 0
|
||||
},
|
||||
|
||||
/**
|
||||
* An array of all errors encountered during the run
|
||||
*
|
||||
* @type {Array.<Error>}
|
||||
*/
|
||||
failures: [],
|
||||
|
||||
/**
|
||||
* This stores any fatal error during the run that caused the run to abort prematurely.
|
||||
*
|
||||
* @type {Error}
|
||||
*/
|
||||
error: null
|
||||
}
|
||||
});
|
||||
|
||||
// track run timings (start and end)
|
||||
RunSummary.attachTimingTrackers(this, emitter);
|
||||
|
||||
// accumulate statistics on all event
|
||||
// for all types of events track the counters for the event and its corresponding "before" counterpart
|
||||
RunSummary.attachStatisticTrackers(this, emitter);
|
||||
|
||||
// accumulate statistics on requests - such as size and time
|
||||
RunSummary.attachRequestTracker(this, emitter);
|
||||
|
||||
// accumulate errors (failures) from all events
|
||||
RunSummary.attachFailureTrackers(this, emitter);
|
||||
|
||||
// accumulate all execution specific data in collection
|
||||
RunSummary.attachReportingTrackers(this, emitter);
|
||||
};
|
||||
|
||||
_.assign(RunSummary, {
|
||||
attachReportingTrackers (summary, emitter) {
|
||||
var cache = {},
|
||||
executions = summary.run.executions;
|
||||
|
||||
emitter.on('beforeItem', function (err, o) {
|
||||
if (err || !_.get(o, 'cursor.ref')) { return; }
|
||||
|
||||
cache[o.cursor.ref] = _.assignIn(cache[o.cursor.ref] || {}, {
|
||||
cursor: o.cursor,
|
||||
item: o.item
|
||||
});
|
||||
});
|
||||
|
||||
// save all responses in executions array
|
||||
emitter.on('request', function (err, o) {
|
||||
if (!_.get(o, 'cursor.ref')) { return; }
|
||||
|
||||
var execution = cache[o.cursor.ref] = (cache[o.cursor.ref] || {});
|
||||
|
||||
executions.push(_.assignIn(execution, {
|
||||
cursor: o.cursor,
|
||||
request: o.request,
|
||||
response: o.response,
|
||||
id: _.get(o, 'item.id')
|
||||
}, err && {
|
||||
requestError: err || undefined
|
||||
}));
|
||||
});
|
||||
|
||||
// save all script execution errors in each execution
|
||||
emitter.on('script', function (err, o) {
|
||||
if (!_.get(o, 'cursor.ref')) { return; }
|
||||
|
||||
var execution = cache[o.cursor.ref] = (cache[o.cursor.ref] || {}),
|
||||
eventName = o && o.event && (o.event.listen + 'Script');
|
||||
|
||||
// store the script error corresponding to the script event name
|
||||
err && (execution && eventName) && (execution[eventName] || (execution[eventName] = [])).push({
|
||||
error: err
|
||||
});
|
||||
});
|
||||
|
||||
// save all assertions in each execution
|
||||
emitter.on('assertion', function (err, o) {
|
||||
if (!_.get(o, 'cursor.ref')) { return; }
|
||||
|
||||
var execution = cache[o.cursor.ref] = (cache[o.cursor.ref] || {});
|
||||
|
||||
if (!execution) { return; }
|
||||
|
||||
(execution.assertions || (execution.assertions = [])).push({
|
||||
assertion: o.assertion,
|
||||
skipped: o.skipped,
|
||||
error: err || undefined
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
attachTimingTrackers (summary, emitter) {
|
||||
// mark the point when the run started
|
||||
// also mark the point when run completed and also store error if needed
|
||||
emitter.on('start', function () { summary.run.timings.started = Date.now(); });
|
||||
emitter.on('beforeDone', function () {
|
||||
summary.run.timings.completed = Date.now();
|
||||
});
|
||||
emitter.on('done', function (err) {
|
||||
err && (summary.error = err);
|
||||
});
|
||||
},
|
||||
attachStatisticTrackers (summary, emitter) {
|
||||
// accumulate statistics on all event
|
||||
// for all types of events track the counters for the event and its corresponding "before" counterpart
|
||||
_.forEach(summary.run.stats, function (tracker, name) {
|
||||
// the actual event names are singular than their plural trackers, so we make the name singular
|
||||
name = name.slice(0, -1); // remove last character
|
||||
|
||||
// populate initial values of trackers
|
||||
_.assign(tracker, { total: 0, pending: 0, failed: 0 });
|
||||
|
||||
// Set up common listeners for a set of events, which tracks how many times they were executed and records
|
||||
// the ones which had an error passed as first argument
|
||||
emitter.on(_.camelCase('before-' + name), function () {
|
||||
tracker.pending += 1;
|
||||
});
|
||||
|
||||
emitter.on(name, function (err) {
|
||||
// check pending so that, it does not negate for items that do not have a `before` counterpart
|
||||
tracker.pending && (tracker.pending -= 1);
|
||||
err && (tracker.failed += 1);
|
||||
tracker.total += 1;
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
attachRequestTracker (summary, emitter) {
|
||||
// accumulate statistics on requests
|
||||
emitter.on('request', function (err, o) {
|
||||
if (err || !(o && o.response)) { return; }
|
||||
|
||||
var size = _.isFunction(o.response.size) && o.response.size(),
|
||||
time = o.response.responseTime,
|
||||
requestCount = summary.run.stats.requests.total,
|
||||
timings,
|
||||
timingPhases;
|
||||
|
||||
// compute the response size total
|
||||
size && (summary.run.transfers.responseTotal += (size.body || 0 + size.headers || 0));
|
||||
|
||||
// if there are redirects, get timings for the last request sent
|
||||
timings = _.last(_.get(o, 'history.execution.data'));
|
||||
timings = timings && timings.timings;
|
||||
timingPhases = timings && sdk.Response.timingPhases(timings);
|
||||
|
||||
(timingPhases || time) && _.forEach([
|
||||
'dns',
|
||||
'firstByte',
|
||||
'response'
|
||||
], (value) => {
|
||||
var currentValue = (value === 'response') ? time : (timingPhases && timingPhases[value]),
|
||||
previousAverage = summary.run.timings[`${value}Average`],
|
||||
previousVariance = Math.pow(summary.run.timings[`${value}Sd`], 2),
|
||||
delta1 = currentValue - previousAverage,
|
||||
delta2,
|
||||
currentVariance;
|
||||
|
||||
if (!currentValue) { return; }
|
||||
|
||||
// compute average time for the given phase of request
|
||||
summary.run.timings[`${value}Average`] =
|
||||
(previousAverage * (requestCount - 1) + currentValue) / requestCount;
|
||||
|
||||
// compute minimum time for the given phase of request
|
||||
if (!summary.run.timings[`${value}Min`]) {
|
||||
summary.run.timings[`${value}Min`] = currentValue;
|
||||
}
|
||||
else {
|
||||
summary.run.timings[`${value}Min`] =
|
||||
Math.min(summary.run.timings[`${value}Min`], currentValue);
|
||||
}
|
||||
|
||||
// compute maximum time the given phase of request
|
||||
summary.run.timings[`${value}Max`] = Math.max(summary.run.timings[`${value}Max`], currentValue);
|
||||
|
||||
// compute standard deviation for the given phase of request
|
||||
// refer Welford's online algorithm from
|
||||
// https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance
|
||||
delta2 = currentValue - summary.run.timings[`${value}Average`];
|
||||
currentVariance = (previousVariance * (requestCount - 1) + (delta1 * delta2)) / requestCount;
|
||||
summary.run.timings[`${value}Sd`] = Math.sqrt(currentVariance);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
attachFailureTrackers (summary, emitter) {
|
||||
var eventsToTrack = ['beforeIteration', 'iteration', 'beforeItem', 'item', 'beforeScript', 'script',
|
||||
'beforePrerequest', 'prerequest', 'beforeRequest', 'request', 'beforeTest', 'test', 'beforeAssertion',
|
||||
'assertion'];
|
||||
|
||||
// accumulate failures of all events
|
||||
// NOTE that surrogate events (which throw duplicate arguments) are not recorded
|
||||
_.forEach(eventsToTrack, function (event) {
|
||||
// push failures sent from "before" events
|
||||
emitter.on(event, function (err, o) {
|
||||
if (!err) { return; }
|
||||
|
||||
var item = o && o.item,
|
||||
source = event;
|
||||
|
||||
// in case of user script error, point to the line and column of the script and its type
|
||||
if (event === 'script') {
|
||||
o.event && (source = o.event.listen + '-script');
|
||||
if (err.stacktrace && err.stacktrace[0] && err.stacktrace[0].lineNumber) {
|
||||
source += (':' + (err.stacktrace[0].lineNumber - 2));
|
||||
err.stacktrace[0].columnNumber && (source += (':' + err.stacktrace[0].columnNumber));
|
||||
}
|
||||
}
|
||||
// assertion errors need to know which assertion in the test was this
|
||||
else if (event === 'assertion') {
|
||||
_.has(err, 'index') && (source += (':' + err.index));
|
||||
source += ' in test-script';
|
||||
}
|
||||
|
||||
// if this is a plain error, convert it to serialised error
|
||||
if (err.stack && !err.stacktrace) {
|
||||
err = new SerialiseError(err, true);
|
||||
}
|
||||
|
||||
summary.run.failures.push({
|
||||
error: err,
|
||||
at: source,
|
||||
source: item || undefined,
|
||||
parent: item && item.__parent && item.__parent.__parent || undefined,
|
||||
cursor: o.cursor || {}
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = RunSummary;
|
Reference in New Issue
Block a user