feat: Created a mini nodeJS server with NewMan for testing without PostMan GUI.

This will mimic a run in a CD/CI environment or docker container.
This commit is contained in:
Simon Priet
2021-09-08 14:01:19 +02:00
parent 5fbd7c88fa
commit e69a613a37
5610 changed files with 740417 additions and 3 deletions

View File

@@ -0,0 +1,65 @@
var _ = require('lodash'),
sdk = require('postman-collection'),
/**
* @const
* @type {string}
*/
FUNCTION = 'function',
SAFE_CONTEXT_PROPERTIES = ['replayState', 'coords'];
/**
* Creates a context object to be used with `http-request.command` extension.
*
* @function createItemContext
*
* @param {Object} payload
* @param {Item} payload.item
* @param {Object} [payload.coords]
* @param {Object} [defaults]
* @param {Object} [defaults.replayState]
* @param {Object} [defaults.coords]
*
* @returns {ItemContext}
*/
module.exports = function (payload, defaults) {
// extract properties from defaults that can/should be reused in new context
var context = defaults ? _.pick(defaults, SAFE_CONTEXT_PROPERTIES) : {};
// set cursor to context
!context.coords && (context.coords = payload.coords);
// save original item for reference
context.originalItem = payload.item;
// we clone item from the payload, so that we can make any changes we need there, without mutating the
// collection
context.item = new sdk.Item(payload.item.toJSON());
// get a reference to the Auth instance from the item, so changes are synced back
context.auth = context.originalItem.getAuth();
// Make sure run is not errored out if older version of collection SDK is used.
// @todo remove this safety check in the next release
if (typeof context.originalItem.getProtocolProfileBehaviorResolved === FUNCTION) {
// get protocolProfileBehavior for the item, also inherited from parent
context.protocolProfileBehavior = context.originalItem.getProtocolProfileBehaviorResolved();
}
else {
// get protocolProfileBehavior for the item
context.protocolProfileBehavior = context.originalItem.protocolProfileBehavior;
}
/**
* @typedef {Object} ItemContext
* @property {Object} coords - current cursor
* @property {Item} originalItem - reference to the item in the collection
* @property {Item} item - Holds a copy of the item given in the payload, so that it can be manipulated
* as necessary
* @property {RequestAuthBase|undefined} auth - If present, is the instance of Auth in the collection, which
* is changed as necessary using intermediate requests, etc.
* @property {ReplayState} replayState - has context on number of replays(if any) for this request
*/
return context;
};

376
node_modules/postman-runtime/lib/runner/cursor.js generated vendored Normal file
View File

@@ -0,0 +1,376 @@
var _ = require('lodash'),
uuid = require('uuid'),
Cursor;
/**
* @param {Number} [length=0]
* @param {Number} [cycles=1]
* @param {Number} [position=0]
* @param {Number} [iteration=0]
* @param {String} [ref]
* @constructor
*/
Cursor = function RunCursor (length, cycles, position, iteration, ref) { // eslint-disable-line func-name-matching
this.length = Cursor.validate(length, 0);
this.position = Cursor.validate(position, 0, this.length);
this.cycles = Cursor.validate(cycles, 1, 1);
this.iteration = Cursor.validate(iteration, 0, this.cycles);
this.ref = ref || uuid.v4();
};
_.assign(Cursor.prototype, {
/**
*
*
* @param {Object} state
* @param {Number} [state.length=0]
* @param {Number} [state.cycles=1]
* @param {Number} [state.position=0]
* @param {Number} [state.iteration=0]
* @param {String} [state.ref]
* @param {Function} [callback] - receives `(err:Error, coords:Object, previous:Object)`
* @param {Object} [scope]
*/
load: function (state, callback, scope) {
!state && (state = {});
(state instanceof Cursor) && (state = state.current());
this.reset(state.length, state.cycles, state.position, state.iteration, state.ref, callback, scope);
},
/**
* Update length and cycle bounds
*
* @param {Number} [length=0]
* @param {Number} [cycles=1]
* @param {Number} [position=0]
* @param {Number} [iteration=0]
* @param {String} [ref]
* @param {Function} [callback] - receives `(err:Error, coords:Object, previous:Object)`
* @param {Object} [scope]
*/
reset: function (length, cycles, position, iteration, ref, callback, scope) {
var coords = _.isFunction(callback) && this.current();
// validate parameter defaults
_.isNil(length) && (length = this.length);
_.isNil(cycles) && (cycles = this.cycles);
_.isNil(position) && (position = this.position);
_.isNil(iteration) && (iteration = this.iteration);
_.isNil(ref) && (ref = this.ref);
// use the constructor to set the values
Cursor.call(this, length, cycles, position, iteration, ref);
// send before and after values to the callback
return coords && callback.call(scope || this, null, this.current(), coords);
},
/**
* Update length and cycle bounds
*
* @param {Number} [length=0]
* @param {Number} [cycles=1]
* @param {Function} [callback] - receives `(err:Error, coords:Object, previous:Object)`
* @param {Object} [scope]
*/
bounds: function (length, cycles, callback, scope) {
var coords = _.isFunction(callback) && this.current();
// validate parameter defaults
_.isNil(length) && (length = this.length);
_.isNil(cycles) && (cycles = this.cycles);
// use the constructor to set the values
Cursor.call(this, length, cycles, this.position, this.iteration);
return coords && callback.call(scope || this, null, this.current(), coords);
},
/**
* Set everything to minimum dimension
*
* @param {Function} [callback] - receives `(err:Error, coords:Object, previous:Object)`
* @param {Object} [scope]
*/
zero: function (callback, scope) {
var coords = _.isFunction(callback) && this.current();
this.position = 0;
this.iteration = 0;
// send before and after values to the callback
return coords && callback.call(scope || this, null, this.current(), coords);
},
/**
* Set everything to mnimum dimension
*
* @param {Function} [callback] - receives `(err:Error, coords:Object, previous:Object)`
* @param {Object} [scope]
*/
clear: function (callback, scope) {
var coords = _.isFunction(callback) && this.current();
this.position = 0;
this.iteration = 0;
this.cycles = 1;
this.length = 0;
return coords && callback.call(scope || this, null, this.current(), coords);
},
/**
* Seek to a specified Cursor
*
* @param {Number} [position]
* @param {Number} [iteration]
* @param {Function} [callback] - receives `(err:Error, changed:Boolean, coords:Object, previous:Object)`
* @param {Object} [scope]
*/
seek: function (position, iteration, callback, scope) {
var coords = _.isFunction(callback) && this.current();
// if null or undefined implies use existing seek position
_.isNil(position) && (position = this.position);
_.isNil(iteration) && (iteration = this.iteration);
// make the pointers stay within boundary
if ((position >= this.length) || (iteration >= this.cycles) || (position < 0) || (iteration < 0) ||
isNaN(position) || isNaN(iteration)) {
return coords &&
callback.call(scope || this, new Error('runcursor: seeking out of bounds: ' + [position, iteration]));
}
// floor the numbers
position = ~~position;
iteration = ~~iteration;
// set the new positions
this.position = Cursor.validate(position, 0, this.length);
this.iteration = Cursor.validate(iteration, 0, this.cycles);
// finally execute the callback with the seek position
return coords && callback.call(scope || this, null, this.hasChanged(coords), this.current(), coords);
},
/**
* Seek one forward
*
* @param {Function} [callback] - receives `(err:Error, changed:Boolean, coords:Object, previous:Object)`
* @param {Object} [scope]
*/
next: function (callback, scope) {
var position = this.position,
iteration = this.iteration,
coords;
// increment position
position += 1;
// check if we need to increment cycle
if (position >= this.length) {
// set position to 0 and increment iteration
position = 0;
iteration += 1;
if (iteration >= this.cycles) {
coords = _.isFunction(callback) && this.current();
coords.eof = true;
return coords && callback.call(scope || this, null, false, coords, coords);
}
coords && (coords.cr = true);
}
// finally handover the new coordinates to seek function
return this.seek(position, iteration, callback, scope);
},
/**
* Tentative Cursor status, if we do `.next()`
*
* @param {Object} coords
*
* @returns {Object}
*/
whatnext: function (coords) {
var base = {
ref: this.ref,
length: this.length,
cycles: this.cycles
},
position,
iteration;
if (!_.isObject(coords)) {
return _.assign(base, {eof: true, bof: true, empty: this.empty()});
}
if (!this.length) {
return _.assign(base, {eof: true, bof: true, empty: true});
}
position = coords.position;
iteration = coords.iteration;
// increment position
position += 1;
// check if we need to increment cycle
if (position >= this.length) {
// set position to 0 and increment iteration
position = 0;
iteration += 1;
if (iteration >= this.cycles) {
return _.assign(base, {
position: this.length - 1,
iteration: iteration - 1,
eof: true
});
}
return _.assign(base, {
position: position,
iteration: iteration,
cr: true
});
}
return _.assign(base, {position: position, iteration: iteration});
},
/**
* Check whether current position and iteration is not as the same specified
*
* @param {Object} coords
* @returns {Boolean}
*/
hasChanged: function (coords) {
return _.isObject(coords) && !((this.position === coords.position) && (this.iteration === coords.iteration));
},
/**
* Current Cursor state
*
* @returns {Object}
*/
current: function () {
return {
position: this.position,
iteration: this.iteration,
length: this.length,
cycles: this.cycles,
empty: this.empty(),
eof: this.eof(),
bof: this.bof(),
cr: this.cr(),
ref: this.ref
};
},
/**
* Is the current position going to trigger a new iteration on `.next`?
*
* @returns {Boolean}
*/
cr: function () {
return !this.length || (this.position >= this.length);
},
/**
* @returns {Boolean}
*/
eof: function () {
return !this.length || (this.position >= this.length) && (this.iteration >= this.cycles);
},
/**
* @returns {Boolean}
*/
bof: function () {
return !this.length || ((this.position === 0) && (this.iteration === 0));
},
/**
* @returns {Boolean}
*/
empty: function () {
return !this.length;
},
/**
* @returns {Object}
*/
valueOf: function () {
return this.current();
},
clone: function () {
return new Cursor(this.length, this.cycles, this.position, this.iteration);
}
});
_.assign(Cursor, {
/**
* @param {Number} [length=0]
* @param {Number} [cycles=1]
* @param {Number} [position=0]
* @param {Number} [iteration=0]
* @param {String} [ref]
*
* @returns {Number}
*/
create: function (length, cycles, position, iteration, ref) {
return new Cursor(length, cycles, position, iteration, ref);
},
/**
* @param {Object|Cursor} obj
* @param {Object} [bounds]
* @param {Number} [bounds.length]
* @param {Number} [bounds.cycles]
*
* @returns {Cursor}
*/
box: function (obj, bounds) {
// already a Cursor, do nothing
if (obj instanceof Cursor) {
bounds && obj.bounds(bounds.length, bounds.cycles);
return obj;
}
// nothing to box, create a blank Cursor
if (!_.isObject(obj)) { return new Cursor(bounds && bounds.length, bounds && bounds.cycles); }
// load Cursor values from object
return new Cursor((bounds || obj).length, (bounds || obj).cycles, obj.position, obj.iteration, obj.ref);
},
/**
* @private
*
* @param {Number} num
* @param {Number} min [description]
* @param {Number} [max]
*
* @returns {Number}
*/
validate: function (num, min, max) {
if (typeof num !== 'number' || num < min) {
return min;
}
if (num === Infinity) {
return _.isNil(max) ? min : max;
}
return num;
}
});
module.exports = Cursor;

View File

@@ -0,0 +1,105 @@
var _ = require('lodash'),
util = require('../util'),
backpack = require('../../backpack');
module.exports = {
/**
* All the events that this extension triggers
* @type {Array}
*/
triggers: ['pause', 'resume', 'abort'],
prototype: /** @lends Run.prototype */ {
/**
* Pause a run
*
* @param {Function} callback
*/
pause: function (callback) {
callback = backpack.ensure(callback, this);
if (this.paused) { return callback && callback(new Error('run: already paused')); }
// schedule the pause command as an interrupt and flag that the run is pausing
this.paused = true;
this.interrupt('pause', null, callback);
},
/**
* Resume a paused a run
*
* @param {Function} callback
*/
resume: function (callback) {
callback = backpack.ensure(callback, this);
if (!this.paused) { return callback && callback(new Error('run: not paused')); }
// set flag that it is no longer paused and fire the stored callback for the command when it was paused
this.paused = false;
setTimeout(function () {
this.__resume();
delete this.__resume;
this.triggers.resume(null, this.state.cursor.current());
}.bind(this), 0);
callback && callback();
},
/**
* Aborts a run
*
* @param {boolean} [summarise=true]
* @param {function} callback
*/
abort: function (summarise, callback) {
if (_.isFunction(summarise) && !callback) {
callback = summarise;
summarise = true;
}
this.interrupt('abort', {
summarise: summarise
}, callback);
_.isFunction(this.__resume) && this.resume();
}
},
process: /** @lends Run.commands */ {
pause: function (userback, payload, next) {
// trigger the secondary callbacks
this.triggers.pause(null, this.state.cursor.current());
// tuck away the command completion callback in the run object so that it can be used during resume
this.__resume = next;
// execute the userback sent as part of the command and do so in a try block to ensure it does not hamper
// the process tick
var error = util.safeCall(userback, this);
// if there is an error executing the userback, then and only then raise the error (which stops the run)
if (error) {
return next(error);
}
},
/**
* @param {Function} userback
* @param {Object} payload
* @param {Boolean} payload.summarise
* @param {Function} next
*/
abort: function (userback, payload, next) {
// clear instruction pool and as such there will be nothing next to execute
this.pool.clear();
this.triggers.abort(null, this.state.cursor.current());
// execute the userback sent as part of the command and do so in a try block to ensure it does not hamper
// the process tick
backpack.ensure(userback, this) && userback();
next(null);
}
}
};

View File

@@ -0,0 +1,62 @@
var _ = require('lodash');
module.exports = {
init: function (done) {
done();
},
triggers: ['waitStateChange'],
prototype: {
/**
* @param {Function} fn - function to execute
* @param {Object} options
* @param {String} options.source
* @param {Number} options.time
* @param {Object} options.cursor
* @param {Function} next
* @private
*/
queueDelay: function (fn, options, next) {
var time = _.isFinite(options.time) ? parseInt(options.time, 10) : 0;
// if the time is a valid and finite time, we queue the delay command
if (time > 0) {
this.queue('delay', {
cursor: options.cursor,
source: options.source,
time: time
}).done(fn);
}
// otherwise, we do not delay and simply execute the function that was supposed to be called post delay
else {
fn();
}
next();
}
},
process: {
/**
* @param {Object} payload
* @param {Number} payload.time
* @param {Object} payload.cursor
* @param {String} payload.source
* @param {Function} next
*/
delay: function (payload, next) {
var cursor = payload.cursor || this.state.cursor.current();
this.waiting = true; // set flag
// trigger the waiting stae change event
this.triggers.waitStateChange(null, cursor, true, payload.time, payload.source);
setTimeout((function () {
this.waiting = false; // unset flag
this.triggers.waitStateChange(null, cursor, false, payload.time, payload.source);
next();
}).bind(this), payload.time || 0);
}
}
};

View File

@@ -0,0 +1,530 @@
var _ = require('lodash'),
uuid = require('uuid'),
async = require('async'),
util = require('../util'),
sdk = require('postman-collection'),
sandbox = require('postman-sandbox'),
serialisedError = require('serialised-error'),
ToughCookie = require('tough-cookie').Cookie,
createItemContext = require('../create-item-context'),
ASSERTION_FAILURE = 'AssertionFailure',
SAFE_CONTEXT_VARIABLES = ['_variables', 'environment', 'globals', 'collectionVariables', 'cookies', 'data',
'request', 'response'],
EXECUTION_REQUEST_EVENT_BASE = 'execution.request.',
EXECUTION_RESPONSE_EVENT_BASE = 'execution.response.',
EXECUTION_ASSERTION_EVENT_BASE = 'execution.assertion.',
EXECUTION_ERROR_EVENT_BASE = 'execution.error.',
EXECUTION_COOKIES_EVENT_BASE = 'execution.cookies.',
COOKIES_EVENT_STORE_ACTION = 'store',
COOKIE_STORE_PUT_METHOD = 'putCookie',
COOKIE_STORE_UPDATE_METHOD = 'updateCookie',
FILE = 'file',
REQUEST_BODY_MODE_FILE = 'file',
REQUEST_BODY_MODE_FORMDATA = 'formdata',
getCookieDomain, // fn
postProcessContext, // fn
sanitizeFiles; // fn
postProcessContext = function (execution, failures) { // function determines whether the event needs to abort
var error;
if (failures && failures.length) {
error = new Error(failures.join(', '));
error.name = ASSERTION_FAILURE;
}
return error ? serialisedError(error, true) : undefined;
};
/**
* Removes files in Request body if any.
*
* @private
*
* @param {Request~definition} request Request JSON representation to be sanitized
* @param {Function} callback function invoked with error, request and sanitisedFiles.
* sanitisedFiles is the list of files removed from request.
*
* @note this function mutates the request
* @todo remove files path from request.certificate
*/
sanitizeFiles = function (request, callback) {
if (!request) {
return callback(new Error('Could not complete pm.sendRequest. Request is empty.'));
}
var sanitisedFiles = [];
// do nothing if request body is empty
if (!request.body) {
// send request as such
return callback(null, request, sanitisedFiles);
}
// in case of request body mode is file, we strip it out
if (request.body.mode === REQUEST_BODY_MODE_FILE) {
sanitisedFiles.push(_.get(request, 'body.file.src'));
request.body = null; // mutate the request for body
}
// if body is form-data then we deep dive into the data items and remove the entries that have file data
else if (request.body.mode === REQUEST_BODY_MODE_FORMDATA) {
// eslint-disable-next-line lodash/prefer-immutable-method
_.remove(request.body.formdata, function (param) {
// blank param and non-file param is removed
if (!param || param.type !== FILE) { return false; }
// at this point the param needs to be removed
sanitisedFiles.push(param.src);
return true;
});
}
return callback(null, request, sanitisedFiles);
};
/**
* Fetch domain name from CookieStore event arguments.
*
* @private
* @param {String} fnName - CookieStore method name
* @param {Array} args - CookieStore method arguments
* @returns {String|Undefined} - Domain name
*/
getCookieDomain = function (fnName, args) {
if (!(fnName && args)) {
return;
}
var domain;
switch (fnName) {
case 'findCookie':
case 'findCookies':
case 'removeCookie':
case 'removeCookies':
domain = args[0];
break;
case 'putCookie':
case 'updateCookie':
domain = args[0] && args[0].domain;
break;
default:
return;
}
return domain;
};
/**
* Script execution extension of the runner.
* This module exposes processors for executing scripts before and after requests. Essentially, the processors are
* itself not aware of other processors and simply allow running of a script and then queue a procesor as defined in
* payload.
*
* Adds options
* - stopOnScriptError:Boolean [false]
* - host:Object [undefined]
*/
module.exports = {
init: function (done) {
var run = this;
// if this run object already has a host, we do not need to create one.
if (run.host) {
return done();
}
// @todo - remove this when chrome app and electron host creation is offloaded to runner
// @todo - can this be removed now in runtime v4?
if (run.options.host && run.options.host.external === true) {
run.host = run.options.host.instance;
return done();
}
sandbox.createContext(_.merge({
timeout: _(run.options.timeout).pick(['script', 'global']).values().min()
// debug: true
}, run.options.host), function (err, context) {
if (err) { return done(err); }
// store the host in run object for future use and move on
run.host = context;
context.on('console', function () {
run.triggers.console.apply(run.triggers, arguments);
});
context.on('error', function () {
run.triggers.error.apply(run.triggers, arguments);
});
context.on('execution.error', function () {
run.triggers.exception.apply(run.triggers, arguments);
});
context.on('execution.assertion', function () {
run.triggers.assertion.apply(run.triggers, arguments);
});
done();
});
},
/**
* This lists the name of the events that the script processors are likely to trigger
*
* @type {Array}
*/
triggers: ['beforeScript', 'script', 'assertion', 'exception', 'console'],
process: {
/**
* This processors job is to do the following:
* - trigger event by its name
* - execute all scripts that the event listens to and return execution results
*
* @param {Object} payload
* @param {String} payload.name
* @param {Item} payload.item
* @param {Object} [payload.context]
* @param {Cursor} [payload.coords]
* @param {Number} [payload.scriptTimeout] - The millisecond timeout for the current running script.
* @param {Array.<String>} [payload.trackContext]
* @param {Boolean} [payload.stopOnScriptError] - if set to true, then a synchronous error encountered during
* execution of a script will stop executing any further scripts
* @param {Boolean} [payload.abortOnFailure]
* @param {Boolean} [payload.stopOnFailure]
* @param {Function} next
*
* @note - in order to raise trigger for the entire event, ensure your extension has registered the triggers
*/
event: function (payload, next) {
var item = payload.item,
eventName = payload.name,
cursor = payload.coords,
// the payload can have a list of variables to track from the context post execution, ensure that
// those are accurately set
track = _.isArray(payload.trackContext) && _.isObject(payload.context) &&
// ensure that only those variables that are defined in the context are synced
payload.trackContext.filter(function (variable) {
return _.isObject(payload.context[variable]);
}),
stopOnScriptError = (_.has(payload, 'stopOnScriptError') ? payload.stopOnScriptError :
this.options.stopOnScriptError),
abortOnError = (_.has(payload, 'abortOnError') ? payload.abortOnError : this.options.abortOnError),
// @todo: find a better home for this option processing
abortOnFailure = payload.abortOnFailure,
stopOnFailure = payload.stopOnFailure,
events;
// @todo: find a better place to code this so that event is not aware of such options
if (abortOnFailure) {
abortOnError = true;
}
// validate the payload
if (!eventName) {
return next(new Error('runner.extension~events: event payload is missing the event name.'));
}
if (!item) {
return next(new Error('runner.extension~events: event payload is missing the triggered item.'));
}
// get the list of events to be executed
// includes events in parent as well
events = item.events.listeners(eventName, {excludeDisabled: true});
// call the "before" event trigger by its event name.
// at this point, the one who queued this event, must ensure that the trigger for it is defined in its
// 'trigger' interface
this.triggers[_.camelCase('before-' + eventName)](null, cursor, events, item);
// with all the event listeners in place, we now iterate on them and execute its scripts. post execution,
// we accumulate the results in order to be passed on to the event callback trigger.
async.mapSeries(events, function (event, next) {
// in case the event has no script we bail out early
if (!event.script) {
return next(null, {event: event});
}
// get access to the script from the event.
var script = event.script,
executionId = uuid(),
assertionFailed = [],
asyncScriptError,
// create copy of cursor so we don't leak script ids outside `event.command`
// and across scripts
scriptCursor = _.clone(cursor);
// store the execution id in script
script._lastExecutionId = executionId; // please don't use it anywhere else!
// if we can find an id on script or event we add them to the cursor
// so logs and errors can be traced back to the script they came from
event.id && (scriptCursor.eventId = event.id);
event.script.id && (scriptCursor.scriptId = event.script.id);
// trigger the "beforeScript" callback
this.triggers.beforeScript(null, scriptCursor, script, event, item);
// add event listener to trap all assertion events, but only if needed. to avoid needlessly accumulate
// stuff in memory.
(abortOnFailure || stopOnFailure) &&
this.host.on(EXECUTION_ASSERTION_EVENT_BASE + executionId, function (scriptCursor, assertions) {
_.forEach(assertions, function (assertion) {
assertion && !assertion.passed && assertionFailed.push(assertion.name);
});
});
// To store error event, but only if needed. Because error in callback of host.execute()
// don't show execution errors for async scripts
(abortOnError || stopOnScriptError) &&
// only store first async error in case of multiple errors
this.host.once(EXECUTION_ERROR_EVENT_BASE + executionId, function (scriptCursor, error) {
if (error && !(error instanceof Error)) {
error = new Error(error.message || error);
}
asyncScriptError = error;
// @todo: Figure out a way to abort the script execution here as soon as we get an error.
// We can send `execution.abort.` event to sandbox for this, but currently it silently
// terminates the script execution without triggering the callback.
});
this.host.on(EXECUTION_COOKIES_EVENT_BASE + executionId,
function (eventId, action, fnName, args) {
// only store action is supported, might need to support
// more cookie actions in next 2 years ¯\_(ツ)_/¯
if (action !== COOKIES_EVENT_STORE_ACTION) { return; }
var self = this,
dispatchEvent = EXECUTION_COOKIES_EVENT_BASE + executionId,
cookieJar = _.get(self, 'requester.options.cookieJar'),
cookieStore = cookieJar && cookieJar.store,
cookieDomain;
if (!cookieStore) {
return self.host.dispatch(dispatchEvent, eventId, 'CookieStore: no store found');
}
if (typeof cookieStore[fnName] !== 'function') {
return self.host.dispatch(dispatchEvent, eventId,
`CookieStore: invalid method name '${fnName}'`);
}
!Array.isArray(args) && (args = []);
// set expected args length to make sure callback is always called
args.length = cookieStore[fnName].length - 1;
// there's no way cookie store can identify the difference
// between regular and programmatic access. So, for now
// we check for programmatic access using the cookieJar
// helper method and emit the default empty value for that
// method.
// @note we don't emit access denied error here because
// that might blocks users use-case while accessing
// cookies for a sub-domain.
cookieDomain = getCookieDomain(fnName, args);
if (cookieJar && typeof cookieJar.allowProgrammaticAccess === 'function' &&
!cookieJar.allowProgrammaticAccess(cookieDomain)) {
return self.host.dispatch(dispatchEvent, eventId,
`CookieStore: programmatic access to "${cookieDomain}" is denied`);
}
// serialize cookie object
if (fnName === COOKIE_STORE_PUT_METHOD && args[0]) {
args[0] = ToughCookie.fromJSON(args[0]);
}
if (fnName === COOKIE_STORE_UPDATE_METHOD && args[0] && args[1]) {
args[0] = ToughCookie.fromJSON(args[0]);
args[1] = ToughCookie.fromJSON(args[1]);
}
// add store method's callback argument
args.push(function (err, res) {
// serialize error message
if (err && err instanceof Error) {
err = err.message || String(err);
}
self.host.dispatch(dispatchEvent, eventId, err, res);
});
try {
cookieStore[fnName].apply(cookieStore, args);
}
catch (error) {
self.host.dispatch(dispatchEvent, eventId,
`runtime~CookieStore: error executing "${fnName}"`);
}
}.bind(this));
this.host.on(EXECUTION_REQUEST_EVENT_BASE + executionId,
function (scriptCursor, id, requestId, request) {
// remove files in request body if any
sanitizeFiles(request, function (err, request, sanitisedFiles) {
if (err) {
return this.host.dispatch(EXECUTION_RESPONSE_EVENT_BASE + id, requestId, err);
}
var nextPayload;
// if request is sanitized send a warning
if (!_.isEmpty(sanitisedFiles)) {
this.triggers.console(scriptCursor, 'warn',
'uploading files from scripts is not allowed');
}
nextPayload = {
item: new sdk.Item({request: request}),
coords: scriptCursor,
// @todo - get script type from the sandbox
source: 'script',
// abortOnError makes sure request command bubbles errors
// so we can pass it on to the callback
abortOnError: true
};
// create context for executing this request
nextPayload.context = createItemContext(nextPayload);
this.immediate('httprequest', nextPayload).done(function (result) {
this.host.dispatch(
EXECUTION_RESPONSE_EVENT_BASE + id,
requestId,
null,
result && result.response,
// @todo get cookies from result.history or pass PostmanHistory
// instance once it is fully supported
result && {cookies: result.cookies}
);
}).catch(function (err) {
this.host.dispatch(EXECUTION_RESPONSE_EVENT_BASE + id, requestId, err);
});
}.bind(this));
}.bind(this));
// finally execute the script
this.host.execute(event, {
id: executionId,
// debug: true,
timeout: payload.scriptTimeout, // @todo: Expose this as a property in Collection SDK's Script
cursor: scriptCursor,
context: _.pick(payload.context, SAFE_CONTEXT_VARIABLES),
serializeLogs: _.get(this, 'options.script.serializeLogs'),
// legacy options
legacy: {
_itemId: item.id,
_itemName: item.name
}
}, function (err, result) {
this.host.removeAllListeners(EXECUTION_REQUEST_EVENT_BASE + executionId);
this.host.removeAllListeners(EXECUTION_ASSERTION_EVENT_BASE + executionId);
this.host.removeAllListeners(EXECUTION_RESPONSE_EVENT_BASE + executionId);
this.host.removeAllListeners(EXECUTION_COOKIES_EVENT_BASE + executionId);
this.host.removeAllListeners(EXECUTION_ERROR_EVENT_BASE + executionId);
// Handle async errors as well.
// If there was an error running the script itself, that takes precedence
if (!err && asyncScriptError) {
err = asyncScriptError;
}
// electron IPC does not bubble errors to the browser process, so we serialize it here.
err && (err = serialisedError(err, true));
// if it is defined that certain variables are to be synced back to result, we do the same
track && result && track.forEach(function (variable) {
if (!(_.isObject(result[variable]) && payload.context[variable])) { return; }
var contextVariable = payload.context[variable],
mutations = result[variable].mutations;
// bail out if there are no mutations
if (!mutations) {
return;
}
// ensure that variable scope is treated accordingly
if (_.isFunction(contextVariable.applyMutation)) {
mutations = new sdk.MutationTracker(result[variable].mutations);
mutations.applyOn(contextVariable);
}
// @todo: unify the non variable scope flows and consume diff always
// and drop sending the full variable scope from sandbox
else {
util.syncObject(contextVariable, result[variable]);
}
});
// Get the failures. If there was an error running the script itself, that takes precedence
if (!err && (abortOnFailure || stopOnFailure)) {
err = postProcessContext(result, assertionFailed); // also use async assertions
}
// Ensure that we have SDK instances, not serialized plain objects.
// @todo - should this be handled by the sandbox?
result && result._variables && (result._variables = new sdk.VariableScope(result._variables));
result && result.environment && (result.environment = new sdk.VariableScope(result.environment));
result && result.globals && (result.globals = new sdk.VariableScope(result.globals));
result && result.collectionVariables &&
(result.collectionVariables = new sdk.VariableScope(result.collectionVariables));
result && result.request && (result.request = new sdk.Request(result.request));
// @note Since postman-sandbox@3.5.2, response object is not included in the execution result.
// Refer: https://github.com/postmanlabs/postman-sandbox/pull/512
// Adding back here to avoid breaking change in `script` callback.
// @todo revisit script callback args in runtime v8.
result && payload.context && payload.context.response &&
(result.response = new sdk.Response(payload.context.response));
// persist the pm.variables for the next script
result && result._variables &&
(payload.context._variables = new sdk.VariableScope(result._variables));
// persist the pm.variables for the next request
result && result._variables && (this.state._variables = new sdk.VariableScope(result._variables));
// persist the mutated request in payload context,
// @note this will be used for the next prerequest script or
// upcoming commands(request, httprequest).
result && result.request && (payload.context.request = result.request);
// now that this script is done executing, we trigger the event and move to the next script
this.triggers.script(err || null, scriptCursor, result, script, event, item);
// move to next script and pass on the results for accumulation
next(((stopOnScriptError || abortOnError || stopOnFailure) && err) ? err : null, _.assign({
event: event,
script: script,
result: result
}, err && {error: err})); // we use assign here to avoid needless error property
}.bind(this));
}.bind(this), function (err, results) {
// trigger the event completion callback
this.triggers[eventName](null, cursor, results, item);
next((abortOnError && err) ? err : null, results, err);
}.bind(this));
}
}
};

View File

@@ -0,0 +1,211 @@
var _ = require('lodash'),
async = require('async'),
uuid = require('uuid'),
// These are functions which a request passes through _before_ being sent. They take care of stuff such as
// variable resolution, loading of files, etc.
prehelpers = require('../request-helpers-presend'),
// Similarly, these run after the request, and have the power to dictate whether a request should be re-queued
posthelpers = require('../request-helpers-postsend'),
ReplayController = require('../replay-controller'),
RequesterPool = require('../../requester').RequesterPool,
RESPONSE_START_EVENT_BASE = 'response.start.',
RESPONSE_END_EVENT_BASE = 'response.end.';
module.exports = {
init: function (done) {
// Request timeouts are applied by the requester, so add them to requester options (if any).
// create a requester pool
this.requester = new RequesterPool(this.options, done);
},
// the http trigger is actually directly triggered by the requester
// todo - figure out whether we should trigger it from here rather than the requester.
triggers: ['beforeRequest', 'request', 'responseStart', 'io'],
process: {
/**
* @param {Object} payload
* @param {Item} payload.item
* @param {Object} payload.data
* @param {Object} payload.context
* @param {VariableScope} payload.globals
* @param {VariableScope} payload.environment
* @param {Cursor} payload.coords
* @param {Boolean} payload.abortOnError
* @param {String} payload.source
* @param {Function} next
*
* @todo validate payload
*/
httprequest: function (payload, next) {
var abortOnError = _.has(payload, 'abortOnError') ? payload.abortOnError : this.options.abortOnError,
self = this,
context;
context = payload.context;
// generates a unique id for each http request
// a collection request can have multiple http requests
_.set(context, 'coords.httpRequestId', payload.httpRequestId || uuid());
// Run the helper functions
async.applyEachSeries(prehelpers, context, self, function (err) {
var xhr,
aborted,
item = context.item,
beforeRequest,
afterRequest,
safeNext;
// finish up current command
safeNext = function (error, finalPayload) {
// the error is passed twice to allow control between aborting the error vs just
// bubbling it up
return next((error && abortOnError) ? error : null, finalPayload, error);
};
// Helper function which calls the beforeRequest trigger ()
beforeRequest = function (err) {
self.triggers.beforeRequest(err, context.coords, item.request, payload.item, {
httpRequestId: context.coords && context.coords.httpRequestId,
abort: function () {
!aborted && xhr && xhr.abort();
aborted = true;
}
});
};
// Helper function to call the afterRequest trigger.
afterRequest = function (err, response, request, cookies, history) {
self.triggers.request(err, context.coords, response, request, payload.item, cookies, history);
};
// Ensure that this is called.
beforeRequest(null);
if (err) {
// Since we encountered an error before even attempting to send the request, we bubble it up
// here.
afterRequest(err, undefined, item.request);
return safeNext(
err,
{request: item.request, coords: context.coords, item: context.originalItem}
);
}
if (aborted) {
return next(new Error('runtime: request aborted'));
}
self.requester.create({
type: 'http',
source: payload.source,
cursor: context.coords
}, function (err, requester) {
if (err) { return next(err); } // this should never happen
var requestId = uuid(),
replayOptions;
// eslint-disable-next-line max-len
requester.on(RESPONSE_START_EVENT_BASE + requestId, function (err, response, request, cookies, history) {
// we could have also added the response to the set of responses in the cloned item,
// but then, we would have to iterate over all of them, which seems unnecessary
context.response = response;
// run the post request helpers, which need to use the response, assigned above
async.applyEachSeries(posthelpers, context, self, function (error, options) {
if (error) {
return;
}
// find the first helper that requested a replay
replayOptions = _.find(options, {replay: true});
// bail out if we know that request will be replayed.
if (replayOptions) {
return;
}
// bail out if its a pm.sendRequest
// @todo find a better way of identifying scripts
// @note don't use source='script'. Script requests
// can trigger `*.auth` source requests as well.
if (context.coords && context.coords.scriptId) {
return;
}
// trigger responseStart only for collection request.
// if there are replays, this will be triggered for the last request in the replay chain.
self.triggers.responseStart(err, context.coords, response, request, payload.item, cookies,
history);
});
});
requester.on(RESPONSE_END_EVENT_BASE + requestId, self.triggers.io.bind(self.triggers));
// eslint-disable-next-line max-len
xhr = requester.request(requestId, item.request, context.protocolProfileBehavior, function (err, res, req, cookies, history) {
err = err || null;
var nextPayload = {
response: res,
request: req,
item: context.originalItem,
cookies: cookies,
coords: context.coords,
history: history
},
replayController;
// trigger the request event.
// @note - we give the _original_ item in this trigger, so someone can do reference
// checking. Not sure if we should do that or not, but that's how it is.
// Don't break it.
afterRequest(err, res, req, cookies, history);
// Dispose off the requester, we don't need it anymore.
requester.dispose();
// do not process replays if there was an error
if (err) {
return safeNext(err, nextPayload);
}
// request replay logic
if (replayOptions) {
// prepare for replay
replayController = new ReplayController(context.replayState, self);
// replay controller invokes callback no. 1 when replaying the request
// invokes callback no. 2 when replay count has exceeded maximum limit
// @note: errors in replayed requests are passed to callback no. 1
return replayController.requestReplay(context,
context.item,
{source: replayOptions.helper},
// new payload with response from replay is sent to `next`
function (err, payloadFromReplay) { safeNext(err, payloadFromReplay); },
// replay was stopped, move on with older payload
function (err) {
// warn users that maximum retries have exceeded
// but don't bubble up the error with the request
self.triggers.console(context.coords, 'warn', (err.message || err));
safeNext(null, nextPayload);
}
);
}
// finish up for any other request
return safeNext(err, nextPayload);
});
});
});
}
}
};

View File

@@ -0,0 +1,277 @@
var _ = require('lodash'),
uuid = require('uuid'),
Response = require('postman-collection').Response,
visualizer = require('../../visualizer'),
/**
* List of request properties which can be mutated via pre-request
*
* @private
* @const
* @type {String[]}
*/
ALLOWED_REQUEST_MUTATIONS = ['url', 'method', 'headers', 'body'],
extractVisualizerData,
getResponseJSON;
/**
* Returns visualizer data from the latest execution result.
*
* @param {Array} prereqExecutions - pre-script executions results
* @param {Array} testExecutions - test-script executions results
* @returns {Object|undefined} - visualizer data
*/
extractVisualizerData = function (prereqExecutions, testExecutions) {
var visualizerData,
i;
if (_.isArray(testExecutions)) {
// loop through the test executions in reverse order to return data from latest execution
for (i = testExecutions.length - 1; i >= 0; i--) {
visualizerData = _.get(testExecutions[i], 'result.return.visualizer');
if (visualizerData) {
return visualizerData;
}
}
}
if (_.isArray(prereqExecutions)) {
// extract visualizer data from pre-request script results if it is not found earlier
for (i = prereqExecutions.length - 1; i >= 0; i--) {
visualizerData = _.get(prereqExecutions[i], 'result.return.visualizer');
if (visualizerData) {
return visualizerData;
}
}
}
};
/**
* Convert response into a JSON serializable object.
* The stream property is converted to base64 string for performance reasons.
*
* @param {Object} response - SDK Response instance
* @returns {Object}
*/
getResponseJSON = function (response) {
if (!Response.isResponse(response)) {
return;
}
return {
id: response.id,
code: response.code,
status: response.status,
header: response.headers && response.headers.toJSON(),
stream: response.stream && {
type: 'Base64',
data: response.stream.toString('base64')
},
responseTime: response.responseTime
};
};
/**
* Add options
* stopOnError:Boolean
* @type {Object}
*/
module.exports = {
init: function (done) {
// @todo - code item global timeout and delay here
done();
},
triggers: ['beforeItem', 'item', 'beforePrerequest', 'prerequest', 'beforeTest', 'test'],
process: {
/**
* @param {Function=} callback
* @param {Object} payload
* @param {Function} next
* @todo validate payload
*/
item: function (callback, payload, next) {
// adjust for polymorphic instructions
if (!next && _.isFunction(payload) && !_.isFunction(callback)) {
next = payload;
payload = callback;
callback = null;
}
var item = payload.item,
originalRequest = item.request.clone(),
coords = payload.coords,
data = _.isObject(payload.data) ? payload.data : {},
environment = _.isObject(payload.environment) ? payload.environment : {},
globals = _.isObject(payload.globals) ? payload.globals : {},
collectionVariables = _.isObject(payload.collectionVariables) ? payload.collectionVariables : {},
_variables = _.isObject(payload._variables) ? payload._variables : {},
stopOnError = _.has(payload, 'stopOnError') ? payload.stopOnError : this.options.stopOnError,
// @todo: this is mostly coded in event extension and we are
// still not sure whether that is the right place for it to be.
abortOnFailure = this.options.abortOnFailure,
stopOnFailure = this.options.stopOnFailure,
delay = _.get(this.options, 'delay.item'),
ctxTemplate;
// validate minimum parameters required for the command to work
if (!(item && coords)) {
return next(new Error('runtime: item execution is missing required parameters'));
}
// store a common uuid in the coords
coords.ref = uuid.v4();
// here we code to queue prerequest script, then make a request and then execute test script
this.triggers.beforeItem(null, coords, item);
this.queueDelay(function () {
// create the context object for scripts to run
ctxTemplate = {
collectionVariables: collectionVariables,
_variables: _variables,
globals: globals,
environment: environment,
data: data,
request: item.request
};
// @todo make it less nested by coding Instruction.thenQueue
this.queue('event', {
name: 'prerequest',
item: item,
coords: coords,
context: ctxTemplate,
trackContext: ['globals', 'environment', 'collectionVariables'],
stopOnScriptError: stopOnError,
stopOnFailure: stopOnFailure
}).done(function (prereqExecutions, prereqExecutionError) {
// if stop on error is marked and script executions had an error,
// do not proceed with more commands, instead we bail out
if ((stopOnError || stopOnFailure) && prereqExecutionError) {
this.triggers.item(null, coords, item); // @todo - should this trigger receive error?
return callback && callback.call(this, prereqExecutionError, {
prerequest: prereqExecutions
});
}
// update allowed request mutation properties with the mutated context
// @note from this point forward, make sure this mutated
// request instance is used for upcoming commands.
ALLOWED_REQUEST_MUTATIONS.forEach(function (property) {
if (_.has(ctxTemplate, ['request', property])) {
item.request[property] = ctxTemplate.request[property];
}
// update property's parent reference
if (item.request[property] && typeof item.request[property].setParent === 'function') {
item.request[property].setParent(item.request);
}
});
this.queue('request', {
item: item,
globals: ctxTemplate.globals,
environment: ctxTemplate.environment,
collectionVariables: ctxTemplate.collectionVariables,
_variables: ctxTemplate._variables,
data: ctxTemplate.data,
coords: coords,
source: 'collection'
}).done(function (result, requestError) {
!result && (result = {});
var request = result.request,
response = result.response,
cookies = result.cookies;
if ((stopOnError || stopOnFailure) && requestError) {
this.triggers.item(null, coords, item); // @todo - should this trigger receive error?
return callback && callback.call(this, requestError, {
request: request
});
}
// also the test object requires the updated request object (since auth helpers may modify it)
request && (ctxTemplate.request = request);
// @note convert response instance to plain object.
// we want to avoid calling Response.toJSON() which triggers toJSON on Response.stream buffer.
// Because that increases the size of stringified object by 3 times.
// Also, that increases the total number of tokens (buffer.data) whereas Buffer.toString
// generates a single string that is easier to stringify and sent over the UVM bridge.
response && (ctxTemplate.response = getResponseJSON(response));
// set cookies for this transaction
cookies && (ctxTemplate.cookies = cookies);
// the context template also has a test object to store assertions
ctxTemplate.tests = {}; // @todo remove
this.queue('event', {
name: 'test',
item: item,
coords: coords,
context: ctxTemplate,
trackContext: ['tests', 'globals', 'environment', 'collectionVariables'],
stopOnScriptError: stopOnError,
abortOnFailure: abortOnFailure,
stopOnFailure: stopOnFailure
}).done(function (testExecutions, testExecutionError) {
var visualizerData = extractVisualizerData(prereqExecutions, testExecutions),
visualizerResult;
if (visualizerData) {
visualizer.processTemplate(visualizerData.template,
visualizerData.data,
visualizerData.options,
function (err, processedTemplate) {
visualizerResult = {
// bubble up the errors while processing template through visualizer result
error: err,
// add processed template and data to visualizer result
processedTemplate: processedTemplate,
data: visualizerData.data
};
// trigger an event saying that item has been processed
this.triggers.item(null, coords, item, visualizerResult);
}.bind(this));
}
else {
// trigger an event saying that item has been processed
// @todo - should this trigger receive error?
this.triggers.item(null, coords, item, null);
}
// reset mutated request with original request instance
// @note request mutations are not persisted across iterations
item.request = originalRequest;
callback && callback.call(this, ((stopOnError || stopOnFailure) && testExecutionError) ?
testExecutionError : null, {
prerequest: prereqExecutions,
request: request,
response: response,
test: testExecutions
});
});
});
});
}.bind(this), {
time: delay,
source: 'item',
cursor: coords
}, next);
}
}
};

View File

@@ -0,0 +1,100 @@
var _ = require('lodash'),
sdk = require('postman-collection'),
createItemContext = require('../create-item-context'),
/**
* Resolve variables in item and auth in context.
*
* @param {ItemContext} context
* @param {Item} [context.item]
* @param {RequestAuth} [context.auth]
* @param {Object} payload
* @param {VariableScope} payload._variables
* @param {Object} payload.data
* @param {VariableScope} payload.environment
* @param {VariableScope} payload.collectionVariables
* @param {VariableScope} payload.globals
*/
resolveVariables = function (context, payload) {
if (!(context.item && context.item.request)) { return; }
// @todo - resolve variables in a more graceful way
var variableDefinitions = [
// extract the variable list from variable scopes
// @note: this is the order of precedence for variable resolution - don't change it
payload._variables.values,
payload.data,
payload.environment.values,
payload.collectionVariables.values,
payload.globals.values
],
urlString = context.item.request.url.toString(),
item,
auth;
// @todo - no need to sync variables when SDK starts supporting resolution from scope directly
// @todo - avoid resolving the entire item as this unnecessarily resolves URL
item = context.item = new sdk.Item(context.item.toObjectResolved(null,
variableDefinitions, {ignoreOwnVariables: true}));
auth = context.auth;
// resolve variables in URL string
if (urlString) {
// @note this adds support resolving nested variables as URL parser doesn't support them well.
urlString = sdk.Property.replaceSubstitutions(urlString, variableDefinitions);
// Re-parse the URL from the resolved string
item.request.url = new sdk.Url(urlString);
}
// resolve variables in auth
auth && (context.auth = new sdk.RequestAuth(auth.toObjectResolved(null,
variableDefinitions, {ignoreOwnVariables: true})));
};
module.exports = {
init: function (done) {
done();
},
triggers: ['response'],
process: {
request: function (payload, next) {
var abortOnError = _.has(payload, 'abortOnError') ? payload.abortOnError : this.options.abortOnError,
// helper function to trigger `response` callback anc complete the command
complete = function (err, nextPayload) {
// nextPayload will be empty for unhandled errors
// trigger `response` callback
// nextPayload.response will be empty for error flows
// the `item` argument is resolved and mutated here
nextPayload && this.triggers.response(err, nextPayload.coords, nextPayload.response,
nextPayload.request, nextPayload.item, nextPayload.cookies, nextPayload.history);
// the error is passed twice to allow control between aborting the error vs just
// bubbling it up
return next(err && abortOnError ? err : null, nextPayload, err);
}.bind(this),
context = createItemContext(payload);
// resolve variables in item and auth
resolveVariables(context, payload);
// add context for use, after resolution
payload.context = context;
// we do not queue `httprequest` instruction here,
// queueing will unblock the item command to prepare for the next `event` instruction
// at this moment request is not fulfilled, and we want to block it
this.immediate('httprequest', payload)
.done(function (nextPayload, err) {
// change signature to error first
complete(err, nextPayload);
})
.catch(complete);
}
}
};

View File

@@ -0,0 +1,239 @@
var _ = require('lodash'),
Cursor = require('../cursor'),
VariableScope = require('postman-collection').VariableScope,
prepareLookupHash,
extractSNR,
getIterationData;
/**
* Returns a hash of IDs and Names of items in an array
*
* @param {Array} items
* @returns {Object}
*/
prepareLookupHash = function (items) {
var hash = {
ids: {},
names: {},
obj: {}
};
_.forEach(items, function (item, index) {
if (item) {
item.id && (hash.ids[item.id] = index);
item.name && (hash.names[item.name] = index);
}
});
return hash;
};
extractSNR = function (executions, previous) {
var snr = previous || {};
_.isArray(executions) && executions.forEach(function (execution) {
_.has(_.get(execution, 'result.return'), 'nextRequest') && (
(snr.defined = true),
(snr.value = execution.result.return.nextRequest)
);
});
return snr;
};
/**
* Returns the data for the given iteration
*
* @function getIterationData
* @param {Array} data - The data array containing all iterations' data
* @param {Number} iteration - The iteration to get data for
* @return {Any} - The data for the iteration
*/
getIterationData = function (data, iteration) {
// if iteration has a corresponding data element use that
if (iteration < data.length) {
return data[iteration];
}
// otherwise use the last data element
return data[data.length - 1];
};
/**
* Adds options
* disableSNR:Boolean
*
* @type {Object}
*/
module.exports = {
init: function (done) {
var state = this.state;
// ensure that the environment, globals and collectionVariables are in VariableScope instance format
state.environment = VariableScope.isVariableScope(state.environment) ? state.environment :
new VariableScope(state.environment);
state.globals = VariableScope.isVariableScope(state.globals) ? state.globals :
new VariableScope(state.globals);
state.collectionVariables = VariableScope.isVariableScope(state.collectionVariables) ?
state.collectionVariables : new VariableScope(state.collectionVariables);
state._variables = new VariableScope();
// ensure that the items and iteration data set is in place
!_.isArray(state.items) && (state.items = []);
!_.isArray(state.data) && (state.data = []);
!_.isObject(state.data[0]) && (state.data[0] = {});
// if the location in state is already normalised then go ahead and queue iteration, else normalise the
// location
state.cursor = Cursor.box(state.cursor, { // we pass bounds to ensure there is no stale state
cycles: this.options.iterationCount,
length: state.items.length
});
this.waterfall = state.cursor; // copy the location object to instance for quick access
// queue the iteration command on start
this.queue('waterfall', {
coords: this.waterfall.current(),
static: true,
start: true
});
// clear the variable that is supposed to store item name and id lookup hash for easy setNextRequest
this.snrHash = null; // we populate it in the first SNR call
done();
},
triggers: ['beforeIteration', 'iteration'],
process: {
/**
* This processor simply queues scripts and requests in a linear chain.
*
* @param {Object} payload
* @param {Object} payload.coords
* @param {Boolean} [payload.static=false]
* @param {Function} next
*/
waterfall: function (payload, next) {
// we procure the coordinates that we have to pick item and data from. the data is
var coords = payload.static ? payload.coords : this.waterfall.whatnext(payload.coords),
item = this.state.items[coords.position],
delay;
// if there is nothing to process, we bail out from here, even before we enter the iteration cycle
if (coords.empty) {
return next();
}
if (payload.stopRunNow) {
this.triggers.iteration(null, payload.coords);
return next();
}
// if it is a beginning of a run, we need to raise events for iteration start
if (payload.start) {
this.triggers.beforeIteration(null, coords);
}
// if this is a new iteration, we close the previous one and start new
if (coords.cr) {
// getting the iteration delay here ensures that delay is only called between two iterations
delay = _.get(this.options, 'delay.iteration', 0);
this.triggers.iteration(null, payload.coords);
this.triggers.beforeIteration(null, coords);
}
// if this is end of waterfall, it is an end of iteration and also end of run
if (coords.eof) {
this.triggers.iteration(null, coords);
return next();
}
this.queueDelay(function () {
this.queue('item', {
item: item,
coords: coords,
data: getIterationData(this.state.data, coords.iteration),
environment: this.state.environment,
globals: this.state.globals,
collectionVariables: this.state.collectionVariables,
_variables: this.state._variables
}, function (executionError, executions) {
var snr = {},
nextCoords,
seekingToStart,
stopRunNow,
stopOnFailure = this.options.stopOnFailure;
if (!executionError) {
// extract set next request
snr = extractSNR(executions.prerequest);
snr = extractSNR(executions.test, snr);
}
if (!this.options.disableSNR && snr.defined) {
// prepare the snr lookup hash if it is not already provided
// @todo - figure out a way to reset this post run complete
!this.snrHash && (this.snrHash = prepareLookupHash(this.state.items));
// if it is null, we do not proceed further and move on
// see if a request is found in the hash and then reset the coords position to the lookup
// value.
(snr.value !== null) && (snr.position = // eslint-disable-next-line no-nested-ternary
this.snrHash[_.has(this.snrHash.ids, snr.value) ? 'ids' :
(_.has(this.snrHash.names, snr.value) ? 'names' : 'obj')][snr.value]);
snr.valid = _.isNumber(snr.position);
}
nextCoords = _.clone(coords);
if (snr.valid) {
// if the position was detected, we set the position to the one previous to the desired location
// this ensures that the next call to .whatnext() will return the desired position.
nextCoords.position = snr.position - 1;
}
else {
// if snr was requested, but not valid, we stop this iteration.
// stopping an iteration is equivalent to seeking the last position of the current
// iteration, so that the next call to .whatnext() will automatically move to the next
// iteration.
(snr.defined || executionError) && (nextCoords.position = nextCoords.length - 1);
// If we need to stop on a run, we set the stop flag to true.
(stopOnFailure && executionError) && (stopRunNow = true);
}
// @todo - do this in unhacky way
if (nextCoords.position === -1) {
nextCoords.position = 0;
seekingToStart = true;
}
this.waterfall.seek(nextCoords.position, nextCoords.iteration, function (err, chngd, coords) {
// this condition should never arise, so better throw error when this happens
if (err) {
throw err;
}
this.queue('waterfall', {
coords: coords,
static: seekingToStart,
stopRunNow: stopRunNow
});
}, this);
});
}.bind(this), {
time: delay,
source: 'iteration',
cursor: coords
}, next);
}
}
};

View File

@@ -0,0 +1,296 @@
var sdk = require('postman-collection'),
ItemGroup = sdk.ItemGroup,
Item = sdk.Item,
DEFAULT_LOOKUP_STRATEGY = 'idOrName',
INVALID_LOOKUP_STRATEGY_ERROR = 'runtime~extractRunnableItems: Invalid entrypoint lookupStrategy',
/**
* Accumulate all items in order if entry point is a collection/folder.
* If an item is passed returns an array with that item.
*
* @param {ItemGroup|Item} node
*
* @returns {Array<Item>}
*
* @todo: Possibly add mapItem to sdk.ItemGroup?
*/
flattenNode = function (node) {
var items = [];
// bail out
if (!node) { return items; }
if (ItemGroup.isItemGroup(node)) {
node.forEachItem(function (item) { items.push(item); });
}
else if (Item.isItem(node)) {
items.push(node);
}
return items;
},
/**
* Finds an item or item group based on id or name.
*
* @param {ItemGroup} itemGroup
* @param {?String} match
*
* @returns {Item|ItemGroup|undefined}
*/
findItemOrGroup = function (itemGroup, match) {
if (!itemGroup || !itemGroup.items) { return; }
var matched;
// lookup match on own children
itemGroup.items.each(function (itemOrGroup) {
if (itemOrGroup.id === match || itemOrGroup.name === match) {
matched = itemOrGroup;
return false; // exit the loop
}
});
// if there is no match on own children, start lookup on grand children
!matched && itemGroup.items.each(function (itemOrGroup) {
matched = findItemOrGroup(itemOrGroup, match);
if (matched) { return false; } // exit the loop
});
return matched;
},
/**
* Finds items based on multiple ids or names provided.
*
* @param {ItemGroup} itemGroup - Composite list of Item or ItemGroup.
* @param {Object} entrypointSubset - Entry-points reference passed across multiple recursive calls.
* @param {Boolean} _continueAccumulation - Flag used to decide whether to accumulate items or not.
* @param {Object} _accumulatedItems - Found Items or ItemGroups.
* @returns {Object} Found Items or ItemGroups.
*/
findItemsOrGroups = function (itemGroup, entrypointSubset, _continueAccumulation, _accumulatedItems) {
!_accumulatedItems && (_accumulatedItems = {members: [], reference: {}});
if (!itemGroup || !itemGroup.items) { return _accumulatedItems; }
var match;
itemGroup.items.each(function (item) {
// bail out if all entry-points are found.
if (!Object.keys(entrypointSubset).length) { return false; }
// lookup for item.id in entrypointSubset and if not found, lookup by item.name.
if (!(match = entrypointSubset[item.id] && item.id)) {
match = entrypointSubset[item.name] && item.name;
}
if (match) {
// only accumulate items which are not previously got tracked from its parent entrypoint.
if (_continueAccumulation) {
_accumulatedItems.members.push(item);
_accumulatedItems.reference[match] = item;
}
// delete looked-up entrypoint.
delete entrypointSubset[match];
}
// recursive call to find nested entry-points. To make sure all provided entry-points got tracked.
// _continueAccumulation flag will be `false` for children if their parent entrypoint is found.
return findItemsOrGroups(item, entrypointSubset, !match, _accumulatedItems);
});
return _accumulatedItems;
},
/**
* Finds an item or group from a path. The path should be an array of ids from the parent chain.
*
* @param {Collection} collection
* @param {Object} options
* @param {String} options.execute
* @param {?Array<String>} [options.path]
* @param {Function} callback
*/
lookupByPath = function (collection, options, callback) {
var lookupPath,
lastMatch = collection,
lookupOptions = options || {},
i,
ii;
// path can be empty, if item/group is at the top level
lookupPath = lookupOptions.path || [];
// push execute id to the path
options.execute && (lookupPath.push(options.execute));
// go down the lookup path
for (i = 0, ii = lookupPath.length; (i < ii) && lastMatch; i++) {
lastMatch = lastMatch.items && lastMatch.items.one(lookupPath[i]);
}
callback && callback(null, flattenNode(lastMatch), lastMatch);
},
/**
* Finds an item or group on a collection with a matching id or name.
*
* @param {Collection} collection
* @param {Object} options
* @param {String} [options.execute]
* @param {Function} callback
*/
lookupByIdOrName = function (collection, options, callback) {
var match = options.execute,
matched;
if (!match) { return callback(null, []); }
// do a recursive lookup
matched = findItemOrGroup(collection, match);
callback(null, flattenNode(matched), matched);
},
/**
* Finds items or item groups in a collection with matching list of ids or names.
*
* @note runnable items follows the order in which the items are defined in the collection
*
* @param {Collection} collection
* @param {Object} options
* @param {Array<String>} [options.execute]
* @param {Function} callback
*/
lookupByMultipleIdOrName = function (collection, options, callback) {
var entrypoints = options.execute,
preserveOrder = options.preserveOrder,
entrypointLookup = {},
runnableItems = [],
items,
i,
ii;
if (!(Array.isArray(entrypoints) && entrypoints.length)) {
return callback(null, []);
}
// add temp reference for faster lookup of entry-point name/id.
// entry-points with same name/id will be ignored.
for (i = 0, ii = entrypoints.length; i < ii; i++) {
entrypointLookup[entrypoints[i]] = true;
}
items = findItemsOrGroups(collection, entrypointLookup, true);
// Extracting the items and folders by the order in which they appear as folder/request arguments,
// only if specified in the entrypoint.preserveOrder
if (preserveOrder) {
items.members = entrypoints.map(function (ref) {
return items.reference[ref];
});
}
// at this point of time, we should have traversed all items mentioned in entrypoint and created a linear
// subset of items. However, if post that, we still have items remaining in lookup object, that implies that
// extra items were present in user input and corresponding items for those do not exist in collection. As such
// we need to bail out if any of the given entry-point is not found.
if (Object.keys(entrypointLookup).length) {
return callback(null, []);
}
// extract runnable items from the searched items.
for (i = 0, ii = items.members.length; i < ii; i++) {
runnableItems = runnableItems.concat(flattenNode(items.members[i]));
}
callback(null, runnableItems, collection);
},
/**
* Finds items or item groups in a collection with matching list of ids or names.
*
* @note runnable items follows the order of entrypoints
*
* @param {Collection} collection
* @param {Object} options
* @param {Array<String>} [options.execute]
* @param {Function} callback
*/
lookupByOrder = function (collection, options, callback) {
var entrypoints = options.execute,
entrypointLookup = {},
runnableItems = [],
items,
i,
ii;
if (!(Array.isArray(entrypoints) && entrypoints.length)) {
return callback(null, []);
}
// add temp reference for faster lookup of entry-point name/id.
// entry-points with same name/id will be ignored.
for (i = 0, ii = entrypoints.length; i < ii; i++) {
entrypointLookup[entrypoints[i]] = true;
}
items = findItemsOrGroups(collection, entrypointLookup, true);
// at this point of time, we should have traversed all items mentioned in entrypoint and created a linear
// subset of items. However, if post that, we still have items remaining in lookup object, that implies that
// extra items were present in user input and corresponding items for those do not exist in collection. As such
// we need to bail out if any of the given entry-point is not found.
if (Object.keys(entrypointLookup).length) {
return callback(null, []);
}
// extract runnable items from the searched items.
entrypoints.forEach(function (entrypoint) {
runnableItems = runnableItems.concat(flattenNode(items.reference[entrypoint]));
});
callback(null, runnableItems, collection);
},
lookupStrategyMap = {
path: lookupByPath,
idOrName: lookupByIdOrName,
followOrder: lookupByOrder,
multipleIdOrName: lookupByMultipleIdOrName
},
/**
* Extracts all the items on a collection starting from the entrypoint.
*
* @param {Collection} collection
* @param {?Object} [entrypoint]
* @param {String} [entrypoint.execute] id of item or group to execute (can be name when used with `idOrName`)
* @param {Array<String>} [entrypoint.path] path leading to the item or group selected (only for `path` strategy)
* @param {String} [entrypoint.lookupStrategy=idOrName] strategy to use for entrypoint lookup [idOrName, path]
* @param {Boolean} [entrypoint.preserveOrder] option to preserve the order of folder/items when specified.
* @param {Function} callback
*/
extractRunnableItems = function (collection, entrypoint, callback) {
var lookupFunction,
lookupStrategy;
// if no entrypoint is specified, flatten the entire collection
if (!entrypoint) { return callback(null, flattenNode(collection), collection); }
lookupStrategy = entrypoint.lookupStrategy || DEFAULT_LOOKUP_STRATEGY;
// lookup entry using given strategy
// eslint-disable-next-line no-cond-assign
(lookupFunction = lookupStrategyMap[lookupStrategy]) ?
lookupFunction(collection, entrypoint, callback) :
callback(new Error(INVALID_LOOKUP_STRATEGY_ERROR)); // eslint-disable-line callback-return
};
module.exports = {
extractRunnableItems: extractRunnableItems
};

136
node_modules/postman-runtime/lib/runner/index.js generated vendored Normal file
View File

@@ -0,0 +1,136 @@
var _ = require('lodash'),
backpack = require('../backpack'),
Run = require('./run'),
extractRunnableItems = require('./extract-runnable-items').extractRunnableItems,
Runner,
defaultTimeouts = {
global: 3 * 60 * 1000, // 3 minutes
request: Infinity,
script: Infinity
};
/**
* @typedef {runCallback}
* @property {Function} [done]
* @property {Function} [error]
* @property {Function} [success]
*/
/**
* @constructor
*
* @param {Object} [options]
*/
Runner = function PostmanCollectionRunner (options) { // eslint-disable-line func-name-matching
this.options = _.assign({}, options);
};
_.assign(Runner.prototype, {
/**
* Prepares `run` config by combining `runner` config with given run options.
*
* @param {Object} [options]
* @param {Object} [options.timeout]
* @param {Object} [options.timeout.global]
* @param {Object} [options.timeout.request]
* @param {Object} [options.timeout.script]
*/
prepareRunConfig: function (options) {
// combine runner config and make a copy
var runOptions = _.merge(_.omit(options, ['environment', 'globals', 'data']), this.options.run) || {};
// start timeout sanitization
!runOptions.timeout && (runOptions.timeout = {});
_.mergeWith(runOptions.timeout, defaultTimeouts, function (userTimeout, defaultTimeout) {
// non numbers, Infinity and missing values are set to default
if (!_.isFinite(userTimeout)) { return defaultTimeout; }
// 0 and negative numbers are set to Infinity, which only leaves positive numbers
return userTimeout > 0 ? userTimeout : Infinity;
});
return runOptions;
},
/**
* Runs a collection or a folder.
*
* @param {Collection} collection
* @param {Object} [options]
* @param {Array.<Item>} options.items
* @param {Array.<Object>} [options.data]
* @param {Object} [options.globals]
* @param {Object} [options.environment]
* @param {Number} [options.iterationCount]
* @param {CertificateList} [options.certificates]
* @param {ProxyConfigList} [options.proxies]
* @param {Array} [options.data]
* @param {Object} [options.entrypoint]
* @param {String} [options.entrypoint.execute] ID of the item-group to be run.
* Can be Name if `entrypoint.lookupStrategy` is `idOrName`
* @param {String} [options.entrypoint.lookupStrategy=idOrName] strategy to lookup the entrypoint [idOrName, path]
* @param {Array<String>} [options.entrypoint.path] path to lookup
* @param {Object} [options.run] Run-specific options, such as options related to the host
*
* @param {Function} callback
*/
run: function (collection, options, callback) {
var self = this,
runOptions = this.prepareRunConfig(options);
callback = backpack.normalise(callback);
!_.isObject(options) && (options = {});
// @todo make the extract runnables interface better defined and documented
// - give the ownership of error to each strategy lookup functions
// - think about moving these codes into an extension command prior to waterfall
// - the third argument in callback that returns control, is ambiguous and can be removed if error is controlled
// by each lookup function.
// - the interface can be further broken down to have the "flattenNode" action be made common and not be
// required to be coded in each lookup strategy
//
// serialise the items into a linear array based on the lookup strategy provided as input
extractRunnableItems(collection, options.entrypoint, function (err, runnableItems, entrypoint) {
if (err || !runnableItems) { return callback(new Error('Error fetching run items')); }
// Bail out only if: abortOnError is set and the returned entrypoint is invalid
if (options.abortOnError && !entrypoint) {
// eslint-disable-next-line max-len
return callback(new Error(`Unable to find a folder or request: ${_.get(options, 'entrypoint.execute')}`));
}
// ensure data is an array
!_.isArray(options.data) && (options.data = [{}]);
// get iterationCount from data if not set
if (!runOptions.iterationCount) {
runOptions.iterationCount = options.data.length;
}
return callback(null, (new Run({
items: runnableItems,
data: options.data,
environment: options.environment,
globals: _.has(options, 'globals') ? options.globals : self.options.globals,
// @todo Move to item level to support Item and ItemGroup variables
collectionVariables: collection.variables,
certificates: options.certificates,
proxies: options.proxies
}, runOptions)));
});
}
});
_.assign(Runner, {
/**
* Expose Run instance for testability
*
* @type {Run}
*/
Run: Run
});
module.exports = Runner;

205
node_modules/postman-runtime/lib/runner/instruction.js generated vendored Normal file
View File

@@ -0,0 +1,205 @@
/**
* An instruction is a self contained piece of information that can be created and then later be executed. {@link Run}
* instance uses this as the values of the `Run.next` queue.
*
* @module Run~Instructions
*/
var _ = require('lodash'),
Timings = require('./timings'),
arrayProtoSlice = Array.prototype.slice,
arrayProtoUnshift = Array.prototype.unshift,
pool; // function
/**
* Create a new instruction pool
*
* @param {Object.<Function>} processors - hash of all command processor functions
* @returns {InstructionPool}
*/
pool = function (processors) {
!_.isObject(processors) && (processors = {});
/**
* Create a new instruction to be executed later
*
* @constructor
*
* @param {String} name - name of the instruction. this is useful for later lookup of the `processor` function when
* deserialising this object
* @param {Object} [payload] - a **JSON compatible** object that will be forwarded as the 2nd last parameter to the
* processor.
* @param {Array} [args] - all the arguments that needs to be passed to the processor is in this array
* @private
* @example
* var inst = Instruction.create(function (arg1, payload, next) {
* console.log(payload);
* next(null, 'hello-on-execute with ' + arg1);
* }, 'sample-instruction', {
* payloadData1: 'value'
* }, ['one-arg']);
*
* // now, when we do execute, the result will be a console.log of payload and message will be as expected
* instance.execute(function (err, message) {
* console.log(message);
* });
*
*/
var Instruction = function (name, payload, args) {
var processor = processors[name];
if (!_.isString(name) || !_.isFunction(processor)) {
throw new Error('run-instruction: invalid construction');
}
// ensure that payload is an object so that data storage can be done. also ensure arguments is an array
!_.isObject(payload) && (payload = {});
!_.isArray(args) && (args = []);
_.assign(this, /** @lends Instruction.prototype */ {
/**
* @type {String}
*/
action: name,
/**
* @type {Object}
*/
payload: payload,
/**
* @type {Array}
*/
in: args,
/**
* @type {Timings}
*/
timings: Timings.create(),
/**
* @private
* @type {Function}
*/
_processor: processor
});
// record the timing when this instruction was created
this.timings.record('created');
};
/**
* Shortcut to `new Instruction(...);`
*
* @param {Function} processor
* @param {String} name
* @param {Object} [payload]
* @param {Array} [args]
*
* @returns {Instruction}
*/
Instruction.create = function (processor, name, payload, args) {
return new Instruction(processor, name, payload, args);
};
/**
* Store all thenable items
*
* @type {Array}
*/
Instruction._queue = [];
/**
* Executes an instruction with previously saved payload and arguments
*
* @param {Function} callback
* @param {*} [scope]
*
* @todo: use timeback and control it via options sent during pool creation as an option
*/
Instruction.prototype.execute = function (callback, scope) {
!scope && (scope = this);
var params = _.clone(this.in),
sealed = false,
doneAndSpread = function (err) {
if (sealed) {
console.error('__postmanruntime_fatal_debug: instruction.execute callback called twice');
if (err) {
console.error(err);
}
return;
}
sealed = true;
this.timings.record('end');
var args = arrayProtoSlice.call(arguments);
arrayProtoUnshift.call(args, scope);
if (err) { // in case it errored, we do not process any thenables
_.isArray(this._catch) && _.invokeMap(this._catch, _.apply, scope, arguments);
}
else {
// call all the `then` stuff and then the main callback
_.isArray(this._done) && _.invokeMap(this._done, _.apply, scope, _.tail(arguments));
}
setTimeout(callback.bind.apply(callback, args), 0);
}.bind(this);
// add two additional arguments at the end of the arguments saved - i.e. the payload and a function to call the
// callback asynchronously
params.push(this.payload, doneAndSpread);
this.timings.record('start');
// run the processor in a try block to avoid causing stalled runs
try {
this._processor.apply(scope, params);
}
catch (e) {
doneAndSpread(e);
}
};
Instruction.prototype.done = function (callback) {
(this._done || (this._done = [])).push(callback);
return this;
};
Instruction.prototype.catch = function (callback) {
(this._catch || (this._catch = [])).push(callback);
return this;
};
Instruction.clear = function () {
_.forEach(Instruction._queue, function (instruction) {
delete instruction._done;
});
Instruction._queue.length = 0;
};
Instruction.shift = function () {
return Instruction._queue.shift.apply(Instruction._queue, arguments);
};
Instruction.unshift = function () {
return Instruction._queue.unshift.apply(Instruction._queue, arguments);
};
Instruction.push = function () {
return Instruction._queue.push.apply(Instruction._queue, arguments);
};
return Instruction;
};
module.exports = {
pool: pool
};

View File

@@ -0,0 +1,88 @@
var _ = require('lodash'),
createItemContext = require('./create-item-context'),
// total number of replays allowed
MAX_REPLAY_COUNT = 3,
ReplayController;
/**
* Handles replay logic with replayState from context.
* Makes sure request replays do not go into an infinite loop.
*
* @param {ReplayState} replayState
* @param {Run} run
*
* @constructor
*/
ReplayController = function ReplayController (replayState, run) {
// store state
this.count = replayState ? replayState.count : 0;
this.run = run;
};
_.assign(ReplayController.prototype, /** @lends ReplayController.prototype */{
/**
* Sends a request in the item. This takes care of limiting the total number of replays for a request.
*
* @param {Object} context
* @param {Request} item
* @param {Object} desiredPayload a partial payload to use for the replay request
* @param {Function} success this callback is invoked when replay controller sent the request
* @param {Function} failure this callback is invoked when replay controller decided not to send the request
*/
requestReplay: function (context, item, desiredPayload, success, failure) {
// max retries exceeded
if (this.count >= MAX_REPLAY_COUNT) {
return failure(new Error('runtime: maximum intermediate request limit exceeded'));
}
// update replay count state
this.count++;
// update replay state to context
context.replayState = this.getReplayState();
// construct payload for request
var payload = _.defaults({
item: item,
// abortOnError makes sure request command bubbles errors
// so we can pass it on to the callback
abortOnError: true
}, desiredPayload);
// create item context from the new item
payload.context = createItemContext(payload, context);
this.run.immediate('httprequest', payload)
.done(function (response) {
success(null, response);
})
.catch(success);
},
/**
* Returns a serialized version of current ReplayController
*
* @returns {ReplayState}
*/
getReplayState: function () {
/**
* Defines the current replay state of a request.
*
* By replay state, we mean the number of requests sent
* as part of one Collection requests. It can be intermediate requests,
* or replays of the same collection requests.
*
* @typedef {Object} ReplayState
*
* @property {Number} count total number of requests, including Collection requests and replays
*/
return {
count: this.count
};
}
});
module.exports = ReplayController;

View File

@@ -0,0 +1,61 @@
var _ = require('lodash'),
AuthLoader = require('../authorizer/index').AuthLoader,
createAuthInterface = require('../authorizer/auth-interface'),
DOT_AUTH = '.auth';
module.exports = [
// Post authorization.
function (context, run, done) {
// if no response is provided, there's nothing to do, and probably means that the request errored out
// let the actual request command handle whatever needs to be done.
if (!context.response) { return done(); }
// bail out if there is no auth
if (!(context.auth && context.auth.type)) { return done(); }
var auth = context.auth,
originalAuth = context.originalItem.getAuth(),
originalAuthParams = originalAuth && originalAuth.parameters(),
authHandler = AuthLoader.getHandler(auth.type),
authInterface = createAuthInterface(auth);
// bail out if there is no matching auth handler for the type
if (!authHandler) {
run.triggers.console(context.coords, 'warn', 'runtime: could not find a handler for auth: ' + auth.type);
return done();
}
// invoke `post` on the Auth
authHandler.post(authInterface, context.response, function (err, success) {
// sync all auth system parameters to the original auth
originalAuthParams && auth.parameters().each(function (param) {
param && param.system && originalAuthParams.upsert({key: param.key, value: param.value, system: true});
});
// sync auth state back to item request
_.set(context, 'item.request.auth', auth);
// there was an error in auth post hook
// warn the user but don't bubble it up
if (err) {
run.triggers.console(
context.coords,
'warn',
'runtime~' + auth.type + '.auth: there was an error validating auth: ' + (err.message || err),
err
);
return done();
}
// auth was verified
if (success) { return done(); }
// request a replay of request
done(null, {replay: true, helper: auth.type + DOT_AUTH});
});
}
];

View File

@@ -0,0 +1,383 @@
var _ = require('lodash'),
async = require('async'),
util = require('./util'),
sdk = require('postman-collection'),
createAuthInterface = require('../authorizer/auth-interface'),
AuthLoader = require('../authorizer/index').AuthLoader,
ReplayController = require('./replay-controller'),
DOT_AUTH = '.auth';
module.exports = [
// File loading
function (context, run, done) {
if (!context.item) { return done(new Error('Nothing to resolve files for.')); }
var triggers = run.triggers,
cursor = context.coords,
resolver = run.options.fileResolver,
request = context.item && context.item.request,
mode,
data;
if (!request) { return done(new Error('No request to send.')); }
// if body is disabled than skip loading files.
// @todo this may cause problem if body is enabled/disabled programmatically from pre-request script.
if (request.body && request.body.disabled) { return done(); }
// todo: add helper functions in the sdk to do this cleanly for us
mode = _.get(request, 'body.mode');
data = _.get(request, ['body', mode]);
// if there is no mode specified, or no data for the specified mode we cannot resolve anything!
// @note that if source is not readable, there is no point reading anything, yet we need to warn that file
// upload was not done. hence we will have to proceed even without an unreadable source
if (!data) { // we do not need to check `mode` here since false mode returns no `data`
return done();
}
// in this block, we simply use async.waterfall to ensure that all form of file reading is async. essentially,
// we first determine the data mode and based on it pass the waterfall functions.
async.waterfall([async.constant(data), {
// form data parsing simply "enriches" all form parameters having file data type by replacing / setting the
// value as a read stream
formdata: function (formdata, next) {
// ensure that we only process the file type
async.eachSeries(_.filter(formdata.all(), {type: 'file'}), function (formparam, callback) {
if (!formparam || formparam.disabled) {
return callback(); // disabled params will be filtered in body-builder.
}
var paramIsComposite = Array.isArray(formparam.src),
onLoadError = function (err, disableParam) {
// triggering a warning message for the user
triggers.console(cursor, 'warn',
`Form param \`${formparam.key}\`, file load error: ${err.message || err}`);
// set disabled, it will be filtered in body-builder
disableParam && (formparam.disabled = true);
};
// handle missing file src
if (!formparam.src || (paramIsComposite && !formparam.src.length)) {
onLoadError(new Error('missing file source'), false);
return callback();
}
// handle form param with a single file
// @note we are handling single file first so that we do not need to hit additional complexity of
// handling multiple files while the majority use-case would be to handle single file.
if (!paramIsComposite) {
// eslint-disable-next-line security/detect-non-literal-fs-filename
util.createReadStream(resolver, formparam.src, function (err, stream) {
if (err) {
onLoadError(err, true);
}
else {
formparam.value = stream;
}
callback();
});
return;
}
// handle form param with multiple files
// @note we use map-limit here instead of free-form map in order to avoid choking the file system
// with many parallel descriptor access.
async.mapLimit(formparam.src, 10, function (src, next) {
// eslint-disable-next-line security/detect-non-literal-fs-filename
util.createReadStream(resolver, src, function (err, stream) {
if (err) {
// @note don't throw error or disable param if one of the src fails to load
onLoadError(err);
return next(); // swallow the error
}
next(null, {src: src, value: stream});
});
}, function (err, results) {
if (err) {
onLoadError(err, true);
return done();
}
_.forEach(results, function (result) {
// Insert individual param above the current formparam
result && formdata.insert(new sdk.FormParam(_.assign(formparam.toJSON(), result)),
formparam);
});
// remove the current formparam after exploding src
formdata.remove(formparam);
done();
});
}, next);
},
// file data
file: function (filedata, next) {
// eslint-disable-next-line security/detect-non-literal-fs-filename
util.createReadStream(resolver, filedata.src, function (err, stream) {
if (err) {
triggers.console(cursor, 'warn', 'Binary file load error: ' + err.message || err);
filedata.value = null; // ensure this does not mess with requester
delete filedata.content; // @todo - why content?
}
else {
filedata.content = stream;
}
next();
});
}
}[mode] || async.constant()], function (err) {
// just as a precaution, show the error in console. each resolver anyway should handle their own console
// warnings.
// @todo - get cursor here.
err && triggers.console(cursor, 'warn', 'file data resolution error: ' + (err.message || err));
done(null); // absorb the error since a console has been trigerred
});
},
// Authorization
function (context, run, done) {
// validate all stuff. dont ask.
if (!context.item) { return done(new Error('runtime: nothing to authorize.')); }
// bail out if there is no auth
if (!(context.auth && context.auth.type)) { return done(null); }
// get auth handler
var auth = context.auth,
authType = auth.type,
originalAuth = context.originalItem.getAuth(),
originalAuthParams = originalAuth && originalAuth.parameters(),
authHandler = AuthLoader.getHandler(authType),
authPreHook,
authInterface,
authSignHook = function () {
try {
authHandler.sign(authInterface, context.item.request, function (err) {
// handle all types of errors in one place, see catch block
if (err) { throw err; }
done();
});
}
catch (err) {
// handles synchronous and asynchronous errors in auth.sign
run.triggers.console(context.coords,
'warn',
'runtime~' + authType + '.auth: could not sign the request: ' + (err.message || err),
err
);
// swallow the error, we've warned the user
done();
}
};
// bail out if there is no matching auth handler for the type
if (!authHandler) {
run.triggers.console(context.coords, 'warn', 'runtime: could not find a handler for auth: ' + auth.type);
return done();
}
authInterface = createAuthInterface(auth, context.protocolProfileBehavior);
/**
* We go through the `pre` request send validation for the auth. In this step one of the three things can happen
*
* If the Auth `pre` hook
* 1. gives a go, we sign the request and proceed to send the request.
* 2. gives a no go, we don't sign the request, but proceed to send the request.
* 3. gives a no go, with a intermediate request,
* a. we suspend current request, send the intermediate request
* b. invoke Auth `init` hook with the response of the intermediate request
* c. invoke Auth `pre` hook, and repeat from 1
*/
authPreHook = function () {
authHandler.pre(authInterface, function (err, success, request) {
// there was an error in pre hook of auth
if (err) {
// warn the user
run.triggers.console(context.coords,
'warn',
'runtime~' + authType + '.auth: could not validate the request: ' + (err.message || err),
err
);
// swallow the error, we've warned the user
return done();
}
// sync all auth system parameters to the original auth
originalAuthParams && auth.parameters().each(function (param) {
param && param.system &&
originalAuthParams.upsert({key: param.key, value: param.value, system: true});
});
// authHandler gave a go, sign the request
if (success) { return authSignHook(); }
// auth gave a no go, but no intermediate request
if (!request) { return done(); }
// prepare for sending intermediate request
var replayController = new ReplayController(context.replayState, run),
item = new sdk.Item({request: request});
// auth handler gave a no go, and an intermediate request.
// make the intermediate request the response is passed to `init` hook
replayController.requestReplay(context,
item,
// marks the auth as source for intermediate request
{source: auth.type + DOT_AUTH},
function (err, response) {
// errors for intermediate requests are passed to request callback
// passing it here will add it to original request as well, so don't do it
if (err) { return done(); }
// pass the response to Auth `init` hook
authHandler.init(authInterface, response, function (error) {
if (error) {
// warn about the err
run.triggers.console(context.coords, 'warn', 'runtime~' + authType + '.auth: ' +
'could not initialize auth: ' + (error.message || error), error);
// swallow the error, we've warned the user
return done();
}
// schedule back to pre hook
authPreHook();
});
},
function (err) {
// warn users that maximum retries have exceeded
if (err) {
run.triggers.console(
context.coords, 'warn', 'runtime~' + authType + '.auth: ' + (err.message || err)
);
}
// but don't bubble up the error with the request
done();
}
);
});
};
// start the by calling the pre hook of the auth
authPreHook();
},
// Proxy lookup
function (context, run, done) {
var proxies = run.options.proxies,
request = context.item.request,
url;
if (!request) { return done(new Error('No request to resolve proxy for.')); }
url = request.url && request.url.toString();
async.waterfall([
// try resolving custom proxies before falling-back to system proxy
function (cb) {
if (_.isFunction(_.get(proxies, 'resolve'))) {
return cb(null, proxies.resolve(url));
}
return cb(null, undefined);
},
// fallback to system proxy
function (config, cb) {
if (config) {
return cb(null, config);
}
return _.isFunction(run.options.systemProxy) ? run.options.systemProxy(url, cb) : cb(null, undefined);
}
], function (err, config) {
if (err) {
run.triggers.console(context.coords, 'warn', 'proxy lookup error: ' + (err.message || err));
}
config && (request.proxy = sdk.ProxyConfig.isProxyConfig(config) ? config : new sdk.ProxyConfig(config));
return done();
});
},
// Certificate lookup + reading from whichever file resolver is provided
function (context, run, done) {
var request,
pfxPath,
keyPath,
certPath,
fileResolver,
certificate;
// A. Check if we have the file resolver
fileResolver = run.options.fileResolver;
if (!fileResolver) { return done(); } // No point going ahead
// B. Ensure we have the request
request = _.get(context.item, 'request');
if (!request) { return done(new Error('No request to resolve certificates for.')); }
// C. See if any cert should be sent, by performing a URL matching
certificate = run.options.certificates && run.options.certificates.resolveOne(request.url);
if (!certificate) { return done(); }
// D. Fetch the paths
// @todo: check why aren't we reading ca file (why are we not supporting ca file)
pfxPath = _.get(certificate, 'pfx.src');
keyPath = _.get(certificate, 'key.src');
certPath = _.get(certificate, 'cert.src');
// E. Read from the path, and add the values to the certificate, also associate
// the certificate with the current request.
async.mapValues({
pfx: pfxPath,
key: keyPath,
cert: certPath
}, function (value, key, next) {
// bail out if value is not defined
// @todo add test with server which only accepts cert file
if (!value) { return next(); }
// eslint-disable-next-line security/detect-non-literal-fs-filename
fileResolver.readFile(value, function (err, data) {
// Swallow the error after triggering a warning message for the user.
err && run.triggers.console(context.coords, 'warn',
`certificate "${key}" load error: ${(err.message || err)}`);
next(null, data);
});
}, function (err, fileContents) {
if (err) {
// Swallow the error after triggering a warning message for the user.
run.triggers.console(context.coords, 'warn', 'certificate load error: ' + (err.message || err));
return done();
}
if (fileContents) {
!_.isNil(fileContents.pfx) && _.set(certificate, 'pfx.value', fileContents.pfx);
!_.isNil(fileContents.key) && _.set(certificate, 'key.value', fileContents.key);
!_.isNil(fileContents.cert) && _.set(certificate, 'cert.value', fileContents.cert);
(fileContents.cert || fileContents.key || fileContents.pfx) && (request.certificate = certificate);
}
done();
});
}
];

236
node_modules/postman-runtime/lib/runner/run.js generated vendored Normal file
View File

@@ -0,0 +1,236 @@
var _ = require('lodash'),
async = require('async'),
backpack = require('../backpack'),
Instruction = require('./instruction'),
Run; // constructor
/**
* The run object is the primary way to interact with a run in progress. It allows controlling the run (pausing,
* starting, etc) and holds references to the helpers, such as requesters and authorizer.
*
* @param state
* @param options
*
* @property {Requester} requester
* @constructor
*/
Run = function PostmanCollectionRun (state, options) { // eslint-disable-line func-name-matching
_.assign(this, /** @lends Run.prototype */ {
/**
* @private
* @type {Object}
* @todo: state also holds the host for now (if any).
*/
state: _.assign({}, state),
/**
* @private
* @type {InstructionPool}
*/
pool: Instruction.pool(Run.commands),
/**
* @private
* @type {Object}
*/
stack: {},
/**
* @private
* @type {Object}
*/
options: options || {}
});
};
_.assign(Run.prototype, {
// eslint-disable-next-line jsdoc/check-param-names
/**
* @param {String} action
* @param {Object} [payload]
* @param {*} [args...]
*/
queue: function (action, payload) {
// extract the arguments that are to be forwarded to the processor
return this._schedule(action, payload, _.slice(arguments, 2), false);
},
// eslint-disable-next-line jsdoc/check-param-names
/**
* @param {String} action
* @param {Object} [payload]
* @param {*} [args...]
*/
interrupt: function (action, payload) {
// extract the arguments that are to be forwarded to the processor
return this._schedule(action, payload, _.slice(arguments, 2), true);
},
// eslint-disable-next-line jsdoc/check-param-names
/**
* Suspends current instruction and executes the given instruction.
*
* This method explicitly chooses not to handle errors, to allow the caller to catch errors and continue execution
* without terminating the instruction queue. However, it is up to the caller to make sure errors are handled,
* or it will go unhandled.
*
* @param {String} action
* @param {Object} payload
* @param {*} [args...]
*/
immediate: function (action, payload) {
var scope = this,
instruction = this.pool.create(action, payload, _.slice(arguments, 2));
// we directly execute this instruction instead od queueing it.
setTimeout(function () {
// we do not have callback, hence we send _.noop. we could have had made callback in .execute optional, but
// that would suppress design-time bugs in majority use-case and hence we avoided the same.
instruction.execute(_.noop, scope);
}, 0);
return instruction;
},
/**
* @param {Function|Object} callback
*/
start: function (callback) {
// @todo add `when` parameter to backpack.normalise
callback = backpack.normalise(callback, Object.keys(Run.triggers));
// cannot start run if it is already running
if (this.triggers) {
return callback(new Error('run: already running'));
}
var timeback = callback;
if (_.isFinite(_.get(this.options, 'timeout.global'))) {
timeback = backpack.timeback(callback, this.options.timeout.global, this, function () {
this.pool.clear();
});
}
// invoke all the initialiser functions one after another and if it has any error then abort with callback.
async.series(_.map(Run.initialisers, function (initializer) {
return initializer.bind(this);
}.bind(this)), function (err) {
if (err) { return callback(err); }
// save the normalised callbacks as triggers
this.triggers = callback;
this.triggers.start(null, this.state.cursor.current()); // @todo may throw error if cursor absent
this._process(timeback);
}.bind(this));
},
/**
* @private
* @param {Object|Cursor} cursor
* @return {Item}
*/
resolveCursor: function (cursor) {
if (!cursor || !Array.isArray(this.state.items)) { return; }
return this.state.items[cursor.position];
},
/**
* @private
*
* @param {String} action
* @param {Object} [payload]
* @param {Array} [args]
* @param {Boolean} [immediate]
*/
_schedule: function (action, payload, args, immediate) {
var instruction = this.pool.create(action, payload, args);
// based on whether the immediate flag is set, add to the top or bottom of the instruction queue.
(immediate ? this.pool.unshift : this.pool.push)(instruction);
return instruction;
},
_process: function (callback) {
// extract the command from the queue
var instruction = this.pool.shift();
// if there is nothing to process, exit
if (!instruction) {
callback(null, this.state.cursor.current());
return;
}
instruction.execute(function (err) {
return err ? callback(err, this.state.cursor.current()) : this._process(callback); // process recursively
}, this);
}
});
_.assign(Run, {
/**
* Stores all events that runner triggers
*
* @type {Object}
*/
triggers: {
start: true
},
/**
* stores all execution commands
* @enum {Function}
*
* @note commands are loaded by flattening the modules in the `./commands` directory
*/
commands: {},
/**
* Functions executed with commands on start
* @type {Array}
*/
initialisers: []
});
// commands are loaded by flattening the modules in the `./commands` directory
Run.commands = _.transform({
'control.command': require('./extensions/control.command'),
'event.command': require('./extensions/event.command'),
'httprequest.command': require('./extensions/http-request.command'),
'request.command': require('./extensions/request.command'),
'waterfall.command': require('./extensions/waterfall.command'),
'item.command': require('./extensions/item.command'),
'delay.command': require('./extensions/delay.command')
}, function (all, extension) {
// extract the prototype from the command interface
_.has(extension, 'prototype') && _.forOwn(extension.prototype, function (value, prop) {
if (Run.prototype.hasOwnProperty(prop)) {
throw new Error('run: duplicate command prototype extension ' + prop);
}
Run.prototype[prop] = value;
});
// put the triggers in a box
_.has(extension, 'triggers') && _.isArray(extension.triggers) && _.forEach(extension.triggers, function (name) {
name && (Run.triggers[name] = true);
});
// we add the processors to the processor list
_.has(extension, 'process') && _.forOwn(extension.process, function (command, name) {
if (!_.isFunction(command)) { return; }
if (all.hasOwnProperty(name)) {
throw new Error('run: duplicate command processor ' + name);
}
// finally add the command function to the accumulator
all[name] = command;
});
// add the initialisation functions
_.has(extension, 'init') && _.isFunction(extension.init) && Run.initialisers.push(extension.init);
});
module.exports = Run;

69
node_modules/postman-runtime/lib/runner/timings.js generated vendored Normal file
View File

@@ -0,0 +1,69 @@
/**
* All timing related functions within the runner is maintained in this module. Things like recording time with label,
* computing elapsed time between two labels, etc all go in here.
* @module Run~Timer
*/
var /**
* @const
* @type {string}
*/
NUMBER = 'number',
Timings; // constructor
/**
* An instance of a timer can record times with a label associated with it.
*
* @constructor
* @private
* @param {Object.<Number>} records create the timer instance with one or more labels and their timestamp.
*/
Timings = function Timings (records) {
for (var prop in records) {
this[prop] = parseInt(records[prop], 10);
}
};
/**
* Create a new instance of timer. Equivalent to doing new {@link Timer}(records:Object.<Number>);
*
* @param {Object.<Number>} records
* @returns {Timings}
*/
Timings.create = function (records) {
return new Timings(records);
};
/**
* Record the current time with the label specified.
*
* @param {String} label
* @returns {Number}
*
* @example
* var t = new Timings();
* t.record('start');
*
* console.log(t.toObject()); // logs {start: 1246333 }
*/
Timings.prototype.record = function (label) {
return (this[label] = Date.now());
};
/**
* Serialise a timing instance to an Object that can then be later used as a source to recreate another timing instance.
*
* @returns {Object.<Number>}
*/
Timings.prototype.toObject = function () {
var obj = {},
prop;
for (prop in this) {
(typeof this[prop] === NUMBER) && (obj[prop] = this[prop]);
}
return obj;
};
module.exports = Timings;

165
node_modules/postman-runtime/lib/runner/util.js generated vendored Normal file
View File

@@ -0,0 +1,165 @@
var /**
* @const
* @type {string}
*/
FUNCTION = 'function',
/**
* @const
* @type {string}
*/
STRING = 'string',
createReadStream; // function
/**
* Create readable stream for given file as well as detect possible file
* read issues.
*
* @param {Object} resolver - External file resolver module
* @param {String} fileSrc - File path
* @param {Function} callback - Final callback
*
* @note This function is defined in the file's root because there is a need to
* trap it within closure in order to append the stream clone functionalities.
* This ensures lesser footprint in case we have a memory leak.
*/
createReadStream = function (resolver, fileSrc, callback) {
var readStream;
// check for the existence of the file before creating read stream.
// eslint-disable-next-line security/detect-non-literal-fs-filename
resolver.stat(fileSrc, function (err, stats) {
if (err) {
// overwrite `ENOENT: no such file or directory` error message. Most likely the case.
err.code === 'ENOENT' && (err.message = `"${fileSrc}", no such file`);
return callback(err);
}
// check for a valid file.
if (stats && typeof stats.isFile === FUNCTION && !stats.isFile()) {
return callback(new Error(`"${fileSrc}", is not a file`));
}
// check read permissions for user.
// octal `400` signifies 'user permissions'. [4 0 0] -> [u g o]
// `4` signifies 'read permission'. [4] -> [1 0 0] -> [r w x]
if (stats && !(stats.mode & 0o400)) {
return callback(new Error(`"${fileSrc}", read permission denied`));
}
// @note Handle all the errors before `createReadStream` to avoid listening on stream error event.
// listening on error requires listening on end event as well. which will make this sync.
// @note In form-data mode stream error will be handled in postman-request but bails out ongoing request.
// eslint-disable-next-line security/detect-non-literal-fs-filename
readStream = resolver.createReadStream(fileSrc);
// We might have to read the file before making the actual request
// e.g, while calculating body hash during AWS auth or redirecting form-data params
// So, this method wraps the `createReadStream` function with fixed arguments.
// This makes sure that we don't have to pass `fileResolver` to
// internal modules (like auth plugins) for security reasons.
readStream.cloneReadStream = function (callback) {
// eslint-disable-next-line security/detect-non-literal-fs-filename
return createReadStream(resolver, fileSrc, callback);
};
callback(null, readStream);
});
};
/**
* Utility functions that are required to be re-used throughout the runner
* @module Runner~util
* @private
*
* @note Do not put module logic or business logic related functions here.
* The functions here are purely decoupled and low-level functions.
*/
module.exports = {
/**
* This function allows one to call another function by wrapping it within a try-catch block.
* The first parameter is the function itself, followed by the scope in which this function is to be executed.
* The third parameter onwards are blindly forwarded to the function being called
*
* @param {Function} fn
* @param {*} ctx
*
* @returns {Error} If there was an error executing the function, the error is returned.
* Note that if the function called here is asynchronous, it's errors will not be returned (for obvious reasons!)
*/
safeCall: function (fn, ctx) {
// extract the arguments that are to be forwarded to the function to be called
var args = Array.prototype.slice.call(arguments, 2);
try {
(typeof fn === FUNCTION) && fn.apply(ctx || global, args);
}
catch (err) {
return err;
}
},
/**
* Copies attributes from source object to destination object.
*
* @param dest
* @param src
*
* @return {Object}
*/
syncObject: function (dest, src) {
var prop;
// update or add values from src
for (prop in src) {
if (src.hasOwnProperty(prop)) {
dest[prop] = src[prop];
}
}
// remove values that no longer exist
for (prop in dest) {
if (dest.hasOwnProperty(prop) && !src.hasOwnProperty(prop)) {
delete dest[prop];
}
}
return dest;
},
/**
* Create readable stream for given file as well as detect possible file
* read issues. The resolver also attaches a clone function to the stream
* so that the stream can be restarted any time.
*
* @param {Object} resolver - External file resolver module
* @param {Function} resolver.stat - Resolver method to check for existence and permissions of file
* @param {Function} resolver.createReadStream - Resolver method for creating read stream
* @param {String} fileSrc - File path
* @param {Function} callback -
*
*/
createReadStream: function (resolver, fileSrc, callback) {
// bail out if resolver not found.
if (!resolver) {
return callback(new Error('file resolver not supported'));
}
// bail out if resolver is not supported.
if (typeof resolver.stat !== FUNCTION || typeof resolver.createReadStream !== FUNCTION) {
return callback(new Error('file resolver interface mismatch'));
}
// bail out if file source is invalid or empty string.
if (!fileSrc || typeof fileSrc !== STRING) {
return callback(new Error('invalid or missing file source'));
}
// now that things are sanitized and validated, we transfer it to the
// stream reading utility function that does the heavy lifting of
// calling there resolver to return the stream
return createReadStream(resolver, fileSrc, callback);
}
};