feat: Created a mini nodeJS server with NewMan for testing without PostMan GUI.

This will mimic a run in a CD/CI environment or docker container.
This commit is contained in:
Simon Priet
2021-09-08 14:01:19 +02:00
parent 5fbd7c88fa
commit e69a613a37
5610 changed files with 740417 additions and 3 deletions

119
node_modules/postman-runtime/lib/authorizer/apikey.js generated vendored Normal file
View File

@@ -0,0 +1,119 @@
var _ = require('lodash'),
TARGETS = {
header: 'header',
query: 'query'
};
/**
* This module negotiates the following
*
* auth: {
* key: 'string',
* value: 'string',
* in: 'string~enum header, query',
*
* // @todo implement:
* privateKey: 'string',
* privateValue: 'string'
* }
* @implements {AuthHandlerInterface}
*/
module.exports = {
/**
* @property {AuthHandlerInterface~AuthManifest}
*/
manifest: {
info: {
name: 'apikey',
version: '0.0.1'
},
updates: [
{
property: '*',
type: 'header'
},
{
property: '*',
type: 'url.param'
}
]
},
/**
* Initializes an item (extracts parameters from intermediate requests if any, etc)
* before the actual authorization step
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authInitHookCallback} done
*/
init: function (auth, response, done) {
done();
},
/**
* Verifies whether the request has required parameters
*
* @param {AuthInterface} auth
* @param {AuthHandlerInterface~authPreHookCallback} done
*/
pre: function (auth, done) {
return done(null, Boolean(auth.get('key') || auth.get('value')));
},
/**
* Verifies whether the auth succeeded
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authPostHookCallback} done
*/
post: function (auth, response, done) {
done(null, true);
},
/**
* Signs the request
*
* @param {AuthInterface} auth
* @param {Request} request
* @param {AuthHandlerInterface~authSignHookCallback} done
*/
sign: function (auth, request, done) {
var target = TARGETS[auth.get('in')] || TARGETS.header,
key = auth.get('key'),
value = auth.get('value'),
lkey = _.lowerCase(key); // needed for header case insensitive matches
// either key or value should be present
if (!(key || value)) {
return done();
}
if (target === TARGETS.header) {
request.headers.remove(function (header) {
return header && (_.lowerCase(header.key) === lkey);
});
request.headers.add({
key: key,
value: value,
system: true
});
}
else if (target === TARGETS.query) {
request.url.query.remove(function (query) {
return query && (query.key === key);
});
request.url.query.add({
key: key,
value: value,
system: true
});
}
return done();
}
};

View File

@@ -0,0 +1,95 @@
var _ = require('lodash'),
EMPTY = '',
createAuthInterface;
/**
* Creates a wrapper around RequestAuth and provides getters and setters helper functions
*
* @constructs AuthInterface
* @param {RequestAuth} auth
* @param {Object} protocolProfileBehavior - Protocol profile behaviors
* @return {AuthInterface}
* @throws {Error}
*/
createAuthInterface = function (auth, protocolProfileBehavior) {
if (!(auth && auth.parameters && auth.parameters())) {
throw new Error('runtime~createAuthInterface: invalid auth');
}
return /** @lends AuthInterface.prototype **/{
/**
* @private
* @property {protocolProfileBehavior} - Protocol profile behaviors
*/
_protocolProfileBehavior: protocolProfileBehavior || {},
/**
* @param {String|Array<String>} keys
* @return {*} Returns a value for a key or an object having all keys & values depending on the input
* @example
* get('foo') // bar
* get(['foo', 'alpha']) // {foo: 'bar', 'alpha': 'beta'}
*/
get: function (keys) {
var paramVariable;
if (_.isString(keys)) {
paramVariable = auth.parameters().one(keys);
return paramVariable && paramVariable.get();
}
if (_.isArray(keys)) {
return _.transform(keys, function (paramObject, key) {
paramVariable = auth.parameters().one(key);
paramVariable && (paramObject[key] = paramVariable.get());
return paramObject;
}, {});
}
return undefined;
},
/**
* @param {String|Object} key
* @param {*} [value]
* @return {AuthInterface}
* @example
* set('foo', 'bar')
* set({foo: 'bar', 'alpha': 'beta'})
* @throws {Error}
*/
set: function (key, value) {
var modifiedParams = {},
parameters;
if (_.isObject(key)) {
modifiedParams = key;
}
else if (_.isString(key)) {
modifiedParams[key] = value;
}
else {
throw new Error('runtime~AuthInterface: set should be called with `key` as a string or object');
}
parameters = auth.parameters();
_.forEach(modifiedParams, function (value, key) {
var param = parameters.one(key);
if (!param) {
return parameters.add({key: key, value: value, system: true});
}
// Update if the param is a system property or an empty user property (null, undefined or empty string)
if (param.system || param.value === EMPTY || _.isNil(param.value) || _.isNaN(param.value)) {
return param.update({key: key, value: value, system: true});
}
});
return this;
}
};
};
module.exports = createAuthInterface;

303
node_modules/postman-runtime/lib/authorizer/aws4.js generated vendored Normal file
View File

@@ -0,0 +1,303 @@
var _ = require('lodash'),
aws4 = require('aws4'),
crypto = require('crypto'),
sdk = require('postman-collection'),
urlEncoder = require('postman-url-encoder'),
bodyBuilder = require('../requester/core-body-builder'),
RequestBody = sdk.RequestBody,
X_AMZ_PREFIX = 'X-Amz-',
BODY_HASH_HEADER = 'X-Amz-Content-Sha256',
/**
* Calculates body hash with given algorithm and digestEncoding.
*
* @todo This function can also be used in Digest auth so that it works correctly for urlencoded and file body types
*
* @param {RequestBody} body
* @param {String} algorithm
* @param {String} digestEncoding
* @param {Function} callback
*/
computeBodyHash = function (body, algorithm, digestEncoding, callback) {
if (!(body && algorithm && digestEncoding) || body.isEmpty()) { return callback(); }
var hash = crypto.createHash(algorithm),
originalReadStream,
rawBody,
urlencodedBody,
graphqlBody;
if (body.mode === RequestBody.MODES.raw) {
rawBody = bodyBuilder.raw(body.raw).body;
hash.update(rawBody);
return callback(hash.digest(digestEncoding));
}
if (body.mode === RequestBody.MODES.urlencoded) {
urlencodedBody = bodyBuilder.urlencoded(body.urlencoded).form;
urlencodedBody = urlEncoder.encodeQueryString(urlencodedBody);
hash.update(urlencodedBody);
return callback(hash.digest(digestEncoding));
}
if (body.mode === RequestBody.MODES.file) {
originalReadStream = _.get(body, 'file.content');
if (!originalReadStream) {
return callback();
}
return originalReadStream.cloneReadStream(function (err, clonedStream) {
if (err) { return callback(); }
clonedStream.on('data', function (chunk) {
hash.update(chunk);
});
clonedStream.on('end', function () {
callback(hash.digest(digestEncoding));
});
});
}
if (body.mode === RequestBody.MODES.graphql) {
graphqlBody = bodyBuilder.graphql(body.graphql).body;
hash.update(graphqlBody);
return callback(hash.digest(digestEncoding));
}
// @todo: formdata body type requires adding new data to form instead of setting headers for AWS auth.
// Figure out how to do that. See below link:
// AWS auth with formdata: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-post-example.html
// ensure that callback is called if body.mode doesn't match with any of the above modes
return callback();
};
/**
* @implements {AuthHandlerInterface}
*/
module.exports = {
/**
* @property {AuthHandlerInterface~AuthManifest}
*/
manifest: {
info: {
name: 'awsv4',
version: '1.0.0'
},
updates: [
{
property: 'Host',
type: 'header'
},
{
property: 'Authorization',
type: 'header'
},
{
property: 'X-Amz-Date',
type: 'header'
},
{
property: 'X-Amz-Security-Token',
type: 'header'
},
{
property: 'X-Amz-Content-Sha256',
type: 'header'
},
{
property: 'X-Amz-Security-Token',
type: 'url.param'
},
{
property: 'X-Amz-Expires',
type: 'url.param'
},
{
property: 'X-Amz-Date',
type: 'url.param'
},
{
property: 'X-Amz-Algorithm',
type: 'url.param'
},
{
property: 'X-Amz-Credential',
type: 'url.param'
},
{
property: 'X-Amz-SignedHeaders',
type: 'url.param'
},
{
property: 'X-Amz-Signature',
type: 'url.param'
}
]
},
/**
* Initializes a item (fetches all required parameters, etc) before the actual authorization step.
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authInitHookCallback} done
*/
init: function (auth, response, done) {
done(null);
},
/**
* Checks the item, and fetches any parameters that are not already provided.
*
* @param {AuthInterface} auth
* @param {AuthHandlerInterface~authPreHookCallback} done
*/
pre: function (auth, done) {
done(null, true);
},
/**
* Verifies whether the request was successful after being sent.
*
* @param {AuthInterface} auth
* @param {Requester} response
* @param {AuthHandlerInterface~authPostHookCallback} done
*/
post: function (auth, response, done) {
done(null, true);
},
/**
* Generates the signature and adds auth data to the request as additional headers/query params.
* AWS v4 auth mandates that a content type header be present in each request.
*
* @param {Request} request request to add auth data
* @param {Object} params data required for auth
* @param {Object} params.credentials Should contain the AWS credentials, "accessKeyId" and "secretAccessKey"
* @param {String} params.host Contains the host name for the request
* @param {String} params.path Contains the complete path, with query string as well, e.g: /something/kane?hi=ho
* @param {String} params.service The name of the AWS service
* @param {String} params.region AWS region
* @param {String} params.method Request method
* @param {String} params.body Stringified request body
* @param {Object} params.headers Each key should be a header key, and the value should be a header value
* @param {Boolean} params.signQuery Add auth data to query params if true, otherwise add it to headers
*/
addAuthDataToRequest: function (request, params) {
var signedData = aws4.sign(params, params.credentials);
if (params.signQuery) {
_.forEach(sdk.Url.parse(signedData.path).query, function (param) {
// only add additional AWS specific params to request
if (_.startsWith(param.key, X_AMZ_PREFIX) && !request.url.query.has(param.key)) {
param.system = true;
request.url.query.add(param);
}
});
}
_.forEach(signedData.headers, function (value, key) {
request.upsertHeader({
key: key,
value: value,
system: true
});
});
},
/**
* Signs a request.
*
* @param {AuthInterface} auth
* @param {Request} request
* @param {AuthHandlerInterface~authSignHookCallback} done
*/
sign: function (auth, request, done) {
var self = this,
params = auth.get([
'accessKey',
'secretKey',
'sessionToken',
'service',
'region',
'addAuthDataToQuery'
]),
url = urlEncoder.toNodeUrl(request.url),
dataToSign;
// Clean up the request (if needed)
request.removeHeader('Authorization', {ignoreCase: true});
request.removeHeader('X-Amz-Date', {ignoreCase: true});
request.removeHeader('X-Amz-Security-Token', {ignoreCase: true});
request.removeHeader('X-Amz-Content-Sha256', {ignoreCase: true});
// Not removing `X-Amz-Expires` from params here allowing user to override
// default value
request.removeQueryParams([
'X-Amz-Security-Token',
'X-Amz-Date',
'X-Amz-Algorithm',
'X-Amz-Credential',
'X-Amz-SignedHeaders',
'X-Amz-Signature'
]);
dataToSign = {
credentials: {
accessKeyId: params.accessKey,
secretAccessKey: params.secretKey,
sessionToken: params.sessionToken || undefined
},
host: url.host,
path: url.path, // path = pathname + query
service: params.service || 'execute-api', // AWS API Gateway is the default service.
region: params.region || 'us-east-1',
method: request.method,
body: undefined, // no need to give body since we are setting 'X-Amz-Content-Sha256' header
headers: _.transform(request.getHeaders({enabled: true}), function (accumulator, value, key) {
accumulator[key] = value;
}, {}),
signQuery: params.addAuthDataToQuery
};
// Removed the code which was adding content-type header if it is not there in the request. Because
// aws4 does not require content-type header. It is only mandatory to include content-type header in signature
// calculation if it is there in the request.
// Refer: https://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-header-based-auth.html#canonical-request
// body hash is not required when adding auth data to qury params
// @see: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html
if (params.addAuthDataToQuery) {
self.addAuthDataToRequest(request, dataToSign);
return done();
}
// aws4 module can't calculate body hash for body with ReadStream.
// So we calculate it our self and set 'X-Amz-Content-Sha256' header which will be used by aws4 module
// to calculate the signature.
computeBodyHash(request.body, 'sha256', 'hex', function (bodyHash) {
if (bodyHash) {
request.upsertHeader({
key: BODY_HASH_HEADER,
value: bodyHash,
system: true
});
dataToSign.headers[BODY_HASH_HEADER] = bodyHash;
}
self.addAuthDataToRequest(request, dataToSign);
return done();
});
}
};

77
node_modules/postman-runtime/lib/authorizer/basic.js generated vendored Normal file
View File

@@ -0,0 +1,77 @@
/**
* @implements {AuthHandlerInterface}
*/
module.exports = {
/**
* @property {AuthHandlerInterface~AuthManifest}
*/
manifest: {
info: {
name: 'basic',
version: '1.0.0'
},
updates: [
{
property: 'Authorization',
type: 'header'
}
]
},
/**
* Initializes an item (extracts parameters from intermediate requests if any, etc)
* before the actual authorization step.
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authInitHookCallback} done
*/
init: function (auth, response, done) {
done(null);
},
/**
* Verifies whether the request has valid basic auth credentials (which is always).
* Sanitizes the auth parameters if needed.
*
* @todo - add support for prompting a user for basic auth credentials if not already provided
*
* @param {AuthInterface} auth
* @param {AuthHandlerInterface~authPreHookCallback} done
*/
pre: function (auth, done) {
done(null, true);
},
/**
* Verifies whether the basic auth succeeded.
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authPostHookCallback} done
*/
post: function (auth, response, done) {
done(null, true);
},
/**
* Signs a request.
*
* @param {AuthInterface} auth
* @param {Request} request
* @param {AuthHandlerInterface~authSignHookCallback} done
*/
sign: function (auth, request, done) {
var username = auth.get('username') || '',
password = auth.get('password') || '';
request.removeHeader('Authorization', {ignoreCase: true});
request.addHeader({
key: 'Authorization',
value: 'Basic ' + Buffer.from(`${username}:${password}`, 'utf8').toString('base64'),
system: true
});
return done();
}
};

83
node_modules/postman-runtime/lib/authorizer/bearer.js generated vendored Normal file
View File

@@ -0,0 +1,83 @@
var BEARER_AUTH_PREFIX = 'Bearer ';
/**
* @implements {AuthHandlerInterface}
*/
module.exports = {
/**
* @property {AuthHandlerInterface~AuthManifest}
*/
manifest: {
info: {
name: 'bearer',
version: '1.0.0'
},
updates: [
{
property: 'Authorization',
type: 'header'
}
]
},
/**
* Initializes an item (extracts parameters from intermediate requests if any, etc)
* before the actual authorization step
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authInitHookCallback} done
*/
init: function (auth, response, done) {
done();
},
/**
* Verifies whether the request has required parameters
*
* @param {AuthInterface} auth
* @param {AuthHandlerInterface~authPreHookCallback} done
*/
pre: function (auth, done) {
return done(null, Boolean(auth.get('token')));
},
/**
* Verifies whether the auth succeeded
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authPostHookCallback} done
*/
post: function (auth, response, done) {
done(null, true);
},
/**
* Signs the request
*
* @param {AuthInterface} auth
* @param {Request} request
* @param {AuthHandlerInterface~authSignHookCallback} done
*/
sign: function (auth, request, done) {
var token = auth.get('token');
if (!token) {
return done(); // Nothing to do if required parameters are not present.
}
// @TODO Should we support adding to query params and/or body also?
// According to the RFC#6750 they are supported but not recommended!
request.removeHeader('Authorization', {ignoreCase: true});
request.addHeader({
key: 'Authorization',
value: BEARER_AUTH_PREFIX + token,
system: true
});
return done();
}
};

498
node_modules/postman-runtime/lib/authorizer/digest.js generated vendored Normal file
View File

@@ -0,0 +1,498 @@
var _ = require('lodash'),
crypto = require('crypto'),
urlEncoder = require('postman-url-encoder'),
RequestBody = require('postman-collection').RequestBody,
bodyBuilder = require('../requester/core-body-builder'),
EMPTY = '',
ONE = '00000001',
DISABLE_RETRY_REQUEST = 'disableRetryRequest',
WWW_AUTHENTICATE = 'www-authenticate',
DIGEST_PREFIX = 'Digest ',
QOP = 'qop',
AUTH = 'auth',
COLON = ':',
QUOTE = '"',
SESS = '-sess',
AUTH_INT = 'auth-int',
AUTHORIZATION = 'Authorization',
MD5_SESS = 'MD5-sess',
ASCII_SOURCE = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789',
ASCII_SOURCE_LENGTH = ASCII_SOURCE.length,
USERNAME_EQUALS_QUOTE = 'username="',
REALM_EQUALS_QUOTE = 'realm="',
NONCE_EQUALS_QUOTE = 'nonce="',
URI_EQUALS_QUOTE = 'uri="',
ALGORITHM_EQUALS_QUOTE = 'algorithm="',
CNONCE_EQUALS_QUOTE = 'cnonce="',
RESPONSE_EQUALS_QUOTE = 'response="',
OPAQUE_EQUALS_QUOTE = 'opaque="',
QOP_EQUALS = 'qop=',
NC_EQUALS = 'nc=',
ALGO = {
MD5: 'MD5',
MD5_SESS: 'MD5-sess',
SHA_256: 'SHA-256',
SHA_256_SESS: 'SHA-256-sess',
SHA_512_256: 'SHA-512-256',
SHA_512_256_SESS: 'SHA-512-256-sess'
},
AUTH_PARAMETERS = [
'algorithm',
'username',
'realm',
'password',
'method',
'nonce',
'nonceCount',
'clientNonce',
'opaque',
'qop',
'uri'
],
nonceRegex = /nonce="([^"]*)"/,
realmRegex = /realm="([^"]*)"/,
qopRegex = /qop="([^"]*)"/,
opaqueRegex = /opaque="([^"]*)"/,
_extractField,
SHA512_256,
nodeCrypto;
// Current Electron version(7.2.3) in Postman app uses OpenSSL 1.1.0
// which don't support `SHA-512-256`. Use external `js-sha512` module
// to handle this case.
if (!_.includes(crypto.getHashes(), 'sha512-256')) {
SHA512_256 = require('js-sha512').sha512_256;
nodeCrypto = crypto;
// create a wrapper class with similar interface to Node's crypto and use jsSHA
// to support SHA512-256 algorithm
crypto = function () {
this._hash = SHA512_256.create();
};
_.assign(crypto.prototype, {
update: function (data) {
this._hash.update(data);
return this;
},
digest: function () {
// we only need 'hex' digest for this auth
return this._hash.hex();
}
});
_.assign(crypto, {
createHash: function (hashAlgo) {
// return hash from js-sha for SHA512-256
if (hashAlgo === 'sha512-256') {
return new crypto();
}
// return Node's hash otherwise
return nodeCrypto.createHash(hashAlgo);
}
});
}
/**
* Generates a random string of given length
*
* @todo Move this to util.js. After moving use that for hawk auth too
* @param {Number} length
*/
function randomString (length) {
length = length || 6;
var result = [],
i;
for (i = 0; i < length; i++) {
result[i] = ASCII_SOURCE[(Math.random() * ASCII_SOURCE_LENGTH) | 0];
}
return result.join(EMPTY);
}
/**
* Extracts a Digest Auth field from a WWW-Authenticate header value using a given regexp.
*
* @param {String} string
* @param {RegExp} regexp
* @private
*/
_extractField = function (string, regexp) {
var match = string.match(regexp);
return match ? match[1] : EMPTY;
};
/**
* Returns the 'www-authenticate' header for Digest auth. Since a server can suport more than more auth-scheme,
* there can be more than one header with the same key. So need to loop over and check each one.
*
* @param {VariableList} headers
* @private
*/
function _getDigestAuthHeader (headers) {
return headers.find(function (property) {
return (property.key.toLowerCase() === WWW_AUTHENTICATE) && (_.startsWith(property.value, DIGEST_PREFIX));
});
}
/**
* Returns hex encoded hash of given data using given algorithm.
*
* @param {String} data string to calculate hash
* @param {String} algorithm hash algorithm
* @returns {String} hex encoded hash of given data
*/
function getHash (data, algorithm) {
return crypto.createHash(algorithm).update(data || EMPTY).digest('hex');
}
/**
* Calculates body hash with given algorithm and digestEncoding.
*
* @param {RequestBody} body Request body
* @param {String} algorithm Hash algorithm to use
* @param {String} digestEncoding Encoding of the hash
* @param {Function} callback Callback function that will be called with body hash
*/
function computeBodyHash (body, algorithm, digestEncoding, callback) {
if (!(algorithm && digestEncoding)) { return callback(); }
var hash = crypto.createHash(algorithm),
originalReadStream,
rawBody,
graphqlBody,
urlencodedBody;
// if body is not available, return hash of empty string
if (!body || body.isEmpty()) {
return callback(hash.digest(digestEncoding));
}
if (body.mode === RequestBody.MODES.raw) {
rawBody = bodyBuilder.raw(body.raw).body;
hash.update(rawBody);
return callback(hash.digest(digestEncoding));
}
if (body.mode === RequestBody.MODES.urlencoded) {
urlencodedBody = bodyBuilder.urlencoded(body.urlencoded).form;
urlencodedBody = urlEncoder.encodeQueryString(urlencodedBody);
hash.update(urlencodedBody);
return callback(hash.digest(digestEncoding));
}
if (body.mode === RequestBody.MODES.file) {
originalReadStream = _.get(body, 'file.content');
if (!originalReadStream) {
return callback();
}
return originalReadStream.cloneReadStream(function (err, clonedStream) {
if (err) { return callback(); }
clonedStream.on('data', function (chunk) {
hash.update(chunk);
});
clonedStream.on('end', function () {
callback(hash.digest(digestEncoding));
});
});
}
if (body.mode === RequestBody.MODES.graphql) {
graphqlBody = bodyBuilder.graphql(body.graphql).body;
hash.update(graphqlBody);
return callback(hash.digest(digestEncoding));
}
// @todo: Figure out a way to calculate hash for formdata body type.
// ensure that callback is called if body.mode doesn't match with any of the above modes
return callback();
}
/**
* All the auth definition parameters excluding username and password should be stored and resued.
* @todo The current implementation would fail for the case when two requests to two different hosts inherits the same
* auth. In that case a retry would not be attempted for the second request (since all the parameters would be present
* in the auth definition though invalid).
*
* @implements {AuthHandlerInterface}
*/
module.exports = {
/**
* @property {AuthHandlerInterface~AuthManifest}
*/
manifest: {
info: {
name: 'digest',
version: '1.0.0'
},
updates: [
{
property: 'Authorization',
type: 'header'
},
{
property: 'nonce',
type: 'auth'
},
{
property: 'realm',
type: 'auth'
}
]
},
/**
* Initializes an item (extracts parameters from intermediate requests if any, etc)
* before the actual authorization step.
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authInitHookCallback} done
*/
init: function (auth, response, done) {
done(null);
},
/**
* Checks whether the given item has all the required parameters in its request.
* Sanitizes the auth parameters if needed.
*
* @param {AuthInterface} auth
* @param {AuthHandlerInterface~authPreHookCallback} done
*/
pre: function (auth, done) {
// ensure that all dynamic parameter values are present in the parameters
// if even one is absent, we return false.
done(null, Boolean(auth.get('nonce') && auth.get('realm')));
},
/**
* Verifies whether the request was successfully authorized after being sent.
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authPostHookCallback} done
*/
post: function (auth, response, done) {
if (auth.get(DISABLE_RETRY_REQUEST) || !response) {
return done(null, true);
}
var code,
realm,
nonce,
qop,
opaque,
authHeader,
authParams = {};
code = response.code;
authHeader = _getDigestAuthHeader(response.headers);
// If code is forbidden or unauthorized, and an auth header exists,
// we can extract the realm & the nonce, and replay the request.
// todo: add response.is4XX, response.is5XX, etc in the SDK.
if ((code === 401 || code === 403) && authHeader) {
nonce = _extractField(authHeader.value, nonceRegex);
realm = _extractField(authHeader.value, realmRegex);
qop = _extractField(authHeader.value, qopRegex);
opaque = _extractField(authHeader.value, opaqueRegex);
authParams.nonce = nonce;
authParams.realm = realm;
opaque && (authParams.opaque = opaque);
qop && (authParams.qop = qop);
if (authParams.qop || auth.get(QOP)) {
authParams.clientNonce = randomString(8);
authParams.nonceCount = ONE;
}
// if all the auth parameters sent by server were already present in auth definition then we do not retry
if (_.every(authParams, function (value, key) { return auth.get(key); })) {
return done(null, true);
}
auth.set(authParams);
return done(null, false);
}
done(null, true);
},
/**
* Computes the Digest Authentication header from the given parameters.
*
* @param {Object} params
* @param {String} params.algorithm
* @param {String} params.username
* @param {String} params.realm
* @param {String} params.password
* @param {String} params.method
* @param {String} params.nonce
* @param {String} params.nonceCount
* @param {String} params.clientNonce
* @param {String} params.opaque
* @param {String} params.qop
* @param {String} params.uri
* @returns {String}
*/
computeHeader: function (params) {
var algorithm = params.algorithm,
hashAlgo = params.hashAlgo,
username = params.username,
realm = params.realm,
password = params.password,
method = params.method,
nonce = params.nonce,
nonceCount = params.nonceCount,
clientNonce = params.clientNonce,
opaque = params.opaque,
qop = params.qop,
uri = params.uri,
// RFC defined terms, http://tools.ietf.org/html/rfc2617#section-3
A0,
A1,
A2,
hashA1,
hashA2,
reqDigest,
headerParams;
if (_.endsWith(algorithm, SESS)) {
A0 = getHash(username + COLON + realm + COLON + password, hashAlgo);
A1 = A0 + COLON + nonce + COLON + clientNonce;
}
else {
A1 = username + COLON + realm + COLON + password;
}
if (qop === AUTH_INT) {
A2 = method + COLON + uri + COLON + params.bodyhash;
}
else {
A2 = method + COLON + uri;
}
hashA1 = getHash(A1, hashAlgo);
hashA2 = getHash(A2, hashAlgo);
if (qop === AUTH || qop === AUTH_INT) {
reqDigest = getHash([hashA1, nonce, nonceCount, clientNonce, qop, hashA2].join(COLON), hashAlgo);
}
else {
reqDigest = getHash([hashA1, nonce, hashA2].join(COLON), hashAlgo);
}
headerParams = [USERNAME_EQUALS_QUOTE + username + QUOTE,
REALM_EQUALS_QUOTE + realm + QUOTE,
NONCE_EQUALS_QUOTE + nonce + QUOTE,
URI_EQUALS_QUOTE + uri + QUOTE
];
algorithm && headerParams.push(ALGORITHM_EQUALS_QUOTE + algorithm + QUOTE);
if (qop === AUTH || qop === AUTH_INT) {
headerParams.push(QOP_EQUALS + qop);
}
if (qop === AUTH || qop === AUTH_INT || algorithm === MD5_SESS) {
nonceCount && headerParams.push(NC_EQUALS + nonceCount);
headerParams.push(CNONCE_EQUALS_QUOTE + clientNonce + QUOTE);
}
headerParams.push(RESPONSE_EQUALS_QUOTE + reqDigest + QUOTE);
opaque && headerParams.push(OPAQUE_EQUALS_QUOTE + opaque + QUOTE);
return DIGEST_PREFIX + headerParams.join(', ');
},
/**
* Signs a request.
*
* @param {AuthInterface} auth
* @param {Request} request
* @param {AuthHandlerInterface~authSignHookCallback} done
*/
sign: function (auth, request, done) {
var self = this,
params = auth.get(AUTH_PARAMETERS),
url = urlEncoder.toNodeUrl(request.url),
header;
if (!params.username || !params.realm) {
return done(); // Nothing to do if required parameters are not present.
}
request.removeHeader(AUTHORIZATION, {ignoreCase: true});
params.method = request.method;
params.uri = url.path;
switch (params.algorithm) {
case ALGO.SHA_256:
case ALGO.SHA_256_SESS:
params.hashAlgo = 'sha256';
break;
case ALGO.MD5:
case ALGO.MD5_SESS:
case EMPTY:
case undefined:
case null:
params.algorithm = params.algorithm || ALGO.MD5;
params.hashAlgo = 'md5';
break;
case ALGO.SHA_512_256:
case ALGO.SHA_512_256_SESS:
params.hashAlgo = 'sha512-256';
break;
default:
return done(new Error(`Unsupported digest algorithm: ${params.algorithm}`));
}
// calculate body hash for qop='auth-int'
if (params.qop === AUTH_INT) {
return computeBodyHash(request.body, params.hashAlgo, 'hex', function (bodyhash) {
params.bodyhash = bodyhash;
header = self.computeHeader(params);
request.addHeader({
key: AUTHORIZATION,
value: header,
system: true
});
return done();
});
}
header = self.computeHeader(params);
request.addHeader({
key: AUTHORIZATION,
value: header,
system: true
});
return done();
}
};

316
node_modules/postman-runtime/lib/authorizer/edgegrid.js generated vendored Normal file
View File

@@ -0,0 +1,316 @@
/**
* @fileOverview
*
* Implements the EdgeGrid authentication method.
* Specification document: https://developer.akamai.com/legacy/introduction/Client_Auth.html
* Sample impletentation by Akamai: https://github.com/akamai/AkamaiOPEN-edgegrid-node
*/
var _ = require('lodash'),
uuid = require('uuid/v4'),
crypto = require('crypto'),
sdk = require('postman-collection'),
RequestBody = sdk.RequestBody,
urlEncoder = require('postman-url-encoder'),
bodyBuilder = require('../requester/core-body-builder'),
EMPTY = '',
COLON = ':',
UTC_OFFSET = '+0000',
ZERO = '0',
DATE_TIME_SEPARATOR = 'T',
TAB = '\t',
SPACE = ' ',
SLASH = '/',
STRING = 'string',
SIGNING_ALGORITHM = 'EG1-HMAC-SHA256 ',
AUTHORIZATION = 'Authorization',
/**
* Returns current timestamp in the format described in EdgeGrid specification (yyyyMMddTHH:mm:ss+0000)
*
* @returns {String} UTC timestamp in format yyyyMMddTHH:mm:ss+0000
*/
getTimestamp = function () {
var date = new Date();
return date.getUTCFullYear() +
_.padStart(date.getUTCMonth() + 1, 2, ZERO) +
_.padStart(date.getUTCDate(), 2, ZERO) +
DATE_TIME_SEPARATOR +
_.padStart(date.getUTCHours(), 2, ZERO) +
COLON +
_.padStart(date.getUTCMinutes(), 2, ZERO) +
COLON +
_.padStart(date.getUTCSeconds(), 2, ZERO) +
UTC_OFFSET;
},
/**
* Creates a String containing a tab delimited set of headers.
*
* @param {String[]} headersToSign Headers to include in signature
* @param {Object} headers Request headers
* @returns {String} Canonicalized headers
*/
canonicalizeHeaders = function (headersToSign, headers) {
var formattedHeaders = [],
headerValue;
headersToSign.forEach(function (headerName) {
if (typeof headerName !== STRING) { return; }
// trim the header name to remove extra spaces from user input
headerName = headerName.trim().toLowerCase();
headerValue = headers[headerName];
// should not include empty headers as per the specification
if (typeof headerValue !== STRING || headerValue === EMPTY) { return; }
formattedHeaders.push(`${headerName}:${headerValue.trim().replace(/\s+/g, SPACE)}`);
});
return formattedHeaders.join(TAB);
},
/**
* Returns base64 encoding of the SHA256 HMAC of given data signed with given key
*
* @param {String} data Data to sign
* @param {String} key Key to use while signing the data
* @returns {String} Base64 encoded signature
*/
base64HmacSha256 = function (data, key) {
var encrypt = crypto.createHmac('sha256', key);
encrypt.update(data);
return encrypt.digest('base64');
},
/**
* Calculates body hash with given algorithm and digestEncoding.
*
* @param {RequestBody} body Request body
* @param {String} algorithm Hash algorithm to use
* @param {String} digestEncoding Encoding of the hash
* @param {Function} callback Callback function that will be called with body hash
*/
computeBodyHash = function (body, algorithm, digestEncoding, callback) {
if (!(body && algorithm && digestEncoding) || body.isEmpty()) { return callback(); }
var hash = crypto.createHash(algorithm),
originalReadStream,
rawBody,
urlencodedBody,
graphqlBody;
if (body.mode === RequestBody.MODES.raw) {
rawBody = bodyBuilder.raw(body.raw).body;
hash.update(rawBody);
return callback(hash.digest(digestEncoding));
}
if (body.mode === RequestBody.MODES.urlencoded) {
urlencodedBody = bodyBuilder.urlencoded(body.urlencoded).form;
urlencodedBody = urlEncoder.encodeQueryString(urlencodedBody);
hash.update(urlencodedBody);
return callback(hash.digest(digestEncoding));
}
if (body.mode === RequestBody.MODES.file) {
originalReadStream = _.get(body, 'file.content');
if (!originalReadStream) {
return callback();
}
return originalReadStream.cloneReadStream(function (err, clonedStream) {
if (err) { return callback(); }
clonedStream.on('data', function (chunk) {
hash.update(chunk);
});
clonedStream.on('end', function () {
callback(hash.digest(digestEncoding));
});
});
}
if (body.mode === RequestBody.MODES.graphql) {
graphqlBody = bodyBuilder.graphql(body.graphql).body;
hash.update(graphqlBody);
return callback(hash.digest(digestEncoding));
}
// @todo: Figure out a way to calculate hash for formdata body type.
// ensure that callback is called if body.mode doesn't match with any of the above modes
return callback();
};
/**
* @implements {AuthHandlerInterface}
*/
module.exports = {
/**
* @property {AuthHandlerInterface~AuthManifest}
*/
manifest: {
info: {
name: 'edgegrid',
version: '1.0.0'
},
updates: [
{
property: 'Authorization',
type: 'header'
}
]
},
/**
* Initializes a item (fetches all required parameters, etc) before the actual authorization step.
*
* @param {AuthInterface} auth AuthInterface instance created with request auth
* @param {Response} response Response of intermediate request (it any)
* @param {AuthHandlerInterface~authInitHookCallback} done Callback function called with error as first argument
*/
init: function (auth, response, done) {
done(null);
},
/**
* Checks the item, and fetches any parameters that are not already provided.
*
* @param {AuthInterface} auth AuthInterface instance created with request auth
* @param {AuthHandlerInterface~authPreHookCallback} done Callback function called with error, success and request
*/
pre: function (auth, done) {
// only check required auth params here
done(null, Boolean(auth.get('accessToken') && auth.get('clientToken') && auth.get('clientSecret')));
},
/**
* Verifies whether the request was successful after being sent.
*
* @param {AuthInterface} auth AuthInterface instance created with request auth
* @param {Requester} response Response of the request
* @param {AuthHandlerInterface~authPostHookCallback} done Callback function called with error and success
*/
post: function (auth, response, done) {
done(null, true);
},
/**
* Generates the signature, and returns the Authorization header.
*
* @param {Object} params Auth parameters to use in header calculation
* @param {String} params.accessToken Access token provided by service provider
* @param {String} params.clientToken Client token provided by service provider
* @param {String} params.clientSecret Client secret provided by service provider
* @param {String} params.nonce Nonce to include in authorization header
* @param {String} params.timestamp Timestamp as defined in protocol specification
* @param {String} [params.bodyHash] Base64-encoded SHA256 hash of request body for POST request
* @param {Object[]} params.headers Request headers
* @param {String[]} params.headersToSign Ordered list of headers to include in signature
* @param {String} params.method Request method
* @param {Url} params.url Node's URL object
* @returns {String} Authorization header
*/
computeHeader: function (params) {
var authHeader = SIGNING_ALGORITHM,
signingKey = base64HmacSha256(params.timestamp, params.clientSecret),
dataToSign;
authHeader += `client_token=${params.clientToken};`;
authHeader += `access_token=${params.accessToken};`;
authHeader += `timestamp=${params.timestamp};`;
authHeader += `nonce=${params.nonce};`;
dataToSign = [
params.method,
// trim to convert 'http:' from Node's URL object to 'http'
_.trimEnd(params.url.protocol, COLON),
params.baseURL || params.url.host,
params.url.path || SLASH,
canonicalizeHeaders(params.headersToSign, params.headers),
params.bodyHash || EMPTY,
authHeader
].join(TAB);
return authHeader + 'signature=' + base64HmacSha256(dataToSign, signingKey);
},
/**
* Signs a request.
*
* @param {AuthInterface} auth AuthInterface instance created with request auth
* @param {Request} request Request to be sent
* @param {AuthHandlerInterface~authSignHookCallback} done Callback function
*/
sign: function (auth, request, done) {
var params = auth.get([
'accessToken',
'clientToken',
'clientSecret',
'baseURL',
'nonce',
'timestamp',
'headersToSign'
]),
url = urlEncoder.toNodeUrl(request.url),
self = this;
if (!(params.accessToken && params.clientToken && params.clientSecret)) {
return done(); // Nothing to do if required parameters are not present.
}
request.removeHeader(AUTHORIZATION, {ignoreCase: true});
// Extract host from provided baseURL.
params.baseURL = params.baseURL && urlEncoder.toNodeUrl(params.baseURL).host;
params.nonce = params.nonce || uuid();
params.timestamp = params.timestamp || getTimestamp();
params.url = url;
params.method = request.method;
// ensure that headers are case-insensitive as specified in the documentation
params.headers = request.getHeaders({enabled: true, ignoreCase: true});
if (typeof params.headersToSign === STRING) {
params.headersToSign = params.headersToSign.split(',');
}
else if (!_.isArray(params.headersToSign)) {
params.headersToSign = [];
}
// only calculate body hash for POST requests according to specification
if (request.method === 'POST') {
return computeBodyHash(request.body, 'sha256', 'base64', function (bodyHash) {
params.bodyHash = bodyHash;
request.addHeader({
key: AUTHORIZATION,
value: self.computeHeader(params),
system: true
});
return done();
});
}
request.addHeader({
key: AUTHORIZATION,
value: self.computeHeader(params),
system: true
});
return done();
}
};

264
node_modules/postman-runtime/lib/authorizer/hawk.js generated vendored Normal file
View File

@@ -0,0 +1,264 @@
var url = require('url'),
_ = require('lodash'),
crypto = require('crypto'),
Hawk = require('postman-request/lib/hawk'),
RequestBody = require('postman-collection').RequestBody,
bodyBuilder = require('../requester/core-body-builder'),
urlEncoder = require('postman-url-encoder'),
ASCII_SOURCE = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789',
ASCII_SOURCE_LENGTH = ASCII_SOURCE.length,
AUTHORIZATION = 'Authorization',
EMPTY = '';
/**
* Generates a random string of given length (useful for nonce generation, etc).
*
* @param {Number} length
*/
function randomString (length) {
length = length || 6;
var result = [],
i;
for (i = 0; i < length; i++) {
result[i] = ASCII_SOURCE[(Math.random() * ASCII_SOURCE_LENGTH) | 0];
}
return result.join(EMPTY);
}
/**
* Calculates body hash with given algorithm and digestEncoding.
* REFER: https://github.com/postmanlabs/postman-request/blob/master/lib/hawk.js#L12
*
* @param {RequestBody} body
* @param {String} algorithm
* @param {String} digestEncoding
* @param {String} contentType
* @param {Function} callback
*/
function computeBodyHash (body, algorithm, digestEncoding, contentType, callback) {
if (!(body && algorithm && digestEncoding) || body.isEmpty()) { return callback(); }
var hash = crypto.createHash(algorithm),
originalReadStream,
rawBody,
urlencodedBody,
graphqlBody;
hash.update('hawk.1.payload\n');
hash.update((contentType ? contentType.split(';')[0].trim().toLowerCase() : '') + '\n');
if (body.mode === RequestBody.MODES.raw) {
rawBody = bodyBuilder.raw(body.raw).body;
hash.update(rawBody);
hash.update('\n');
return callback(hash.digest(digestEncoding));
}
if (body.mode === RequestBody.MODES.urlencoded) {
urlencodedBody = bodyBuilder.urlencoded(body.urlencoded).form;
urlencodedBody = urlEncoder.encodeQueryString(urlencodedBody);
hash.update(urlencodedBody);
hash.update('\n');
return callback(hash.digest(digestEncoding));
}
if (body.mode === RequestBody.MODES.file) {
originalReadStream = _.get(body, 'file.content');
if (!originalReadStream) {
return callback();
}
return originalReadStream.cloneReadStream(function (err, clonedStream) {
if (err) { return callback(); }
clonedStream.on('data', function (chunk) {
hash.update(chunk);
});
clonedStream.on('end', function () {
hash.update('\n');
callback(hash.digest(digestEncoding));
});
});
}
if (body.mode === RequestBody.MODES.graphql) {
graphqlBody = bodyBuilder.graphql(body.graphql).body;
hash.update(graphqlBody);
hash.update('\n');
return callback(hash.digest(digestEncoding));
}
// @todo: Figure out a way to calculate hash for formdata body type.
// ensure that callback is called if body.mode doesn't match with any of the above modes
return callback();
}
/**
* @implements {AuthHandlerInterface}
*/
module.exports = {
/**
* @property {AuthHandlerInterface~AuthManifest}
*/
manifest: {
info: {
name: 'hawk',
version: '1.0.0'
},
updates: [
{
property: 'Authorization',
type: 'header'
},
{
property: 'nonce',
type: 'auth'
},
{
property: 'timestamp',
type: 'auth'
}
]
},
/**
* Initializes an item (extracts parameters from intermediate requests if any, etc)
* before the actual authorization step.
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authInitHookCallback} done
*/
init: function (auth, response, done) {
done(null);
},
/**
* Checks the item, and fetches any parameters that are not already provided.
* Sanitizes the auth parameters if needed.
*
* @param {AuthInterface} auth
* @param {AuthHandlerInterface~authPreHookCallback} done
*/
pre: function (auth, done) {
!auth.get('nonce') && auth.set('nonce', randomString(6));
!_.parseInt(auth.get('timestamp')) && auth.set('timestamp', Math.floor(Date.now() / 1e3));
done(null, true);
},
/**
* Verifies whether the request was successfully authorized after being sent.
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authPostHookCallback} done
*/
post: function (auth, response, done) {
done(null, true);
},
/**
* Computes signature and Auth header for a request.
*
* @param {Object} params
* @param {Object} params.credentials Contains hawk auth credentials, "id", "key" and "algorithm"
* @param {String} params.nonce
* @param {String} params.ext Extra data that may be associated with the request.
* @param {String} params.app Application ID used in Oz authorization protocol
* @param {String} params.dlg Delegation information (used in the Oz protocol)
* @param {String} params.user User id
* @param {String} params.url Complete request URL
* @param {String} params.method Request method
*
* @returns {*}
*/
computeHeader: function (params) {
return Hawk.header(url.parse(params.url), params.method, params);
},
/**
* Signs a request.
*
* @param {AuthInterface} auth
* @param {Request} request
* @param {AuthHandlerInterface~authSignHookCallback} done
*/
sign: function (auth, request, done) {
var params = auth.get([
'authId',
'authKey',
'algorithm',
'nonce',
'timestamp',
'extraData',
'app',
'delegation',
'user',
'includePayloadHash'
]),
contentType = request.headers.get('content-type'),
self = this,
signRequest = function (bodyHash) {
// force toString to add a protocol to the URL.
var url = urlEncoder.toNodeUrl(request.url),
result = self.computeHeader({
credentials: {
id: params.authId,
key: params.authKey,
algorithm: params.algorithm
},
nonce: params.nonce,
timestamp: params.timestamp,
ext: params.extraData,
app: params.app,
dlg: params.delegation,
user: params.user,
url: url.href,
method: request.method,
hash: bodyHash
});
request.addHeader({
key: AUTHORIZATION,
value: result,
system: true
});
return done();
};
if (!params.authId || !params.authKey) {
return done(); // Nothing to do if required parameters are not present.
}
request.removeHeader(AUTHORIZATION, {ignoreCase: true});
// @note: Payload verification is optional in hawk auth according to specifications (see below link). If user
// opt-in for payload verification, `Content-Type` header must be specified explicitely otherwise
// authentication might fail because we automatically add `Content-Type` header after auth handlers which
// is not accounted while calculating payload hash for hawk auth.
// documentation: https://github.com/hapijs/hawk#payload-validation
// issue: https://github.com/postmanlabs/postman-app-support/issues/6550
//
// @todo: Change flow of auto adding `Content-Type` header to happen before auth handlers
if (!params.includePayloadHash) {
return signRequest(); // sign request without calculating payload hash
}
computeBodyHash(request.body, params.algorithm, 'base64', contentType, signRequest);
}
};

239
node_modules/postman-runtime/lib/authorizer/index.js generated vendored Normal file
View File

@@ -0,0 +1,239 @@
var _ = require('lodash'),
sdk = require('postman-collection'),
createAuthInterface = require('./auth-interface'),
AUTH_TYPE_PROP = '__auth_type',
AuthLoader,
authorizeRequest;
/**
* This object manages loading and finding Handlers for auth.
*
* @type AuthLoader
*/
AuthLoader = {
/**
* Houses list of available Authentication handlers.
*
* @property {Object}
*/
handlers: {},
/**
* Finds the Handler for an Auth type.
*
* @param name
*
* @returns {AuthHandler}
*/
getHandler: function (name) {
return AuthLoader.handlers[name];
},
/**
* Adds a Handler for use with given Auth type.
*
* @param Handler
* @param name
*/
addHandler: function (Handler, name) {
if (!_.isFunction(Handler.init)) {
throw new Error('The handler for "' + name + '" does not have an "init" function, which is necessary');
}
if (!_.isFunction(Handler.pre)) {
throw new Error('The handler for "' + name + '" does not have a "pre" function, which is necessary');
}
if (!_.isFunction(Handler.post)) {
throw new Error('The handler for "' + name + '" does not have a "post" function, which is necessary');
}
if (!_.isFunction(Handler.sign)) {
throw new Error('The handler for "' + name + '" does not have a "sign" function, which is necessary');
}
Object.defineProperty(Handler, AUTH_TYPE_PROP, {
value: name,
configurable: false,
enumerable: false,
writable: false
});
AuthLoader.handlers[name] = Handler;
},
/**
* Removes the Handler for the Auth type.
*
* @param name
*/
removeHandler: function (name) {
AuthLoader.handlers[name] && (delete AuthLoader.handlers[name]);
}
};
// Create a Handler from each Signer that the SDK provides. Basically, we augment the signers with extra
// helper functions which take over the job of preparing a request for signing.
_.forEach({
noauth: require('./noauth'),
awsv4: require('./aws4'),
basic: require('./basic'),
bearer: require('./bearer'),
digest: require('./digest'),
hawk: require('./hawk'),
oauth1: require('./oauth1'),
oauth2: require('./oauth2'),
ntlm: require('./ntlm'),
apikey: require('./apikey'),
edgegrid: require('./edgegrid')
}, AuthLoader.addHandler);
/**
* Creates a copy of request, with the appropriate auth headers or parameters added.
*
* @note This function does not take care of resolving variables.
*
* @param {Request} request
* @param done
*
* @returns {Request}
*/
authorizeRequest = function (request, done) {
if (!request.auth) {
return done();
}
var clonedReq = new sdk.Request(request.toJSON()),
auth = clonedReq.auth,
authInterface = createAuthInterface(auth),
handler = AuthLoader.getHandler(auth.type);
if (handler) {
handler.sign(authInterface, clonedReq, function () { return done(null, clonedReq); });
}
else {
return done(new Error('runtime~authorizeRequest: could not find handler for auth type ' + auth.type));
}
};
module.exports = {
AuthLoader: AuthLoader,
authorizeRequest: authorizeRequest
};
// Interface
/**
* Interface for implementing auth handlers
*
* @interface AuthHandlerInterface
*/
// Interface functions
/**
* Defines the behaviour of an Auth Handler. This way the handler allows to statically analyse
* any changes the Handler will make ahead of time.
*
* @member {AuthHandlerInterface~AuthManifest} AuthHandlerInterface#manifest
*/
/**
* This hook decides whether all the required parameters are present in the auth or not.
* What happens next is dependent upon how the `done` callback is called.
* Check {@link AuthHandlerInterface~authPreHookCallback} for all the possible ways the callback can be called.
*
* @function
* @name AuthHandlerInterface#pre
*
* @param {AuthInterface} auth
* @param {AuthHandlerInterface~authPreHookCallback} done
* Callback function which takes error, success, and request as arguments
*/
/**
* This hook is called with the response from the intermediate request, which was requested from the
* [pre]{@link AuthHandlerInterface#pre} hook.
* Here the `auth` can be modified using the response. After this [pre]{@link AuthHandlerInterface#pre} hook will be
* called again to verify the required parameters.
*
* @function
* @name AuthHandlerInterface#init
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authInitHookCallback} done Callback function which takes error as the only argument
*/
/**
* This hook signs the `request` using the `auth`.
*
* @function
* @name AuthHandlerInterface#sign
*
* @param {AuthInterface} auth
* @param {Request} request
* @param {AuthHandlerInterface~authSignHookCallback} done Callback function which takes error as the only argument
*/
/**
* This hook is called after the request is made. It receives the response using which it can determine whether
* it was a failure or success. It can also modify the `auth` and ask to replay the `request`.
* For this it has to call the [done]{@link AuthHandlerInterface~authPostHookCallback} callback with `success` as false.
*
* @function
* @name AuthHandlerInterface#post
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authPostHookCallback} done Callback function which takes error and success as arguments
*/
// Callbacks
/**
* This callback is called in the `pre` hook of the auth handler
* Depending on what parameters are passed in this callback, one of the following flows will be executed:
* 1. return (err): The request will be stopped and the error will be bubbled up
* 2. return (null, true): The request will be signed and sent
* 3. return (null, false): The request will be sent without being signed
* 4. return (null, false, `request`):
* - send the intermediate request
* - invoke the auth's [init]{@link AuthHandlerInterface#init} hook with the response of the intermediate request
* - invoke the auth's [pre]{@link AuthHandlerInterface#pre} hook
* @callback AuthHandlerInterface~authPreHookCallback
* @param {?Error} err
* @param {Boolean} success Defines whether the [pre]{@link AuthHandlerInterface#pre} hook was successful.
* @param {Request~definition|String} [request] It can be either request definition or request URL
*/
/**
* This callback is called in the `init` hook of the auth handler
* @callback AuthHandlerInterface~authInitHookCallback
* @param {?Error} err
*/
/**
* This callback is called in the `sign` hook of the auth handler
* @callback AuthHandlerInterface~authSignHookCallback
* @param {?Error} err
*/
/**
* This callback is called in the `post` hook of the auth handler
* @callback AuthHandlerInterface~authPostHookCallback
* @param {?Error} err
* @param {Boolean} success Defines whether the request was successful or not. If not, it will be replayed.
*/
/**
* Structure of an Auth Manifest. See {@link AuthHandlerInterface#manifest} for description.
*
* @typedef {Object} AuthHandlerInterface~AuthManifest
*
* @property {Object} info
* @property {String} info.name
* @property {String} info.version
* @property {Array<Object>} updates
*/

62
node_modules/postman-runtime/lib/authorizer/noauth.js generated vendored Normal file
View File

@@ -0,0 +1,62 @@
/**
* @implements {AuthHandlerInterface}
*/
module.exports = {
/**
* @property {AuthHandlerInterface~AuthManifest}
* @todo runtime needs to make sure AuthHandler
* cannot mutate any property on Request that it has not declared on the manifest.
*/
manifest: {
info: {
name: 'noauth',
version: '1.0.0'
},
updates: []
},
/**
* Initializes an item (extracts parameters from intermediate requests if any, etc)
* before the actual authorization step.
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authInitHookCallback} done
*/
init: function (auth, response, done) {
done(null);
},
/**
* Checks whether the given item has all the required parameters in its request.
* Sanitizes the auth parameters if needed.
*
* @param {AuthInterface} auth
* @param {AuthHandlerInterface~authPreHookCallback} done
*/
pre: function (auth, done) {
done(null, true);
},
/**
* Verifies whether the request was successfully authorized after being sent.
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authPostHookCallback} done
*/
post: function (auth, response, done) {
done(null, true);
},
/**
* Signs a request.
*
* @param {AuthInterface} auth
* @param {Request} request
* @param {AuthHandlerInterface~authSignHookCallback} done
*/
sign: function (auth, request, done) {
return done();
}
};

276
node_modules/postman-runtime/lib/authorizer/ntlm.js generated vendored Normal file
View File

@@ -0,0 +1,276 @@
/**
* @fileOverview
*
* Implements the NTLM over HTTP specification: [MS-NTHT] https://msdn.microsoft.com/en-us/library/cc237488.aspx
* Also see [MS-NLMP]: https://msdn.microsoft.com/en-us/library/cc236621.aspx
*
* @note NTLM supports a number of different variations, where an actual TCP connection is signed etc. This file
* does _not_ implement those cases.
*/
var ntlmUtil = require('httpntlm').ntlm,
_ = require('lodash'),
EMPTY = '',
NTLM = 'NTLM',
STATE = 'state',
NEGOTIATE = 'negotiate',
NTLM_HEADER = 'ntlmHeader',
AUTHORIZATION = 'Authorization',
WWW_AUTHENTICATE = 'www-authenticate',
DISABLE_RETRY_REQUEST = 'disableRetryRequest',
NTLM_PARAMETERS = {
DOMAIN: 'domain',
WORKSTATION: 'workstation',
USERNAME: 'username',
PASSWORD: 'password'
},
STATES = {
INITIALIZED: 'INITIALIZED',
T1_MSG_CREATED: 'T1_MSG_CREATED',
T3_MSG_CREATED: 'T3_MSG_CREATED'
};
/**
* Parses the username to separate username and domain. It can handle two formats:
* - Down-Level Logon name format `DOMAIN\USERNAME`
* - User Principal Name format `USERNAME@DOMAIN`
*
* @private
* @param {String} username - Username string to parse from
* @return {Object} - An object with `username` and `domain` fields, which are `strings`.
*/
function parseParametersFromUsername (username) {
var dllParams,
upnParams;
if (!(username && typeof username === 'string')) {
return {
username: EMPTY,
domain: EMPTY
};
}
dllParams = username.split('\\');
upnParams = username.split('@');
// username should be either of the two formats, not both
if (dllParams.length > 1 && upnParams.length > 1) {
return {
username,
domain: EMPTY
};
}
// try to parse from "down level logon" format
if (dllParams.length === 2 && dllParams[0] && dllParams[1]) {
return {
username: dllParams[1],
domain: dllParams[0]
};
}
// try to parse from "user principal name" format
if (upnParams.length === 2 && upnParams[0] && upnParams[1]) {
return {
username: upnParams[0],
domain: upnParams[1]
};
}
return {
username,
domain: EMPTY
};
}
/**
* Check if `WWW-Authenticate` header has NTLM challenge.
*
* @private
* @param {*} headers - Postman headers instance
* @returns {Boolean}
*/
function hasNTLMChallenge (headers) {
// Case 1: multiple headers
// - WWW-Authenticate: NTLM
// - WWW-Authenticate: Negotiate
if (headers.has(WWW_AUTHENTICATE, NTLM) || headers.has(WWW_AUTHENTICATE, NEGOTIATE)) {
return true;
}
// Case 2: single header
// - WWW-Authenticate: Negotiate, NTLM
return String(headers.get(WWW_AUTHENTICATE)).includes(NTLM);
}
/**
* NTLM auth while authenticating requires negotiateMessage (type 1) and authenticateMessage (type 3) to be stored.
* Also it needs to know which stage is it in (INITIALIZED, T1_MSG_CREATED and T3_MSG_CREATED).
* After the first successful authentication, it just relies on the TCP connection, no other state is needed.
* @todo Currenty we don't close the connection. So there is no way to de-authenticate.
*
* @implements {AuthHandlerInterface}
*/
module.exports = {
/**
* @property {AuthHandlerInterface~AuthManifest}
*/
manifest: {
info: {
name: 'ntlm',
version: '1.0.0'
},
updates: [
{
property: 'Authorization',
type: 'header'
}
]
},
/**
* Initializes an item (extracts parameters from intermediate requests if any, etc)
* before the actual authorization step.
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authInitHookCallback} done
*/
init: function (auth, response, done) {
done(null);
},
/**
* Verifies whether the request has valid basic auth credentials (which is always).
* Sanitizes the auth parameters if needed.
*
* @param {AuthInterface} auth
* @param {AuthHandlerInterface~authPreHookCallback} done
*/
pre: function (auth, done) {
!auth.get(STATE) && auth.set(STATE, STATES.INITIALIZED);
done(null, true);
},
/**
* Verifies whether the basic auth succeeded.
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authPostHookCallback} done
*/
post: function (auth, response, done) {
if (auth.get(DISABLE_RETRY_REQUEST)) {
return done(null, true);
}
var state = auth.get(STATE),
domain = auth.get(NTLM_PARAMETERS.DOMAIN) || EMPTY,
workstation = auth.get(NTLM_PARAMETERS.WORKSTATION) || EMPTY,
username = auth.get(NTLM_PARAMETERS.USERNAME) || EMPTY,
password = auth.get(NTLM_PARAMETERS.PASSWORD) || EMPTY,
negotiateMessage, // type 1
challengeMessage, // type 2
authenticateMessage, // type 3
ntlmType2Header,
parsedParameters;
if (response.code !== 401 && response.code !== 403) {
return done(null, true);
}
// we try to extract domain from username if not specified.
if (!domain) {
parsedParameters = parseParametersFromUsername(username) || {};
username = parsedParameters.username;
domain = parsedParameters.domain;
}
if (state === STATES.INITIALIZED) {
// Nothing to do if the server does not ask us for auth in the first place.
if (!hasNTLMChallenge(response.headers)) {
return done(null, true);
}
// Create a type 1 message to send to the server
negotiateMessage = ntlmUtil.createType1Message({
domain: domain,
workstation: workstation
});
// Add the type 1 message as the auth header
auth.set(NTLM_HEADER, negotiateMessage);
// Update the state
auth.set(STATE, STATES.T1_MSG_CREATED);
// ask runtime to replay the request
return done(null, false);
}
else if (state === STATES.T1_MSG_CREATED) {
// At this point, we can assume that the type 1 message was sent to the server
// there can be multiple headers present with key `www-authenticate`.
// iterate to get the one which has the NTLM hash. if multiple
// headers have the NTLM hash, use the first one.
ntlmType2Header = response.headers.find(function (header) {
return String(header.key).toLowerCase() === WWW_AUTHENTICATE &&
header.valueOf().startsWith('NTLM ');
});
if (!ntlmType2Header) {
return done(new Error('ntlm: server did not send NTLM type 2 message'));
}
challengeMessage = ntlmUtil.parseType2Message(ntlmType2Header.valueOf(), _.noop);
if (!challengeMessage) {
return done(new Error('ntlm: server did not correctly process authentication request'));
}
authenticateMessage = ntlmUtil.createType3Message(challengeMessage, {
domain: domain,
workstation: workstation,
username: username,
password: password
});
// Now create the type 3 message, and add it to the request
auth.set(NTLM_HEADER, authenticateMessage);
auth.set(STATE, STATES.T3_MSG_CREATED);
// ask runtime to replay the request
return done(null, false);
}
else if (state === STATES.T3_MSG_CREATED) {
// Means we have tried to authenticate, so we should stop here without worrying about anything
return done(null, true);
}
// We are in an undefined state
return done(null, true);
},
/**
* Signs a request.
*
* @param {AuthInterface} auth
* @param {Request} request
* @param {AuthHandlerInterface~authSignHookCallback} done
*/
sign: function (auth, request, done) {
var ntlmHeader = auth.get(NTLM_HEADER);
request.removeHeader(AUTHORIZATION, {ignoreCase: true});
ntlmHeader && request.addHeader({
key: AUTHORIZATION,
value: ntlmHeader,
system: true
});
return done();
}
};

494
node_modules/postman-runtime/lib/authorizer/oauth1.js generated vendored Normal file
View File

@@ -0,0 +1,494 @@
var _ = require('lodash'),
crypto = require('crypto'),
oAuth1 = require('node-oauth1'),
urlEncoder = require('postman-url-encoder'),
RequestBody = require('postman-collection').RequestBody,
bodyBuilder = require('../requester/core-body-builder'),
EMPTY = '',
RSA = 'RSA',
HYPHEN = '-',
PROTOCOL_HTTP = 'http',
PROTOCOL_SEPARATOR = '://',
HTTPS_PORT = '443',
HTTP_PORT = '80',
OAUTH1_PARAMS = {
oauthConsumerKey: 'oauth_consumer_key',
oauthToken: 'oauth_token',
oauthSignatureMethod: 'oauth_signature_method',
oauthTimestamp: 'oauth_timestamp',
oauthNonce: 'oauth_nonce',
oauthVersion: 'oauth_version',
oauthSignature: 'oauth_signature',
oauthCallback: 'oauth_callback',
oauthVerifier: 'oauth_verifier',
oauthBodyHash: 'oauth_body_hash'
};
/**
* Returns a OAuth1.0-a compatible representation of the request URL, also called "Base URL".
* For details, http://oauth.net/core/1.0a/#anchor13
*
* todo: should we ignore the auth parameters of the URL or not? (the standard does not mention them)
* we currently are.
*
* @private
* @param {Url} url - Node's URL object
* @returns {String}
*/
function getOAuth1BaseUrl (url) {
var port = url.port ? url.port : undefined,
host = ((port === HTTP_PORT ||
port === HTTPS_PORT ||
port === undefined) && url.hostname) || url.host,
path = url.path,
// trim to convert 'http:' from Node's URL object to 'http'
protocol = _.trimEnd(url.protocol || PROTOCOL_HTTP, PROTOCOL_SEPARATOR);
protocol = (_.endsWith(protocol, PROTOCOL_SEPARATOR) ? protocol : protocol + PROTOCOL_SEPARATOR);
return protocol.toLowerCase() + host.toLowerCase() + path;
}
/**
* Query parameters are encoded with WHATWG encoding in the request. OAuth1.0
* requires the query params to be encoded with the RFC-3986 standard. This
* function decodes the query parameters and encodes them to the required RFC-3986
* standard. For details: https://oauth.net/core/1.0a/#encoding_parameters
*
* @param {Request} request - request to update query parameters
* @param {Object} url - Node.js like url object
*/
function updateQueryParamEncoding (request, url) {
// early bailout if no query is set.
if (!url.query) {
return;
}
const queryParams = oAuth1.decodeForm(url.query);
// clear all query parameters
request.url.query.clear();
_.forEach(queryParams, function (param) {
request.url.query.add({
key: param[0] && oAuth1.percentEncode(param[0]),
value: param[1] && oAuth1.percentEncode(param[1])
});
});
}
/**
* Calculates body hash with given algorithm and digestEncoding.
*
* @param {RequestBody} body Request body
* @param {String} algorithm Hash algorithm to use
* @param {String} digestEncoding Encoding of the hash
* @param {Function} callback Callback function that will be called with body hash
*/
function computeBodyHash (body, algorithm, digestEncoding, callback) {
if (!(algorithm && digestEncoding)) { return callback(); }
var hash = crypto.createHash(algorithm),
originalReadStream,
rawBody,
graphqlBody;
// if body is not available, return hash of empty string
if (!body || body.isEmpty()) {
return callback(hash.digest(digestEncoding));
}
if (body.mode === RequestBody.MODES.raw) {
rawBody = bodyBuilder.raw(body.raw).body;
hash.update(rawBody);
return callback(hash.digest(digestEncoding));
}
// calculations for url-encoded body are not done here unlike other
// auths(i.e. AWS/HAWK) because it is not required for OAuth1.0
if (body.mode === RequestBody.MODES.file) {
originalReadStream = _.get(body, 'file.content');
if (!originalReadStream) {
return callback();
}
return originalReadStream.cloneReadStream(function (err, clonedStream) {
if (err) { return callback(); }
clonedStream.on('data', function (chunk) {
hash.update(chunk);
});
clonedStream.on('end', function () {
callback(hash.digest(digestEncoding));
});
});
}
if (body.mode === RequestBody.MODES.graphql) {
graphqlBody = bodyBuilder.graphql(body.graphql).body;
hash.update(graphqlBody);
return callback(hash.digest(digestEncoding));
}
// @todo: Figure out a way to calculate hash for formdata body type.
// ensure that callback is called if body.mode doesn't match with any of the above modes
return callback();
}
/**
* @implements {AuthHandlerInterface}
*/
module.exports = {
/**
* @property {AuthHandlerInterface~AuthManifest}
*/
manifest: {
info: {
name: 'oauth1',
version: '1.0.0'
},
updates: [
{
property: 'Authorization',
type: 'header'
},
{
property: OAUTH1_PARAMS.oauthConsumerKey,
type: 'url.param'
},
{
property: OAUTH1_PARAMS.oauthToken,
type: 'url.param'
},
{
property: OAUTH1_PARAMS.oauthCallback,
type: 'url.param'
},
{
property: OAUTH1_PARAMS.oauthVerifier,
type: 'url.param'
},
{
property: OAUTH1_PARAMS.oauthBodyHash,
type: 'url.param'
},
{
property: OAUTH1_PARAMS.oauthSignatureMethod,
type: 'url.param'
},
{
property: OAUTH1_PARAMS.oauthTimestamp,
type: 'url.param'
},
{
property: OAUTH1_PARAMS.oauthNonce,
type: 'url.param'
},
{
property: OAUTH1_PARAMS.oauthVersion,
type: 'url.param'
},
{
property: OAUTH1_PARAMS.oauthSignature,
type: 'url.param'
},
{
property: OAUTH1_PARAMS.oauthConsumerKey,
type: 'body.urlencoded'
},
{
property: OAUTH1_PARAMS.oauthToken,
type: 'body.urlencoded'
},
{
property: OAUTH1_PARAMS.oauthCallback,
type: 'body.urlencoded'
},
{
property: OAUTH1_PARAMS.oauthVerifier,
type: 'body.urlencoded'
},
{
property: OAUTH1_PARAMS.oauthSignatureMethod,
type: 'body.urlencoded'
},
{
property: OAUTH1_PARAMS.oauthTimestamp,
type: 'body.urlencoded'
},
{
property: OAUTH1_PARAMS.oauthNonce,
type: 'body.urlencoded'
},
{
property: OAUTH1_PARAMS.oauthVersion,
type: 'body.urlencoded'
},
{
property: OAUTH1_PARAMS.oauthSignature,
type: 'body.urlencoded'
}
]
},
/**
* Initializes an item (extracts parameters from intermediate requests if any, etc)
* before the actual authorization step.
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authInitHookCallback} done
*/
init: function (auth, response, done) {
done(null);
},
/**
* Verifies whether the request has valid basic auth credentials (which is always).
* Sanitizes the auth parameters if needed.
*
* @param {AuthInterface} auth
* @param {AuthHandlerInterface~authPreHookCallback} done
*/
pre: function (auth, done) {
done(null, true);
},
/**
* Verifies whether the basic auth succeeded.
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authPostHookCallback} done
*/
post: function (auth, response, done) {
done(null, true);
},
/**
* Generates and adds oAuth1 data to the request. This function modifies the
* request passed in the argument.
*
* @param {Request} request - request to add oauth1 parameters
* @param {Object} params - oauth data to generate signature
* @param {Object} protocolProfileBehavior - Protocol profile behaviors
* @param {Function} done - callback function
*/
addAuthDataToRequest: function (request, params, protocolProfileBehavior, done) {
var url = urlEncoder.toNodeUrl(request.url),
signatureParams,
urlencodedBody,
bodyParams,
allParams,
signature,
message,
header,
accessor = {
consumerSecret: params.consumerSecret || EMPTY,
tokenSecret: params.tokenSecret || EMPTY,
privateKey: params.privateKey || EMPTY
},
disableUrlEncoding = protocolProfileBehavior && protocolProfileBehavior.disableUrlEncoding;
signatureParams = [
{system: true, key: OAUTH1_PARAMS.oauthConsumerKey, value: params.consumerKey},
{system: true, key: OAUTH1_PARAMS.oauthToken, value: params.token},
{system: true, key: OAUTH1_PARAMS.oauthSignatureMethod, value: params.signatureMethod},
{system: true, key: OAUTH1_PARAMS.oauthTimestamp, value: params.timestamp},
{system: true, key: OAUTH1_PARAMS.oauthNonce, value: params.nonce},
{system: true, key: OAUTH1_PARAMS.oauthVersion, value: params.version}
];
// bodyHash, callback and verifier parameters are part of extensions of the original OAuth1 spec.
// So we only include those in signature if they are non-empty, ignoring the addEmptyParamsToSign setting.
// Otherwise it causes problem for servers that don't support the respective OAuth1 extensions.
// Issue: https://github.com/postmanlabs/postman-app-support/issues/8737
if (params.bodyHash) {
signatureParams.push({system: true, key: OAUTH1_PARAMS.oauthBodyHash, value: params.bodyHash});
}
if (params.callback) {
signatureParams.push({system: true, key: OAUTH1_PARAMS.oauthCallback, value: params.callback});
}
if (params.verifier) {
signatureParams.push({system: true, key: OAUTH1_PARAMS.oauthVerifier, value: params.verifier});
}
// filter empty signature parameters
signatureParams = _.filter(signatureParams, function (param) {
return params.addEmptyParamsToSign || param.value;
});
urlencodedBody = request.body &&
request.body.mode === RequestBody.MODES.urlencoded &&
request.body.urlencoded;
// Body params only need to be included if they are URL encoded.
// http://oauth.net/core/1.0a/#anchor13
bodyParams = urlencodedBody ? urlencodedBody.filter(function (param) {
return !param.disabled;
}) : [];
allParams = [].concat(signatureParams, bodyParams);
message = {
action: getOAuth1BaseUrl(url),
method: request.method,
parameters: _.map(allParams, function (param) {
return [param.key, param.value];
})
};
try {
signature = oAuth1.SignatureMethod.sign(message, accessor);
}
catch (err) {
// handle invalid private key errors for RSA signatures
return done(err);
}
// Update the encoding for query parameters to RFC-3986 in accordance with the
// OAuth1.0a specification: https://oauth.net/core/1.0a/#encoding_parameters
// disableUrlEncoding option should be respected in authorization flow as well
if (disableUrlEncoding !== true) {
updateQueryParamEncoding(request, url);
}
signatureParams.push({system: true, key: OAUTH1_PARAMS.oauthSignature, value: signature});
// Add signature params to the request. The OAuth specification says
// that we should add parameters in the following order of preference:
// 1. Auth Header
// 2. Body parameters
// 3. Query parameters
//
// http://oauth.net/core/1.0/#consumer_req_param
if (params.addParamsToHeader) {
header = oAuth1.getAuthorizationHeader(params.realm, _.map(signatureParams, function (param) {
return [param.key, param.value];
}), params.disableHeaderEncoding);
request.addHeader({
key: 'Authorization',
value: header,
system: true
});
}
else if ((/PUT|POST/).test(request.method) && urlencodedBody) {
_.forEach(signatureParams, function (param) {
urlencodedBody.add(param);
});
}
else if (disableUrlEncoding === true) {
// disableUrlEncoding option should be respected in authorization flow as well
request.addQueryParams(signatureParams);
}
else {
_.forEach(signatureParams, function (param) {
request.url.query.add({
key: param.key && oAuth1.percentEncode(param.key),
value: param.value && oAuth1.percentEncode(param.value),
system: true
});
});
}
done();
},
/**
* Signs a request.
*
* @param {AuthInterface} auth
* @param {Request} request
* @param {AuthHandlerInterface~authSignHookCallback} done
*/
sign: function (auth, request, done) {
var self = this,
params = auth.get([
'consumerKey',
'consumerSecret',
'token',
'tokenSecret',
'privateKey',
'signatureMethod',
'callback',
'verifier',
'timestamp',
'nonce',
'version',
'realm',
'includeBodyHash',
'addParamsToHeader',
'addEmptyParamsToSign',
'disableHeaderEncoding'
]),
urlencodedBody = request.body,
signatureAlgo,
hashAlgo,
protocolProfileBehavior = auth._protocolProfileBehavior;
// extract hash and signature algorithm form signatureMethod
// signature methods are in this format: '<signatureAlgo>-<hashAlgo>' e.g. RSA-SHA1
hashAlgo = _.split(params.signatureMethod, HYPHEN);
signatureAlgo = _.upperCase(hashAlgo[0]);
hashAlgo = hashAlgo[1];
if (!params.consumerKey ||
(signatureAlgo !== RSA && !params.consumerSecret) ||
(signatureAlgo === RSA && !params.privateKey)) {
return done(); // Nothing to do if required parameters are not present.
}
// before this: urlencodedBody = request.body
// after this: urlencodedBody = request.body.urlencoded or undefined
urlencodedBody = (urlencodedBody &&
urlencodedBody.mode === RequestBody.MODES.urlencoded
) ? urlencodedBody.urlencoded : undefined;
// Remove existing headers and params (if any)
request.removeHeader('Authorization');
request.removeQueryParams(_.values(OAUTH1_PARAMS));
urlencodedBody && urlencodedBody.remove(function (param) {
return _.includes(_.values(OAUTH1_PARAMS), param.key);
});
// Generate a new nonce and timestamp
params.nonce = params.nonce || oAuth1.nonce(11).toString();
params.timestamp = params.timestamp || oAuth1.timestamp().toString();
// Ensure that empty parameters are not added to the signature
if (!params.addEmptyParamsToSign) {
params = _.reduce(params, function (accumulator, value, key) {
if (_.isString(value) && (value.trim() === EMPTY)) {
return accumulator;
}
accumulator[key] = value;
return accumulator;
}, {});
}
// Don't include body hash as defined in specification
// @see: https://tools.ietf.org/id/draft-eaton-oauth-bodyhash-00.html#when_to_include
if (urlencodedBody || !(params.includeBodyHash && hashAlgo)) {
return self.addAuthDataToRequest(request, params, protocolProfileBehavior, done);
}
computeBodyHash(request.body, hashAlgo, 'base64', function (bodyHash) {
params.bodyHash = bodyHash;
return self.addAuthDataToRequest(request, params, protocolProfileBehavior, done);
});
}
};

133
node_modules/postman-runtime/lib/authorizer/oauth2.js generated vendored Normal file
View File

@@ -0,0 +1,133 @@
var _ = require('lodash'),
HEADER = 'header',
QUERY_PARAMS = 'queryParams',
BEARER = 'bearer',
MAC = 'mac',
AUTHORIZATION = 'Authorization',
ACCESS_TOKEN = 'access_token',
AUTHORIZATION_PREFIX = 'Bearer',
OAUTH2_PARAMETERS = [
'accessToken',
'addTokenTo',
'tokenType',
'headerPrefix'
];
/**
* @implements {AuthHandlerInterface}
*/
module.exports = {
/**
* @property {AuthHandlerInterface~AuthManifest}
*/
manifest: {
info: {
name: 'oauth2',
version: '1.0.0'
},
updates: [
{
property: AUTHORIZATION,
type: 'header'
},
{
property: ACCESS_TOKEN,
type: 'url.param'
}
]
},
/**
* Initializes an item (extracts parameters from intermediate requests if any, etc)
* before the actual authorization step.
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authInitHookCallback} done
*/
init: function (auth, response, done) {
done(null);
},
/**
* Verifies whether the request has valid basic auth credentials (which is always).
* Sanitizes the auth parameters if needed.
*
* @param {AuthInterface} auth
* @param {AuthHandlerInterface~authPreHookCallback} done
*/
pre: function (auth, done) {
done(null, Boolean(auth.get('accessToken')));
},
/**
* Verifies whether the basic auth succeeded.
*
* @param {AuthInterface} auth
* @param {Response} response
* @param {AuthHandlerInterface~authPostHookCallback} done
*/
post: function (auth, response, done) {
done(null, true);
},
/**
* Signs a request.
*
* @param {AuthInterface} auth
* @param {Request} request
* @param {AuthHandlerInterface~authSignHookCallback} done
*/
sign: function (auth, request, done) {
var params = auth.get(OAUTH2_PARAMETERS),
tokenType;
// Validation
if (!params.accessToken) {
return done(); // Nothing to do if required parameters are not present.
}
// Defaults
params.addTokenTo = params.addTokenTo || HEADER; // Add token to header by default
params.tokenType = params.tokenType || BEARER; // Use `Bearer` token type by default
params.headerPrefix = _.isNil(params.headerPrefix) ?
AUTHORIZATION_PREFIX : _.trim(String(params.headerPrefix));
// add a space after prefix only if there is any prefix
params.headerPrefix && (params.headerPrefix += ' ');
// Some servers send 'Bearer' while others send 'bearer'
tokenType = _.toLower(params.tokenType);
// @TODO Add support for HMAC
if (tokenType === MAC) {
return done();
}
// treat every token types (other than MAC) as bearer token
// clean conflicting headers and query params
// @todo: we should be able to get conflicting params from auth manifest
// and clear them before the sign step for any auth
request.removeHeader(AUTHORIZATION, {ignoreCase: true});
request.removeQueryParams([ACCESS_TOKEN]);
if (params.addTokenTo === QUERY_PARAMS) {
request.addQueryParams({
key: ACCESS_TOKEN,
value: params.accessToken,
system: true
});
}
else if (params.addTokenTo === HEADER) {
request.addHeader({
key: AUTHORIZATION,
value: params.headerPrefix + params.accessToken,
system: true
});
}
return done();
}
};

166
node_modules/postman-runtime/lib/backpack/index.js generated vendored Normal file
View File

@@ -0,0 +1,166 @@
var _ = require('lodash'),
meetExpectations,
backpack;
/**
* ensure the specified keys are functions in subject
*
* @param {Object} subject
* @param {Array} expectations
* @param {Array=} [defaults]
* @returns {Object}
*/
meetExpectations = function (subject, expectations, defaults) {
// provided that the subject is an object, we meet expectations that the keys in array must be a function
// eslint-disable-next-line lodash/prefer-lodash-chain
_.isObject(subject) && _.union(defaults, expectations).forEach(function (expect) {
!_.isFunction(subject[expect]) && (subject[expect] = _.noop);
});
return subject;
};
module.exports = backpack = {
/**
* Ensures that the given argument is a callable.
*
* @param {*} arg
* @param {Object=} ctx
* @returns {boolean|*}
*/
ensure: function (arg, ctx) {
return (typeof arg === 'function') && (ctx ? arg.bind(ctx) : arg) || undefined;
},
/**
* accept the callback parameter and convert it into a consistent object interface
*
* @param {Function|Object} cb
* @param {Array} [expect=]
* @returns {Object}
*
* @todo - write tests
*/
normalise: function (cb, expect) {
if (_.isFunction(cb) && cb.__normalised) {
return meetExpectations(cb, expect);
}
var userback, // this var will be populated and returned
// keep a reference of all initial callbacks sent by user
callback = (_.isFunction(cb) && cb) || (_.isFunction(cb && cb.done) && cb.done),
callbackError = _.isFunction(cb && cb.error) && cb.error,
callbackSuccess = _.isFunction(cb && cb.success) && cb.success;
// create master callback that calls these user provided callbacks
userback = _.assign(function (err) {
// if common callback is defined, call that
callback && callback.apply(this, arguments);
// for special error and success, call them if they are user defined
if (err) {
callbackError && callbackError.apply(this, arguments);
}
else {
// remove the extra error param before calling success
callbackSuccess && callbackSuccess.apply(this, (Array.prototype.shift.call(arguments), arguments));
}
}, _.isPlainObject(cb) && cb, { // override error, success and done
error: function () {
return userback.apply(this, arguments);
},
success: function () {
// inject null to arguments and call the main callback
userback.apply(this, (Array.prototype.unshift.call(arguments, null), arguments));
},
done: function () {
return userback.apply(this, arguments);
},
__normalised: true
});
return meetExpectations(userback, expect);
},
/**
* Convert a callback into a function that is called multiple times and the callback is actually called when a set
* of flags are set to true
*
* @param {Array} flags
* @param {Function} callback
* @param {Array} args
* @param {Number} ms
* @returns {Function}
*/
multiback: function (flags, callback, args, ms) {
var status = {},
sealed;
// ensure that the callback times out after a while
callback = backpack.timeback(callback, ms, null, function () {
sealed = true;
});
return function (err, flag, value) {
if (sealed) { return; } // do not proceed of it is sealed
status[flag] = value;
if (err) { // on error we directly call the callback and seal subsequent calls
sealed = true;
status = null;
callback.call(status, err);
return;
}
// if any flag is not defined, we exit. when all flags hold a value, we know that the end callback has to be
// executed.
for (var i = 0, ii = flags.length; i < ii; i++) {
if (!status.hasOwnProperty(flags[i])) { return; }
}
sealed = true;
status = null;
callback.apply(status, args);
};
},
/**
* Ensures that a callback is executed within a specific time.
*
* @param {Function} callback
* @param {Number=} [ms]
* @param {Object=} [scope]
* @param {Function=} [when] - function executed right before callback is called with timeout. one can do cleanup
* stuff here
* @returns {Function}
*/
timeback: function (callback, ms, scope, when) {
ms = Number(ms);
// if np callback time is specified, just return the callback function and exit. this is because we do need to
// track timeout in 0ms
if (!ms) {
return callback;
}
var sealed = false,
irq = setTimeout(function () { // irq = interrupt request
sealed = true;
irq = null;
when && when.call(scope || this);
callback.call(scope || this, new Error('callback timed out'));
}, ms);
return function () {
// if sealed, it means that timeout has elapsed and we accept no future callback
if (sealed) { return undefined; }
// otherwise we clear timeout and allow the callback to be executed. note that we do not seal the function
// since we should allow multiple callback calls.
irq && (irq = clearTimeout(irq));
return callback.apply(scope || this, arguments);
};
}
};

5
node_modules/postman-runtime/lib/index.js generated vendored Normal file
View File

@@ -0,0 +1,5 @@
module.exports = {
Runner: require('./runner'),
Requester: require('./requester').Requester,
version: require('./version')
};

View File

@@ -0,0 +1,786 @@
// Browser Request
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/* eslint-disable */
var _ = require('lodash');
var parseHeadersString = require('postman-collection').Header.parse;
request.log = {
'trace': noop, 'debug': noop, 'info': noop, 'warn': noop, 'error': noop
}
var CORS_ERROR_CODE = 'ERR_PM_CORS'; // Custom error code for CORS errors
var MIXED_CONTENT_ERROR_CODE = 'ERR_PM_MIXED_CONTENT'; // Custom error code for mixed content error
var DEFAULT_TIMEOUT = 3 * 60 * 1000 // 3 minutes
// The body is ignored if the request method is GET or HEAD.
// Refer: https://xhr.spec.whatwg.org/#the-send()-method
var METHODS_WITHOUT_BODY = {
'GET': true,
'HEAD': true
};
// Refer: https://developer.mozilla.org/en-US/docs/Glossary/Forbidden_header_name
var FORBIDDEN_HEADERS = {
'accept-charset': true,
'accept-encoding': true,
'access-control-request-headers': true,
'access-control-request-method': true,
connection: true,
'content-length': true,
cookie: true,
cookie2: true,
date: true,
dnt: true,
expect: true,
'feature-policy': true,
host: true,
'keep-alive': true,
origin: true,
referer: true,
te: true,
trailer: true,
'transfer-encoding': true,
upgrade: true,
via: true
};
var IS_LOCALHOST = {
'localhost': true,
'127.0.0.1': true,
'127.1': true,
'[::1]': true
};
function forEachAsync (items, fn, cb) {
!cb && (cb = function () { /* (ಠ_ಠ) */ })
if (!(Array.isArray(items) && fn)) { return cb() }
var index = 0
var totalItems = items.length
function next (err) {
if (err || index >= totalItems) {
return cb(err)
}
try {
fn.call(items, items[index++], next)
} catch (error) {
return cb(error)
}
}
if (!totalItems) { return cb() }
next()
}
//
// request
//
function request(originalRequest, options, onStart, callback) {
var options_onResponse = options.onResponse; // Save this for later.
var XHR = _.get(options, ['agents', options.url && options.url.protocol.slice(0, -1), 'agentClass']) || XMLHttpRequest;
if(typeof options === 'string')
options = {'uri':options};
else
options = _.clone(options); // Use a duplicate for mutating.
options.onResponse = options_onResponse // And put it back.
if (options.verbose) request.log = getLogger();
if(options.url) {
options.uri = options.url && options.url.href || options.url;
delete options.url;
}
if(!options.uri && options.uri !== "")
return callback(new Error("options.uri is a required argument"));
if(typeof options.uri != "string")
return callback(new Error("options.uri must be a string"));
options.onStart = onStart
options.callback = callback
options.method = options.method || 'GET';
options.headers = _.reduce(options.headers || {}, function (accumulator, value, key) {
if (!XHR._allowForbiddenHeaders && isForbiddenHeader(key)) {
// mutate original request and options as these will be passed in the
// request and response callbacks.
originalRequest.headers.remove(key);
delete options.headers[key];
return accumulator;
}
accumulator[key] = value;
return accumulator;
}, {});
options.body = options.body || null
options.timeout = options.timeout || request.DEFAULT_TIMEOUT
if(options.headers.host)
console.warn("Request: Options.headers.host is not supported");
if(options.json) {
options.headers.accept = options.headers.accept || 'application/json'
if(options.method !== 'GET')
options.headers['content-type'] = 'application/json'
if(typeof options.json !== 'boolean')
options.body = JSON.stringify(options.json)
else if(typeof options.body !== 'string')
options.body = JSON.stringify(options.body)
}
//BEGIN QS Hack
var serialize = function(obj) {
var str = [];
for(var p in obj)
if (obj.hasOwnProperty(p)) {
if (_.isArray(obj[p])) {
_.forEach(obj[p], function (value) {
str.push(encodeURIComponent(p) + "=" + encodeURIComponent(value));
});
}
else {
str.push(encodeURIComponent(p) + "=" + encodeURIComponent(obj[p]));
}
}
return str.join("&");
}
if(options.qs){
var qs = (typeof options.qs == 'string')? options.qs : serialize(options.qs);
if(options.uri.indexOf('?') !== -1){ //no get params
options.uri = options.uri+'&'+qs;
}else{ //existing get params
options.uri = options.uri+'?'+qs;
}
}
//END QS Hack
//BEGIN FORM Hack
var multipart = function (data) {
if (!Array.isArray(data)) { return; }
var i,
ii,
formParam,
formData = new FormData();
for (i = 0, ii = data.length; i < ii; i++) {
if (!(formParam = data[i])) { continue; }
if (Array.isArray(formParam.value)) {
formParam.value.forEach(function (value) {
formData.append(formParam.key, value);
});
}
else {
formData.append(formParam.key, formParam.value);
}
}
return {
body: formData
};
};
if(options.form){
if(typeof options.form == 'string') {
console.warn('form name unsupported');
}
if(XHR._allowBodyInGET || !METHODS_WITHOUT_BODY[options.method]) {
var encoding = (options.encoding || 'application/x-www-form-urlencoded').toLowerCase();
if (!options.headers['content-type'] && !options.headers['Content-Type']) {
options.headers['content-type'] = encoding;
}
switch(encoding){
case 'application/x-www-form-urlencoded':
options.body = serialize(options.form).replace(/%20/g, "+");
break;
case 'multipart/form-data':
var multi = multipart(options.form);
//options.headers['content-length'] = multi.length;
options.body = multi.body;
options.headers['content-type'] = multi.type;
break;
default : console.warn('unsupported encoding:'+encoding);
}
}
}
if (options.formData && (XHR._allowBodyInGET || !METHODS_WITHOUT_BODY[options.method])) {
var multipartBody = multipart(options.formData);
//options.headers['content-length'] = multipartBody.length;
options.body = multipartBody.body;
multipartBody.type && (options.headers['content-type'] = multipartBody.type);
}
//END FORM Hack
// If onResponse is boolean true, call back immediately when the response is known,
// not when the full request is complete.
options.onResponse = options.onResponse || noop
if(options.onResponse === true) {
options.onResponse = callback
options.callback = noop
}
// XXX Browsers do not like this.
//if(options.body)
// options.headers['content-length'] = options.body.length;
// HTTP basic authentication
if(!options.headers.authorization && options.auth)
options.headers.authorization = 'Basic ' + b64_enc(options.auth.username + ':' + options.auth.password);
// Query cookie jar if available
if ((typeof (options.jar && options.jar.getCookieString) === 'function')) {
options.jar.getCookieString(options.uri, function (_, cookies) {
if (cookies && cookies.length) {
options.cookiesFromJar = cookies;
}
run_xhr(XHR, originalRequest, options)
})
}
else {
return run_xhr(XHR, originalRequest, options)
}
}
var req_seq = 0
function run_xhr(XHR, originalRequest, options) {
var xhr = new XHR(options)
, timed_out = false
, is_cors = is_crossDomain(options.uri)
, supports_cors = ('withCredentials' in xhr)
req_seq += 1
xhr.seq_id = req_seq
xhr.id = req_seq + ': ' + options.method + ' ' + options.uri
xhr._id = xhr.id // I know I will type "_id" from habit all the time.
if(is_cors && !supports_cors) {
// This should never happen in our app
var cors_err = new Error('Browser does not support cross-origin request: ' + options.uri);
cors_err.code = CORS_ERROR_CODE;
cors_err.cors = 'unsupported';
options.callback(cors_err, xhr);
return xhr;
}
xhr.timeoutTimer = setTimeout(too_late, options.timeout)
function too_late() {
timed_out = true
var er = new Error('ETIMEDOUT')
er.code = 'ETIMEDOUT'
er.duration = options.timeout
request.log.error('Timeout', { 'id':xhr._id, 'milliseconds':options.timeout })
return options.callback(er, xhr)
}
// Some states can be skipped over, so remember what is still incomplete.
var did = {'response':false, 'loading':false, 'end':false, 'onStart': false}
xhr.onreadystatechange = on_state_change
xhr.open(options.method, options.uri, true) // asynchronous
if (is_cors) {
xhr.withCredentials = !! options.withCredentials
}
(options.encoding === null) && (xhr.responseType = "arraybuffer");
xhr.send(options.body)
return xhr
function on_state_change(event) {
if(timed_out)
return request.log.debug('Ignoring timed out state change', {'state':xhr.readyState, 'id':xhr.id})
request.log.debug('State change', {'state':xhr.readyState, 'id':xhr.id, 'timed_out':timed_out})
if(xhr.readyState === XHR.OPENED) {
request.log.debug('Request started', { 'id': xhr.id });
var cookies = [],
onInvalidHeader = function (key, error) {
error = new Error(`Header "${key}" contains invalid characters`);
// Do not process this request further.
did.response = true
did.loading = true
did.end = true
options.callback(error, xhr)
};
for (var key in options.headers) {
if (!options.headers.hasOwnProperty(key)) {
continue;
}
// Save all the cookies and add at the end because
if (String(key).toLowerCase() === 'cookie') {
cookies.push(options.headers[key]);
continue;
}
try {
if (Array.isArray(options.headers[key])) {
_.forEach(options.headers[key], function (eachValue) {
xhr.setRequestHeader(key, eachValue);
});
}
else {
xhr.setRequestHeader(key, options.headers[key]);
}
} catch (error) {
onInvalidHeader(key, error)
}
}
// Add `Cookie` header if cookies are present
if (cookies.length || options.cookiesFromJar) {
try {
var cookieString = cookies.join('; ') + (options.cookiesFromJar || '');
xhr.setRequestHeader('Cookie', cookieString);
// Also add update the original request header for console logs
originalRequest.headers.upsert({
key: 'Cookie',
value: cookieString
});
} catch (error) {
onInvalidHeader('Cookie', error)
}
}
}
else if(xhr.readyState === XHR.HEADERS_RECEIVED)
on_response()
else if(xhr.readyState === XHR.LOADING) {
on_response()
on_loading()
}
else if(xhr.readyState === XHR.DONE) {
on_response()
on_loading()
on_end()
}
}
function on_response() {
if(did.response)
return
did.response = true
request.log.debug('Got response', {'id':xhr.id, 'status':xhr.status})
clearTimeout(xhr.timeoutTimer)
xhr.statusCode = xhr.status // Node request compatibility
// Construct postman-request compatible debug object
!xhr.request && (xhr.request = {});
xhr.request._debug = xhr._debugData || [{
request: {
method: options.method,
href: options.uri,
headers: originalRequest.headers.toJSON(),
httpVersion: '1.1'
},
response: {
statusCode: xhr.statusCode,
headers: parseHeadersString(xhr.getAllResponseHeaders()),
httpVersion: '1.1'
}
}];
if (xhr.statusCode === 0 && xhr._error) {
// Do not process this request further.
did.loading = true
did.end = true
return options.callback(xhr._error, xhr);
}
// Detect mixed content failure
if (xhr.statusCode === 0 && is_mixedContent(options.uri)) {
var mixedContent_err = new Error('Mixed Content request rejected: ' + options.uri);
mixedContent_err.code = MIXED_CONTENT_ERROR_CODE;
// Do not process this request further.
did.loading = true
did.end = true
return options.callback(mixedContent_err, xhr)
}
// Detect failed CORS requests.
if(is_cors && xhr.statusCode == 0) {
var cors_err = new Error('CORS request rejected: ' + options.uri);
cors_err.code = CORS_ERROR_CODE;
cors_err.cors = 'rejected';
// Do not process this request further.
did.loading = true
did.end = true
return options.callback(cors_err, xhr)
}
function done () {
// Trigger onStart before callback
did.onStart = true
options.onStart(xhr)
options.onResponse(null, xhr)
// Due to the weird dependency of `onStart` and `callback` order,
// we ensure that callback is not called before onStart.
// This happens only if we are waiting for cookies to be added into the cookie jar.
typeof did.callback === 'function' && did.callback();
}
// We are all done here if the cookie jar is not available
if (!(typeof (options.jar && options.jar.setCookie) === 'function')) {
return done();
}
// Add cookies into the jar
var addCookie = function (cookie, cb) {
options.jar.setCookie(cookie, options.uri, {ignoreError: true}, function () {
cb()
})
},
getSetCookieHeaders = function (headersString) {
var cookies = [];
(parseHeadersString(headersString) || []).filter(function (header) {
if (String(header && header.key).toLowerCase() === 'set-cookie') {
cookies.push(header.value);
}
});
return cookies;
},
cookies = getSetCookieHeaders(xhr.getAllResponseHeaders());
if (!(cookies && cookies.length)) {
return done();
}
forEachAsync(cookies, addCookie, function () {
done()
})
}
function on_loading() {
if(did.loading)
return
did.loading = true
request.log.debug('Response body loading', {'id':xhr.id})
// TODO: Maybe simulate "data" events by watching xhr.responseText
}
function on_end() {
if(did.end)
return
did.end = true
request.log.debug('Request done', {'id':xhr.id})
xhr.body = (options.encoding === null) ? xhr.response : xhr.responseText;
if(options.json) {
try {
xhr.body = (xhr.responseText) ? JSON.parse(xhr.responseText) : xhr.responseText;
}
catch (er) {
return options.callback(er, xhr)
}
}
// Call the final callback if `onStart` is already called
if (did.onStart) {
options.callback(null, xhr, xhr.body, xhr.request && xhr.request._debug)
}
// otherwise, save the callback which will be triggered later in the `done` function
else {
did.callback = options.callback.bind(this, null, xhr, xhr.body, xhr.request && xhr.request._debug)
}
}
} // request
request.withCredentials = false;
request.DEFAULT_TIMEOUT = DEFAULT_TIMEOUT;
var shortcuts = [
'get',
'post',
'put',
'head',
'del',
'options',
'trace',
'copy',
'lock',
'mkcol',
'move',
'purge',
'propfind',
'proppatch',
'unlock',
'report',
'mkactivity',
'checkout',
'merge',
'm-search',
'notify',
'subscribe',
'unsubscribe',
'patch',
'search'
];
var shortcutsToMethods = {
'del': 'delete'
};
//
// defaults
//
request.defaults = function(options, requester) {
var def = function (method) {
var d = function (params, callback) {
if(typeof params === 'string')
params = {'uri': params};
else {
params = JSON.parse(JSON.stringify(params));
}
for (var i in options) {
if (params[i] === undefined) params[i] = options[i]
}
return method(params, callback)
}
return d
}
var de = def(request)
shortcuts.forEach(function (method) {
de[method] = def(request[method])
})
return de
}
//
// HTTP method shortcuts
//
shortcuts.forEach(function(shortcut) {
var method = shortcutsToMethods[shortcut] || shortcut;
method = method.toUpperCase();
var func = shortcut.toLowerCase();
request[func] = function(opts) {
if(typeof opts === 'string')
opts = {'method':method, 'uri':opts};
else {
opts = JSON.parse(JSON.stringify(opts));
opts.method = method;
}
var args = [opts].concat(Array.prototype.slice.apply(arguments, [1]));
return request.apply(this, args);
}
})
//
// CouchDB shortcut
//
request.couch = function(options, callback) {
if(typeof options === 'string')
options = {'uri':options}
// Just use the request API to do JSON.
options.json = true
if(options.body)
options.json = options.body
delete options.body
callback = callback || noop
var xhr = request(options, couch_handler)
return xhr
function couch_handler(er, resp, body) {
if(er)
return callback(er, resp, body)
if((resp.statusCode < 200 || resp.statusCode > 299) && body.error) {
// The body is a Couch JSON object indicating the error.
er = new Error('CouchDB error: ' + (body.error.reason || body.error.error))
for (var key in body)
er[key] = body[key]
return callback(er, resp, body);
}
return callback(er, resp, body);
}
}
//
// Utility
//
function noop() {}
function getLogger() {
var logger = {}
, levels = ['trace', 'debug', 'info', 'warn', 'error']
, level, i
for(i = 0; i < levels.length; i++) {
level = levels[i]
logger[level] = noop
if(typeof console !== 'undefined' && console && console[level])
logger[level] = formatted(console, level)
}
return logger
}
function formatted(obj, method) {
return formatted_logger
function formatted_logger(str, context) {
if(typeof context === 'object')
str += ' ' + JSON.stringify(context)
return obj[method].call(obj, str)
}
}
function window_location () {
// jQuery #8138, IE may throw an exception when accessing
// a field from window.location if document.domain has been set
var ajaxLocation
try { ajaxLocation = location.href }
catch (e) {
// Use the href attribute of an A element since IE will modify it given document.location
ajaxLocation = document.createElement( "a" );
ajaxLocation.href = "";
ajaxLocation = ajaxLocation.href;
}
return ajaxLocation
}
// Return whether a URL is a cross-domain request.
function is_crossDomain(url) {
var rurl = /^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+))?)?/
, ajaxLocation = window_location()
, ajaxLocParts = rurl.exec(ajaxLocation.toLowerCase()) || []
, parts = rurl.exec(url.toLowerCase() )
var result = !!(
parts &&
( parts[1] != ajaxLocParts[1]
|| parts[2] != ajaxLocParts[2]
|| (parts[3] || (parts[1] === "http:" ? 80 : 443)) != (ajaxLocParts[3] || (ajaxLocParts[1] === "http:" ? 80 : 443))
)
)
//console.debug('is_crossDomain('+url+') -> ' + result)
return result
}
function is_mixedContent (url) {
var rurl = /^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+))?)?/
, ajaxLocation = window_location()
, ajaxLocParts = rurl.exec(ajaxLocation.toLowerCase()) || []
, parts = rurl.exec(url.toLowerCase() )
return parts[1] != ajaxLocParts[1] && !IS_LOCALHOST[parts[2]]
}
// MIT License from http://phpjs.org/functions/base64_encode:358
function b64_enc (data) {
// Encodes string using MIME base64 algorithm
var b64 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
var o1, o2, o3, h1, h2, h3, h4, bits, i = 0, ac = 0, enc="", tmp_arr = [];
if (!data) {
return data;
}
// assume utf8 data
// data = this.utf8_encode(data+'');
do { // pack three octets into four hexets
o1 = data.charCodeAt(i++);
o2 = data.charCodeAt(i++);
o3 = data.charCodeAt(i++);
bits = o1<<16 | o2<<8 | o3;
h1 = bits>>18 & 0x3f;
h2 = bits>>12 & 0x3f;
h3 = bits>>6 & 0x3f;
h4 = bits & 0x3f;
// use hexets to index into b64, and append result to encoded string
tmp_arr[ac++] = b64.charAt(h1) + b64.charAt(h2) + b64.charAt(h3) + b64.charAt(h4);
} while (i < data.length);
enc = tmp_arr.join('');
switch (data.length % 3) {
case 1:
enc = enc.slice(0, -2) + '==';
break;
case 2:
enc = enc.slice(0, -1) + '=';
break;
}
return enc;
}
// Check if given header name is forbidden i.e, cannot be modified programmatically.
// Refer: https://developer.mozilla.org/en-US/docs/Glossary/Forbidden_header_name
// @note The User-Agent header is no longer forbidden. However,
// Chrome will silently drop the header: https://bugs.chromium.org/p/chromium/issues/detail?id=571722
function isForbiddenHeader (headerName) {
headerName = String(headerName).toLowerCase();
return FORBIDDEN_HEADERS[headerName] ||
headerName.startsWith('proxy-') ||
headerName.startsWith('sec-');
}
// ensure that the .jar() function is available
request.jar = _.noop;
module.exports = request;

View File

@@ -0,0 +1,303 @@
/**
* @fileOverview
*
* This module consists all request body transformer functions based on the request body modes supported
* Ideally, this should one day move to a function in SDK something like request.getNodeRequestOptions()
*
*
* _
* ( ) ,,,,,
* \\ . . ,
* \\ | - D ,
* (._) \__- | ,
* | |..
* \\|_ , ,---- _ |----.
* \__ ( ( / ) _
* | \/ \. ' _.| \ ( )
* | \ /( / /\_ \ //
* \ / ( / / ) //
* ( , / / , (_.)
* |......\ | \,
* / / ) \---
* /___/___^//
*/
var _ = require('lodash'),
CONTENT_TYPE_HEADER_KEY = 'Content-Type',
/**
* Map content-type to respective body language.
*
* @private
* @type {Object}
*/
CONTENT_TYPE_LANGUAGE = {
'html': 'text/html',
'text': 'text/plain',
'json': 'application/json',
'javascript': 'application/javascript',
'xml': 'application/xml'
},
STRING = 'string',
E = '',
oneNormalizedHeader,
// the following two are reducer functions. we keep it defined here to avoid redefinition upon each parse
urlEncodedBodyReducer,
formDataBodyReducer;
/**
* Find the enabled header with the given name.
*
* @todo Add this helper in Collection SDK.
*
* @private
* @param {HeaderList} headers
* @param {String} name
* @returns {Header|undefined}
*/
oneNormalizedHeader = function oneNormalizedHeader (headers, name) {
var i,
header;
// get all headers with `name`
headers = headers.reference[name.toLowerCase()];
if (Array.isArray(headers)) {
// traverse the headers list in reverse direction in order to find the last enabled
for (i = headers.length - 1; i >= 0; i--) {
header = headers[i];
if (header && !header.disabled) {
return header;
}
}
// bail out if no enabled header was found
return;
}
// return the single enabled header
if (headers && !headers.disabled) {
return headers;
}
};
/**
* Reduces postman SDK url encoded form definition (flattened to array) into Node compatible body options
*
* @param {Object} form - url encoded form params accumulator
* @param {Object} param - url encoded form param
*
* @returns {Object}
*/
urlEncodedBodyReducer = function (form, param) {
if (!param || param.disabled) {
return form;
}
var key = param.key,
value = param.value;
// add the parameter to the form while accounting for duplicate values
if (!form.hasOwnProperty(key)) {
form[key] = value;
return form;
}
// at this point, we know that form has duplicate, so we need to accumulate it in an array
if (!Array.isArray(form[key])) {
form[key] = [form[key]];
}
form[key].push(value); // finally push the duplicate and return
return form;
};
/**
* Reduces postman SDK multi-part form definition (flattened to array) into Node compatible body options
*
* @param {Array} data - multi-part form params accumulator
* @param {Object} param - multi-part form param
*
* @returns {Array}
*/
formDataBodyReducer = function (data, param) {
if (!param || param.disabled) {
return data;
}
var formParam = {
key: param.key,
value: param.value
},
options; // we keep the default blank and then set to object wherever needed. saves doing object keyLength
// make sure that value is either string or read stream otherwise it'll cause error in postman-request
if (param.type !== 'file' && typeof formParam.value !== STRING) {
try {
formParam.value = JSON.stringify(formParam.value);
}
catch (err) {
formParam.value = E;
}
}
// make sure `filename` param is sent for every file without `value`
// so that `filename=""` is added to content-disposition header in form data
if (param.type === 'file' && !formParam.value && typeof param.fileName !== 'string') {
param.fileName = E;
formParam.value = E; // make sure value is not null/undefined ever
}
// if data has a truthy content type, we mutate the value to take the options. we are assuming that
// blank string will not be considered as an accepted content type.
if (param.contentType && typeof param.contentType === STRING) {
(options || (options = {})).contentType = param.contentType;
}
// additionally parse the file name and length if sent
// @note: Add support for fileName & fileLength option in Schema & SDK.
// The filepath property overrides filename and may contain a relative path.
if (typeof param.fileName === STRING) { (options || (options = {})).filename = param.fileName; }
if (typeof param.fileLength === 'number') { (options || (options = {})).knownLength = param.fileLength; }
// if options were set, add them to formParam
options && (formParam.options = options);
data.push(formParam);
return data;
};
/**
* This module exposes functions that are named corresponding to Postman collection body modes. It accepts the body
* definition, usually like `request.body.raw` where mode is `raw` and returns its equivalent structure that needs to be
* sent to node request module
*/
module.exports = {
/**
* @param {Object} content - request body content
* @param {Request} [request] - request object
* @returns {Object}
*/
raw: function (content, request) {
var contentLanguage = _.get(request, 'body.options.raw.language', 'text');
// Add `Content-Type` header from body options if not set already
if (request && !oneNormalizedHeader(request.headers, CONTENT_TYPE_HEADER_KEY)) {
request.headers.add({
key: CONTENT_TYPE_HEADER_KEY,
value: CONTENT_TYPE_LANGUAGE[contentLanguage] || CONTENT_TYPE_LANGUAGE.text,
system: true
});
}
if (typeof content !== STRING) {
content = JSON.stringify(content);
}
return {
body: content
};
},
/**
* @param {Object} content - request body content
* @returns {Object}
*/
urlencoded: function (content) {
if (content && _.isFunction(content.all)) { content = content.all(); } // flatten the body content
return {
form: _.reduce(content, urlEncodedBodyReducer, {})
};
},
/**
* @param {Object} content - request body content
* @returns {Object}
*/
formdata: function (content) {
if (content && _.isFunction(content.all)) { content = content.all(); } // flatten the body content
return {
formData: _.reduce(content, formDataBodyReducer, [])
};
},
/**
* @param {Object} content - request body content
* @returns {Object}
*/
file: function (content) {
return {
body: content && content.content
};
},
/**
* @param {Object} content - request body content
* @param {Request} [request] - Request object
* @returns {Object}
*/
graphql: function (content, request) {
var body;
// implicitly add `Content-Type` header if not set already
if (request && !oneNormalizedHeader(request.headers, CONTENT_TYPE_HEADER_KEY)) {
request.headers.add({
key: CONTENT_TYPE_HEADER_KEY,
value: CONTENT_TYPE_LANGUAGE.json,
system: true
});
}
// if `variables` is an object, just stringify the entire content
if (content && typeof content.variables !== STRING) {
// if any property of graphql is undefined, it will not get stringified
// as a result, if no content object's properties are present then the
// result will be a blank object being sent.
// note that this behavior has to be imitated later when we are
// receiving variables as string
return {
body: JSON.stringify({
query: content.query,
operationName: content.operationName,
variables: content.variables
})
};
}
// otherwise, traverse the graphql properties and generate the
// stringified content. This avoids parsing the variables.
body = [];
if (content.hasOwnProperty('query') && (typeof content.query === STRING)) {
body.push('"query":' + JSON.stringify(content.query));
}
if (content.hasOwnProperty('operationName') && (typeof content.operationName === STRING)) {
body.push('"operationName":' + JSON.stringify(content.operationName));
}
if (content.hasOwnProperty('variables') && (typeof content.variables === STRING) &&
// even though users are free to send even malformed json string, the case of empty string has to be
// specially disallowed since in most default cases if a text editor is used to accept this data, it will
// send a blank string for an empty text-editor state and that would be an error flow. That implies majority
// default use case will become error flow and handling for the same has to be also coded in every other
// place where runtime is used.
(content.variables !== E)) {
body.push('"variables":' + content.variables); // already a stringified JSON
}
return {
body: '{' + body.join(',') + '}' // note that [] body = {} ¯\_(ツ)_/¯
};
}
};

769
node_modules/postman-runtime/lib/requester/core.js generated vendored Normal file
View File

@@ -0,0 +1,769 @@
var dns = require('dns'),
constants = require('constants'),
_ = require('lodash'),
uuid = require('uuid/v4'),
sdk = require('postman-collection'),
urlEncoder = require('postman-url-encoder'),
Socket = require('net').Socket,
requestBodyBuilders = require('./core-body-builder'),
version = require('../../package.json').version,
LOCAL_IPV6 = '::1',
LOCAL_IPV4 = '127.0.0.1',
LOCALHOST = 'localhost',
SOCKET_TIMEOUT = 500,
COLON = ':',
STRING = 'string',
HOSTS_TYPE = {
HOST_IP_MAP: 'hostIpMap'
},
HTTPS = 'https',
HTTPS_DEFAULT_PORT = 443,
HTTP_DEFAULT_PORT = 80,
S_CONNECT = 'connect',
S_ERROR = 'error',
S_TIMEOUT = 'timeout',
SSL_OP_NO = 'SSL_OP_NO_',
ERROR_ADDRESS_RESOLVE = 'NETERR: getaddrinfo ENOTFOUND ',
/**
* List of request methods without body.
*
* @private
* @type {Object}
*
* @note hash is used to reduce the lookup cost
* these methods are picked from the app, which don't support body.
* @todo move this list to SDK for parity.
*/
METHODS_WITHOUT_BODY = {
get: true,
copy: true,
head: true,
purge: true,
unlock: true
},
/**
* List of request options with their corresponding protocol profile behavior property name;
*
* @private
* @type {Object}
*/
PPB_OPTS = {
// enable or disable certificate verification
strictSSL: 'strictSSL',
// maximum number of redirects to follow (default: 10)
maxRedirects: 'maxRedirects',
// controls redirect behavior
// keeping the same convention as Newman
followRedirect: 'followRedirects',
followAllRedirects: 'followRedirects',
// retain `authorization` header when a redirect happens to a different hostname
followAuthorizationHeader: 'followAuthorizationHeader',
// redirect with the original HTTP method (default: redirects with GET)
followOriginalHttpMethod: 'followOriginalHttpMethod',
// removes the `referer` header when a redirect happens (default: false)
// @note `referer` header set in the initial request will be preserved during redirect chain
removeRefererHeader: 'removeRefererHeaderOnRedirect'
},
/**
* System headers which can be removed before sending the request if set
* in disabledSystemHeaders protocol profile behavior.
*
*
* @private
* @type {Array}
*/
ALLOWED_BLACKLIST_HEADERS = ['content-type', 'content-length', 'accept-encoding', 'connection'],
/**
* Find the enabled header with the given name.
*
* @todo Add this helper in Collection SDK.
*
* @private
* @param {HeaderList} headers
* @param {String} name
* @returns {Header|undefined}
*/
oneNormalizedHeader = function oneNormalizedHeader (headers, name) {
var i,
header;
// get all headers with `name`
headers = headers.reference[name.toLowerCase()];
if (Array.isArray(headers)) {
// traverse the headers list in reverse direction in order to find the last enabled
for (i = headers.length - 1; i >= 0; i--) {
header = headers[i];
if (header && !header.disabled) {
return header;
}
}
// bail out if no enabled header was found
return;
}
// return the single enabled header
if (headers && !headers.disabled) {
return headers;
}
},
/**
* Add static system headers if they are not disable using `disabledSystemHeaders`
* protocol profile behavior.
* Add the system headers provided as requester configuration.
*
* @note Don't traverse the user provided `disabledSystemHeaders` object
* to ensure runtime allowed headers and also for security reasons.
*
* @private
* @param {Request} request
* @param {Object} options
* @param {Object} disabledHeaders
* @param {Object} systemHeaders
*/
addSystemHeaders = function (request, options, disabledHeaders, systemHeaders) {
var key,
headers = request.headers;
[
{key: 'User-Agent', value: `PostmanRuntime/${version}`},
{key: 'Accept', value: '*/*'},
{key: 'Cache-Control', value: 'no-cache'},
{key: 'Postman-Token', value: uuid()},
{key: 'Host', value: options.url && options.url.host},
{key: 'Accept-Encoding', value: 'gzip, deflate, br'},
{key: 'Connection', value: 'keep-alive'}
].forEach(function (header) {
key = header.key.toLowerCase();
// add system header only if,
// 1. there's no user added header
// 2. not disabled using disabledSystemHeaders
!disabledHeaders[key] && !oneNormalizedHeader(headers, key) &&
headers.add({
key: header.key,
value: header.value,
system: true
});
});
for (key in systemHeaders) {
if (systemHeaders.hasOwnProperty(key)) {
// upsert instead of add to replace user-defined headers also
headers.upsert({
key: key,
value: systemHeaders[key],
system: true
});
}
}
},
/**
* Helper function to extract top level domain for the given hostname
*
* @private
*
* @param {String} hostname
* @returns {String}
*/
getTLD = function (hostname) {
if (!hostname) {
return '';
}
hostname = String(hostname);
return hostname.substring(hostname.lastIndexOf('.') + 1);
},
/**
* Abstracts out the logic for domain resolution
*
* @param options
* @param hostLookup
* @param hostLookup.type
* @param hostLookup.hostIpMap
* @param hostname
* @param callback
*/
_lookup = function (options, hostLookup, hostname, callback) {
var hostIpMap,
resolvedFamily = 4,
resolvedAddr;
// first we try to resolve the hostname using hosts file configuration
hostLookup && hostLookup.type === HOSTS_TYPE.HOST_IP_MAP &&
(hostIpMap = hostLookup[HOSTS_TYPE.HOST_IP_MAP]) && (resolvedAddr = hostIpMap[hostname]);
if (resolvedAddr) {
// since we only get an string for the resolved ip address, we manually find it's family (4 or 6)
// there will be at-least one `:` in an IPv6 (https://en.wikipedia.org/wiki/IPv6_address#Representation)
resolvedAddr.indexOf(COLON) !== -1 && (resolvedFamily = 6); // eslint-disable-line lodash/prefer-includes
// returning error synchronously causes uncaught error because listeners are not attached to error events
// on socket yet
return setImmediate(function () {
callback(null, resolvedAddr, resolvedFamily);
});
}
// no hosts file configuration provided or no match found. Falling back to normal dns lookup
return dns.lookup(hostname, options, callback);
},
/**
* Tries to make a TCP connection to the given host and port. If successful, the connection is immediately
* destroyed.
*
* @param host
* @param port
* @param callback
*/
connect = function (host, port, callback) {
var socket = new Socket(),
called,
done = function (type) {
if (!called) {
callback(type === S_CONNECT ? null : true); // eslint-disable-line callback-return
called = true;
this.destroy();
}
};
socket.setTimeout(SOCKET_TIMEOUT, done.bind(socket, S_TIMEOUT));
socket.once('connect', done.bind(socket, S_CONNECT));
socket.once('error', done.bind(socket, S_ERROR));
socket.connect(port, host);
socket = null;
},
/**
* Override DNS lookups in Node, to handle localhost as a special case.
* Chrome tries connecting to IPv6 by default, so we try the same thing.
*
* @param lookupOptions
* @param lookupOptions.port
* @param lookupOptions.network
* @param lookupOptions.network.restrictedAddresses
* @param lookupOptions.network.hostLookup
* @param lookupOptions.network.hostLookup.type
* @param lookupOptions.network.hostLookup.hostIpMap
* @param hostname
* @param options
* @param callback
*/
lookup = function (lookupOptions, hostname, options, callback) {
var self = this,
lowercaseHost = hostname && hostname.toLowerCase(),
networkOpts = lookupOptions.network || {},
hostLookup = networkOpts.hostLookup;
// do dns.lookup if hostname is not one of:
// - localhost
// - *.localhost
if (getTLD(lowercaseHost) !== LOCALHOST) {
return _lookup(options, hostLookup, lowercaseHost, function (err, addr, family) {
if (err) { return callback(err); }
return callback(self.isAddressRestricted(addr, networkOpts) ?
new Error(ERROR_ADDRESS_RESOLVE + hostname) : null, addr, family);
});
}
// Try checking if we can connect to IPv6 localhost ('::1')
connect(LOCAL_IPV6, lookupOptions.port, function (err) {
// use IPv4 if we cannot connect to IPv6
if (err) { return callback(null, LOCAL_IPV4, 4); }
callback(null, LOCAL_IPV6, 6);
});
},
/**
* Helper function to return postman-request compatible URL parser which
* respects the `disableUrlEncoding` protocol profile behavior.
*
* @private
* @param {Boolean} disableUrlEncoding
* @returns {Object}
*/
urlParser = function (disableUrlEncoding) {
return {
parse: function (urlToParse) {
return urlEncoder.toNodeUrl(urlToParse, disableUrlEncoding);
},
resolve: function (base, relative) {
if (typeof base === STRING) {
// @note we parse base URL here to respect `disableUrlEncoding`
// option even though resolveNodeUrl() accepts it as a string
base = urlEncoder.toNodeUrl(base, disableUrlEncoding);
}
return urlEncoder.resolveNodeUrl(base, relative);
}
};
},
/**
* Resolves given property with protocol profile behavior.
* Returns protocolProfileBehavior value if the given property is present.
* Else, returns value defined in default options.
*
* @param {String} property - Property name to look for
* @param {Object} defaultOpts - Default request options
* @param {Object} protocolProfileBehavior - Protocol profile behaviors
* @returns {*} - Resolved request option value
*/
resolveWithProtocolProfileBehavior = function (property, defaultOpts, protocolProfileBehavior) {
// bail out if property or defaultOpts is not defined
if (!(property && defaultOpts)) { return; }
if (protocolProfileBehavior && protocolProfileBehavior.hasOwnProperty(property)) {
return protocolProfileBehavior[property];
}
return defaultOpts[property];
};
module.exports = {
/**
* Creates a node request compatible options object from a request.
*
* @param request
* @param defaultOpts
* @param defaultOpts.agents
* @param defaultOpts.network
* @param defaultOpts.keepAlive
* @param defaultOpts.timeout
* @param defaultOpts.strictSSL
* @param defaultOpts.cookieJar The cookie jar to use (if any).
* @param defaultOpts.followRedirects
* @param defaultOpts.followOriginalHttpMethod
* @param defaultOpts.maxRedirects
* @param defaultOpts.maxResponseSize
* @param defaultOpts.implicitCacheControl
* @param defaultOpts.implicitTraceHeader
* @param defaultOpts.removeRefererHeaderOnRedirect
* @param defaultOpts.timings
* @param protocolProfileBehavior
* @returns {{}}
*/
getRequestOptions: function (request, defaultOpts, protocolProfileBehavior) {
!defaultOpts && (defaultOpts = {});
!protocolProfileBehavior && (protocolProfileBehavior = {});
var options = {},
networkOptions = defaultOpts.network || {},
self = this,
bodyParams,
useWhatWGUrlParser = defaultOpts.useWhatWGUrlParser,
disableUrlEncoding = protocolProfileBehavior.disableUrlEncoding,
disabledSystemHeaders = protocolProfileBehavior.disabledSystemHeaders || {},
// the system headers provided in requester configuration
systemHeaders = defaultOpts.systemHeaders || {},
url = useWhatWGUrlParser ? urlEncoder.toNodeUrl(request.url, disableUrlEncoding) :
urlEncoder.toLegacyNodeUrl(request.url.toString(true)),
isSSL = _.startsWith(url.protocol, HTTPS),
isTunnelingProxy = request.proxy && (request.proxy.tunnel || isSSL),
header,
reqOption,
portNumber,
behaviorName,
port = url && url.port,
hostname = url && url.hostname && url.hostname.toLowerCase(),
proxyHostname = request.proxy && request.proxy.host;
// resolve all *.localhost to localhost itself
// RFC: 6761 section 6.3 (https://tools.ietf.org/html/rfc6761#section-6.3)
if (getTLD(hostname) === LOCALHOST) {
// @note setting hostname to localhost ensures that we override lookup function
hostname = LOCALHOST;
}
if (getTLD(proxyHostname) === LOCALHOST) {
proxyHostname = LOCALHOST;
}
options.url = url;
options.method = request.method;
options.timeout = defaultOpts.timeout;
options.gzip = true;
options.brotli = true;
options.time = defaultOpts.timings;
options.verbose = defaultOpts.verbose;
options.agents = defaultOpts.agents;
options.extraCA = defaultOpts.extendedRootCA;
options.ignoreProxyEnvironmentVariables = defaultOpts.ignoreProxyEnvironmentVariables;
// Disable encoding of URL in postman-request in order to use pre-encoded URL object returned from
// toNodeUrl() function of postman-url-encoder
options.disableUrlEncoding = true;
// Ensures that "request" creates URL encoded formdata or querystring as
// foo=bar&foo=baz instead of foo[0]=bar&foo[1]=baz
options.useQuerystring = true;
// set encoding to null so that the response is a stream
options.encoding = null;
// Re-encode status message using `utf8` character encoding in postman-request.
// This is done to correctly represent status messages with characters that lie outside
// the range of `latin1` encoding (which is the default encoding in which status message is returned)
options.statusMessageEncoding = 'utf8';
// eslint-disable-next-line guard-for-in
for (reqOption in PPB_OPTS) {
behaviorName = PPB_OPTS[reqOption];
options[reqOption] = resolveWithProtocolProfileBehavior(behaviorName, defaultOpts, protocolProfileBehavior);
}
// set cookie jar if not disabled
if (!protocolProfileBehavior.disableCookies) {
options.jar = defaultOpts.cookieJar || true;
}
// use the server's cipher suite order instead of the client's during negotiation
if (protocolProfileBehavior.tlsPreferServerCiphers) {
options.honorCipherOrder = true;
}
// the SSL and TLS protocol versions to disabled during negotiation
if (Array.isArray(protocolProfileBehavior.tlsDisabledProtocols)) {
protocolProfileBehavior.tlsDisabledProtocols.forEach(function (protocol) {
// since secure options doesn't support TLSv1.3 before Node 14
// @todo remove the if condition when we drop support for Node 12
if (protocol === 'TLSv1_3' && !constants[SSL_OP_NO + protocol]) {
options.maxVersion = 'TLSv1.2';
}
else {
options.secureOptions |= constants[SSL_OP_NO + protocol];
}
});
}
// order of cipher suites that the SSL server profile uses to establish a secure connection
if (Array.isArray(protocolProfileBehavior.tlsCipherSelection)) {
options.ciphers = protocolProfileBehavior.tlsCipherSelection.join(':');
}
if (typeof defaultOpts.maxResponseSize === 'number') {
options.maxResponseSize = defaultOpts.maxResponseSize;
}
// Request body may return different options depending on the type of the body.
// @note getRequestBody may add system headers based on intent
bodyParams = self.getRequestBody(request, protocolProfileBehavior);
// Disable 'Cache-Control' and 'Postman-Token' based on global options
// @note this also make 'cache-control' and 'postman-token' part of `disabledSystemHeaders`
!defaultOpts.implicitCacheControl && (disabledSystemHeaders['cache-control'] = true);
!defaultOpts.implicitTraceHeader && (disabledSystemHeaders['postman-token'] = true);
// Add additional system headers to the request instance
addSystemHeaders(request, options, disabledSystemHeaders, systemHeaders);
// Don't add `Host` header if disabled using disabledSystemHeaders
// @note This can't be part of `blacklistHeaders` option as `setHost` is
// a Node.js http.request option to specifies whether or not to
// automatically add the Host header or not.
if (disabledSystemHeaders.host) {
header = oneNormalizedHeader(request.headers, 'host');
// only possible with AWS auth
header && header.system && (header.disabled = true);
// set `setHost` to false if there's no host header defined by the user
// or, the present host is added by the system.
(!header || header.system) && (options.setHost = false);
}
// Set `allowContentTypeOverride` if content-type header is disabled,
// this allows overriding (if invalid) the content-type for form-data
// and urlencoded request body.
if (disabledSystemHeaders['content-type']) {
options.allowContentTypeOverride = true;
}
options.blacklistHeaders = [];
ALLOWED_BLACKLIST_HEADERS.forEach(function (headerKey) {
if (!disabledSystemHeaders[headerKey]) { return; } // not disabled
header = oneNormalizedHeader(request.headers, headerKey);
// content-type added by body helper
header && header.system && (header.disabled = true);
// blacklist only if it's missing or part of system added headers
(!header || header.system) && options.blacklistHeaders.push(headerKey);
// @note for non-GET requests if no 'content-length' is set, it
// it assumes to be chucked request body and add 'transfer-encoding'
// here, we ensure blacklisting 'content-length' will also blacklist
// 'transfer-encoding' header.
if (headerKey === 'content-length') {
header = oneNormalizedHeader(request.headers, 'transfer-encoding');
(!header || header.system) && options.blacklistHeaders.push('transfer-encoding');
}
});
// Finally, get headers object
options.headers = request.getHeaders({enabled: true, sanitizeKeys: true});
// override URL parser to WhatWG URL parser
if (useWhatWGUrlParser) {
options.urlParser = urlParser(disableUrlEncoding);
}
// override DNS lookup
if (networkOptions.restrictedAddresses || hostname === LOCALHOST ||
(!isTunnelingProxy && proxyHostname === LOCALHOST) || networkOptions.hostLookup) {
// Use proxy port for localhost resolution in case of non-tunneling proxy
// because the request will be sent to proxy server by postman-request
if (request.proxy && !isTunnelingProxy) {
portNumber = Number(request.proxy.port);
}
// Otherwise, use request's port
else {
portNumber = Number(port) || (isSSL ? HTTPS_DEFAULT_PORT : HTTP_DEFAULT_PORT);
}
_.isFinite(portNumber) && (options.lookup = lookup.bind(this, {
port: portNumber,
network: networkOptions
}));
}
_.assign(options, bodyParams, {
// @note these common agent options can be overridden by specifying
// custom http/https agents using requester option `agents`
agentOptions: {
keepAlive: defaultOpts.keepAlive
}
});
return options;
},
/**
* Processes a request body and puts it in a format compatible with
* the "request" library.
*
* @todo: Move this to the SDK.
* @param request - Request object
* @param protocolProfileBehavior - Protocol profile behaviors
*
* @returns {Object}
*/
getRequestBody: function (request, protocolProfileBehavior) {
if (!(request && request.body)) {
return;
}
var i,
property,
requestBody = request.body,
requestBodyType = requestBody.mode,
requestMethod = (typeof request.method === STRING) ? request.method.toLowerCase() : undefined,
bodyIsEmpty = requestBody.isEmpty(),
bodyIsDisabled = requestBody.disabled,
bodyContent = requestBody[requestBodyType],
// flag to decide body pruning for METHODS_WITHOUT_BODY
// @note this will be `true` even if protocolProfileBehavior is undefined
pruneBody = protocolProfileBehavior ? !protocolProfileBehavior.disableBodyPruning : true;
// early bailout for empty or disabled body (this area has some legacy shenanigans)
if (bodyIsEmpty || bodyIsDisabled) {
return;
}
// body is empty if all the params in urlencoded and formdata body are disabled
// @todo update Collection SDK isEmpty method to account for this
if (sdk.PropertyList.isPropertyList(bodyContent)) {
bodyIsEmpty = true;
for (i = bodyContent.members.length - 1; i >= 0; i--) {
property = bodyContent.members[i];
// bail out if a single enabled property is present
if (property && !property.disabled) {
bodyIsEmpty = false;
break;
}
}
// bail out if body is empty
if (bodyIsEmpty) {
return;
}
}
// bail out if request method doesn't support body and pruneBody is true.
if (METHODS_WITHOUT_BODY[requestMethod] && pruneBody) {
return;
}
// even if body is not empty, but the body type is not known, we do not know how to parse the same
//
// @note if you'd like to support additional body types beyond formdata, url-encoding, etc, add the same to
// the builder module
if (!requestBodyBuilders.hasOwnProperty(requestBodyType)) {
return;
}
return requestBodyBuilders[requestBodyType](bodyContent, request);
},
/**
* Returns a JSON compatible with the Node's request library. (Also contains the original request)
*
* @param rawResponse Can be an XHR response or a Node request compatible response.
* about the actual request that was sent.
* @param requestOptions Options that were used to send the request.
* @param responseBody Body as a string.
*/
jsonifyResponse: function (rawResponse, requestOptions, responseBody) {
if (!rawResponse) {
return;
}
var responseJSON;
if (rawResponse.toJSON) {
responseJSON = rawResponse.toJSON();
responseJSON.request && _.assign(responseJSON.request, {
data: requestOptions.form || requestOptions.formData || requestOptions.body || {},
uri: { // @todo remove this
href: requestOptions.url && requestOptions.url.href || requestOptions.url
},
url: requestOptions.url && requestOptions.url.href || requestOptions.url
});
rawResponse.rawHeaders &&
(responseJSON.headers = this.arrayPairsToObject(rawResponse.rawHeaders) || responseJSON.headers);
return responseJSON;
}
responseBody = responseBody || '';
// @todo drop support or isolate XHR requester in v8
// XHR :/
return {
statusCode: rawResponse.status,
body: responseBody,
headers: _.transform(sdk.Header.parse(rawResponse.getAllResponseHeaders()), function (acc, header) {
if (acc[header.key]) {
!Array.isArray(acc[header.key]) && (acc[header.key] = [acc[header.key]]);
acc[header.key].push(header.value);
}
else {
acc[header.key] = header.value;
}
}, {}),
request: {
method: requestOptions.method || 'GET',
headers: requestOptions.headers,
uri: { // @todo remove this
href: requestOptions.url && requestOptions.url.href || requestOptions.url
},
url: requestOptions.url && requestOptions.url.href || requestOptions.url,
data: requestOptions.form || requestOptions.formData || requestOptions.body || {}
}
};
},
/**
* ArrayBuffer to String
*
* @param {ArrayBuffer} buffer
* @returns {String}
*/
arrayBufferToString: function (buffer) {
var str = '',
uArrayVal = new Uint8Array(buffer),
i,
ii;
for (i = 0, ii = uArrayVal.length; i < ii; i++) {
str += String.fromCharCode(uArrayVal[i]);
}
return str;
},
/**
* Converts an array of sequential pairs to an object.
*
* @param arr
* @returns {{}}
*
* @example
* ['a', 'b', 'c', 'd'] ====> {a: 'b', c: 'd' }
*/
arrayPairsToObject: function (arr) {
if (!_.isArray(arr)) {
return;
}
var obj = {},
key,
val,
i,
ii;
for (i = 0, ii = arr.length; i < ii; i += 2) {
key = arr[i];
val = arr[i + 1];
if (_.has(obj, key)) {
!_.isArray(obj[key]) && (obj[key] = [obj[key]]);
obj[key].push(val);
}
else {
obj[key] = val;
}
}
return obj;
},
/**
* Checks if a given host or IP is has been restricted in the options.
*
* @param {String} host
* @param {Object} networkOptions
* @param {Array<String>} networkOptions.restrictedAddresses
*
* @returns {Boolean}
*/
isAddressRestricted: function (host, networkOptions) {
return networkOptions.restrictedAddresses &&
networkOptions.restrictedAddresses[(host && host.toLowerCase())];
}
};

334
node_modules/postman-runtime/lib/requester/dry-run.js generated vendored Normal file
View File

@@ -0,0 +1,334 @@
/* istanbul ignore file */
// @todo
// 1. Return with annotations like (overridden headers, auth headers etc.)
// 2. Utilize requester (core.js) methods for dryRun
// 3. Add tests
const _ = require('lodash'),
async = require('async'),
mime = require('mime-types'),
urlEncoder = require('postman-url-encoder'),
Request = require('postman-collection').Request,
authorizeRequest = require('../authorizer').authorizeRequest,
authHandlers = require('../authorizer').AuthLoader.handlers,
version = require('../../package.json').version,
CALCULATED_AT_RUNTIME = '<calculated when request is sent>',
COOKIE = 'Cookie',
FUNCTION = 'function',
CONTENT_TYPE = 'Content-Type',
DEFAULT_MIME_TYPE = 'application/octet-stream',
CONTENT_TYPE_URLENCODED = 'application/x-www-form-urlencoded',
CONTENT_TYPE_FORMDATA = 'multipart/form-data; boundary=' + CALCULATED_AT_RUNTIME,
CONTENT_TYPE_LANGUAGE = {
'html': 'text/html',
'text': 'text/plain',
'json': 'application/json',
'javascript': 'application/javascript',
'xml': 'application/xml'
},
BODY_MODE = {
raw: 'raw',
file: 'file',
graphql: 'graphql',
formdata: 'formdata',
urlencoded: 'urlencoded'
};
/**
* Check if request body is empty and also handles disabled params for urlencoded
* and formdata bodies.
*
* @todo Update Collection SDK isEmpty method to account for this.
*
* @private
* @param {RequestBody} body
* @returns {Boolean}
*/
function bodyIsEmpty (body) {
if (!body || body.disabled || body.isEmpty()) {
return true;
}
var i,
param,
mode = body.mode;
if (!(mode === BODY_MODE.formdata || mode === BODY_MODE.urlencoded)) {
return false;
}
for (i = body[mode].members.length - 1; i >= 0; i--) {
param = body[mode].members[i];
// bail out if a single enabled param is present
if (param && !param.disabled) {
return false;
}
}
return true;
}
/**
* Add new System header.
*
* @param {object} headers
* @param {String} key
* @param {String} value
*/
function addSystemHeader (headers, key, value) {
headers.add({
key: key,
value: value,
system: true
});
}
/**
* Authorize the given request.
*
* @private
* @param {Request} request
* @param {Function} callback
*/
function setAuthorization (request, callback) {
authorizeRequest(request, function (err, clonedRequest) {
// @note authorizeRequest returns a cloned request.
!clonedRequest && (clonedRequest = new Request(request.toJSON()));
if (err) {
return callback(null, clonedRequest);
}
var auth = request.auth,
authType = auth && auth.type,
manifest = _.get(authHandlers, [authType, 'manifest']),
headers = _.get(clonedRequest, 'headers.reference') || {},
queryParams = _.get(clonedRequest, 'url.query.reference') || {},
bodyParams = _.get(clonedRequest, 'body.urlencoded.reference') || {},
propertyList,
propertyKey,
property;
if (authType === 'apikey' && (auth = auth.apikey)) {
propertyKey = String(auth.get('key')).toLowerCase();
propertyList = auth.get('in') === 'query' ? queryParams : headers;
if ((property = propertyList[propertyKey])) {
Array.isArray(property) && (property = property[property.length - 1]);
property.auth = true;
}
return callback(null, clonedRequest);
}
if (!(manifest && manifest.updates)) {
return callback(null, clonedRequest);
}
manifest.updates.forEach(function (update) {
propertyKey = update.property;
switch (update.type) {
case 'header':
propertyKey = propertyKey.toLowerCase();
propertyList = headers;
break;
case 'url.param':
propertyList = queryParams;
break;
case 'body.urlencoded':
propertyList = bodyParams;
break;
default: return;
}
if ((property = propertyList[propertyKey])) {
Array.isArray(property) && (property = property[property.length - 1]);
property.auth = true;
}
});
callback(null, clonedRequest);
});
}
/**
* Adds Content-Type header based on selected request body.
*
* @private
* @param {Request} request
* @param {Function} callback
*/
function setContentType (request, callback) {
// bail out if body is empty
if (bodyIsEmpty(request.body)) {
return callback(null, request);
}
var headers = request.headers,
contentLanguage;
switch (request.body.mode) {
case BODY_MODE.raw:
contentLanguage = _.get(request, 'body.options.raw.language', 'text');
addSystemHeader(headers, CONTENT_TYPE, CONTENT_TYPE_LANGUAGE[contentLanguage] ||
CONTENT_TYPE_LANGUAGE.text);
break;
case BODY_MODE.urlencoded:
addSystemHeader(headers, CONTENT_TYPE, CONTENT_TYPE_URLENCODED);
break;
case BODY_MODE.formdata:
addSystemHeader(headers, CONTENT_TYPE, CONTENT_TYPE_FORMDATA);
break;
case BODY_MODE.graphql:
addSystemHeader(headers, CONTENT_TYPE, CONTENT_TYPE_LANGUAGE.json);
break;
case BODY_MODE.file:
addSystemHeader(headers, CONTENT_TYPE,
mime.lookup(request.body.file && request.body.file.src) || DEFAULT_MIME_TYPE);
break;
default: break;
}
addSystemHeader(headers, 'Content-Length', CALCULATED_AT_RUNTIME);
callback(null, request);
}
/**
* Adds Cookie header for the given request url.
*
* @private
* @param {Request} request
* @param {Object} cookieJar
* @param {Function} callback
*/
function setCookie (request, cookieJar, callback) {
// bail out if not a valid instance of CookieJar
if (!(cookieJar && cookieJar.getCookieString)) {
return callback(null, request);
}
// @note don't pass request.url instance to force re-parsing of the URL
cookieJar.getCookieString(urlEncoder.toNodeUrl(request.url.toString()), function (err, cookies) {
if (err) {
return callback(null, request);
}
if (cookies && cookies.length) {
addSystemHeader(request.headers, COOKIE, cookies);
}
callback(null, request);
});
}
/**
* A helper method to dry run the given request instance.
* It returns the cloned request instance with the system added properties.
*
* @param {Request} request
* @param {Object} options
* @param {Object} options.cookieJar
* @param {Object} options.protocolProfileBehavior
* @param {Function} done
*/
function dryRun (request, options, done) {
if (!done && typeof options === FUNCTION) {
done = options;
options = {};
}
if (!Request.isRequest(request)) {
return done(new Error('Invalid Request instance'));
}
!options && (options = {});
var cookieJar = options.cookieJar,
implicitCacheControl = options.implicitCacheControl,
implicitTraceHeader = options.implicitTraceHeader,
disabledSystemHeaders = _.get(options.protocolProfileBehavior, 'disabledSystemHeaders') || {},
disableCookies = _.get(options.protocolProfileBehavior, 'disableCookies');
async.waterfall([
function setAuthorizationHeaders (next) {
setAuthorization(request, next);
},
function setContentTypeHeader (request, next) {
setContentType(request, next);
},
function setContentLength (request, next) {
var headers = request.headers,
header = headers.one('content-length');
// bail out if header added by body helper
if (header && header.system) {
return next(null, request);
}
switch (String(request.method).toUpperCase()) {
case 'GET':
case 'HEAD':
case 'TRACE':
case 'DELETE':
case 'CONNECT':
case 'OPTIONS':
break;
default:
addSystemHeader(headers, 'Content-Length', '0');
break;
}
next(null, request);
},
function setCookieHeader (request, next) {
if (disableCookies || !cookieJar) {
return next(null, request);
}
setCookie(request, cookieJar, next);
},
function setStaticHeaders (request, next) {
var headers = request.headers;
// remove header added by auth helpers
headers.remove(function (header) {
return header.system && header.key.toLowerCase() === 'host';
});
addSystemHeader(headers, 'User-Agent', 'PostmanRuntime/' + version);
addSystemHeader(headers, 'Accept', '*/*');
addSystemHeader(headers, 'Accept-Encoding', 'gzip, deflate, br');
addSystemHeader(headers, 'Host', CALCULATED_AT_RUNTIME);
addSystemHeader(headers, 'Connection', 'keep-alive');
implicitCacheControl && addSystemHeader(headers, 'Cache-Control', 'no-cache');
implicitTraceHeader && addSystemHeader(headers, 'Postman-Token', CALCULATED_AT_RUNTIME);
next(null, request);
},
function disableSystemHeaders (request, next) {
var headersReference = request.headers.reference,
header;
_.forEach(disabledSystemHeaders, function (disabled, headerKey) {
if (!disabled) { return; }
if ((header = headersReference[headerKey.toLowerCase()])) {
Array.isArray(header) && (header = header[header.length - 1]);
header.system && (header.disabled = true);
}
});
next(null, request);
}
], function (err, request) {
if (err) { return done(err); }
done(null, request);
});
}
module.exports = dryRun;

4
node_modules/postman-runtime/lib/requester/index.js generated vendored Normal file
View File

@@ -0,0 +1,4 @@
module.exports = {
Requester: require('./requester').Requester,
RequesterPool: require('./requester-pool').RequesterPool
};

View File

@@ -0,0 +1,94 @@
var _ = require('lodash'),
async = require('async'),
requests = require('postman-request'),
/**
* Sets the Proxy and tunnel to the options
*
* @param request
* @param options
* @param cb
*/
setProxy = function (request, options, cb) {
var proxyConfig;
if ((proxyConfig = _.get(request, 'proxy'))) {
options.proxy = proxyConfig.getProxyUrl();
// TODO: Use tri-state var for tunnel in SDK and update here
// for now determine the tunnel value from the URL unless explicitly set to true
options.tunnel = proxyConfig.tunnel ? true : request.url.protocol === 'https';
}
// if proxy is not set, postman-request implicitly fallbacks to proxy
// environment variables. To opt-out of this, set `ignoreProxyEnvironmentVariables`
// requester option.
// Setting proxy to `false` opt out of the implicit proxy configuration
// of the other environment variables.
if (!options.proxy && options.ignoreProxyEnvironmentVariables) {
options.proxy = false;
}
cb(null, request, options);
},
/**
* Gets the certificate from the options.certificate
* And appends it with the options provided
*
* @param request
* @param options
* @param cb
*/
setCertificate = function (request, options, cb) {
var certificate,
isSSL = request.url.protocol === 'https',
hasCertificate = request.certificate;
// exit if protocol is not https
// or both certificateManager and certificateList are missing
if (!isSSL || !hasCertificate) {
return cb(null, options);
}
certificate = request.certificate;
if (!certificate) { return cb(null, options); }
_.assign(options, {
pfx: _.get(certificate, 'pfx.value'),
key: _.get(certificate, 'key.value'),
cert: _.get(certificate, 'cert.value'),
passphrase: certificate.passphrase
});
cb(null, options);
};
// Enable support for extending root CAs.
// Refer: https://github.com/postmanlabs/postman-request/pull/35
// @todo trigger console warning (using callback) if not enabled.
requests.enableNodeExtraCACerts();
module.exports = function (request, options, onStart, callback) {
var req = {};
async.waterfall([
function (next) {
setProxy(request, options, next);
},
function (request, options, next) {
setCertificate(request, options, next);
}
], function (err, options) {
if (err) { return callback(err); }
var request = requests(options, callback);
// todo: this is a hack to ensure that we can abort requests from the app before they're complete.
req.abort = request.abort.bind(request);
// emit responseStart event
request.on('response', onStart);
});
return req;
};

View File

@@ -0,0 +1,69 @@
var _ = require('lodash'),
Requester = require('./requester').Requester,
RequestCookieJar = require('postman-request').jar,
STRING = 'string',
FUNCTION = 'function',
RequesterPool; // fn
RequesterPool = function (options, callback) {
var self = this,
extendedRootCA,
fileResolver = options && options.fileResolver;
_.assign((self.options = {}), {
timeout: _.min([
_.get(options, 'timeout.request'),
_.get(options, 'timeout.global')
]), // validated later inside requester
timings: _.get(options, 'requester.timings', true),
verbose: _.get(options, 'requester.verbose', false),
keepAlive: _.get(options, 'requester.keepAlive', true),
agents: _.get(options, 'requester.agents'), // http(s).Agent instances
cookieJar: _.get(options, 'requester.cookieJar'), // default set later in this constructor
strictSSL: _.get(options, 'requester.strictSSL'),
maxResponseSize: _.get(options, 'requester.maxResponseSize'),
// @todo drop support in v8
useWhatWGUrlParser: _.get(options, 'requester.useWhatWGUrlParser', false),
followRedirects: _.get(options, 'requester.followRedirects', true),
followOriginalHttpMethod: _.get(options, 'requester.followOriginalHttpMethod'),
maxRedirects: _.get(options, 'requester.maxRedirects'),
implicitCacheControl: _.get(options, 'requester.implicitCacheControl', true),
implicitTraceHeader: _.get(options, 'requester.implicitTraceHeader', true),
systemHeaders: _.get(options, 'requester.systemHeaders', {}),
removeRefererHeaderOnRedirect: _.get(options, 'requester.removeRefererHeaderOnRedirect'),
ignoreProxyEnvironmentVariables: _.get(options, 'ignoreProxyEnvironmentVariables'),
network: _.get(options, 'network', {})
});
// create a cookie jar if one is not provided
if (!self.options.cookieJar) {
self.options.cookieJar = RequestCookieJar();
}
if (fileResolver && typeof fileResolver.readFile === FUNCTION &&
typeof (extendedRootCA = _.get(options, 'requester.extendedRootCA')) === STRING) {
// eslint-disable-next-line security/detect-non-literal-fs-filename
fileResolver.readFile(extendedRootCA, function (err, caCerts) {
if (err) {
// @todo trigger console error
}
else {
// set extendedRootCA option
self.options.extendedRootCA = caCerts;
}
return callback();
});
}
else {
return callback();
}
};
RequesterPool.prototype.create = function (trace, callback) {
return Requester.create(trace, this.options, callback);
};
module.exports.RequesterPool = RequesterPool;

494
node_modules/postman-runtime/lib/requester/requester.js generated vendored Normal file
View File

@@ -0,0 +1,494 @@
var _ = require('lodash'),
core = require('./core'),
Emitter = require('events'),
inherits = require('inherits'),
now = require('performance-now'),
sdk = require('postman-collection'),
requests = require('./request-wrapper'),
dryRun = require('./dry-run'),
RESPONSE_START_EVENT_BASE = 'response.start.',
RESPONSE_END_EVENT_BASE = 'response.end.',
RESPONSE_START = 'responseStart',
RESPONSE_END = 'response',
ERROR_RESTRICTED_ADDRESS = 'NETERR: getaddrinfo ENOTFOUND ',
/**
* Headers which get overwritten by the requester.
*
* @private
* @const
* @type {Object}
*/
OVERWRITTEN_HEADERS = {
cookie: true, // cookies get appended with `;`
'content-length': true
},
/**
* Creates a sdk compatible cookie from a tough-cookie compatible cookie.
*
* @param cookie
* @returns {Object}
*/
toPostmanCookie = function (cookie) {
var expires = cookie.expiryTime();
cookie.toJSON && (cookie = cookie.toJSON());
return new sdk.Cookie({
name: cookie.key,
value: cookie.value,
expires: Number.isFinite(expires) ? new Date(expires) : null,
maxAge: cookie.maxAge,
domain: cookie.domain,
path: cookie.path,
secure: cookie.secure,
httpOnly: cookie.httpOnly,
hostOnly: cookie.hostOnly,
extensions: cookie.extensions
});
},
/**
* This method is used in conjunction with _.transform method to convert multi-value headers to multiple single
* value headers
*
* @param {Array} acc
* @param {Array|String} val
* @param {String} key
* @return {Object}
*/
transformMultiValueHeaders = function (acc, val, key) {
var i, ii;
if (Array.isArray(val)) {
for (i = 0, ii = val.length; i < ii; i++) {
acc.push({
key: key,
value: val[i]
});
}
}
else {
acc.push({
key: key,
value: val
});
}
},
/**
* Calculate request timings offset by adding runtime overhead which
* helps to determine request prepare and process time taken.
*
* @param {Number} runtimeTimer - Runtime request start HR time
* @param {Number} requestTimer - Request start HR time
* @param {Object} timings - Request timings offset
* @returns {Object}
*/
calcTimingsOffset = function (runtimeTimer, requestTimer, timings) {
if (!(runtimeTimer && requestTimer && timings)) { return; }
// runtime + postman-request initialization time
var initTime = requestTimer - runtimeTimer,
offset = {
request: initTime
};
// add initialization overhead to request offsets
_.forOwn(timings, function (value, key) {
offset[key] = value + initTime;
});
// total time taken by runtime to get the response
// @note if offset.end is missing, that means request is not complete.
// this is used to calculate timings on responseStart.
if (offset.end) {
offset.done = now() - runtimeTimer;
}
return offset;
},
Requester;
/**
* Creates a new Requester, which is used to make HTTP(s) requests.
*
* @param trace
* @param options
* @param {Boolean} [options.keepAlive=true] Optimizes HTTP connections by keeping them alive, so that new requests
* to the same host are made over the same underlying TCP connection.
* @param {CookieJar} [options.cookieJar] A cookie jar to use with Node requests.
* @param {Boolean} [options.strictSSL]
* @param {Boolean} [options.followRedirects=true] If false, returns a 301/302 as the response code
* instead of following the redirect
* @note `options.keepAlive` is only supported in Node.
* @note `options.cookieJar` is only supported in Node.
*
* @constructor
*/
inherits(Requester = function (trace, options) {
this.options = options || {};
// protect the timeout value from being non-numeric or infinite
if (!_.isFinite(this.options.timeout)) {
this.options.timeout = undefined;
}
this.trace = trace;
Requester.super_.call(this);
}, Emitter);
_.assign(Requester.prototype, /** @lends Requester.prototype */ {
/**
* Perform an HTTP request.
*
* @param {String} id
* @param {Request} request
* @param {Object} protocolProfileBehavior
* @param {Function} callback
*/
request: function (id, request, protocolProfileBehavior, callback) {
var self = this,
hostname,
cookieJar,
requestOptions,
networkOptions = self.options.network || {},
startTime = Date.now(),
startTimer = now(), // high-resolution time
cookies = [],
responseHeaders = [],
responseJSON = {},
// keep track of `responseStart` and `response` triggers
_responseStarted = false,
_responseEnded = false,
_responseData = {},
// Refer: https://github.com/postmanlabs/postman-runtime/blob/v7.14.0/docs/history.md
getExecutionHistory = function (debugInfo) {
var history = {
execution: {
verbose: Boolean(requestOptions.verbose),
sessions: {},
data: []
}
},
executionData = [],
requestSessions = {};
if (!Array.isArray(debugInfo)) {
return history;
}
// prepare history from request debug data
debugInfo.forEach(function (debugData) {
if (!debugData) { return; }
// @todo cache connection sessions and fetch reused session
// from the requester pool.
if (debugData.session && !requestSessions[debugData.session.id]) {
requestSessions[debugData.session.id] = debugData.session.data;
}
executionData.push({
request: debugData.request,
response: debugData.response,
timings: debugData.timings && {
// runtime start time
start: startTime,
// request start time
requestStart: debugData.timingStart,
// offsets calculated are relative to runtime start time
offset: calcTimingsOffset(startTimer, debugData.timingStartTimer, debugData.timings)
},
session: debugData.session && {
id: debugData.session.id,
// is connection socket reused
reused: debugData.session.reused
}
});
});
// update history object
history.execution.data = executionData;
history.execution.sessions = requestSessions;
return history;
},
/**
* Add the missing/system headers in the request object
*
* @param {Object[]} headers
*/
addMissingRequestHeaders = function (headers) {
_.forEach(headers, function (header) {
var lowerCasedKey = header.key.toLowerCase();
// update headers which gets overwritten by the requester
if (OVERWRITTEN_HEADERS[lowerCasedKey]) {
if (Array.isArray(_.get(request.headers, ['reference', lowerCasedKey]))) {
request.headers.remove(header.key);
}
request.headers.upsert({
key: header.key,
value: header.value,
system: true
});
}
});
},
/**
* Helper function to trigger `callback` and complete the request function
*
* @param {Error} error - error while requesting
* @param {Response} response - SDK Response instance
* @param {Object} history - Request-Response History
*/
onEnd = function (error, response, history) {
self.emit(RESPONSE_END_EVENT_BASE + id, error, self.trace.cursor,
self.trace, response, request, cookies, history);
return callback(error, response, request, cookies, history);
},
/**
* Helper function to keep track of `responseStart` and `response`
* triggers to make they are emitted in correct order.
*
* @todo fix requester control flow to remove this hack!
* this is required because CookieJar.getCookies is async method
* and by that time postman-request ends the request, which affects
* request post-send helpers because `response.start` event is not
* emitted on time and shared variables `cookies`, `responseJSON`,
* and, `responseHeaders` are initialized in onStart function.
*
* @param {String} trigger - trigger name
* @param {Response} response - SDK Response instance
* @param {Object} history - Request-Response History
*/
onComplete = function (trigger, response, history) {
if (trigger === RESPONSE_START) {
// set flag for responseStart callback
_responseStarted = true;
// if response is ended, end the response using cached data
if (_responseEnded) {
onEnd(null, _responseData.response, _responseData.history);
}
// bail out and wait for response end if not ended already
return;
}
// if response started, don't wait and end the response
if (_responseStarted) {
onEnd(null, response, history);
return;
}
// wait for responseStart and cache response callback data
_responseEnded = true;
_responseData = {
response: response,
history: history
};
},
/**
* Helper function to trigger `responseStart` callback and
* - transform postman-request response instance to SDK Response
* - filter cookies
* - filter response headers
* - add missing request headers
*
* @param {Object} response - Postman-Request response instance
*/
onStart = function (response) {
var responseStartEventName = RESPONSE_START_EVENT_BASE + id,
executionData,
initialRequest,
finalRequest,
sdkResponse,
history,
done = function () {
// emit the response.start event which eventually
// triggers responseStart callback
self.emit(responseStartEventName, null, sdkResponse, request, cookies, history);
// trigger completion of responseStart
onComplete(RESPONSE_START);
};
// @todo get rid of jsonifyResponse
responseJSON = core.jsonifyResponse(response, requestOptions);
// transform response headers to SDK compatible HeaderList
responseHeaders = _.transform(responseJSON.headers, transformMultiValueHeaders, []);
// initialize SDK Response instance
sdkResponse = new sdk.Response({
status: response && response.statusMessage,
code: responseJSON.statusCode,
header: responseHeaders
});
// prepare history from request debug data
history = getExecutionHistory(_.get(response, 'request._debug'));
// get the initial and final (on redirect) request from history
executionData = _.get(history, 'execution.data') || [];
initialRequest = _.get(executionData, '[0].request') || {};
finalRequest = executionData.length > 1 ?
// get final redirect
_.get(executionData, [executionData.length - 1, 'request']) :
// no redirects
initialRequest;
// add missing request headers so that they get bubbled up into the UI
addMissingRequestHeaders(initialRequest.headers);
// pull out cookies from the cookie jar, and make them chrome compatible.
if (cookieJar && _.isFunction(cookieJar.getCookies)) {
// get cookies set for the final request URL
cookieJar.getCookies(finalRequest.href, function (err, cookiesFromJar) {
if (err) {
return done();
}
cookies = _.transform(cookiesFromJar, function (acc, cookie) {
acc.push(toPostmanCookie(cookie));
}, []);
cookies = new sdk.CookieList(null, cookies);
done();
});
}
else {
cookies = new sdk.CookieList(null, []);
done();
}
};
// at this point the request could have come from collection, auth or sandbox
// we can't trust the integrity of this request
// bail out if request url is empty
if (!(request && request.url && request.url.toString && request.url.toString())) {
return onEnd(new Error('runtime:extensions~request: request url is empty'));
}
cookieJar = self.options.cookieJar;
requestOptions = core.getRequestOptions(request, self.options, protocolProfileBehavior);
// update url with the final encoded url
// @note this mutates the request object which will be passed in request
// and response callbacks
request.url.update(requestOptions.url.href);
hostname = request.url.getHost();
// check if host is on the `restrictedAddresses`
if (networkOptions.restrictedAddresses && core.isAddressRestricted(hostname, networkOptions)) {
return onEnd(new Error(ERROR_RESTRICTED_ADDRESS + hostname));
}
return requests(request, requestOptions, onStart, function (err, res, resBody, debug) {
// prepare history from request debug data
var history = getExecutionHistory(debug),
responseTime,
response;
if (err) {
// bubble up http errors
// @todo - Should we send an empty sdk Response here?
//
// Sending `history` object even in case of error
return onEnd(err, undefined, history);
}
// Calculate the time taken for us to get the response.
responseTime = Date.now() - startTime;
if (res && res.timings) {
// update response time to actual response end time
// of the final request in the redirect chain.
responseTime = Math.ceil(res.timings.end);
}
if (resBody && resBody instanceof ArrayBuffer) {
resBody = Buffer.from(resBody);
}
// Response in the SDK format
// @todo reuse same response instance used for responseStart callback
response = new sdk.Response({
code: responseJSON.statusCode,
status: res && res.statusMessage,
header: responseHeaders,
stream: resBody,
responseTime: responseTime
});
onComplete(RESPONSE_END, response, history);
});
},
/**
* Removes all current event listeners on the requester, and makes it ready for garbage collection :).
*
* @param {Function=} cb - Optional callback to be called on disposal
*
* @todo - In the future, when the requester manages its own connections etc, close them all here.
*/
dispose: function (cb) {
// This is safe for us, because we do not use wait on events. (i.e, no part of Runtime ever waits on
// any event to occur). We rely on callbacks for that, only choosing to use events as a way of streaming
// information outside runtime.
this.removeAllListeners();
_.isFunction(cb) && cb();
}
});
_.assign(Requester, /** @lends Requester */ {
/**
* Asynchronously create a new requester.
*
* @param trace
* @param trace.type - type of requester to return (for now, just http)
* @param trace.source - information about who needs this requester, e.g Auth, etc.
* @param trace.cursor - the cursor
* @param options
* @param callback
* @returns {*}
*/
create: function (trace, options, callback) {
return callback(null, new Requester(trace, options));
},
/**
* A helper method to dry run the given request instance.
* It returns the cloned request instance with the system added properties.
*
* @param {Request} request
* @param {Object} options
* @param {Object} options.cookieJar
* @param {Object} options.protocolProfileBehavior
* @param {Object} options.implicitCacheControl
* @param {Object} options.implicitTraceHeader
* @param {Function} done
*/
dryRun
});
module.exports.Requester = Requester;

View File

@@ -0,0 +1,65 @@
var _ = require('lodash'),
sdk = require('postman-collection'),
/**
* @const
* @type {string}
*/
FUNCTION = 'function',
SAFE_CONTEXT_PROPERTIES = ['replayState', 'coords'];
/**
* Creates a context object to be used with `http-request.command` extension.
*
* @function createItemContext
*
* @param {Object} payload
* @param {Item} payload.item
* @param {Object} [payload.coords]
* @param {Object} [defaults]
* @param {Object} [defaults.replayState]
* @param {Object} [defaults.coords]
*
* @returns {ItemContext}
*/
module.exports = function (payload, defaults) {
// extract properties from defaults that can/should be reused in new context
var context = defaults ? _.pick(defaults, SAFE_CONTEXT_PROPERTIES) : {};
// set cursor to context
!context.coords && (context.coords = payload.coords);
// save original item for reference
context.originalItem = payload.item;
// we clone item from the payload, so that we can make any changes we need there, without mutating the
// collection
context.item = new sdk.Item(payload.item.toJSON());
// get a reference to the Auth instance from the item, so changes are synced back
context.auth = context.originalItem.getAuth();
// Make sure run is not errored out if older version of collection SDK is used.
// @todo remove this safety check in the next release
if (typeof context.originalItem.getProtocolProfileBehaviorResolved === FUNCTION) {
// get protocolProfileBehavior for the item, also inherited from parent
context.protocolProfileBehavior = context.originalItem.getProtocolProfileBehaviorResolved();
}
else {
// get protocolProfileBehavior for the item
context.protocolProfileBehavior = context.originalItem.protocolProfileBehavior;
}
/**
* @typedef {Object} ItemContext
* @property {Object} coords - current cursor
* @property {Item} originalItem - reference to the item in the collection
* @property {Item} item - Holds a copy of the item given in the payload, so that it can be manipulated
* as necessary
* @property {RequestAuthBase|undefined} auth - If present, is the instance of Auth in the collection, which
* is changed as necessary using intermediate requests, etc.
* @property {ReplayState} replayState - has context on number of replays(if any) for this request
*/
return context;
};

376
node_modules/postman-runtime/lib/runner/cursor.js generated vendored Normal file
View File

@@ -0,0 +1,376 @@
var _ = require('lodash'),
uuid = require('uuid'),
Cursor;
/**
* @param {Number} [length=0]
* @param {Number} [cycles=1]
* @param {Number} [position=0]
* @param {Number} [iteration=0]
* @param {String} [ref]
* @constructor
*/
Cursor = function RunCursor (length, cycles, position, iteration, ref) { // eslint-disable-line func-name-matching
this.length = Cursor.validate(length, 0);
this.position = Cursor.validate(position, 0, this.length);
this.cycles = Cursor.validate(cycles, 1, 1);
this.iteration = Cursor.validate(iteration, 0, this.cycles);
this.ref = ref || uuid.v4();
};
_.assign(Cursor.prototype, {
/**
*
*
* @param {Object} state
* @param {Number} [state.length=0]
* @param {Number} [state.cycles=1]
* @param {Number} [state.position=0]
* @param {Number} [state.iteration=0]
* @param {String} [state.ref]
* @param {Function} [callback] - receives `(err:Error, coords:Object, previous:Object)`
* @param {Object} [scope]
*/
load: function (state, callback, scope) {
!state && (state = {});
(state instanceof Cursor) && (state = state.current());
this.reset(state.length, state.cycles, state.position, state.iteration, state.ref, callback, scope);
},
/**
* Update length and cycle bounds
*
* @param {Number} [length=0]
* @param {Number} [cycles=1]
* @param {Number} [position=0]
* @param {Number} [iteration=0]
* @param {String} [ref]
* @param {Function} [callback] - receives `(err:Error, coords:Object, previous:Object)`
* @param {Object} [scope]
*/
reset: function (length, cycles, position, iteration, ref, callback, scope) {
var coords = _.isFunction(callback) && this.current();
// validate parameter defaults
_.isNil(length) && (length = this.length);
_.isNil(cycles) && (cycles = this.cycles);
_.isNil(position) && (position = this.position);
_.isNil(iteration) && (iteration = this.iteration);
_.isNil(ref) && (ref = this.ref);
// use the constructor to set the values
Cursor.call(this, length, cycles, position, iteration, ref);
// send before and after values to the callback
return coords && callback.call(scope || this, null, this.current(), coords);
},
/**
* Update length and cycle bounds
*
* @param {Number} [length=0]
* @param {Number} [cycles=1]
* @param {Function} [callback] - receives `(err:Error, coords:Object, previous:Object)`
* @param {Object} [scope]
*/
bounds: function (length, cycles, callback, scope) {
var coords = _.isFunction(callback) && this.current();
// validate parameter defaults
_.isNil(length) && (length = this.length);
_.isNil(cycles) && (cycles = this.cycles);
// use the constructor to set the values
Cursor.call(this, length, cycles, this.position, this.iteration);
return coords && callback.call(scope || this, null, this.current(), coords);
},
/**
* Set everything to minimum dimension
*
* @param {Function} [callback] - receives `(err:Error, coords:Object, previous:Object)`
* @param {Object} [scope]
*/
zero: function (callback, scope) {
var coords = _.isFunction(callback) && this.current();
this.position = 0;
this.iteration = 0;
// send before and after values to the callback
return coords && callback.call(scope || this, null, this.current(), coords);
},
/**
* Set everything to mnimum dimension
*
* @param {Function} [callback] - receives `(err:Error, coords:Object, previous:Object)`
* @param {Object} [scope]
*/
clear: function (callback, scope) {
var coords = _.isFunction(callback) && this.current();
this.position = 0;
this.iteration = 0;
this.cycles = 1;
this.length = 0;
return coords && callback.call(scope || this, null, this.current(), coords);
},
/**
* Seek to a specified Cursor
*
* @param {Number} [position]
* @param {Number} [iteration]
* @param {Function} [callback] - receives `(err:Error, changed:Boolean, coords:Object, previous:Object)`
* @param {Object} [scope]
*/
seek: function (position, iteration, callback, scope) {
var coords = _.isFunction(callback) && this.current();
// if null or undefined implies use existing seek position
_.isNil(position) && (position = this.position);
_.isNil(iteration) && (iteration = this.iteration);
// make the pointers stay within boundary
if ((position >= this.length) || (iteration >= this.cycles) || (position < 0) || (iteration < 0) ||
isNaN(position) || isNaN(iteration)) {
return coords &&
callback.call(scope || this, new Error('runcursor: seeking out of bounds: ' + [position, iteration]));
}
// floor the numbers
position = ~~position;
iteration = ~~iteration;
// set the new positions
this.position = Cursor.validate(position, 0, this.length);
this.iteration = Cursor.validate(iteration, 0, this.cycles);
// finally execute the callback with the seek position
return coords && callback.call(scope || this, null, this.hasChanged(coords), this.current(), coords);
},
/**
* Seek one forward
*
* @param {Function} [callback] - receives `(err:Error, changed:Boolean, coords:Object, previous:Object)`
* @param {Object} [scope]
*/
next: function (callback, scope) {
var position = this.position,
iteration = this.iteration,
coords;
// increment position
position += 1;
// check if we need to increment cycle
if (position >= this.length) {
// set position to 0 and increment iteration
position = 0;
iteration += 1;
if (iteration >= this.cycles) {
coords = _.isFunction(callback) && this.current();
coords.eof = true;
return coords && callback.call(scope || this, null, false, coords, coords);
}
coords && (coords.cr = true);
}
// finally handover the new coordinates to seek function
return this.seek(position, iteration, callback, scope);
},
/**
* Tentative Cursor status, if we do `.next()`
*
* @param {Object} coords
*
* @returns {Object}
*/
whatnext: function (coords) {
var base = {
ref: this.ref,
length: this.length,
cycles: this.cycles
},
position,
iteration;
if (!_.isObject(coords)) {
return _.assign(base, {eof: true, bof: true, empty: this.empty()});
}
if (!this.length) {
return _.assign(base, {eof: true, bof: true, empty: true});
}
position = coords.position;
iteration = coords.iteration;
// increment position
position += 1;
// check if we need to increment cycle
if (position >= this.length) {
// set position to 0 and increment iteration
position = 0;
iteration += 1;
if (iteration >= this.cycles) {
return _.assign(base, {
position: this.length - 1,
iteration: iteration - 1,
eof: true
});
}
return _.assign(base, {
position: position,
iteration: iteration,
cr: true
});
}
return _.assign(base, {position: position, iteration: iteration});
},
/**
* Check whether current position and iteration is not as the same specified
*
* @param {Object} coords
* @returns {Boolean}
*/
hasChanged: function (coords) {
return _.isObject(coords) && !((this.position === coords.position) && (this.iteration === coords.iteration));
},
/**
* Current Cursor state
*
* @returns {Object}
*/
current: function () {
return {
position: this.position,
iteration: this.iteration,
length: this.length,
cycles: this.cycles,
empty: this.empty(),
eof: this.eof(),
bof: this.bof(),
cr: this.cr(),
ref: this.ref
};
},
/**
* Is the current position going to trigger a new iteration on `.next`?
*
* @returns {Boolean}
*/
cr: function () {
return !this.length || (this.position >= this.length);
},
/**
* @returns {Boolean}
*/
eof: function () {
return !this.length || (this.position >= this.length) && (this.iteration >= this.cycles);
},
/**
* @returns {Boolean}
*/
bof: function () {
return !this.length || ((this.position === 0) && (this.iteration === 0));
},
/**
* @returns {Boolean}
*/
empty: function () {
return !this.length;
},
/**
* @returns {Object}
*/
valueOf: function () {
return this.current();
},
clone: function () {
return new Cursor(this.length, this.cycles, this.position, this.iteration);
}
});
_.assign(Cursor, {
/**
* @param {Number} [length=0]
* @param {Number} [cycles=1]
* @param {Number} [position=0]
* @param {Number} [iteration=0]
* @param {String} [ref]
*
* @returns {Number}
*/
create: function (length, cycles, position, iteration, ref) {
return new Cursor(length, cycles, position, iteration, ref);
},
/**
* @param {Object|Cursor} obj
* @param {Object} [bounds]
* @param {Number} [bounds.length]
* @param {Number} [bounds.cycles]
*
* @returns {Cursor}
*/
box: function (obj, bounds) {
// already a Cursor, do nothing
if (obj instanceof Cursor) {
bounds && obj.bounds(bounds.length, bounds.cycles);
return obj;
}
// nothing to box, create a blank Cursor
if (!_.isObject(obj)) { return new Cursor(bounds && bounds.length, bounds && bounds.cycles); }
// load Cursor values from object
return new Cursor((bounds || obj).length, (bounds || obj).cycles, obj.position, obj.iteration, obj.ref);
},
/**
* @private
*
* @param {Number} num
* @param {Number} min [description]
* @param {Number} [max]
*
* @returns {Number}
*/
validate: function (num, min, max) {
if (typeof num !== 'number' || num < min) {
return min;
}
if (num === Infinity) {
return _.isNil(max) ? min : max;
}
return num;
}
});
module.exports = Cursor;

View File

@@ -0,0 +1,105 @@
var _ = require('lodash'),
util = require('../util'),
backpack = require('../../backpack');
module.exports = {
/**
* All the events that this extension triggers
* @type {Array}
*/
triggers: ['pause', 'resume', 'abort'],
prototype: /** @lends Run.prototype */ {
/**
* Pause a run
*
* @param {Function} callback
*/
pause: function (callback) {
callback = backpack.ensure(callback, this);
if (this.paused) { return callback && callback(new Error('run: already paused')); }
// schedule the pause command as an interrupt and flag that the run is pausing
this.paused = true;
this.interrupt('pause', null, callback);
},
/**
* Resume a paused a run
*
* @param {Function} callback
*/
resume: function (callback) {
callback = backpack.ensure(callback, this);
if (!this.paused) { return callback && callback(new Error('run: not paused')); }
// set flag that it is no longer paused and fire the stored callback for the command when it was paused
this.paused = false;
setTimeout(function () {
this.__resume();
delete this.__resume;
this.triggers.resume(null, this.state.cursor.current());
}.bind(this), 0);
callback && callback();
},
/**
* Aborts a run
*
* @param {boolean} [summarise=true]
* @param {function} callback
*/
abort: function (summarise, callback) {
if (_.isFunction(summarise) && !callback) {
callback = summarise;
summarise = true;
}
this.interrupt('abort', {
summarise: summarise
}, callback);
_.isFunction(this.__resume) && this.resume();
}
},
process: /** @lends Run.commands */ {
pause: function (userback, payload, next) {
// trigger the secondary callbacks
this.triggers.pause(null, this.state.cursor.current());
// tuck away the command completion callback in the run object so that it can be used during resume
this.__resume = next;
// execute the userback sent as part of the command and do so in a try block to ensure it does not hamper
// the process tick
var error = util.safeCall(userback, this);
// if there is an error executing the userback, then and only then raise the error (which stops the run)
if (error) {
return next(error);
}
},
/**
* @param {Function} userback
* @param {Object} payload
* @param {Boolean} payload.summarise
* @param {Function} next
*/
abort: function (userback, payload, next) {
// clear instruction pool and as such there will be nothing next to execute
this.pool.clear();
this.triggers.abort(null, this.state.cursor.current());
// execute the userback sent as part of the command and do so in a try block to ensure it does not hamper
// the process tick
backpack.ensure(userback, this) && userback();
next(null);
}
}
};

View File

@@ -0,0 +1,62 @@
var _ = require('lodash');
module.exports = {
init: function (done) {
done();
},
triggers: ['waitStateChange'],
prototype: {
/**
* @param {Function} fn - function to execute
* @param {Object} options
* @param {String} options.source
* @param {Number} options.time
* @param {Object} options.cursor
* @param {Function} next
* @private
*/
queueDelay: function (fn, options, next) {
var time = _.isFinite(options.time) ? parseInt(options.time, 10) : 0;
// if the time is a valid and finite time, we queue the delay command
if (time > 0) {
this.queue('delay', {
cursor: options.cursor,
source: options.source,
time: time
}).done(fn);
}
// otherwise, we do not delay and simply execute the function that was supposed to be called post delay
else {
fn();
}
next();
}
},
process: {
/**
* @param {Object} payload
* @param {Number} payload.time
* @param {Object} payload.cursor
* @param {String} payload.source
* @param {Function} next
*/
delay: function (payload, next) {
var cursor = payload.cursor || this.state.cursor.current();
this.waiting = true; // set flag
// trigger the waiting stae change event
this.triggers.waitStateChange(null, cursor, true, payload.time, payload.source);
setTimeout((function () {
this.waiting = false; // unset flag
this.triggers.waitStateChange(null, cursor, false, payload.time, payload.source);
next();
}).bind(this), payload.time || 0);
}
}
};

View File

@@ -0,0 +1,530 @@
var _ = require('lodash'),
uuid = require('uuid'),
async = require('async'),
util = require('../util'),
sdk = require('postman-collection'),
sandbox = require('postman-sandbox'),
serialisedError = require('serialised-error'),
ToughCookie = require('tough-cookie').Cookie,
createItemContext = require('../create-item-context'),
ASSERTION_FAILURE = 'AssertionFailure',
SAFE_CONTEXT_VARIABLES = ['_variables', 'environment', 'globals', 'collectionVariables', 'cookies', 'data',
'request', 'response'],
EXECUTION_REQUEST_EVENT_BASE = 'execution.request.',
EXECUTION_RESPONSE_EVENT_BASE = 'execution.response.',
EXECUTION_ASSERTION_EVENT_BASE = 'execution.assertion.',
EXECUTION_ERROR_EVENT_BASE = 'execution.error.',
EXECUTION_COOKIES_EVENT_BASE = 'execution.cookies.',
COOKIES_EVENT_STORE_ACTION = 'store',
COOKIE_STORE_PUT_METHOD = 'putCookie',
COOKIE_STORE_UPDATE_METHOD = 'updateCookie',
FILE = 'file',
REQUEST_BODY_MODE_FILE = 'file',
REQUEST_BODY_MODE_FORMDATA = 'formdata',
getCookieDomain, // fn
postProcessContext, // fn
sanitizeFiles; // fn
postProcessContext = function (execution, failures) { // function determines whether the event needs to abort
var error;
if (failures && failures.length) {
error = new Error(failures.join(', '));
error.name = ASSERTION_FAILURE;
}
return error ? serialisedError(error, true) : undefined;
};
/**
* Removes files in Request body if any.
*
* @private
*
* @param {Request~definition} request Request JSON representation to be sanitized
* @param {Function} callback function invoked with error, request and sanitisedFiles.
* sanitisedFiles is the list of files removed from request.
*
* @note this function mutates the request
* @todo remove files path from request.certificate
*/
sanitizeFiles = function (request, callback) {
if (!request) {
return callback(new Error('Could not complete pm.sendRequest. Request is empty.'));
}
var sanitisedFiles = [];
// do nothing if request body is empty
if (!request.body) {
// send request as such
return callback(null, request, sanitisedFiles);
}
// in case of request body mode is file, we strip it out
if (request.body.mode === REQUEST_BODY_MODE_FILE) {
sanitisedFiles.push(_.get(request, 'body.file.src'));
request.body = null; // mutate the request for body
}
// if body is form-data then we deep dive into the data items and remove the entries that have file data
else if (request.body.mode === REQUEST_BODY_MODE_FORMDATA) {
// eslint-disable-next-line lodash/prefer-immutable-method
_.remove(request.body.formdata, function (param) {
// blank param and non-file param is removed
if (!param || param.type !== FILE) { return false; }
// at this point the param needs to be removed
sanitisedFiles.push(param.src);
return true;
});
}
return callback(null, request, sanitisedFiles);
};
/**
* Fetch domain name from CookieStore event arguments.
*
* @private
* @param {String} fnName - CookieStore method name
* @param {Array} args - CookieStore method arguments
* @returns {String|Undefined} - Domain name
*/
getCookieDomain = function (fnName, args) {
if (!(fnName && args)) {
return;
}
var domain;
switch (fnName) {
case 'findCookie':
case 'findCookies':
case 'removeCookie':
case 'removeCookies':
domain = args[0];
break;
case 'putCookie':
case 'updateCookie':
domain = args[0] && args[0].domain;
break;
default:
return;
}
return domain;
};
/**
* Script execution extension of the runner.
* This module exposes processors for executing scripts before and after requests. Essentially, the processors are
* itself not aware of other processors and simply allow running of a script and then queue a procesor as defined in
* payload.
*
* Adds options
* - stopOnScriptError:Boolean [false]
* - host:Object [undefined]
*/
module.exports = {
init: function (done) {
var run = this;
// if this run object already has a host, we do not need to create one.
if (run.host) {
return done();
}
// @todo - remove this when chrome app and electron host creation is offloaded to runner
// @todo - can this be removed now in runtime v4?
if (run.options.host && run.options.host.external === true) {
run.host = run.options.host.instance;
return done();
}
sandbox.createContext(_.merge({
timeout: _(run.options.timeout).pick(['script', 'global']).values().min()
// debug: true
}, run.options.host), function (err, context) {
if (err) { return done(err); }
// store the host in run object for future use and move on
run.host = context;
context.on('console', function () {
run.triggers.console.apply(run.triggers, arguments);
});
context.on('error', function () {
run.triggers.error.apply(run.triggers, arguments);
});
context.on('execution.error', function () {
run.triggers.exception.apply(run.triggers, arguments);
});
context.on('execution.assertion', function () {
run.triggers.assertion.apply(run.triggers, arguments);
});
done();
});
},
/**
* This lists the name of the events that the script processors are likely to trigger
*
* @type {Array}
*/
triggers: ['beforeScript', 'script', 'assertion', 'exception', 'console'],
process: {
/**
* This processors job is to do the following:
* - trigger event by its name
* - execute all scripts that the event listens to and return execution results
*
* @param {Object} payload
* @param {String} payload.name
* @param {Item} payload.item
* @param {Object} [payload.context]
* @param {Cursor} [payload.coords]
* @param {Number} [payload.scriptTimeout] - The millisecond timeout for the current running script.
* @param {Array.<String>} [payload.trackContext]
* @param {Boolean} [payload.stopOnScriptError] - if set to true, then a synchronous error encountered during
* execution of a script will stop executing any further scripts
* @param {Boolean} [payload.abortOnFailure]
* @param {Boolean} [payload.stopOnFailure]
* @param {Function} next
*
* @note - in order to raise trigger for the entire event, ensure your extension has registered the triggers
*/
event: function (payload, next) {
var item = payload.item,
eventName = payload.name,
cursor = payload.coords,
// the payload can have a list of variables to track from the context post execution, ensure that
// those are accurately set
track = _.isArray(payload.trackContext) && _.isObject(payload.context) &&
// ensure that only those variables that are defined in the context are synced
payload.trackContext.filter(function (variable) {
return _.isObject(payload.context[variable]);
}),
stopOnScriptError = (_.has(payload, 'stopOnScriptError') ? payload.stopOnScriptError :
this.options.stopOnScriptError),
abortOnError = (_.has(payload, 'abortOnError') ? payload.abortOnError : this.options.abortOnError),
// @todo: find a better home for this option processing
abortOnFailure = payload.abortOnFailure,
stopOnFailure = payload.stopOnFailure,
events;
// @todo: find a better place to code this so that event is not aware of such options
if (abortOnFailure) {
abortOnError = true;
}
// validate the payload
if (!eventName) {
return next(new Error('runner.extension~events: event payload is missing the event name.'));
}
if (!item) {
return next(new Error('runner.extension~events: event payload is missing the triggered item.'));
}
// get the list of events to be executed
// includes events in parent as well
events = item.events.listeners(eventName, {excludeDisabled: true});
// call the "before" event trigger by its event name.
// at this point, the one who queued this event, must ensure that the trigger for it is defined in its
// 'trigger' interface
this.triggers[_.camelCase('before-' + eventName)](null, cursor, events, item);
// with all the event listeners in place, we now iterate on them and execute its scripts. post execution,
// we accumulate the results in order to be passed on to the event callback trigger.
async.mapSeries(events, function (event, next) {
// in case the event has no script we bail out early
if (!event.script) {
return next(null, {event: event});
}
// get access to the script from the event.
var script = event.script,
executionId = uuid(),
assertionFailed = [],
asyncScriptError,
// create copy of cursor so we don't leak script ids outside `event.command`
// and across scripts
scriptCursor = _.clone(cursor);
// store the execution id in script
script._lastExecutionId = executionId; // please don't use it anywhere else!
// if we can find an id on script or event we add them to the cursor
// so logs and errors can be traced back to the script they came from
event.id && (scriptCursor.eventId = event.id);
event.script.id && (scriptCursor.scriptId = event.script.id);
// trigger the "beforeScript" callback
this.triggers.beforeScript(null, scriptCursor, script, event, item);
// add event listener to trap all assertion events, but only if needed. to avoid needlessly accumulate
// stuff in memory.
(abortOnFailure || stopOnFailure) &&
this.host.on(EXECUTION_ASSERTION_EVENT_BASE + executionId, function (scriptCursor, assertions) {
_.forEach(assertions, function (assertion) {
assertion && !assertion.passed && assertionFailed.push(assertion.name);
});
});
// To store error event, but only if needed. Because error in callback of host.execute()
// don't show execution errors for async scripts
(abortOnError || stopOnScriptError) &&
// only store first async error in case of multiple errors
this.host.once(EXECUTION_ERROR_EVENT_BASE + executionId, function (scriptCursor, error) {
if (error && !(error instanceof Error)) {
error = new Error(error.message || error);
}
asyncScriptError = error;
// @todo: Figure out a way to abort the script execution here as soon as we get an error.
// We can send `execution.abort.` event to sandbox for this, but currently it silently
// terminates the script execution without triggering the callback.
});
this.host.on(EXECUTION_COOKIES_EVENT_BASE + executionId,
function (eventId, action, fnName, args) {
// only store action is supported, might need to support
// more cookie actions in next 2 years ¯\_(ツ)_/¯
if (action !== COOKIES_EVENT_STORE_ACTION) { return; }
var self = this,
dispatchEvent = EXECUTION_COOKIES_EVENT_BASE + executionId,
cookieJar = _.get(self, 'requester.options.cookieJar'),
cookieStore = cookieJar && cookieJar.store,
cookieDomain;
if (!cookieStore) {
return self.host.dispatch(dispatchEvent, eventId, 'CookieStore: no store found');
}
if (typeof cookieStore[fnName] !== 'function') {
return self.host.dispatch(dispatchEvent, eventId,
`CookieStore: invalid method name '${fnName}'`);
}
!Array.isArray(args) && (args = []);
// set expected args length to make sure callback is always called
args.length = cookieStore[fnName].length - 1;
// there's no way cookie store can identify the difference
// between regular and programmatic access. So, for now
// we check for programmatic access using the cookieJar
// helper method and emit the default empty value for that
// method.
// @note we don't emit access denied error here because
// that might blocks users use-case while accessing
// cookies for a sub-domain.
cookieDomain = getCookieDomain(fnName, args);
if (cookieJar && typeof cookieJar.allowProgrammaticAccess === 'function' &&
!cookieJar.allowProgrammaticAccess(cookieDomain)) {
return self.host.dispatch(dispatchEvent, eventId,
`CookieStore: programmatic access to "${cookieDomain}" is denied`);
}
// serialize cookie object
if (fnName === COOKIE_STORE_PUT_METHOD && args[0]) {
args[0] = ToughCookie.fromJSON(args[0]);
}
if (fnName === COOKIE_STORE_UPDATE_METHOD && args[0] && args[1]) {
args[0] = ToughCookie.fromJSON(args[0]);
args[1] = ToughCookie.fromJSON(args[1]);
}
// add store method's callback argument
args.push(function (err, res) {
// serialize error message
if (err && err instanceof Error) {
err = err.message || String(err);
}
self.host.dispatch(dispatchEvent, eventId, err, res);
});
try {
cookieStore[fnName].apply(cookieStore, args);
}
catch (error) {
self.host.dispatch(dispatchEvent, eventId,
`runtime~CookieStore: error executing "${fnName}"`);
}
}.bind(this));
this.host.on(EXECUTION_REQUEST_EVENT_BASE + executionId,
function (scriptCursor, id, requestId, request) {
// remove files in request body if any
sanitizeFiles(request, function (err, request, sanitisedFiles) {
if (err) {
return this.host.dispatch(EXECUTION_RESPONSE_EVENT_BASE + id, requestId, err);
}
var nextPayload;
// if request is sanitized send a warning
if (!_.isEmpty(sanitisedFiles)) {
this.triggers.console(scriptCursor, 'warn',
'uploading files from scripts is not allowed');
}
nextPayload = {
item: new sdk.Item({request: request}),
coords: scriptCursor,
// @todo - get script type from the sandbox
source: 'script',
// abortOnError makes sure request command bubbles errors
// so we can pass it on to the callback
abortOnError: true
};
// create context for executing this request
nextPayload.context = createItemContext(nextPayload);
this.immediate('httprequest', nextPayload).done(function (result) {
this.host.dispatch(
EXECUTION_RESPONSE_EVENT_BASE + id,
requestId,
null,
result && result.response,
// @todo get cookies from result.history or pass PostmanHistory
// instance once it is fully supported
result && {cookies: result.cookies}
);
}).catch(function (err) {
this.host.dispatch(EXECUTION_RESPONSE_EVENT_BASE + id, requestId, err);
});
}.bind(this));
}.bind(this));
// finally execute the script
this.host.execute(event, {
id: executionId,
// debug: true,
timeout: payload.scriptTimeout, // @todo: Expose this as a property in Collection SDK's Script
cursor: scriptCursor,
context: _.pick(payload.context, SAFE_CONTEXT_VARIABLES),
serializeLogs: _.get(this, 'options.script.serializeLogs'),
// legacy options
legacy: {
_itemId: item.id,
_itemName: item.name
}
}, function (err, result) {
this.host.removeAllListeners(EXECUTION_REQUEST_EVENT_BASE + executionId);
this.host.removeAllListeners(EXECUTION_ASSERTION_EVENT_BASE + executionId);
this.host.removeAllListeners(EXECUTION_RESPONSE_EVENT_BASE + executionId);
this.host.removeAllListeners(EXECUTION_COOKIES_EVENT_BASE + executionId);
this.host.removeAllListeners(EXECUTION_ERROR_EVENT_BASE + executionId);
// Handle async errors as well.
// If there was an error running the script itself, that takes precedence
if (!err && asyncScriptError) {
err = asyncScriptError;
}
// electron IPC does not bubble errors to the browser process, so we serialize it here.
err && (err = serialisedError(err, true));
// if it is defined that certain variables are to be synced back to result, we do the same
track && result && track.forEach(function (variable) {
if (!(_.isObject(result[variable]) && payload.context[variable])) { return; }
var contextVariable = payload.context[variable],
mutations = result[variable].mutations;
// bail out if there are no mutations
if (!mutations) {
return;
}
// ensure that variable scope is treated accordingly
if (_.isFunction(contextVariable.applyMutation)) {
mutations = new sdk.MutationTracker(result[variable].mutations);
mutations.applyOn(contextVariable);
}
// @todo: unify the non variable scope flows and consume diff always
// and drop sending the full variable scope from sandbox
else {
util.syncObject(contextVariable, result[variable]);
}
});
// Get the failures. If there was an error running the script itself, that takes precedence
if (!err && (abortOnFailure || stopOnFailure)) {
err = postProcessContext(result, assertionFailed); // also use async assertions
}
// Ensure that we have SDK instances, not serialized plain objects.
// @todo - should this be handled by the sandbox?
result && result._variables && (result._variables = new sdk.VariableScope(result._variables));
result && result.environment && (result.environment = new sdk.VariableScope(result.environment));
result && result.globals && (result.globals = new sdk.VariableScope(result.globals));
result && result.collectionVariables &&
(result.collectionVariables = new sdk.VariableScope(result.collectionVariables));
result && result.request && (result.request = new sdk.Request(result.request));
// @note Since postman-sandbox@3.5.2, response object is not included in the execution result.
// Refer: https://github.com/postmanlabs/postman-sandbox/pull/512
// Adding back here to avoid breaking change in `script` callback.
// @todo revisit script callback args in runtime v8.
result && payload.context && payload.context.response &&
(result.response = new sdk.Response(payload.context.response));
// persist the pm.variables for the next script
result && result._variables &&
(payload.context._variables = new sdk.VariableScope(result._variables));
// persist the pm.variables for the next request
result && result._variables && (this.state._variables = new sdk.VariableScope(result._variables));
// persist the mutated request in payload context,
// @note this will be used for the next prerequest script or
// upcoming commands(request, httprequest).
result && result.request && (payload.context.request = result.request);
// now that this script is done executing, we trigger the event and move to the next script
this.triggers.script(err || null, scriptCursor, result, script, event, item);
// move to next script and pass on the results for accumulation
next(((stopOnScriptError || abortOnError || stopOnFailure) && err) ? err : null, _.assign({
event: event,
script: script,
result: result
}, err && {error: err})); // we use assign here to avoid needless error property
}.bind(this));
}.bind(this), function (err, results) {
// trigger the event completion callback
this.triggers[eventName](null, cursor, results, item);
next((abortOnError && err) ? err : null, results, err);
}.bind(this));
}
}
};

View File

@@ -0,0 +1,211 @@
var _ = require('lodash'),
async = require('async'),
uuid = require('uuid'),
// These are functions which a request passes through _before_ being sent. They take care of stuff such as
// variable resolution, loading of files, etc.
prehelpers = require('../request-helpers-presend'),
// Similarly, these run after the request, and have the power to dictate whether a request should be re-queued
posthelpers = require('../request-helpers-postsend'),
ReplayController = require('../replay-controller'),
RequesterPool = require('../../requester').RequesterPool,
RESPONSE_START_EVENT_BASE = 'response.start.',
RESPONSE_END_EVENT_BASE = 'response.end.';
module.exports = {
init: function (done) {
// Request timeouts are applied by the requester, so add them to requester options (if any).
// create a requester pool
this.requester = new RequesterPool(this.options, done);
},
// the http trigger is actually directly triggered by the requester
// todo - figure out whether we should trigger it from here rather than the requester.
triggers: ['beforeRequest', 'request', 'responseStart', 'io'],
process: {
/**
* @param {Object} payload
* @param {Item} payload.item
* @param {Object} payload.data
* @param {Object} payload.context
* @param {VariableScope} payload.globals
* @param {VariableScope} payload.environment
* @param {Cursor} payload.coords
* @param {Boolean} payload.abortOnError
* @param {String} payload.source
* @param {Function} next
*
* @todo validate payload
*/
httprequest: function (payload, next) {
var abortOnError = _.has(payload, 'abortOnError') ? payload.abortOnError : this.options.abortOnError,
self = this,
context;
context = payload.context;
// generates a unique id for each http request
// a collection request can have multiple http requests
_.set(context, 'coords.httpRequestId', payload.httpRequestId || uuid());
// Run the helper functions
async.applyEachSeries(prehelpers, context, self, function (err) {
var xhr,
aborted,
item = context.item,
beforeRequest,
afterRequest,
safeNext;
// finish up current command
safeNext = function (error, finalPayload) {
// the error is passed twice to allow control between aborting the error vs just
// bubbling it up
return next((error && abortOnError) ? error : null, finalPayload, error);
};
// Helper function which calls the beforeRequest trigger ()
beforeRequest = function (err) {
self.triggers.beforeRequest(err, context.coords, item.request, payload.item, {
httpRequestId: context.coords && context.coords.httpRequestId,
abort: function () {
!aborted && xhr && xhr.abort();
aborted = true;
}
});
};
// Helper function to call the afterRequest trigger.
afterRequest = function (err, response, request, cookies, history) {
self.triggers.request(err, context.coords, response, request, payload.item, cookies, history);
};
// Ensure that this is called.
beforeRequest(null);
if (err) {
// Since we encountered an error before even attempting to send the request, we bubble it up
// here.
afterRequest(err, undefined, item.request);
return safeNext(
err,
{request: item.request, coords: context.coords, item: context.originalItem}
);
}
if (aborted) {
return next(new Error('runtime: request aborted'));
}
self.requester.create({
type: 'http',
source: payload.source,
cursor: context.coords
}, function (err, requester) {
if (err) { return next(err); } // this should never happen
var requestId = uuid(),
replayOptions;
// eslint-disable-next-line max-len
requester.on(RESPONSE_START_EVENT_BASE + requestId, function (err, response, request, cookies, history) {
// we could have also added the response to the set of responses in the cloned item,
// but then, we would have to iterate over all of them, which seems unnecessary
context.response = response;
// run the post request helpers, which need to use the response, assigned above
async.applyEachSeries(posthelpers, context, self, function (error, options) {
if (error) {
return;
}
// find the first helper that requested a replay
replayOptions = _.find(options, {replay: true});
// bail out if we know that request will be replayed.
if (replayOptions) {
return;
}
// bail out if its a pm.sendRequest
// @todo find a better way of identifying scripts
// @note don't use source='script'. Script requests
// can trigger `*.auth` source requests as well.
if (context.coords && context.coords.scriptId) {
return;
}
// trigger responseStart only for collection request.
// if there are replays, this will be triggered for the last request in the replay chain.
self.triggers.responseStart(err, context.coords, response, request, payload.item, cookies,
history);
});
});
requester.on(RESPONSE_END_EVENT_BASE + requestId, self.triggers.io.bind(self.triggers));
// eslint-disable-next-line max-len
xhr = requester.request(requestId, item.request, context.protocolProfileBehavior, function (err, res, req, cookies, history) {
err = err || null;
var nextPayload = {
response: res,
request: req,
item: context.originalItem,
cookies: cookies,
coords: context.coords,
history: history
},
replayController;
// trigger the request event.
// @note - we give the _original_ item in this trigger, so someone can do reference
// checking. Not sure if we should do that or not, but that's how it is.
// Don't break it.
afterRequest(err, res, req, cookies, history);
// Dispose off the requester, we don't need it anymore.
requester.dispose();
// do not process replays if there was an error
if (err) {
return safeNext(err, nextPayload);
}
// request replay logic
if (replayOptions) {
// prepare for replay
replayController = new ReplayController(context.replayState, self);
// replay controller invokes callback no. 1 when replaying the request
// invokes callback no. 2 when replay count has exceeded maximum limit
// @note: errors in replayed requests are passed to callback no. 1
return replayController.requestReplay(context,
context.item,
{source: replayOptions.helper},
// new payload with response from replay is sent to `next`
function (err, payloadFromReplay) { safeNext(err, payloadFromReplay); },
// replay was stopped, move on with older payload
function (err) {
// warn users that maximum retries have exceeded
// but don't bubble up the error with the request
self.triggers.console(context.coords, 'warn', (err.message || err));
safeNext(null, nextPayload);
}
);
}
// finish up for any other request
return safeNext(err, nextPayload);
});
});
});
}
}
};

View File

@@ -0,0 +1,277 @@
var _ = require('lodash'),
uuid = require('uuid'),
Response = require('postman-collection').Response,
visualizer = require('../../visualizer'),
/**
* List of request properties which can be mutated via pre-request
*
* @private
* @const
* @type {String[]}
*/
ALLOWED_REQUEST_MUTATIONS = ['url', 'method', 'headers', 'body'],
extractVisualizerData,
getResponseJSON;
/**
* Returns visualizer data from the latest execution result.
*
* @param {Array} prereqExecutions - pre-script executions results
* @param {Array} testExecutions - test-script executions results
* @returns {Object|undefined} - visualizer data
*/
extractVisualizerData = function (prereqExecutions, testExecutions) {
var visualizerData,
i;
if (_.isArray(testExecutions)) {
// loop through the test executions in reverse order to return data from latest execution
for (i = testExecutions.length - 1; i >= 0; i--) {
visualizerData = _.get(testExecutions[i], 'result.return.visualizer');
if (visualizerData) {
return visualizerData;
}
}
}
if (_.isArray(prereqExecutions)) {
// extract visualizer data from pre-request script results if it is not found earlier
for (i = prereqExecutions.length - 1; i >= 0; i--) {
visualizerData = _.get(prereqExecutions[i], 'result.return.visualizer');
if (visualizerData) {
return visualizerData;
}
}
}
};
/**
* Convert response into a JSON serializable object.
* The stream property is converted to base64 string for performance reasons.
*
* @param {Object} response - SDK Response instance
* @returns {Object}
*/
getResponseJSON = function (response) {
if (!Response.isResponse(response)) {
return;
}
return {
id: response.id,
code: response.code,
status: response.status,
header: response.headers && response.headers.toJSON(),
stream: response.stream && {
type: 'Base64',
data: response.stream.toString('base64')
},
responseTime: response.responseTime
};
};
/**
* Add options
* stopOnError:Boolean
* @type {Object}
*/
module.exports = {
init: function (done) {
// @todo - code item global timeout and delay here
done();
},
triggers: ['beforeItem', 'item', 'beforePrerequest', 'prerequest', 'beforeTest', 'test'],
process: {
/**
* @param {Function=} callback
* @param {Object} payload
* @param {Function} next
* @todo validate payload
*/
item: function (callback, payload, next) {
// adjust for polymorphic instructions
if (!next && _.isFunction(payload) && !_.isFunction(callback)) {
next = payload;
payload = callback;
callback = null;
}
var item = payload.item,
originalRequest = item.request.clone(),
coords = payload.coords,
data = _.isObject(payload.data) ? payload.data : {},
environment = _.isObject(payload.environment) ? payload.environment : {},
globals = _.isObject(payload.globals) ? payload.globals : {},
collectionVariables = _.isObject(payload.collectionVariables) ? payload.collectionVariables : {},
_variables = _.isObject(payload._variables) ? payload._variables : {},
stopOnError = _.has(payload, 'stopOnError') ? payload.stopOnError : this.options.stopOnError,
// @todo: this is mostly coded in event extension and we are
// still not sure whether that is the right place for it to be.
abortOnFailure = this.options.abortOnFailure,
stopOnFailure = this.options.stopOnFailure,
delay = _.get(this.options, 'delay.item'),
ctxTemplate;
// validate minimum parameters required for the command to work
if (!(item && coords)) {
return next(new Error('runtime: item execution is missing required parameters'));
}
// store a common uuid in the coords
coords.ref = uuid.v4();
// here we code to queue prerequest script, then make a request and then execute test script
this.triggers.beforeItem(null, coords, item);
this.queueDelay(function () {
// create the context object for scripts to run
ctxTemplate = {
collectionVariables: collectionVariables,
_variables: _variables,
globals: globals,
environment: environment,
data: data,
request: item.request
};
// @todo make it less nested by coding Instruction.thenQueue
this.queue('event', {
name: 'prerequest',
item: item,
coords: coords,
context: ctxTemplate,
trackContext: ['globals', 'environment', 'collectionVariables'],
stopOnScriptError: stopOnError,
stopOnFailure: stopOnFailure
}).done(function (prereqExecutions, prereqExecutionError) {
// if stop on error is marked and script executions had an error,
// do not proceed with more commands, instead we bail out
if ((stopOnError || stopOnFailure) && prereqExecutionError) {
this.triggers.item(null, coords, item); // @todo - should this trigger receive error?
return callback && callback.call(this, prereqExecutionError, {
prerequest: prereqExecutions
});
}
// update allowed request mutation properties with the mutated context
// @note from this point forward, make sure this mutated
// request instance is used for upcoming commands.
ALLOWED_REQUEST_MUTATIONS.forEach(function (property) {
if (_.has(ctxTemplate, ['request', property])) {
item.request[property] = ctxTemplate.request[property];
}
// update property's parent reference
if (item.request[property] && typeof item.request[property].setParent === 'function') {
item.request[property].setParent(item.request);
}
});
this.queue('request', {
item: item,
globals: ctxTemplate.globals,
environment: ctxTemplate.environment,
collectionVariables: ctxTemplate.collectionVariables,
_variables: ctxTemplate._variables,
data: ctxTemplate.data,
coords: coords,
source: 'collection'
}).done(function (result, requestError) {
!result && (result = {});
var request = result.request,
response = result.response,
cookies = result.cookies;
if ((stopOnError || stopOnFailure) && requestError) {
this.triggers.item(null, coords, item); // @todo - should this trigger receive error?
return callback && callback.call(this, requestError, {
request: request
});
}
// also the test object requires the updated request object (since auth helpers may modify it)
request && (ctxTemplate.request = request);
// @note convert response instance to plain object.
// we want to avoid calling Response.toJSON() which triggers toJSON on Response.stream buffer.
// Because that increases the size of stringified object by 3 times.
// Also, that increases the total number of tokens (buffer.data) whereas Buffer.toString
// generates a single string that is easier to stringify and sent over the UVM bridge.
response && (ctxTemplate.response = getResponseJSON(response));
// set cookies for this transaction
cookies && (ctxTemplate.cookies = cookies);
// the context template also has a test object to store assertions
ctxTemplate.tests = {}; // @todo remove
this.queue('event', {
name: 'test',
item: item,
coords: coords,
context: ctxTemplate,
trackContext: ['tests', 'globals', 'environment', 'collectionVariables'],
stopOnScriptError: stopOnError,
abortOnFailure: abortOnFailure,
stopOnFailure: stopOnFailure
}).done(function (testExecutions, testExecutionError) {
var visualizerData = extractVisualizerData(prereqExecutions, testExecutions),
visualizerResult;
if (visualizerData) {
visualizer.processTemplate(visualizerData.template,
visualizerData.data,
visualizerData.options,
function (err, processedTemplate) {
visualizerResult = {
// bubble up the errors while processing template through visualizer result
error: err,
// add processed template and data to visualizer result
processedTemplate: processedTemplate,
data: visualizerData.data
};
// trigger an event saying that item has been processed
this.triggers.item(null, coords, item, visualizerResult);
}.bind(this));
}
else {
// trigger an event saying that item has been processed
// @todo - should this trigger receive error?
this.triggers.item(null, coords, item, null);
}
// reset mutated request with original request instance
// @note request mutations are not persisted across iterations
item.request = originalRequest;
callback && callback.call(this, ((stopOnError || stopOnFailure) && testExecutionError) ?
testExecutionError : null, {
prerequest: prereqExecutions,
request: request,
response: response,
test: testExecutions
});
});
});
});
}.bind(this), {
time: delay,
source: 'item',
cursor: coords
}, next);
}
}
};

View File

@@ -0,0 +1,100 @@
var _ = require('lodash'),
sdk = require('postman-collection'),
createItemContext = require('../create-item-context'),
/**
* Resolve variables in item and auth in context.
*
* @param {ItemContext} context
* @param {Item} [context.item]
* @param {RequestAuth} [context.auth]
* @param {Object} payload
* @param {VariableScope} payload._variables
* @param {Object} payload.data
* @param {VariableScope} payload.environment
* @param {VariableScope} payload.collectionVariables
* @param {VariableScope} payload.globals
*/
resolveVariables = function (context, payload) {
if (!(context.item && context.item.request)) { return; }
// @todo - resolve variables in a more graceful way
var variableDefinitions = [
// extract the variable list from variable scopes
// @note: this is the order of precedence for variable resolution - don't change it
payload._variables.values,
payload.data,
payload.environment.values,
payload.collectionVariables.values,
payload.globals.values
],
urlString = context.item.request.url.toString(),
item,
auth;
// @todo - no need to sync variables when SDK starts supporting resolution from scope directly
// @todo - avoid resolving the entire item as this unnecessarily resolves URL
item = context.item = new sdk.Item(context.item.toObjectResolved(null,
variableDefinitions, {ignoreOwnVariables: true}));
auth = context.auth;
// resolve variables in URL string
if (urlString) {
// @note this adds support resolving nested variables as URL parser doesn't support them well.
urlString = sdk.Property.replaceSubstitutions(urlString, variableDefinitions);
// Re-parse the URL from the resolved string
item.request.url = new sdk.Url(urlString);
}
// resolve variables in auth
auth && (context.auth = new sdk.RequestAuth(auth.toObjectResolved(null,
variableDefinitions, {ignoreOwnVariables: true})));
};
module.exports = {
init: function (done) {
done();
},
triggers: ['response'],
process: {
request: function (payload, next) {
var abortOnError = _.has(payload, 'abortOnError') ? payload.abortOnError : this.options.abortOnError,
// helper function to trigger `response` callback anc complete the command
complete = function (err, nextPayload) {
// nextPayload will be empty for unhandled errors
// trigger `response` callback
// nextPayload.response will be empty for error flows
// the `item` argument is resolved and mutated here
nextPayload && this.triggers.response(err, nextPayload.coords, nextPayload.response,
nextPayload.request, nextPayload.item, nextPayload.cookies, nextPayload.history);
// the error is passed twice to allow control between aborting the error vs just
// bubbling it up
return next(err && abortOnError ? err : null, nextPayload, err);
}.bind(this),
context = createItemContext(payload);
// resolve variables in item and auth
resolveVariables(context, payload);
// add context for use, after resolution
payload.context = context;
// we do not queue `httprequest` instruction here,
// queueing will unblock the item command to prepare for the next `event` instruction
// at this moment request is not fulfilled, and we want to block it
this.immediate('httprequest', payload)
.done(function (nextPayload, err) {
// change signature to error first
complete(err, nextPayload);
})
.catch(complete);
}
}
};

View File

@@ -0,0 +1,239 @@
var _ = require('lodash'),
Cursor = require('../cursor'),
VariableScope = require('postman-collection').VariableScope,
prepareLookupHash,
extractSNR,
getIterationData;
/**
* Returns a hash of IDs and Names of items in an array
*
* @param {Array} items
* @returns {Object}
*/
prepareLookupHash = function (items) {
var hash = {
ids: {},
names: {},
obj: {}
};
_.forEach(items, function (item, index) {
if (item) {
item.id && (hash.ids[item.id] = index);
item.name && (hash.names[item.name] = index);
}
});
return hash;
};
extractSNR = function (executions, previous) {
var snr = previous || {};
_.isArray(executions) && executions.forEach(function (execution) {
_.has(_.get(execution, 'result.return'), 'nextRequest') && (
(snr.defined = true),
(snr.value = execution.result.return.nextRequest)
);
});
return snr;
};
/**
* Returns the data for the given iteration
*
* @function getIterationData
* @param {Array} data - The data array containing all iterations' data
* @param {Number} iteration - The iteration to get data for
* @return {Any} - The data for the iteration
*/
getIterationData = function (data, iteration) {
// if iteration has a corresponding data element use that
if (iteration < data.length) {
return data[iteration];
}
// otherwise use the last data element
return data[data.length - 1];
};
/**
* Adds options
* disableSNR:Boolean
*
* @type {Object}
*/
module.exports = {
init: function (done) {
var state = this.state;
// ensure that the environment, globals and collectionVariables are in VariableScope instance format
state.environment = VariableScope.isVariableScope(state.environment) ? state.environment :
new VariableScope(state.environment);
state.globals = VariableScope.isVariableScope(state.globals) ? state.globals :
new VariableScope(state.globals);
state.collectionVariables = VariableScope.isVariableScope(state.collectionVariables) ?
state.collectionVariables : new VariableScope(state.collectionVariables);
state._variables = new VariableScope();
// ensure that the items and iteration data set is in place
!_.isArray(state.items) && (state.items = []);
!_.isArray(state.data) && (state.data = []);
!_.isObject(state.data[0]) && (state.data[0] = {});
// if the location in state is already normalised then go ahead and queue iteration, else normalise the
// location
state.cursor = Cursor.box(state.cursor, { // we pass bounds to ensure there is no stale state
cycles: this.options.iterationCount,
length: state.items.length
});
this.waterfall = state.cursor; // copy the location object to instance for quick access
// queue the iteration command on start
this.queue('waterfall', {
coords: this.waterfall.current(),
static: true,
start: true
});
// clear the variable that is supposed to store item name and id lookup hash for easy setNextRequest
this.snrHash = null; // we populate it in the first SNR call
done();
},
triggers: ['beforeIteration', 'iteration'],
process: {
/**
* This processor simply queues scripts and requests in a linear chain.
*
* @param {Object} payload
* @param {Object} payload.coords
* @param {Boolean} [payload.static=false]
* @param {Function} next
*/
waterfall: function (payload, next) {
// we procure the coordinates that we have to pick item and data from. the data is
var coords = payload.static ? payload.coords : this.waterfall.whatnext(payload.coords),
item = this.state.items[coords.position],
delay;
// if there is nothing to process, we bail out from here, even before we enter the iteration cycle
if (coords.empty) {
return next();
}
if (payload.stopRunNow) {
this.triggers.iteration(null, payload.coords);
return next();
}
// if it is a beginning of a run, we need to raise events for iteration start
if (payload.start) {
this.triggers.beforeIteration(null, coords);
}
// if this is a new iteration, we close the previous one and start new
if (coords.cr) {
// getting the iteration delay here ensures that delay is only called between two iterations
delay = _.get(this.options, 'delay.iteration', 0);
this.triggers.iteration(null, payload.coords);
this.triggers.beforeIteration(null, coords);
}
// if this is end of waterfall, it is an end of iteration and also end of run
if (coords.eof) {
this.triggers.iteration(null, coords);
return next();
}
this.queueDelay(function () {
this.queue('item', {
item: item,
coords: coords,
data: getIterationData(this.state.data, coords.iteration),
environment: this.state.environment,
globals: this.state.globals,
collectionVariables: this.state.collectionVariables,
_variables: this.state._variables
}, function (executionError, executions) {
var snr = {},
nextCoords,
seekingToStart,
stopRunNow,
stopOnFailure = this.options.stopOnFailure;
if (!executionError) {
// extract set next request
snr = extractSNR(executions.prerequest);
snr = extractSNR(executions.test, snr);
}
if (!this.options.disableSNR && snr.defined) {
// prepare the snr lookup hash if it is not already provided
// @todo - figure out a way to reset this post run complete
!this.snrHash && (this.snrHash = prepareLookupHash(this.state.items));
// if it is null, we do not proceed further and move on
// see if a request is found in the hash and then reset the coords position to the lookup
// value.
(snr.value !== null) && (snr.position = // eslint-disable-next-line no-nested-ternary
this.snrHash[_.has(this.snrHash.ids, snr.value) ? 'ids' :
(_.has(this.snrHash.names, snr.value) ? 'names' : 'obj')][snr.value]);
snr.valid = _.isNumber(snr.position);
}
nextCoords = _.clone(coords);
if (snr.valid) {
// if the position was detected, we set the position to the one previous to the desired location
// this ensures that the next call to .whatnext() will return the desired position.
nextCoords.position = snr.position - 1;
}
else {
// if snr was requested, but not valid, we stop this iteration.
// stopping an iteration is equivalent to seeking the last position of the current
// iteration, so that the next call to .whatnext() will automatically move to the next
// iteration.
(snr.defined || executionError) && (nextCoords.position = nextCoords.length - 1);
// If we need to stop on a run, we set the stop flag to true.
(stopOnFailure && executionError) && (stopRunNow = true);
}
// @todo - do this in unhacky way
if (nextCoords.position === -1) {
nextCoords.position = 0;
seekingToStart = true;
}
this.waterfall.seek(nextCoords.position, nextCoords.iteration, function (err, chngd, coords) {
// this condition should never arise, so better throw error when this happens
if (err) {
throw err;
}
this.queue('waterfall', {
coords: coords,
static: seekingToStart,
stopRunNow: stopRunNow
});
}, this);
});
}.bind(this), {
time: delay,
source: 'iteration',
cursor: coords
}, next);
}
}
};

View File

@@ -0,0 +1,296 @@
var sdk = require('postman-collection'),
ItemGroup = sdk.ItemGroup,
Item = sdk.Item,
DEFAULT_LOOKUP_STRATEGY = 'idOrName',
INVALID_LOOKUP_STRATEGY_ERROR = 'runtime~extractRunnableItems: Invalid entrypoint lookupStrategy',
/**
* Accumulate all items in order if entry point is a collection/folder.
* If an item is passed returns an array with that item.
*
* @param {ItemGroup|Item} node
*
* @returns {Array<Item>}
*
* @todo: Possibly add mapItem to sdk.ItemGroup?
*/
flattenNode = function (node) {
var items = [];
// bail out
if (!node) { return items; }
if (ItemGroup.isItemGroup(node)) {
node.forEachItem(function (item) { items.push(item); });
}
else if (Item.isItem(node)) {
items.push(node);
}
return items;
},
/**
* Finds an item or item group based on id or name.
*
* @param {ItemGroup} itemGroup
* @param {?String} match
*
* @returns {Item|ItemGroup|undefined}
*/
findItemOrGroup = function (itemGroup, match) {
if (!itemGroup || !itemGroup.items) { return; }
var matched;
// lookup match on own children
itemGroup.items.each(function (itemOrGroup) {
if (itemOrGroup.id === match || itemOrGroup.name === match) {
matched = itemOrGroup;
return false; // exit the loop
}
});
// if there is no match on own children, start lookup on grand children
!matched && itemGroup.items.each(function (itemOrGroup) {
matched = findItemOrGroup(itemOrGroup, match);
if (matched) { return false; } // exit the loop
});
return matched;
},
/**
* Finds items based on multiple ids or names provided.
*
* @param {ItemGroup} itemGroup - Composite list of Item or ItemGroup.
* @param {Object} entrypointSubset - Entry-points reference passed across multiple recursive calls.
* @param {Boolean} _continueAccumulation - Flag used to decide whether to accumulate items or not.
* @param {Object} _accumulatedItems - Found Items or ItemGroups.
* @returns {Object} Found Items or ItemGroups.
*/
findItemsOrGroups = function (itemGroup, entrypointSubset, _continueAccumulation, _accumulatedItems) {
!_accumulatedItems && (_accumulatedItems = {members: [], reference: {}});
if (!itemGroup || !itemGroup.items) { return _accumulatedItems; }
var match;
itemGroup.items.each(function (item) {
// bail out if all entry-points are found.
if (!Object.keys(entrypointSubset).length) { return false; }
// lookup for item.id in entrypointSubset and if not found, lookup by item.name.
if (!(match = entrypointSubset[item.id] && item.id)) {
match = entrypointSubset[item.name] && item.name;
}
if (match) {
// only accumulate items which are not previously got tracked from its parent entrypoint.
if (_continueAccumulation) {
_accumulatedItems.members.push(item);
_accumulatedItems.reference[match] = item;
}
// delete looked-up entrypoint.
delete entrypointSubset[match];
}
// recursive call to find nested entry-points. To make sure all provided entry-points got tracked.
// _continueAccumulation flag will be `false` for children if their parent entrypoint is found.
return findItemsOrGroups(item, entrypointSubset, !match, _accumulatedItems);
});
return _accumulatedItems;
},
/**
* Finds an item or group from a path. The path should be an array of ids from the parent chain.
*
* @param {Collection} collection
* @param {Object} options
* @param {String} options.execute
* @param {?Array<String>} [options.path]
* @param {Function} callback
*/
lookupByPath = function (collection, options, callback) {
var lookupPath,
lastMatch = collection,
lookupOptions = options || {},
i,
ii;
// path can be empty, if item/group is at the top level
lookupPath = lookupOptions.path || [];
// push execute id to the path
options.execute && (lookupPath.push(options.execute));
// go down the lookup path
for (i = 0, ii = lookupPath.length; (i < ii) && lastMatch; i++) {
lastMatch = lastMatch.items && lastMatch.items.one(lookupPath[i]);
}
callback && callback(null, flattenNode(lastMatch), lastMatch);
},
/**
* Finds an item or group on a collection with a matching id or name.
*
* @param {Collection} collection
* @param {Object} options
* @param {String} [options.execute]
* @param {Function} callback
*/
lookupByIdOrName = function (collection, options, callback) {
var match = options.execute,
matched;
if (!match) { return callback(null, []); }
// do a recursive lookup
matched = findItemOrGroup(collection, match);
callback(null, flattenNode(matched), matched);
},
/**
* Finds items or item groups in a collection with matching list of ids or names.
*
* @note runnable items follows the order in which the items are defined in the collection
*
* @param {Collection} collection
* @param {Object} options
* @param {Array<String>} [options.execute]
* @param {Function} callback
*/
lookupByMultipleIdOrName = function (collection, options, callback) {
var entrypoints = options.execute,
preserveOrder = options.preserveOrder,
entrypointLookup = {},
runnableItems = [],
items,
i,
ii;
if (!(Array.isArray(entrypoints) && entrypoints.length)) {
return callback(null, []);
}
// add temp reference for faster lookup of entry-point name/id.
// entry-points with same name/id will be ignored.
for (i = 0, ii = entrypoints.length; i < ii; i++) {
entrypointLookup[entrypoints[i]] = true;
}
items = findItemsOrGroups(collection, entrypointLookup, true);
// Extracting the items and folders by the order in which they appear as folder/request arguments,
// only if specified in the entrypoint.preserveOrder
if (preserveOrder) {
items.members = entrypoints.map(function (ref) {
return items.reference[ref];
});
}
// at this point of time, we should have traversed all items mentioned in entrypoint and created a linear
// subset of items. However, if post that, we still have items remaining in lookup object, that implies that
// extra items were present in user input and corresponding items for those do not exist in collection. As such
// we need to bail out if any of the given entry-point is not found.
if (Object.keys(entrypointLookup).length) {
return callback(null, []);
}
// extract runnable items from the searched items.
for (i = 0, ii = items.members.length; i < ii; i++) {
runnableItems = runnableItems.concat(flattenNode(items.members[i]));
}
callback(null, runnableItems, collection);
},
/**
* Finds items or item groups in a collection with matching list of ids or names.
*
* @note runnable items follows the order of entrypoints
*
* @param {Collection} collection
* @param {Object} options
* @param {Array<String>} [options.execute]
* @param {Function} callback
*/
lookupByOrder = function (collection, options, callback) {
var entrypoints = options.execute,
entrypointLookup = {},
runnableItems = [],
items,
i,
ii;
if (!(Array.isArray(entrypoints) && entrypoints.length)) {
return callback(null, []);
}
// add temp reference for faster lookup of entry-point name/id.
// entry-points with same name/id will be ignored.
for (i = 0, ii = entrypoints.length; i < ii; i++) {
entrypointLookup[entrypoints[i]] = true;
}
items = findItemsOrGroups(collection, entrypointLookup, true);
// at this point of time, we should have traversed all items mentioned in entrypoint and created a linear
// subset of items. However, if post that, we still have items remaining in lookup object, that implies that
// extra items were present in user input and corresponding items for those do not exist in collection. As such
// we need to bail out if any of the given entry-point is not found.
if (Object.keys(entrypointLookup).length) {
return callback(null, []);
}
// extract runnable items from the searched items.
entrypoints.forEach(function (entrypoint) {
runnableItems = runnableItems.concat(flattenNode(items.reference[entrypoint]));
});
callback(null, runnableItems, collection);
},
lookupStrategyMap = {
path: lookupByPath,
idOrName: lookupByIdOrName,
followOrder: lookupByOrder,
multipleIdOrName: lookupByMultipleIdOrName
},
/**
* Extracts all the items on a collection starting from the entrypoint.
*
* @param {Collection} collection
* @param {?Object} [entrypoint]
* @param {String} [entrypoint.execute] id of item or group to execute (can be name when used with `idOrName`)
* @param {Array<String>} [entrypoint.path] path leading to the item or group selected (only for `path` strategy)
* @param {String} [entrypoint.lookupStrategy=idOrName] strategy to use for entrypoint lookup [idOrName, path]
* @param {Boolean} [entrypoint.preserveOrder] option to preserve the order of folder/items when specified.
* @param {Function} callback
*/
extractRunnableItems = function (collection, entrypoint, callback) {
var lookupFunction,
lookupStrategy;
// if no entrypoint is specified, flatten the entire collection
if (!entrypoint) { return callback(null, flattenNode(collection), collection); }
lookupStrategy = entrypoint.lookupStrategy || DEFAULT_LOOKUP_STRATEGY;
// lookup entry using given strategy
// eslint-disable-next-line no-cond-assign
(lookupFunction = lookupStrategyMap[lookupStrategy]) ?
lookupFunction(collection, entrypoint, callback) :
callback(new Error(INVALID_LOOKUP_STRATEGY_ERROR)); // eslint-disable-line callback-return
};
module.exports = {
extractRunnableItems: extractRunnableItems
};

136
node_modules/postman-runtime/lib/runner/index.js generated vendored Normal file
View File

@@ -0,0 +1,136 @@
var _ = require('lodash'),
backpack = require('../backpack'),
Run = require('./run'),
extractRunnableItems = require('./extract-runnable-items').extractRunnableItems,
Runner,
defaultTimeouts = {
global: 3 * 60 * 1000, // 3 minutes
request: Infinity,
script: Infinity
};
/**
* @typedef {runCallback}
* @property {Function} [done]
* @property {Function} [error]
* @property {Function} [success]
*/
/**
* @constructor
*
* @param {Object} [options]
*/
Runner = function PostmanCollectionRunner (options) { // eslint-disable-line func-name-matching
this.options = _.assign({}, options);
};
_.assign(Runner.prototype, {
/**
* Prepares `run` config by combining `runner` config with given run options.
*
* @param {Object} [options]
* @param {Object} [options.timeout]
* @param {Object} [options.timeout.global]
* @param {Object} [options.timeout.request]
* @param {Object} [options.timeout.script]
*/
prepareRunConfig: function (options) {
// combine runner config and make a copy
var runOptions = _.merge(_.omit(options, ['environment', 'globals', 'data']), this.options.run) || {};
// start timeout sanitization
!runOptions.timeout && (runOptions.timeout = {});
_.mergeWith(runOptions.timeout, defaultTimeouts, function (userTimeout, defaultTimeout) {
// non numbers, Infinity and missing values are set to default
if (!_.isFinite(userTimeout)) { return defaultTimeout; }
// 0 and negative numbers are set to Infinity, which only leaves positive numbers
return userTimeout > 0 ? userTimeout : Infinity;
});
return runOptions;
},
/**
* Runs a collection or a folder.
*
* @param {Collection} collection
* @param {Object} [options]
* @param {Array.<Item>} options.items
* @param {Array.<Object>} [options.data]
* @param {Object} [options.globals]
* @param {Object} [options.environment]
* @param {Number} [options.iterationCount]
* @param {CertificateList} [options.certificates]
* @param {ProxyConfigList} [options.proxies]
* @param {Array} [options.data]
* @param {Object} [options.entrypoint]
* @param {String} [options.entrypoint.execute] ID of the item-group to be run.
* Can be Name if `entrypoint.lookupStrategy` is `idOrName`
* @param {String} [options.entrypoint.lookupStrategy=idOrName] strategy to lookup the entrypoint [idOrName, path]
* @param {Array<String>} [options.entrypoint.path] path to lookup
* @param {Object} [options.run] Run-specific options, such as options related to the host
*
* @param {Function} callback
*/
run: function (collection, options, callback) {
var self = this,
runOptions = this.prepareRunConfig(options);
callback = backpack.normalise(callback);
!_.isObject(options) && (options = {});
// @todo make the extract runnables interface better defined and documented
// - give the ownership of error to each strategy lookup functions
// - think about moving these codes into an extension command prior to waterfall
// - the third argument in callback that returns control, is ambiguous and can be removed if error is controlled
// by each lookup function.
// - the interface can be further broken down to have the "flattenNode" action be made common and not be
// required to be coded in each lookup strategy
//
// serialise the items into a linear array based on the lookup strategy provided as input
extractRunnableItems(collection, options.entrypoint, function (err, runnableItems, entrypoint) {
if (err || !runnableItems) { return callback(new Error('Error fetching run items')); }
// Bail out only if: abortOnError is set and the returned entrypoint is invalid
if (options.abortOnError && !entrypoint) {
// eslint-disable-next-line max-len
return callback(new Error(`Unable to find a folder or request: ${_.get(options, 'entrypoint.execute')}`));
}
// ensure data is an array
!_.isArray(options.data) && (options.data = [{}]);
// get iterationCount from data if not set
if (!runOptions.iterationCount) {
runOptions.iterationCount = options.data.length;
}
return callback(null, (new Run({
items: runnableItems,
data: options.data,
environment: options.environment,
globals: _.has(options, 'globals') ? options.globals : self.options.globals,
// @todo Move to item level to support Item and ItemGroup variables
collectionVariables: collection.variables,
certificates: options.certificates,
proxies: options.proxies
}, runOptions)));
});
}
});
_.assign(Runner, {
/**
* Expose Run instance for testability
*
* @type {Run}
*/
Run: Run
});
module.exports = Runner;

205
node_modules/postman-runtime/lib/runner/instruction.js generated vendored Normal file
View File

@@ -0,0 +1,205 @@
/**
* An instruction is a self contained piece of information that can be created and then later be executed. {@link Run}
* instance uses this as the values of the `Run.next` queue.
*
* @module Run~Instructions
*/
var _ = require('lodash'),
Timings = require('./timings'),
arrayProtoSlice = Array.prototype.slice,
arrayProtoUnshift = Array.prototype.unshift,
pool; // function
/**
* Create a new instruction pool
*
* @param {Object.<Function>} processors - hash of all command processor functions
* @returns {InstructionPool}
*/
pool = function (processors) {
!_.isObject(processors) && (processors = {});
/**
* Create a new instruction to be executed later
*
* @constructor
*
* @param {String} name - name of the instruction. this is useful for later lookup of the `processor` function when
* deserialising this object
* @param {Object} [payload] - a **JSON compatible** object that will be forwarded as the 2nd last parameter to the
* processor.
* @param {Array} [args] - all the arguments that needs to be passed to the processor is in this array
* @private
* @example
* var inst = Instruction.create(function (arg1, payload, next) {
* console.log(payload);
* next(null, 'hello-on-execute with ' + arg1);
* }, 'sample-instruction', {
* payloadData1: 'value'
* }, ['one-arg']);
*
* // now, when we do execute, the result will be a console.log of payload and message will be as expected
* instance.execute(function (err, message) {
* console.log(message);
* });
*
*/
var Instruction = function (name, payload, args) {
var processor = processors[name];
if (!_.isString(name) || !_.isFunction(processor)) {
throw new Error('run-instruction: invalid construction');
}
// ensure that payload is an object so that data storage can be done. also ensure arguments is an array
!_.isObject(payload) && (payload = {});
!_.isArray(args) && (args = []);
_.assign(this, /** @lends Instruction.prototype */ {
/**
* @type {String}
*/
action: name,
/**
* @type {Object}
*/
payload: payload,
/**
* @type {Array}
*/
in: args,
/**
* @type {Timings}
*/
timings: Timings.create(),
/**
* @private
* @type {Function}
*/
_processor: processor
});
// record the timing when this instruction was created
this.timings.record('created');
};
/**
* Shortcut to `new Instruction(...);`
*
* @param {Function} processor
* @param {String} name
* @param {Object} [payload]
* @param {Array} [args]
*
* @returns {Instruction}
*/
Instruction.create = function (processor, name, payload, args) {
return new Instruction(processor, name, payload, args);
};
/**
* Store all thenable items
*
* @type {Array}
*/
Instruction._queue = [];
/**
* Executes an instruction with previously saved payload and arguments
*
* @param {Function} callback
* @param {*} [scope]
*
* @todo: use timeback and control it via options sent during pool creation as an option
*/
Instruction.prototype.execute = function (callback, scope) {
!scope && (scope = this);
var params = _.clone(this.in),
sealed = false,
doneAndSpread = function (err) {
if (sealed) {
console.error('__postmanruntime_fatal_debug: instruction.execute callback called twice');
if (err) {
console.error(err);
}
return;
}
sealed = true;
this.timings.record('end');
var args = arrayProtoSlice.call(arguments);
arrayProtoUnshift.call(args, scope);
if (err) { // in case it errored, we do not process any thenables
_.isArray(this._catch) && _.invokeMap(this._catch, _.apply, scope, arguments);
}
else {
// call all the `then` stuff and then the main callback
_.isArray(this._done) && _.invokeMap(this._done, _.apply, scope, _.tail(arguments));
}
setTimeout(callback.bind.apply(callback, args), 0);
}.bind(this);
// add two additional arguments at the end of the arguments saved - i.e. the payload and a function to call the
// callback asynchronously
params.push(this.payload, doneAndSpread);
this.timings.record('start');
// run the processor in a try block to avoid causing stalled runs
try {
this._processor.apply(scope, params);
}
catch (e) {
doneAndSpread(e);
}
};
Instruction.prototype.done = function (callback) {
(this._done || (this._done = [])).push(callback);
return this;
};
Instruction.prototype.catch = function (callback) {
(this._catch || (this._catch = [])).push(callback);
return this;
};
Instruction.clear = function () {
_.forEach(Instruction._queue, function (instruction) {
delete instruction._done;
});
Instruction._queue.length = 0;
};
Instruction.shift = function () {
return Instruction._queue.shift.apply(Instruction._queue, arguments);
};
Instruction.unshift = function () {
return Instruction._queue.unshift.apply(Instruction._queue, arguments);
};
Instruction.push = function () {
return Instruction._queue.push.apply(Instruction._queue, arguments);
};
return Instruction;
};
module.exports = {
pool: pool
};

View File

@@ -0,0 +1,88 @@
var _ = require('lodash'),
createItemContext = require('./create-item-context'),
// total number of replays allowed
MAX_REPLAY_COUNT = 3,
ReplayController;
/**
* Handles replay logic with replayState from context.
* Makes sure request replays do not go into an infinite loop.
*
* @param {ReplayState} replayState
* @param {Run} run
*
* @constructor
*/
ReplayController = function ReplayController (replayState, run) {
// store state
this.count = replayState ? replayState.count : 0;
this.run = run;
};
_.assign(ReplayController.prototype, /** @lends ReplayController.prototype */{
/**
* Sends a request in the item. This takes care of limiting the total number of replays for a request.
*
* @param {Object} context
* @param {Request} item
* @param {Object} desiredPayload a partial payload to use for the replay request
* @param {Function} success this callback is invoked when replay controller sent the request
* @param {Function} failure this callback is invoked when replay controller decided not to send the request
*/
requestReplay: function (context, item, desiredPayload, success, failure) {
// max retries exceeded
if (this.count >= MAX_REPLAY_COUNT) {
return failure(new Error('runtime: maximum intermediate request limit exceeded'));
}
// update replay count state
this.count++;
// update replay state to context
context.replayState = this.getReplayState();
// construct payload for request
var payload = _.defaults({
item: item,
// abortOnError makes sure request command bubbles errors
// so we can pass it on to the callback
abortOnError: true
}, desiredPayload);
// create item context from the new item
payload.context = createItemContext(payload, context);
this.run.immediate('httprequest', payload)
.done(function (response) {
success(null, response);
})
.catch(success);
},
/**
* Returns a serialized version of current ReplayController
*
* @returns {ReplayState}
*/
getReplayState: function () {
/**
* Defines the current replay state of a request.
*
* By replay state, we mean the number of requests sent
* as part of one Collection requests. It can be intermediate requests,
* or replays of the same collection requests.
*
* @typedef {Object} ReplayState
*
* @property {Number} count total number of requests, including Collection requests and replays
*/
return {
count: this.count
};
}
});
module.exports = ReplayController;

View File

@@ -0,0 +1,61 @@
var _ = require('lodash'),
AuthLoader = require('../authorizer/index').AuthLoader,
createAuthInterface = require('../authorizer/auth-interface'),
DOT_AUTH = '.auth';
module.exports = [
// Post authorization.
function (context, run, done) {
// if no response is provided, there's nothing to do, and probably means that the request errored out
// let the actual request command handle whatever needs to be done.
if (!context.response) { return done(); }
// bail out if there is no auth
if (!(context.auth && context.auth.type)) { return done(); }
var auth = context.auth,
originalAuth = context.originalItem.getAuth(),
originalAuthParams = originalAuth && originalAuth.parameters(),
authHandler = AuthLoader.getHandler(auth.type),
authInterface = createAuthInterface(auth);
// bail out if there is no matching auth handler for the type
if (!authHandler) {
run.triggers.console(context.coords, 'warn', 'runtime: could not find a handler for auth: ' + auth.type);
return done();
}
// invoke `post` on the Auth
authHandler.post(authInterface, context.response, function (err, success) {
// sync all auth system parameters to the original auth
originalAuthParams && auth.parameters().each(function (param) {
param && param.system && originalAuthParams.upsert({key: param.key, value: param.value, system: true});
});
// sync auth state back to item request
_.set(context, 'item.request.auth', auth);
// there was an error in auth post hook
// warn the user but don't bubble it up
if (err) {
run.triggers.console(
context.coords,
'warn',
'runtime~' + auth.type + '.auth: there was an error validating auth: ' + (err.message || err),
err
);
return done();
}
// auth was verified
if (success) { return done(); }
// request a replay of request
done(null, {replay: true, helper: auth.type + DOT_AUTH});
});
}
];

View File

@@ -0,0 +1,383 @@
var _ = require('lodash'),
async = require('async'),
util = require('./util'),
sdk = require('postman-collection'),
createAuthInterface = require('../authorizer/auth-interface'),
AuthLoader = require('../authorizer/index').AuthLoader,
ReplayController = require('./replay-controller'),
DOT_AUTH = '.auth';
module.exports = [
// File loading
function (context, run, done) {
if (!context.item) { return done(new Error('Nothing to resolve files for.')); }
var triggers = run.triggers,
cursor = context.coords,
resolver = run.options.fileResolver,
request = context.item && context.item.request,
mode,
data;
if (!request) { return done(new Error('No request to send.')); }
// if body is disabled than skip loading files.
// @todo this may cause problem if body is enabled/disabled programmatically from pre-request script.
if (request.body && request.body.disabled) { return done(); }
// todo: add helper functions in the sdk to do this cleanly for us
mode = _.get(request, 'body.mode');
data = _.get(request, ['body', mode]);
// if there is no mode specified, or no data for the specified mode we cannot resolve anything!
// @note that if source is not readable, there is no point reading anything, yet we need to warn that file
// upload was not done. hence we will have to proceed even without an unreadable source
if (!data) { // we do not need to check `mode` here since false mode returns no `data`
return done();
}
// in this block, we simply use async.waterfall to ensure that all form of file reading is async. essentially,
// we first determine the data mode and based on it pass the waterfall functions.
async.waterfall([async.constant(data), {
// form data parsing simply "enriches" all form parameters having file data type by replacing / setting the
// value as a read stream
formdata: function (formdata, next) {
// ensure that we only process the file type
async.eachSeries(_.filter(formdata.all(), {type: 'file'}), function (formparam, callback) {
if (!formparam || formparam.disabled) {
return callback(); // disabled params will be filtered in body-builder.
}
var paramIsComposite = Array.isArray(formparam.src),
onLoadError = function (err, disableParam) {
// triggering a warning message for the user
triggers.console(cursor, 'warn',
`Form param \`${formparam.key}\`, file load error: ${err.message || err}`);
// set disabled, it will be filtered in body-builder
disableParam && (formparam.disabled = true);
};
// handle missing file src
if (!formparam.src || (paramIsComposite && !formparam.src.length)) {
onLoadError(new Error('missing file source'), false);
return callback();
}
// handle form param with a single file
// @note we are handling single file first so that we do not need to hit additional complexity of
// handling multiple files while the majority use-case would be to handle single file.
if (!paramIsComposite) {
// eslint-disable-next-line security/detect-non-literal-fs-filename
util.createReadStream(resolver, formparam.src, function (err, stream) {
if (err) {
onLoadError(err, true);
}
else {
formparam.value = stream;
}
callback();
});
return;
}
// handle form param with multiple files
// @note we use map-limit here instead of free-form map in order to avoid choking the file system
// with many parallel descriptor access.
async.mapLimit(formparam.src, 10, function (src, next) {
// eslint-disable-next-line security/detect-non-literal-fs-filename
util.createReadStream(resolver, src, function (err, stream) {
if (err) {
// @note don't throw error or disable param if one of the src fails to load
onLoadError(err);
return next(); // swallow the error
}
next(null, {src: src, value: stream});
});
}, function (err, results) {
if (err) {
onLoadError(err, true);
return done();
}
_.forEach(results, function (result) {
// Insert individual param above the current formparam
result && formdata.insert(new sdk.FormParam(_.assign(formparam.toJSON(), result)),
formparam);
});
// remove the current formparam after exploding src
formdata.remove(formparam);
done();
});
}, next);
},
// file data
file: function (filedata, next) {
// eslint-disable-next-line security/detect-non-literal-fs-filename
util.createReadStream(resolver, filedata.src, function (err, stream) {
if (err) {
triggers.console(cursor, 'warn', 'Binary file load error: ' + err.message || err);
filedata.value = null; // ensure this does not mess with requester
delete filedata.content; // @todo - why content?
}
else {
filedata.content = stream;
}
next();
});
}
}[mode] || async.constant()], function (err) {
// just as a precaution, show the error in console. each resolver anyway should handle their own console
// warnings.
// @todo - get cursor here.
err && triggers.console(cursor, 'warn', 'file data resolution error: ' + (err.message || err));
done(null); // absorb the error since a console has been trigerred
});
},
// Authorization
function (context, run, done) {
// validate all stuff. dont ask.
if (!context.item) { return done(new Error('runtime: nothing to authorize.')); }
// bail out if there is no auth
if (!(context.auth && context.auth.type)) { return done(null); }
// get auth handler
var auth = context.auth,
authType = auth.type,
originalAuth = context.originalItem.getAuth(),
originalAuthParams = originalAuth && originalAuth.parameters(),
authHandler = AuthLoader.getHandler(authType),
authPreHook,
authInterface,
authSignHook = function () {
try {
authHandler.sign(authInterface, context.item.request, function (err) {
// handle all types of errors in one place, see catch block
if (err) { throw err; }
done();
});
}
catch (err) {
// handles synchronous and asynchronous errors in auth.sign
run.triggers.console(context.coords,
'warn',
'runtime~' + authType + '.auth: could not sign the request: ' + (err.message || err),
err
);
// swallow the error, we've warned the user
done();
}
};
// bail out if there is no matching auth handler for the type
if (!authHandler) {
run.triggers.console(context.coords, 'warn', 'runtime: could not find a handler for auth: ' + auth.type);
return done();
}
authInterface = createAuthInterface(auth, context.protocolProfileBehavior);
/**
* We go through the `pre` request send validation for the auth. In this step one of the three things can happen
*
* If the Auth `pre` hook
* 1. gives a go, we sign the request and proceed to send the request.
* 2. gives a no go, we don't sign the request, but proceed to send the request.
* 3. gives a no go, with a intermediate request,
* a. we suspend current request, send the intermediate request
* b. invoke Auth `init` hook with the response of the intermediate request
* c. invoke Auth `pre` hook, and repeat from 1
*/
authPreHook = function () {
authHandler.pre(authInterface, function (err, success, request) {
// there was an error in pre hook of auth
if (err) {
// warn the user
run.triggers.console(context.coords,
'warn',
'runtime~' + authType + '.auth: could not validate the request: ' + (err.message || err),
err
);
// swallow the error, we've warned the user
return done();
}
// sync all auth system parameters to the original auth
originalAuthParams && auth.parameters().each(function (param) {
param && param.system &&
originalAuthParams.upsert({key: param.key, value: param.value, system: true});
});
// authHandler gave a go, sign the request
if (success) { return authSignHook(); }
// auth gave a no go, but no intermediate request
if (!request) { return done(); }
// prepare for sending intermediate request
var replayController = new ReplayController(context.replayState, run),
item = new sdk.Item({request: request});
// auth handler gave a no go, and an intermediate request.
// make the intermediate request the response is passed to `init` hook
replayController.requestReplay(context,
item,
// marks the auth as source for intermediate request
{source: auth.type + DOT_AUTH},
function (err, response) {
// errors for intermediate requests are passed to request callback
// passing it here will add it to original request as well, so don't do it
if (err) { return done(); }
// pass the response to Auth `init` hook
authHandler.init(authInterface, response, function (error) {
if (error) {
// warn about the err
run.triggers.console(context.coords, 'warn', 'runtime~' + authType + '.auth: ' +
'could not initialize auth: ' + (error.message || error), error);
// swallow the error, we've warned the user
return done();
}
// schedule back to pre hook
authPreHook();
});
},
function (err) {
// warn users that maximum retries have exceeded
if (err) {
run.triggers.console(
context.coords, 'warn', 'runtime~' + authType + '.auth: ' + (err.message || err)
);
}
// but don't bubble up the error with the request
done();
}
);
});
};
// start the by calling the pre hook of the auth
authPreHook();
},
// Proxy lookup
function (context, run, done) {
var proxies = run.options.proxies,
request = context.item.request,
url;
if (!request) { return done(new Error('No request to resolve proxy for.')); }
url = request.url && request.url.toString();
async.waterfall([
// try resolving custom proxies before falling-back to system proxy
function (cb) {
if (_.isFunction(_.get(proxies, 'resolve'))) {
return cb(null, proxies.resolve(url));
}
return cb(null, undefined);
},
// fallback to system proxy
function (config, cb) {
if (config) {
return cb(null, config);
}
return _.isFunction(run.options.systemProxy) ? run.options.systemProxy(url, cb) : cb(null, undefined);
}
], function (err, config) {
if (err) {
run.triggers.console(context.coords, 'warn', 'proxy lookup error: ' + (err.message || err));
}
config && (request.proxy = sdk.ProxyConfig.isProxyConfig(config) ? config : new sdk.ProxyConfig(config));
return done();
});
},
// Certificate lookup + reading from whichever file resolver is provided
function (context, run, done) {
var request,
pfxPath,
keyPath,
certPath,
fileResolver,
certificate;
// A. Check if we have the file resolver
fileResolver = run.options.fileResolver;
if (!fileResolver) { return done(); } // No point going ahead
// B. Ensure we have the request
request = _.get(context.item, 'request');
if (!request) { return done(new Error('No request to resolve certificates for.')); }
// C. See if any cert should be sent, by performing a URL matching
certificate = run.options.certificates && run.options.certificates.resolveOne(request.url);
if (!certificate) { return done(); }
// D. Fetch the paths
// @todo: check why aren't we reading ca file (why are we not supporting ca file)
pfxPath = _.get(certificate, 'pfx.src');
keyPath = _.get(certificate, 'key.src');
certPath = _.get(certificate, 'cert.src');
// E. Read from the path, and add the values to the certificate, also associate
// the certificate with the current request.
async.mapValues({
pfx: pfxPath,
key: keyPath,
cert: certPath
}, function (value, key, next) {
// bail out if value is not defined
// @todo add test with server which only accepts cert file
if (!value) { return next(); }
// eslint-disable-next-line security/detect-non-literal-fs-filename
fileResolver.readFile(value, function (err, data) {
// Swallow the error after triggering a warning message for the user.
err && run.triggers.console(context.coords, 'warn',
`certificate "${key}" load error: ${(err.message || err)}`);
next(null, data);
});
}, function (err, fileContents) {
if (err) {
// Swallow the error after triggering a warning message for the user.
run.triggers.console(context.coords, 'warn', 'certificate load error: ' + (err.message || err));
return done();
}
if (fileContents) {
!_.isNil(fileContents.pfx) && _.set(certificate, 'pfx.value', fileContents.pfx);
!_.isNil(fileContents.key) && _.set(certificate, 'key.value', fileContents.key);
!_.isNil(fileContents.cert) && _.set(certificate, 'cert.value', fileContents.cert);
(fileContents.cert || fileContents.key || fileContents.pfx) && (request.certificate = certificate);
}
done();
});
}
];

236
node_modules/postman-runtime/lib/runner/run.js generated vendored Normal file
View File

@@ -0,0 +1,236 @@
var _ = require('lodash'),
async = require('async'),
backpack = require('../backpack'),
Instruction = require('./instruction'),
Run; // constructor
/**
* The run object is the primary way to interact with a run in progress. It allows controlling the run (pausing,
* starting, etc) and holds references to the helpers, such as requesters and authorizer.
*
* @param state
* @param options
*
* @property {Requester} requester
* @constructor
*/
Run = function PostmanCollectionRun (state, options) { // eslint-disable-line func-name-matching
_.assign(this, /** @lends Run.prototype */ {
/**
* @private
* @type {Object}
* @todo: state also holds the host for now (if any).
*/
state: _.assign({}, state),
/**
* @private
* @type {InstructionPool}
*/
pool: Instruction.pool(Run.commands),
/**
* @private
* @type {Object}
*/
stack: {},
/**
* @private
* @type {Object}
*/
options: options || {}
});
};
_.assign(Run.prototype, {
// eslint-disable-next-line jsdoc/check-param-names
/**
* @param {String} action
* @param {Object} [payload]
* @param {*} [args...]
*/
queue: function (action, payload) {
// extract the arguments that are to be forwarded to the processor
return this._schedule(action, payload, _.slice(arguments, 2), false);
},
// eslint-disable-next-line jsdoc/check-param-names
/**
* @param {String} action
* @param {Object} [payload]
* @param {*} [args...]
*/
interrupt: function (action, payload) {
// extract the arguments that are to be forwarded to the processor
return this._schedule(action, payload, _.slice(arguments, 2), true);
},
// eslint-disable-next-line jsdoc/check-param-names
/**
* Suspends current instruction and executes the given instruction.
*
* This method explicitly chooses not to handle errors, to allow the caller to catch errors and continue execution
* without terminating the instruction queue. However, it is up to the caller to make sure errors are handled,
* or it will go unhandled.
*
* @param {String} action
* @param {Object} payload
* @param {*} [args...]
*/
immediate: function (action, payload) {
var scope = this,
instruction = this.pool.create(action, payload, _.slice(arguments, 2));
// we directly execute this instruction instead od queueing it.
setTimeout(function () {
// we do not have callback, hence we send _.noop. we could have had made callback in .execute optional, but
// that would suppress design-time bugs in majority use-case and hence we avoided the same.
instruction.execute(_.noop, scope);
}, 0);
return instruction;
},
/**
* @param {Function|Object} callback
*/
start: function (callback) {
// @todo add `when` parameter to backpack.normalise
callback = backpack.normalise(callback, Object.keys(Run.triggers));
// cannot start run if it is already running
if (this.triggers) {
return callback(new Error('run: already running'));
}
var timeback = callback;
if (_.isFinite(_.get(this.options, 'timeout.global'))) {
timeback = backpack.timeback(callback, this.options.timeout.global, this, function () {
this.pool.clear();
});
}
// invoke all the initialiser functions one after another and if it has any error then abort with callback.
async.series(_.map(Run.initialisers, function (initializer) {
return initializer.bind(this);
}.bind(this)), function (err) {
if (err) { return callback(err); }
// save the normalised callbacks as triggers
this.triggers = callback;
this.triggers.start(null, this.state.cursor.current()); // @todo may throw error if cursor absent
this._process(timeback);
}.bind(this));
},
/**
* @private
* @param {Object|Cursor} cursor
* @return {Item}
*/
resolveCursor: function (cursor) {
if (!cursor || !Array.isArray(this.state.items)) { return; }
return this.state.items[cursor.position];
},
/**
* @private
*
* @param {String} action
* @param {Object} [payload]
* @param {Array} [args]
* @param {Boolean} [immediate]
*/
_schedule: function (action, payload, args, immediate) {
var instruction = this.pool.create(action, payload, args);
// based on whether the immediate flag is set, add to the top or bottom of the instruction queue.
(immediate ? this.pool.unshift : this.pool.push)(instruction);
return instruction;
},
_process: function (callback) {
// extract the command from the queue
var instruction = this.pool.shift();
// if there is nothing to process, exit
if (!instruction) {
callback(null, this.state.cursor.current());
return;
}
instruction.execute(function (err) {
return err ? callback(err, this.state.cursor.current()) : this._process(callback); // process recursively
}, this);
}
});
_.assign(Run, {
/**
* Stores all events that runner triggers
*
* @type {Object}
*/
triggers: {
start: true
},
/**
* stores all execution commands
* @enum {Function}
*
* @note commands are loaded by flattening the modules in the `./commands` directory
*/
commands: {},
/**
* Functions executed with commands on start
* @type {Array}
*/
initialisers: []
});
// commands are loaded by flattening the modules in the `./commands` directory
Run.commands = _.transform({
'control.command': require('./extensions/control.command'),
'event.command': require('./extensions/event.command'),
'httprequest.command': require('./extensions/http-request.command'),
'request.command': require('./extensions/request.command'),
'waterfall.command': require('./extensions/waterfall.command'),
'item.command': require('./extensions/item.command'),
'delay.command': require('./extensions/delay.command')
}, function (all, extension) {
// extract the prototype from the command interface
_.has(extension, 'prototype') && _.forOwn(extension.prototype, function (value, prop) {
if (Run.prototype.hasOwnProperty(prop)) {
throw new Error('run: duplicate command prototype extension ' + prop);
}
Run.prototype[prop] = value;
});
// put the triggers in a box
_.has(extension, 'triggers') && _.isArray(extension.triggers) && _.forEach(extension.triggers, function (name) {
name && (Run.triggers[name] = true);
});
// we add the processors to the processor list
_.has(extension, 'process') && _.forOwn(extension.process, function (command, name) {
if (!_.isFunction(command)) { return; }
if (all.hasOwnProperty(name)) {
throw new Error('run: duplicate command processor ' + name);
}
// finally add the command function to the accumulator
all[name] = command;
});
// add the initialisation functions
_.has(extension, 'init') && _.isFunction(extension.init) && Run.initialisers.push(extension.init);
});
module.exports = Run;

69
node_modules/postman-runtime/lib/runner/timings.js generated vendored Normal file
View File

@@ -0,0 +1,69 @@
/**
* All timing related functions within the runner is maintained in this module. Things like recording time with label,
* computing elapsed time between two labels, etc all go in here.
* @module Run~Timer
*/
var /**
* @const
* @type {string}
*/
NUMBER = 'number',
Timings; // constructor
/**
* An instance of a timer can record times with a label associated with it.
*
* @constructor
* @private
* @param {Object.<Number>} records create the timer instance with one or more labels and their timestamp.
*/
Timings = function Timings (records) {
for (var prop in records) {
this[prop] = parseInt(records[prop], 10);
}
};
/**
* Create a new instance of timer. Equivalent to doing new {@link Timer}(records:Object.<Number>);
*
* @param {Object.<Number>} records
* @returns {Timings}
*/
Timings.create = function (records) {
return new Timings(records);
};
/**
* Record the current time with the label specified.
*
* @param {String} label
* @returns {Number}
*
* @example
* var t = new Timings();
* t.record('start');
*
* console.log(t.toObject()); // logs {start: 1246333 }
*/
Timings.prototype.record = function (label) {
return (this[label] = Date.now());
};
/**
* Serialise a timing instance to an Object that can then be later used as a source to recreate another timing instance.
*
* @returns {Object.<Number>}
*/
Timings.prototype.toObject = function () {
var obj = {},
prop;
for (prop in this) {
(typeof this[prop] === NUMBER) && (obj[prop] = this[prop]);
}
return obj;
};
module.exports = Timings;

165
node_modules/postman-runtime/lib/runner/util.js generated vendored Normal file
View File

@@ -0,0 +1,165 @@
var /**
* @const
* @type {string}
*/
FUNCTION = 'function',
/**
* @const
* @type {string}
*/
STRING = 'string',
createReadStream; // function
/**
* Create readable stream for given file as well as detect possible file
* read issues.
*
* @param {Object} resolver - External file resolver module
* @param {String} fileSrc - File path
* @param {Function} callback - Final callback
*
* @note This function is defined in the file's root because there is a need to
* trap it within closure in order to append the stream clone functionalities.
* This ensures lesser footprint in case we have a memory leak.
*/
createReadStream = function (resolver, fileSrc, callback) {
var readStream;
// check for the existence of the file before creating read stream.
// eslint-disable-next-line security/detect-non-literal-fs-filename
resolver.stat(fileSrc, function (err, stats) {
if (err) {
// overwrite `ENOENT: no such file or directory` error message. Most likely the case.
err.code === 'ENOENT' && (err.message = `"${fileSrc}", no such file`);
return callback(err);
}
// check for a valid file.
if (stats && typeof stats.isFile === FUNCTION && !stats.isFile()) {
return callback(new Error(`"${fileSrc}", is not a file`));
}
// check read permissions for user.
// octal `400` signifies 'user permissions'. [4 0 0] -> [u g o]
// `4` signifies 'read permission'. [4] -> [1 0 0] -> [r w x]
if (stats && !(stats.mode & 0o400)) {
return callback(new Error(`"${fileSrc}", read permission denied`));
}
// @note Handle all the errors before `createReadStream` to avoid listening on stream error event.
// listening on error requires listening on end event as well. which will make this sync.
// @note In form-data mode stream error will be handled in postman-request but bails out ongoing request.
// eslint-disable-next-line security/detect-non-literal-fs-filename
readStream = resolver.createReadStream(fileSrc);
// We might have to read the file before making the actual request
// e.g, while calculating body hash during AWS auth or redirecting form-data params
// So, this method wraps the `createReadStream` function with fixed arguments.
// This makes sure that we don't have to pass `fileResolver` to
// internal modules (like auth plugins) for security reasons.
readStream.cloneReadStream = function (callback) {
// eslint-disable-next-line security/detect-non-literal-fs-filename
return createReadStream(resolver, fileSrc, callback);
};
callback(null, readStream);
});
};
/**
* Utility functions that are required to be re-used throughout the runner
* @module Runner~util
* @private
*
* @note Do not put module logic or business logic related functions here.
* The functions here are purely decoupled and low-level functions.
*/
module.exports = {
/**
* This function allows one to call another function by wrapping it within a try-catch block.
* The first parameter is the function itself, followed by the scope in which this function is to be executed.
* The third parameter onwards are blindly forwarded to the function being called
*
* @param {Function} fn
* @param {*} ctx
*
* @returns {Error} If there was an error executing the function, the error is returned.
* Note that if the function called here is asynchronous, it's errors will not be returned (for obvious reasons!)
*/
safeCall: function (fn, ctx) {
// extract the arguments that are to be forwarded to the function to be called
var args = Array.prototype.slice.call(arguments, 2);
try {
(typeof fn === FUNCTION) && fn.apply(ctx || global, args);
}
catch (err) {
return err;
}
},
/**
* Copies attributes from source object to destination object.
*
* @param dest
* @param src
*
* @return {Object}
*/
syncObject: function (dest, src) {
var prop;
// update or add values from src
for (prop in src) {
if (src.hasOwnProperty(prop)) {
dest[prop] = src[prop];
}
}
// remove values that no longer exist
for (prop in dest) {
if (dest.hasOwnProperty(prop) && !src.hasOwnProperty(prop)) {
delete dest[prop];
}
}
return dest;
},
/**
* Create readable stream for given file as well as detect possible file
* read issues. The resolver also attaches a clone function to the stream
* so that the stream can be restarted any time.
*
* @param {Object} resolver - External file resolver module
* @param {Function} resolver.stat - Resolver method to check for existence and permissions of file
* @param {Function} resolver.createReadStream - Resolver method for creating read stream
* @param {String} fileSrc - File path
* @param {Function} callback -
*
*/
createReadStream: function (resolver, fileSrc, callback) {
// bail out if resolver not found.
if (!resolver) {
return callback(new Error('file resolver not supported'));
}
// bail out if resolver is not supported.
if (typeof resolver.stat !== FUNCTION || typeof resolver.createReadStream !== FUNCTION) {
return callback(new Error('file resolver interface mismatch'));
}
// bail out if file source is invalid or empty string.
if (!fileSrc || typeof fileSrc !== STRING) {
return callback(new Error('invalid or missing file source'));
}
// now that things are sanitized and validated, we transfer it to the
// stream reading utility function that does the heavy lifting of
// calling there resolver to return the stream
return createReadStream(resolver, fileSrc, callback);
}
};

121
node_modules/postman-runtime/lib/version.js generated vendored Normal file
View File

@@ -0,0 +1,121 @@
var _ = require('lodash'),
RUNTIME_PACKAGE = require('../package.json'),
/**
* List of postman packages to include while generating dependency object
*
* @const
* @type {String[]}
*/
PACKAGES_TO_CONSIDER = {
'chai-postman': true,
'postman-collection': true,
'postman-request': true,
'postman-sandbox': true,
'uniscope': true,
'uvm': true
},
/**
* Generate object containing version and dependencies of Runtime or given module.
*
* @param {String} [moduleName] - Module name
* @param {Boolean} [deepDependencies=true] - Do depth dependencies traversal or stop at root
* @param {Object} [moduleData={}] - Object to store module data
* @returns {Object}
*
* @example <caption>Returned Object Structure</caption>
* var runtime = require('postman-runtime');
* runtime.version();
* {
* version: '7.6.1',
* dependencies: {
* 'postman-collection' : {
* version: '3.4.2',
* dependencies: {
* 'postman-request': {
* version: '2.88.1-postman.5'
* }
* }
* },
* 'postman-request': {
* version: '2.88.1-postman.5'
* }
* }
* }
*/
getVersionData = function (moduleName, deepDependencies, moduleData) {
// Set default values of function arguments if not given
// @note Argument moduleData is undefined if function is called directly.
// Otherwise moduleData will contain object if called recursively.
!moduleData && (moduleData = {});
// Include nested dependencies in moduleData object only if deepDependencies=true.
// Return only direct dependencies of module if deepDependencies=false.
(deepDependencies === undefined) && (deepDependencies = true);
var version,
// Runtime's package.json is considered by default
packageJson = RUNTIME_PACKAGE;
// bail out if either dependency not in PACKAGES_TO_CONSIDER
// or not Runtime's dependency
if (
moduleName &&
!(
PACKAGES_TO_CONSIDER[moduleName] &&
(
_.has(packageJson, ['dependencies', moduleName]) ||
_.has(packageJson, ['devDependencies', moduleName])
)
)
) {
return;
}
// if module name is given in function argument, consider that module's package.json instead of default
if (moduleName) {
// eslint-disable-next-line security/detect-non-literal-require
packageJson = require(require.resolve(`${moduleName}/package.json`));
}
// set version of dependency given as function argument or Runtime
moduleData.version = packageJson.version;
moduleData.dependencies = {};
_.forEach(PACKAGES_TO_CONSIDER, function (value, key) {
// if key is normal dependency
if (_.has(packageJson, ['dependencies', key])) {
version = packageJson.dependencies[key];
}
// else if key is devDependency
else if (_.has(packageJson, ['devDependencies', key])) {
version = packageJson.devDependencies[key];
}
// skip if key is not listed as dependency in packageJson
else {
return;
}
// include dependency in module data
moduleData.dependencies[key] = {
version: version,
dependencies: {}
};
// recursive call to include deep-dependency
if (!moduleName && deepDependencies) {
getVersionData(key, deepDependencies, moduleData.dependencies[key]);
}
// delete if no deep-dependency found
if (!_.size(moduleData.dependencies[key].dependencies)) {
delete moduleData.dependencies[key].dependencies;
}
});
return moduleData;
};
module.exports = getVersionData;

31
node_modules/postman-runtime/lib/visualizer/index.js generated vendored Normal file
View File

@@ -0,0 +1,31 @@
const Handlebars = require('handlebars');
module.exports = {
/**
* Hydrate the given template with given data and produce final HTML to render in visualizer
*
* @param {String} template - handlebars template as a string
* @param {Object} userData - data provided by user
* @param {Object} options - options for processing the template
* @param {Function} callback - callback called with errors and processed template
*/
processTemplate: function (template, userData, options, callback) {
// bail out if there is no valid template to process
if (typeof template !== 'string') {
return callback(new Error(`Invalid template. Template must be of type string, found ${typeof template}`));
}
var compiledTemplate = Handlebars.compile(template, options),
processedTemplate;
try {
// hydrate the template with provided data
processedTemplate = compiledTemplate(userData);
}
catch (err) {
return callback(err);
}
return callback(null, processedTemplate);
}
};