$
This commit is contained in:
172
node_modules/mocha/lib/nodejs/buffered-worker-pool.js
generated
vendored
Normal file
172
node_modules/mocha/lib/nodejs/buffered-worker-pool.js
generated
vendored
Normal file
@@ -0,0 +1,172 @@
|
||||
/**
|
||||
* A wrapper around a third-party child process worker pool implementation.
|
||||
* Used by {@link module:buffered-runner}.
|
||||
* @private
|
||||
* @module buffered-worker-pool
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const serializeJavascript = require('serialize-javascript');
|
||||
const workerpool = require('workerpool');
|
||||
const {deserialize} = require('./serializer');
|
||||
const debug = require('debug')('mocha:parallel:buffered-worker-pool');
|
||||
const {createInvalidArgumentTypeError} = require('../errors');
|
||||
|
||||
const WORKER_PATH = require.resolve('./worker.js');
|
||||
|
||||
/**
|
||||
* A mapping of Mocha `Options` objects to serialized values.
|
||||
*
|
||||
* This is helpful because we tend to same the same options over and over
|
||||
* over IPC.
|
||||
* @type {WeakMap<Options,string>}
|
||||
*/
|
||||
let optionsCache = new WeakMap();
|
||||
|
||||
/**
|
||||
* These options are passed into the [workerpool](https://npm.im/workerpool) module.
|
||||
* @type {Partial<WorkerPoolOptions>}
|
||||
*/
|
||||
const WORKER_POOL_DEFAULT_OPTS = {
|
||||
// use child processes, not worker threads!
|
||||
workerType: 'process',
|
||||
// ensure the same flags sent to `node` for this `mocha` invocation are passed
|
||||
// along to children
|
||||
forkOpts: {execArgv: process.execArgv},
|
||||
maxWorkers: workerpool.cpus - 1
|
||||
};
|
||||
|
||||
/**
|
||||
* A wrapper around a third-party worker pool implementation.
|
||||
* @private
|
||||
*/
|
||||
class BufferedWorkerPool {
|
||||
/**
|
||||
* Creates an underlying worker pool instance; determines max worker count
|
||||
* @param {Partial<WorkerPoolOptions>} [opts] - Options
|
||||
*/
|
||||
constructor(opts = {}) {
|
||||
const maxWorkers = Math.max(
|
||||
1,
|
||||
typeof opts.maxWorkers === 'undefined'
|
||||
? WORKER_POOL_DEFAULT_OPTS.maxWorkers
|
||||
: opts.maxWorkers
|
||||
);
|
||||
|
||||
/* istanbul ignore next */
|
||||
if (workerpool.cpus < 2) {
|
||||
// TODO: decide whether we should warn
|
||||
debug(
|
||||
'not enough CPU cores available to run multiple jobs; avoid --parallel on this machine'
|
||||
);
|
||||
} else if (maxWorkers >= workerpool.cpus) {
|
||||
// TODO: decide whether we should warn
|
||||
debug(
|
||||
'%d concurrent job(s) requested, but only %d core(s) available',
|
||||
maxWorkers,
|
||||
workerpool.cpus
|
||||
);
|
||||
}
|
||||
/* istanbul ignore next */
|
||||
debug(
|
||||
'run(): starting worker pool of max size %d, using node args: %s',
|
||||
maxWorkers,
|
||||
process.execArgv.join(' ')
|
||||
);
|
||||
|
||||
this.options = {...WORKER_POOL_DEFAULT_OPTS, opts, maxWorkers};
|
||||
this._pool = workerpool.pool(WORKER_PATH, this.options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Terminates all workers in the pool.
|
||||
* @param {boolean} [force] - Whether to force-kill workers. By default, lets workers finish their current task before termination.
|
||||
* @private
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async terminate(force = false) {
|
||||
/* istanbul ignore next */
|
||||
debug('terminate(): terminating with force = %s', force);
|
||||
return this._pool.terminate(force);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a test file run to the worker pool queue for execution by a worker process.
|
||||
*
|
||||
* Handles serialization/deserialization.
|
||||
*
|
||||
* @param {string} filepath - Filepath of test
|
||||
* @param {Options} [options] - Options for Mocha instance
|
||||
* @private
|
||||
* @returns {Promise<SerializedWorkerResult>}
|
||||
*/
|
||||
async run(filepath, options = {}) {
|
||||
if (!filepath || typeof filepath !== 'string') {
|
||||
throw createInvalidArgumentTypeError(
|
||||
'Expected a non-empty filepath',
|
||||
'filepath',
|
||||
'string'
|
||||
);
|
||||
}
|
||||
const serializedOptions = BufferedWorkerPool.serializeOptions(options);
|
||||
const result = await this._pool.exec('run', [filepath, serializedOptions]);
|
||||
return deserialize(result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns stats about the state of the worker processes in the pool.
|
||||
*
|
||||
* Used for debugging.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
stats() {
|
||||
return this._pool.stats();
|
||||
}
|
||||
|
||||
/**
|
||||
* Instantiates a {@link WorkerPool}.
|
||||
* @private
|
||||
*/
|
||||
static create(...args) {
|
||||
return new BufferedWorkerPool(...args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given Mocha options object `opts`, serialize into a format suitable for
|
||||
* transmission over IPC.
|
||||
*
|
||||
* @param {Options} [opts] - Mocha options
|
||||
* @private
|
||||
* @returns {string} Serialized options
|
||||
*/
|
||||
static serializeOptions(opts = {}) {
|
||||
if (!optionsCache.has(opts)) {
|
||||
const serialized = serializeJavascript(opts, {
|
||||
unsafe: true, // this means we don't care about XSS
|
||||
ignoreFunction: true // do not serialize functions
|
||||
});
|
||||
optionsCache.set(opts, serialized);
|
||||
/* istanbul ignore next */
|
||||
debug(
|
||||
'serializeOptions(): serialized options %O to: %s',
|
||||
opts,
|
||||
serialized
|
||||
);
|
||||
}
|
||||
return optionsCache.get(opts);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets internal cache of serialized options objects.
|
||||
*
|
||||
* For testing/debugging
|
||||
* @private
|
||||
*/
|
||||
static resetOptionsCache() {
|
||||
optionsCache = new WeakMap();
|
||||
}
|
||||
}
|
||||
|
||||
exports.BufferedWorkerPool = BufferedWorkerPool;
|
15
node_modules/mocha/lib/nodejs/file-unloader.js
generated
vendored
Normal file
15
node_modules/mocha/lib/nodejs/file-unloader.js
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* This module should not be in the browser bundle, so it's here.
|
||||
* @private
|
||||
* @module
|
||||
*/
|
||||
|
||||
/**
|
||||
* Deletes a file from the `require` cache.
|
||||
* @param {string} file - File
|
||||
*/
|
||||
exports.unloadFile = file => {
|
||||
delete require.cache[require.resolve(file)];
|
||||
};
|
137
node_modules/mocha/lib/nodejs/growl.js
generated
vendored
Normal file
137
node_modules/mocha/lib/nodejs/growl.js
generated
vendored
Normal file
@@ -0,0 +1,137 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Desktop Notifications module.
|
||||
* @module Growl
|
||||
*/
|
||||
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const {sync: which} = require('which');
|
||||
const {EVENT_RUN_END} = require('../runner').constants;
|
||||
const {isBrowser} = require('../utils');
|
||||
|
||||
/**
|
||||
* @summary
|
||||
* Checks if Growl notification support seems likely.
|
||||
*
|
||||
* @description
|
||||
* Glosses over the distinction between an unsupported platform
|
||||
* and one that lacks prerequisite software installations.
|
||||
*
|
||||
* @public
|
||||
* @see {@link https://github.com/tj/node-growl/blob/master/README.md|Prerequisite Installs}
|
||||
* @see {@link Mocha#growl}
|
||||
* @see {@link Mocha#isGrowlCapable}
|
||||
* @return {boolean} whether Growl notification support can be expected
|
||||
*/
|
||||
exports.isCapable = () => {
|
||||
if (!isBrowser()) {
|
||||
return getSupportBinaries().reduce(
|
||||
(acc, binary) => acc || Boolean(which(binary, {nothrow: true})),
|
||||
false
|
||||
);
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
/**
|
||||
* Implements desktop notifications as a pseudo-reporter.
|
||||
*
|
||||
* @public
|
||||
* @see {@link Mocha#_growl}
|
||||
* @param {Runner} runner - Runner instance.
|
||||
*/
|
||||
exports.notify = runner => {
|
||||
runner.once(EVENT_RUN_END, () => {
|
||||
display(runner);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Displays the notification.
|
||||
*
|
||||
* @private
|
||||
* @param {Runner} runner - Runner instance.
|
||||
*/
|
||||
const display = runner => {
|
||||
const growl = require('growl');
|
||||
const stats = runner.stats;
|
||||
const symbol = {
|
||||
cross: '\u274C',
|
||||
tick: '\u2705'
|
||||
};
|
||||
let _message;
|
||||
let message;
|
||||
let title;
|
||||
|
||||
if (stats.failures) {
|
||||
_message = `${stats.failures} of ${stats.tests} tests failed`;
|
||||
message = `${symbol.cross} ${_message}`;
|
||||
title = 'Failed';
|
||||
} else {
|
||||
_message = `${stats.passes} tests passed in ${stats.duration}ms`;
|
||||
message = `${symbol.tick} ${_message}`;
|
||||
title = 'Passed';
|
||||
}
|
||||
|
||||
// Send notification
|
||||
const options = {
|
||||
image: logo(),
|
||||
name: 'mocha',
|
||||
title
|
||||
};
|
||||
growl(message, options, onCompletion);
|
||||
};
|
||||
|
||||
/**
|
||||
* @summary
|
||||
* Callback for result of attempted Growl notification.
|
||||
*
|
||||
* @description
|
||||
* Despite its appearance, this is <strong>not</strong> an Error-first
|
||||
* callback -- all parameters are populated regardless of success.
|
||||
*
|
||||
* @private
|
||||
* @callback Growl~growlCB
|
||||
* @param {*} err - Error object, or <code>null</code> if successful.
|
||||
*/
|
||||
function onCompletion(err) {
|
||||
if (err) {
|
||||
// As notifications are tangential to our purpose, just log the error.
|
||||
const message =
|
||||
err.code === 'ENOENT' ? 'prerequisite software not found' : err.message;
|
||||
console.error('notification error:', message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns Mocha logo image path.
|
||||
*
|
||||
* @private
|
||||
* @return {string} Pathname of Mocha logo
|
||||
*/
|
||||
const logo = () => {
|
||||
return path.join(__dirname, '..', 'assets', 'mocha-logo-96.png');
|
||||
};
|
||||
|
||||
/**
|
||||
* @summary
|
||||
* Gets platform-specific Growl support binaries.
|
||||
*
|
||||
* @description
|
||||
* Somewhat brittle dependency on `growl` package implementation, but it
|
||||
* rarely changes.
|
||||
*
|
||||
* @private
|
||||
* @see {@link https://github.com/tj/node-growl/blob/master/lib/growl.js#L28-L126|setupCmd}
|
||||
* @return {string[]} names of Growl support binaries
|
||||
*/
|
||||
const getSupportBinaries = () => {
|
||||
const binaries = {
|
||||
Darwin: ['terminal-notifier', 'growlnotify'],
|
||||
Linux: ['notify-send', 'growl'],
|
||||
Windows_NT: ['growlnotify.exe']
|
||||
};
|
||||
return binaries[os.type()] || [];
|
||||
};
|
433
node_modules/mocha/lib/nodejs/parallel-buffered-runner.js
generated
vendored
Normal file
433
node_modules/mocha/lib/nodejs/parallel-buffered-runner.js
generated
vendored
Normal file
@@ -0,0 +1,433 @@
|
||||
/**
|
||||
* A test Runner that uses a {@link module:buffered-worker-pool}.
|
||||
* @module parallel-buffered-runner
|
||||
* @private
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const allSettled = require('@ungap/promise-all-settled').bind(Promise);
|
||||
const Runner = require('../runner');
|
||||
const {EVENT_RUN_BEGIN, EVENT_RUN_END} = Runner.constants;
|
||||
const debug = require('debug')('mocha:parallel:parallel-buffered-runner');
|
||||
const {BufferedWorkerPool} = require('./buffered-worker-pool');
|
||||
const {setInterval, clearInterval} = global;
|
||||
const {createMap, constants} = require('../utils');
|
||||
const {MOCHA_ID_PROP_NAME} = constants;
|
||||
const {createFatalError} = require('../errors');
|
||||
|
||||
const DEFAULT_WORKER_REPORTER = require.resolve(
|
||||
'./reporters/parallel-buffered'
|
||||
);
|
||||
|
||||
/**
|
||||
* List of options to _not_ serialize for transmission to workers
|
||||
*/
|
||||
const DENY_OPTIONS = [
|
||||
'globalSetup',
|
||||
'globalTeardown',
|
||||
'parallel',
|
||||
'p',
|
||||
'jobs',
|
||||
'j'
|
||||
];
|
||||
|
||||
/**
|
||||
* Outputs a debug statement with worker stats
|
||||
* @param {BufferedWorkerPool} pool - Worker pool
|
||||
*/
|
||||
/* istanbul ignore next */
|
||||
const debugStats = pool => {
|
||||
const {totalWorkers, busyWorkers, idleWorkers, pendingTasks} = pool.stats();
|
||||
debug(
|
||||
'%d/%d busy workers; %d idle; %d tasks queued',
|
||||
busyWorkers,
|
||||
totalWorkers,
|
||||
idleWorkers,
|
||||
pendingTasks
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* The interval at which we will display stats for worker processes in debug mode
|
||||
*/
|
||||
const DEBUG_STATS_INTERVAL = 5000;
|
||||
|
||||
const ABORTED = 'ABORTED';
|
||||
const IDLE = 'IDLE';
|
||||
const ABORTING = 'ABORTING';
|
||||
const RUNNING = 'RUNNING';
|
||||
const BAILING = 'BAILING';
|
||||
const BAILED = 'BAILED';
|
||||
const COMPLETE = 'COMPLETE';
|
||||
|
||||
const states = createMap({
|
||||
[IDLE]: new Set([RUNNING, ABORTING]),
|
||||
[RUNNING]: new Set([COMPLETE, BAILING, ABORTING]),
|
||||
[COMPLETE]: new Set(),
|
||||
[ABORTED]: new Set(),
|
||||
[ABORTING]: new Set([ABORTED]),
|
||||
[BAILING]: new Set([BAILED, ABORTING]),
|
||||
[BAILED]: new Set([COMPLETE, ABORTING])
|
||||
});
|
||||
|
||||
/**
|
||||
* This `Runner` delegates tests runs to worker threads. Does not execute any
|
||||
* {@link Runnable}s by itself!
|
||||
* @public
|
||||
*/
|
||||
class ParallelBufferedRunner extends Runner {
|
||||
constructor(...args) {
|
||||
super(...args);
|
||||
|
||||
let state = IDLE;
|
||||
Object.defineProperty(this, '_state', {
|
||||
get() {
|
||||
return state;
|
||||
},
|
||||
set(newState) {
|
||||
if (states[state].has(newState)) {
|
||||
state = newState;
|
||||
} else {
|
||||
throw new Error(`invalid state transition: ${state} => ${newState}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
this._workerReporter = DEFAULT_WORKER_REPORTER;
|
||||
this._linkPartialObjects = false;
|
||||
this._linkedObjectMap = new Map();
|
||||
|
||||
this.once(Runner.constants.EVENT_RUN_END, () => {
|
||||
this._state = COMPLETE;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a mapping function to enqueue a file in the worker pool and return results of its execution.
|
||||
* @param {BufferedWorkerPool} pool - Worker pool
|
||||
* @param {Options} options - Mocha options
|
||||
* @returns {FileRunner} Mapping function
|
||||
* @private
|
||||
*/
|
||||
_createFileRunner(pool, options) {
|
||||
/**
|
||||
* Emits event and sets `BAILING` state, if necessary.
|
||||
* @param {Object} event - Event having `eventName`, maybe `data` and maybe `error`
|
||||
* @param {number} failureCount - Failure count
|
||||
*/
|
||||
const emitEvent = (event, failureCount) => {
|
||||
this.emit(event.eventName, event.data, event.error);
|
||||
if (
|
||||
this._state !== BAILING &&
|
||||
event.data &&
|
||||
event.data._bail &&
|
||||
(failureCount || event.error)
|
||||
) {
|
||||
debug('run(): nonzero failure count & found bail flag');
|
||||
// we need to let the events complete for this file, as the worker
|
||||
// should run any cleanup hooks
|
||||
this._state = BAILING;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Given an event, recursively find any objects in its data that have ID's, and create object references to already-seen objects.
|
||||
* @param {Object} event - Event having `eventName`, maybe `data` and maybe `error`
|
||||
*/
|
||||
const linkEvent = event => {
|
||||
const stack = [{parent: event, prop: 'data'}];
|
||||
while (stack.length) {
|
||||
const {parent, prop} = stack.pop();
|
||||
const obj = parent[prop];
|
||||
let newObj;
|
||||
if (obj && typeof obj === 'object') {
|
||||
if (obj[MOCHA_ID_PROP_NAME]) {
|
||||
const id = obj[MOCHA_ID_PROP_NAME];
|
||||
newObj = this._linkedObjectMap.has(id)
|
||||
? Object.assign(this._linkedObjectMap.get(id), obj)
|
||||
: obj;
|
||||
this._linkedObjectMap.set(id, newObj);
|
||||
parent[prop] = newObj;
|
||||
} else {
|
||||
throw createFatalError(
|
||||
'Object missing ID received in event data',
|
||||
obj
|
||||
);
|
||||
}
|
||||
}
|
||||
Object.keys(newObj).forEach(key => {
|
||||
const value = obj[key];
|
||||
if (value && typeof value === 'object' && value[MOCHA_ID_PROP_NAME]) {
|
||||
stack.push({obj: value, parent: newObj, prop: key});
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return async file => {
|
||||
debug('run(): enqueueing test file %s', file);
|
||||
try {
|
||||
const {failureCount, events} = await pool.run(file, options);
|
||||
|
||||
if (this._state === BAILED) {
|
||||
// short-circuit after a graceful bail. if this happens,
|
||||
// some other worker has bailed.
|
||||
// TODO: determine if this is the desired behavior, or if we
|
||||
// should report the events of this run anyway.
|
||||
return;
|
||||
}
|
||||
debug(
|
||||
'run(): completed run of file %s; %d failures / %d events',
|
||||
file,
|
||||
failureCount,
|
||||
events.length
|
||||
);
|
||||
this.failures += failureCount; // can this ever be non-numeric?
|
||||
let event = events.shift();
|
||||
|
||||
if (this._linkPartialObjects) {
|
||||
while (event) {
|
||||
linkEvent(event);
|
||||
emitEvent(event, failureCount);
|
||||
event = events.shift();
|
||||
}
|
||||
} else {
|
||||
while (event) {
|
||||
emitEvent(event, failureCount);
|
||||
event = events.shift();
|
||||
}
|
||||
}
|
||||
if (this._state === BAILING) {
|
||||
debug('run(): terminating pool due to "bail" flag');
|
||||
this._state = BAILED;
|
||||
await pool.terminate();
|
||||
}
|
||||
} catch (err) {
|
||||
if (this._state === BAILED || this._state === ABORTING) {
|
||||
debug(
|
||||
'run(): worker pool terminated with intent; skipping file %s',
|
||||
file
|
||||
);
|
||||
} else {
|
||||
// this is an uncaught exception
|
||||
debug('run(): encountered uncaught exception: %O', err);
|
||||
if (this.allowUncaught) {
|
||||
// still have to clean up
|
||||
this._state = ABORTING;
|
||||
await pool.terminate(true);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
} finally {
|
||||
debug('run(): done running file %s', file);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Listen on `Process.SIGINT`; terminate pool if caught.
|
||||
* Returns the listener for later call to `process.removeListener()`.
|
||||
* @param {BufferedWorkerPool} pool - Worker pool
|
||||
* @returns {SigIntListener} Listener
|
||||
* @private
|
||||
*/
|
||||
_bindSigIntListener(pool) {
|
||||
const sigIntListener = async () => {
|
||||
debug('run(): caught a SIGINT');
|
||||
this._state = ABORTING;
|
||||
|
||||
try {
|
||||
debug('run(): force-terminating worker pool');
|
||||
await pool.terminate(true);
|
||||
} catch (err) {
|
||||
console.error(
|
||||
`Error while attempting to force-terminate worker pool: ${err}`
|
||||
);
|
||||
process.exitCode = 1;
|
||||
} finally {
|
||||
process.nextTick(() => {
|
||||
debug('run(): imminent death');
|
||||
this._state = ABORTED;
|
||||
process.kill(process.pid, 'SIGINT');
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
process.once('SIGINT', sigIntListener);
|
||||
|
||||
return sigIntListener;
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs Mocha tests by creating a thread pool, then delegating work to the
|
||||
* worker threads.
|
||||
*
|
||||
* Each worker receives one file, and as workers become available, they take a
|
||||
* file from the queue and run it. The worker thread execution is treated like
|
||||
* an RPC--it returns a `Promise` containing serialized information about the
|
||||
* run. The information is processed as it's received, and emitted to a
|
||||
* {@link Reporter}, which is likely listening for these events.
|
||||
*
|
||||
* @param {Function} callback - Called with an exit code corresponding to
|
||||
* number of test failures.
|
||||
* @param {{files: string[], options: Options}} opts - Files to run and
|
||||
* command-line options, respectively.
|
||||
*/
|
||||
run(callback, {files, options = {}} = {}) {
|
||||
/**
|
||||
* Listener on `Process.SIGINT` which tries to cleanly terminate the worker pool.
|
||||
*/
|
||||
let sigIntListener;
|
||||
|
||||
// assign the reporter the worker will use, which will be different than the
|
||||
// main process' reporter
|
||||
options = {...options, reporter: this._workerReporter};
|
||||
|
||||
// This function should _not_ return a `Promise`; its parent (`Runner#run`)
|
||||
// returns this instance, so this should do the same. However, we want to make
|
||||
// use of `async`/`await`, so we use this IIFE.
|
||||
(async () => {
|
||||
/**
|
||||
* This is an interval that outputs stats about the worker pool every so often
|
||||
*/
|
||||
let debugInterval;
|
||||
|
||||
/**
|
||||
* @type {BufferedWorkerPool}
|
||||
*/
|
||||
let pool;
|
||||
|
||||
try {
|
||||
pool = BufferedWorkerPool.create({maxWorkers: options.jobs});
|
||||
|
||||
sigIntListener = this._bindSigIntListener(pool);
|
||||
|
||||
/* istanbul ignore next */
|
||||
debugInterval = setInterval(
|
||||
() => debugStats(pool),
|
||||
DEBUG_STATS_INTERVAL
|
||||
).unref();
|
||||
|
||||
// this is set for uncaught exception handling in `Runner#uncaught`
|
||||
// TODO: `Runner` should be using a state machine instead.
|
||||
this.started = true;
|
||||
this._state = RUNNING;
|
||||
|
||||
this.emit(EVENT_RUN_BEGIN);
|
||||
|
||||
options = {...options};
|
||||
DENY_OPTIONS.forEach(opt => {
|
||||
delete options[opt];
|
||||
});
|
||||
|
||||
const results = await allSettled(
|
||||
files.map(this._createFileRunner(pool, options))
|
||||
);
|
||||
|
||||
// note that pool may already be terminated due to --bail
|
||||
await pool.terminate();
|
||||
|
||||
results
|
||||
.filter(({status}) => status === 'rejected')
|
||||
.forEach(({reason}) => {
|
||||
if (this.allowUncaught) {
|
||||
// yep, just the first one.
|
||||
throw reason;
|
||||
}
|
||||
// "rejected" will correspond to uncaught exceptions.
|
||||
// unlike the serial runner, the parallel runner can always recover.
|
||||
this.uncaught(reason);
|
||||
});
|
||||
|
||||
if (this._state === ABORTING) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.emit(EVENT_RUN_END);
|
||||
debug('run(): completing with failure count %d', this.failures);
|
||||
callback(this.failures);
|
||||
} catch (err) {
|
||||
// this `nextTick` takes us out of the `Promise` scope, so the
|
||||
// exception will not be caught and returned as a rejected `Promise`,
|
||||
// which would lead to an `unhandledRejection` event.
|
||||
process.nextTick(() => {
|
||||
debug('run(): re-throwing uncaught exception');
|
||||
throw err;
|
||||
});
|
||||
} finally {
|
||||
clearInterval(debugInterval);
|
||||
process.removeListener('SIGINT', sigIntListener);
|
||||
}
|
||||
})();
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Toggle partial object linking behavior; used for building object references from
|
||||
* unique ID's.
|
||||
* @param {boolean} [value] - If `true`, enable partial object linking, otherwise disable
|
||||
* @returns {Runner}
|
||||
* @chainable
|
||||
* @public
|
||||
* @example
|
||||
* // this reporter needs proper object references when run in parallel mode
|
||||
* class MyReporter() {
|
||||
* constructor(runner) {
|
||||
* this.runner.linkPartialObjects(true)
|
||||
* .on(EVENT_SUITE_BEGIN, suite => {
|
||||
// this Suite may be the same object...
|
||||
* })
|
||||
* .on(EVENT_TEST_BEGIN, test => {
|
||||
* // ...as the `test.parent` property
|
||||
* });
|
||||
* }
|
||||
* }
|
||||
*/
|
||||
linkPartialObjects(value) {
|
||||
this._linkPartialObjects = Boolean(value);
|
||||
return super.linkPartialObjects(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* If this class is the `Runner` in use, then this is going to return `true`.
|
||||
*
|
||||
* For use by reporters.
|
||||
* @returns {true}
|
||||
* @public
|
||||
*/
|
||||
isParallelMode() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures an alternate reporter for worker processes to use. Subclasses
|
||||
* using worker processes should implement this.
|
||||
* @public
|
||||
* @param {string} path - Absolute path to alternate reporter for worker processes to use
|
||||
* @returns {Runner}
|
||||
* @throws When in serial mode
|
||||
* @chainable
|
||||
*/
|
||||
workerReporter(reporter) {
|
||||
this._workerReporter = reporter;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ParallelBufferedRunner;
|
||||
|
||||
/**
|
||||
* Listener function intended to be bound to `Process.SIGINT` event
|
||||
* @private
|
||||
* @callback SigIntListener
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
|
||||
/**
|
||||
* A function accepting a test file path and returning the results of a test run
|
||||
* @private
|
||||
* @callback FileRunner
|
||||
* @param {string} filename - File to run
|
||||
* @returns {Promise<SerializedWorkerResult>}
|
||||
*/
|
165
node_modules/mocha/lib/nodejs/reporters/parallel-buffered.js
generated
vendored
Normal file
165
node_modules/mocha/lib/nodejs/reporters/parallel-buffered.js
generated
vendored
Normal file
@@ -0,0 +1,165 @@
|
||||
/**
|
||||
* "Buffered" reporter used internally by a worker process when running in parallel mode.
|
||||
* @module nodejs/reporters/parallel-buffered
|
||||
* @public
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
const {
|
||||
EVENT_SUITE_BEGIN,
|
||||
EVENT_SUITE_END,
|
||||
EVENT_TEST_FAIL,
|
||||
EVENT_TEST_PASS,
|
||||
EVENT_TEST_PENDING,
|
||||
EVENT_TEST_BEGIN,
|
||||
EVENT_TEST_END,
|
||||
EVENT_TEST_RETRY,
|
||||
EVENT_DELAY_BEGIN,
|
||||
EVENT_DELAY_END,
|
||||
EVENT_HOOK_BEGIN,
|
||||
EVENT_HOOK_END,
|
||||
EVENT_RUN_END
|
||||
} = require('../../runner').constants;
|
||||
const {SerializableEvent, SerializableWorkerResult} = require('../serializer');
|
||||
const debug = require('debug')('mocha:reporters:buffered');
|
||||
const Base = require('../../reporters/base');
|
||||
|
||||
/**
|
||||
* List of events to listen to; these will be buffered and sent
|
||||
* when `Mocha#run` is complete (via {@link ParallelBuffered#done}).
|
||||
*/
|
||||
const EVENT_NAMES = [
|
||||
EVENT_SUITE_BEGIN,
|
||||
EVENT_SUITE_END,
|
||||
EVENT_TEST_BEGIN,
|
||||
EVENT_TEST_PENDING,
|
||||
EVENT_TEST_FAIL,
|
||||
EVENT_TEST_PASS,
|
||||
EVENT_TEST_RETRY,
|
||||
EVENT_TEST_END,
|
||||
EVENT_HOOK_BEGIN,
|
||||
EVENT_HOOK_END
|
||||
];
|
||||
|
||||
/**
|
||||
* Like {@link EVENT_NAMES}, except we expect these events to only be emitted
|
||||
* by the `Runner` once.
|
||||
*/
|
||||
const ONCE_EVENT_NAMES = [EVENT_DELAY_BEGIN, EVENT_DELAY_END];
|
||||
|
||||
/**
|
||||
* The `ParallelBuffered` reporter is used by each worker process in "parallel"
|
||||
* mode, by default. Instead of reporting to to `STDOUT`, etc., it retains a
|
||||
* list of events it receives and hands these off to the callback passed into
|
||||
* {@link Mocha#run}. That callback will then return the data to the main
|
||||
* process.
|
||||
* @public
|
||||
*/
|
||||
class ParallelBuffered extends Base {
|
||||
/**
|
||||
* Calls {@link ParallelBuffered#createListeners}
|
||||
* @param {Runner} runner
|
||||
*/
|
||||
constructor(runner, opts) {
|
||||
super(runner, opts);
|
||||
|
||||
/**
|
||||
* Retained list of events emitted from the {@link Runner} instance.
|
||||
* @type {BufferedEvent[]}
|
||||
* @public
|
||||
*/
|
||||
this.events = [];
|
||||
|
||||
/**
|
||||
* Map of `Runner` event names to listeners (for later teardown)
|
||||
* @public
|
||||
* @type {Map<string,EventListener>}
|
||||
*/
|
||||
this.listeners = new Map();
|
||||
|
||||
this.createListeners(runner);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new listener which saves event data in memory to
|
||||
* {@link ParallelBuffered#events}. Listeners are indexed by `eventName` and stored
|
||||
* in {@link ParallelBuffered#listeners}. This is a defensive measure, so that we
|
||||
* don't a) leak memory or b) remove _other_ listeners that may not be
|
||||
* associated with this reporter.
|
||||
*
|
||||
* Subclasses could override this behavior.
|
||||
*
|
||||
* @public
|
||||
* @param {string} eventName - Name of event to create listener for
|
||||
* @returns {EventListener}
|
||||
*/
|
||||
createListener(eventName) {
|
||||
const listener = (runnable, err) => {
|
||||
this.events.push(SerializableEvent.create(eventName, runnable, err));
|
||||
};
|
||||
return this.listeners.set(eventName, listener).get(eventName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates event listeners (using {@link ParallelBuffered#createListener}) for each
|
||||
* reporter-relevant event emitted by a {@link Runner}. This array is drained when
|
||||
* {@link ParallelBuffered#done} is called by {@link Runner#run}.
|
||||
*
|
||||
* Subclasses could override this behavior.
|
||||
* @public
|
||||
* @param {Runner} runner - Runner instance
|
||||
* @returns {ParallelBuffered}
|
||||
* @chainable
|
||||
*/
|
||||
createListeners(runner) {
|
||||
EVENT_NAMES.forEach(evt => {
|
||||
runner.on(evt, this.createListener(evt));
|
||||
});
|
||||
ONCE_EVENT_NAMES.forEach(evt => {
|
||||
runner.once(evt, this.createListener(evt));
|
||||
});
|
||||
|
||||
runner.once(EVENT_RUN_END, () => {
|
||||
debug('received EVENT_RUN_END');
|
||||
this.listeners.forEach((listener, evt) => {
|
||||
runner.removeListener(evt, listener);
|
||||
this.listeners.delete(evt);
|
||||
});
|
||||
});
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls the {@link Mocha#run} callback (`callback`) with the test failure
|
||||
* count and the array of {@link BufferedEvent} objects. Resets the array.
|
||||
*
|
||||
* This is called directly by `Runner#run` and should not be called by any other consumer.
|
||||
*
|
||||
* Subclasses could override this.
|
||||
*
|
||||
* @param {number} failures - Number of failed tests
|
||||
* @param {Function} callback - The callback passed to {@link Mocha#run}.
|
||||
* @public
|
||||
*/
|
||||
done(failures, callback) {
|
||||
callback(SerializableWorkerResult.create(this.events, failures));
|
||||
this.events = []; // defensive
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Serializable event data from a `Runner`. Keys of the `data` property
|
||||
* beginning with `__` will be converted into a function which returns the value
|
||||
* upon deserialization.
|
||||
* @typedef {Object} BufferedEvent
|
||||
* @property {string} name - Event name
|
||||
* @property {object} data - Event parameters
|
||||
*/
|
||||
|
||||
module.exports = ParallelBuffered;
|
412
node_modules/mocha/lib/nodejs/serializer.js
generated
vendored
Normal file
412
node_modules/mocha/lib/nodejs/serializer.js
generated
vendored
Normal file
@@ -0,0 +1,412 @@
|
||||
/**
|
||||
* Serialization/deserialization classes and functions for communication between a main Mocha process and worker processes.
|
||||
* @module serializer
|
||||
* @private
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const {type} = require('../utils');
|
||||
const {createInvalidArgumentTypeError} = require('../errors');
|
||||
// this is not named `mocha:parallel:serializer` because it's noisy and it's
|
||||
// helpful to be able to write `DEBUG=mocha:parallel*` and get everything else.
|
||||
const debug = require('debug')('mocha:serializer');
|
||||
|
||||
const SERIALIZABLE_RESULT_NAME = 'SerializableWorkerResult';
|
||||
const SERIALIZABLE_TYPES = new Set(['object', 'array', 'function', 'error']);
|
||||
|
||||
/**
|
||||
* The serializable result of a test file run from a worker.
|
||||
* @private
|
||||
*/
|
||||
class SerializableWorkerResult {
|
||||
/**
|
||||
* Creates instance props; of note, the `__type` prop.
|
||||
*
|
||||
* Note that the failure count is _redundant_ and could be derived from the
|
||||
* list of events; but since we're already doing the work, might as well use
|
||||
* it.
|
||||
* @param {SerializableEvent[]} [events=[]] - Events to eventually serialize
|
||||
* @param {number} [failureCount=0] - Failure count
|
||||
*/
|
||||
constructor(events = [], failureCount = 0) {
|
||||
/**
|
||||
* The number of failures in this run
|
||||
* @type {number}
|
||||
*/
|
||||
this.failureCount = failureCount;
|
||||
/**
|
||||
* All relevant events emitted from the {@link Runner}.
|
||||
* @type {SerializableEvent[]}
|
||||
*/
|
||||
this.events = events;
|
||||
|
||||
/**
|
||||
* Symbol-like value needed to distinguish when attempting to deserialize
|
||||
* this object (once it's been received over IPC).
|
||||
* @type {Readonly<"SerializableWorkerResult">}
|
||||
*/
|
||||
Object.defineProperty(this, '__type', {
|
||||
value: SERIALIZABLE_RESULT_NAME,
|
||||
enumerable: true,
|
||||
writable: false
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Instantiates a new {@link SerializableWorkerResult}.
|
||||
* @param {...any} args - Args to constructor
|
||||
* @returns {SerializableWorkerResult}
|
||||
*/
|
||||
static create(...args) {
|
||||
return new SerializableWorkerResult(...args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Serializes each {@link SerializableEvent} in our `events` prop;
|
||||
* makes this object read-only.
|
||||
* @returns {Readonly<SerializableWorkerResult>}
|
||||
*/
|
||||
serialize() {
|
||||
this.events.forEach(event => {
|
||||
event.serialize();
|
||||
});
|
||||
return Object.freeze(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Deserializes a {@link SerializedWorkerResult} into something reporters can
|
||||
* use; calls {@link SerializableEvent.deserialize} on each item in its
|
||||
* `events` prop.
|
||||
* @param {SerializedWorkerResult} obj
|
||||
* @returns {SerializedWorkerResult}
|
||||
*/
|
||||
static deserialize(obj) {
|
||||
obj.events.forEach(event => {
|
||||
SerializableEvent.deserialize(event);
|
||||
});
|
||||
return obj;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns `true` if this is a {@link SerializedWorkerResult} or a
|
||||
* {@link SerializableWorkerResult}.
|
||||
* @param {*} value - A value to check
|
||||
* @returns {boolean} If true, it's deserializable
|
||||
*/
|
||||
static isSerializedWorkerResult(value) {
|
||||
return (
|
||||
value instanceof SerializableWorkerResult ||
|
||||
(type(value) === 'object' && value.__type === SERIALIZABLE_RESULT_NAME)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents an event, emitted by a {@link Runner}, which is to be transmitted
|
||||
* over IPC.
|
||||
*
|
||||
* Due to the contents of the event data, it's not possible to send them
|
||||
* verbatim. When received by the main process--and handled by reporters--these
|
||||
* objects are expected to contain {@link Runnable} instances. This class
|
||||
* provides facilities to perform the translation via serialization and
|
||||
* deserialization.
|
||||
* @private
|
||||
*/
|
||||
class SerializableEvent {
|
||||
/**
|
||||
* Constructs a `SerializableEvent`, throwing if we receive unexpected data.
|
||||
*
|
||||
* Practically, events emitted from `Runner` have a minumum of zero (0)
|
||||
* arguments-- (for example, {@link Runnable.constants.EVENT_RUN_BEGIN}) and a
|
||||
* maximum of two (2) (for example,
|
||||
* {@link Runnable.constants.EVENT_TEST_FAIL}, where the second argument is an
|
||||
* `Error`). The first argument, if present, is a {@link Runnable}. This
|
||||
* constructor's arguments adhere to this convention.
|
||||
* @param {string} eventName - A non-empty event name.
|
||||
* @param {any} [originalValue] - Some data. Corresponds to extra arguments
|
||||
* passed to `EventEmitter#emit`.
|
||||
* @param {Error} [originalError] - An error, if there's an error.
|
||||
* @throws If `eventName` is empty, or `originalValue` is a non-object.
|
||||
*/
|
||||
constructor(eventName, originalValue, originalError) {
|
||||
if (!eventName) {
|
||||
throw createInvalidArgumentTypeError(
|
||||
'Empty `eventName` string argument',
|
||||
'eventName',
|
||||
'string'
|
||||
);
|
||||
}
|
||||
/**
|
||||
* The event name.
|
||||
* @memberof SerializableEvent
|
||||
*/
|
||||
this.eventName = eventName;
|
||||
const originalValueType = type(originalValue);
|
||||
if (originalValueType !== 'object' && originalValueType !== 'undefined') {
|
||||
throw createInvalidArgumentTypeError(
|
||||
`Expected object but received ${originalValueType}`,
|
||||
'originalValue',
|
||||
'object'
|
||||
);
|
||||
}
|
||||
/**
|
||||
* An error, if present.
|
||||
* @memberof SerializableEvent
|
||||
*/
|
||||
Object.defineProperty(this, 'originalError', {
|
||||
value: originalError,
|
||||
enumerable: false
|
||||
});
|
||||
|
||||
/**
|
||||
* The raw value.
|
||||
*
|
||||
* We don't want this value sent via IPC; making it non-enumerable will do that.
|
||||
*
|
||||
* @memberof SerializableEvent
|
||||
*/
|
||||
Object.defineProperty(this, 'originalValue', {
|
||||
value: originalValue,
|
||||
enumerable: false
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* In case you hated using `new` (I do).
|
||||
*
|
||||
* @param {...any} args - Args for {@link SerializableEvent#constructor}.
|
||||
* @returns {SerializableEvent} A new `SerializableEvent`
|
||||
*/
|
||||
static create(...args) {
|
||||
return new SerializableEvent(...args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Used internally by {@link SerializableEvent#serialize}.
|
||||
* @ignore
|
||||
* @param {Array<object|string>} pairs - List of parent/key tuples to process; modified in-place. This JSDoc type is an approximation
|
||||
* @param {object} parent - Some parent object
|
||||
* @param {string} key - Key to inspect
|
||||
* @param {WeakSet<Object>} seenObjects - For avoiding circular references
|
||||
*/
|
||||
static _serialize(pairs, parent, key, seenObjects) {
|
||||
let value = parent[key];
|
||||
if (seenObjects.has(value)) {
|
||||
parent[key] = Object.create(null);
|
||||
return;
|
||||
}
|
||||
let _type = type(value);
|
||||
if (_type === 'error') {
|
||||
// we need to reference the stack prop b/c it's lazily-loaded.
|
||||
// `__type` is necessary for deserialization to create an `Error` later.
|
||||
// `message` is apparently not enumerable, so we must handle it specifically.
|
||||
value = Object.assign(Object.create(null), value, {
|
||||
stack: value.stack,
|
||||
message: value.message,
|
||||
__type: 'Error'
|
||||
});
|
||||
parent[key] = value;
|
||||
// after this, set the result of type(value) to be `object`, and we'll throw
|
||||
// whatever other junk is in the original error into the new `value`.
|
||||
_type = 'object';
|
||||
}
|
||||
switch (_type) {
|
||||
case 'object':
|
||||
if (type(value.serialize) === 'function') {
|
||||
parent[key] = value.serialize();
|
||||
} else {
|
||||
// by adding props to the `pairs` array, we will process it further
|
||||
pairs.push(
|
||||
...Object.keys(value)
|
||||
.filter(key => SERIALIZABLE_TYPES.has(type(value[key])))
|
||||
.map(key => [value, key])
|
||||
);
|
||||
}
|
||||
break;
|
||||
case 'function':
|
||||
// we _may_ want to dig in to functions for some assertion libraries
|
||||
// that might put a usable property on a function.
|
||||
// for now, just zap it.
|
||||
delete parent[key];
|
||||
break;
|
||||
case 'array':
|
||||
pairs.push(
|
||||
...value
|
||||
.filter(value => SERIALIZABLE_TYPES.has(type(value)))
|
||||
.map((value, index) => [value, index])
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Modifies this object *in place* (for theoretical memory consumption &
|
||||
* performance reasons); serializes `SerializableEvent#originalValue` (placing
|
||||
* the result in `SerializableEvent#data`) and `SerializableEvent#error`.
|
||||
* Freezes this object. The result is an object that can be transmitted over
|
||||
* IPC.
|
||||
* If this quickly becomes unmaintainable, we will want to move towards immutable
|
||||
* objects post-haste.
|
||||
*/
|
||||
serialize() {
|
||||
// given a parent object and a key, inspect the value and decide whether
|
||||
// to replace it, remove it, or add it to our `pairs` array to further process.
|
||||
// this is recursion in loop form.
|
||||
const originalValue = this.originalValue;
|
||||
const result = Object.assign(Object.create(null), {
|
||||
data:
|
||||
type(originalValue) === 'object' &&
|
||||
type(originalValue.serialize) === 'function'
|
||||
? originalValue.serialize()
|
||||
: originalValue,
|
||||
error: this.originalError
|
||||
});
|
||||
|
||||
const pairs = Object.keys(result).map(key => [result, key]);
|
||||
const seenObjects = new WeakSet();
|
||||
|
||||
let pair;
|
||||
while ((pair = pairs.shift())) {
|
||||
SerializableEvent._serialize(pairs, ...pair, seenObjects);
|
||||
seenObjects.add(pair[0]);
|
||||
}
|
||||
|
||||
this.data = result.data;
|
||||
this.error = result.error;
|
||||
|
||||
return Object.freeze(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Used internally by {@link SerializableEvent.deserialize}; creates an `Error`
|
||||
* from an `Error`-like (serialized) object
|
||||
* @ignore
|
||||
* @param {Object} value - An Error-like value
|
||||
* @returns {Error} Real error
|
||||
*/
|
||||
static _deserializeError(value) {
|
||||
const error = new Error(value.message);
|
||||
error.stack = value.stack;
|
||||
Object.assign(error, value);
|
||||
delete error.__type;
|
||||
return error;
|
||||
}
|
||||
|
||||
/**
|
||||
* Used internally by {@link SerializableEvent.deserialize}; recursively
|
||||
* deserializes an object in-place.
|
||||
* @param {object|Array} parent - Some object or array
|
||||
* @param {string|number} key - Some prop name or array index within `parent`
|
||||
*/
|
||||
static _deserializeObject(parent, key) {
|
||||
if (key === '__proto__') {
|
||||
delete parent[key];
|
||||
return;
|
||||
}
|
||||
const value = parent[key];
|
||||
// keys beginning with `$$` are converted into functions returning the value
|
||||
// and renamed, stripping the `$$` prefix.
|
||||
// functions defined this way cannot be array members!
|
||||
if (type(key) === 'string' && key.startsWith('$$')) {
|
||||
const newKey = key.slice(2);
|
||||
parent[newKey] = () => value;
|
||||
delete parent[key];
|
||||
key = newKey;
|
||||
}
|
||||
if (type(value) === 'array') {
|
||||
value.forEach((_, idx) => {
|
||||
SerializableEvent._deserializeObject(value, idx);
|
||||
});
|
||||
} else if (type(value) === 'object') {
|
||||
if (value.__type === 'Error') {
|
||||
parent[key] = SerializableEvent._deserializeError(value);
|
||||
} else {
|
||||
Object.keys(value).forEach(key => {
|
||||
SerializableEvent._deserializeObject(value, key);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deserialize value returned from a worker into something more useful.
|
||||
* Does not return the same object.
|
||||
* @todo do this in a loop instead of with recursion (if necessary)
|
||||
* @param {SerializedEvent} obj - Object returned from worker
|
||||
* @returns {SerializedEvent} Deserialized result
|
||||
*/
|
||||
static deserialize(obj) {
|
||||
if (!obj) {
|
||||
throw createInvalidArgumentTypeError('Expected value', obj);
|
||||
}
|
||||
|
||||
obj = Object.assign(Object.create(null), obj);
|
||||
|
||||
if (obj.data) {
|
||||
Object.keys(obj.data).forEach(key => {
|
||||
SerializableEvent._deserializeObject(obj.data, key);
|
||||
});
|
||||
}
|
||||
|
||||
if (obj.error) {
|
||||
obj.error = SerializableEvent._deserializeError(obj.error);
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* "Serializes" a value for transmission over IPC as a message.
|
||||
*
|
||||
* If value is an object and has a `serialize()` method, call that method; otherwise return the object and hope for the best.
|
||||
*
|
||||
* @param {*} [value] - A value to serialize
|
||||
*/
|
||||
exports.serialize = function serialize(value) {
|
||||
const result =
|
||||
type(value) === 'object' && type(value.serialize) === 'function'
|
||||
? value.serialize()
|
||||
: value;
|
||||
debug('serialized: %O', result);
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* "Deserializes" a "message" received over IPC.
|
||||
*
|
||||
* This could be expanded with other objects that need deserialization,
|
||||
* but at present time we only care about {@link SerializableWorkerResult} objects.
|
||||
*
|
||||
* @param {*} [value] - A "message" to deserialize
|
||||
*/
|
||||
exports.deserialize = function deserialize(value) {
|
||||
const result = SerializableWorkerResult.isSerializedWorkerResult(value)
|
||||
? SerializableWorkerResult.deserialize(value)
|
||||
: value;
|
||||
debug('deserialized: %O', result);
|
||||
return result;
|
||||
};
|
||||
|
||||
exports.SerializableEvent = SerializableEvent;
|
||||
exports.SerializableWorkerResult = SerializableWorkerResult;
|
||||
|
||||
/**
|
||||
* The result of calling `SerializableEvent.serialize`, as received
|
||||
* by the deserializer.
|
||||
* @private
|
||||
* @typedef {Object} SerializedEvent
|
||||
* @property {object?} data - Optional serialized data
|
||||
* @property {object?} error - Optional serialized `Error`
|
||||
*/
|
||||
|
||||
/**
|
||||
* The result of calling `SerializableWorkerResult.serialize` as received
|
||||
* by the deserializer.
|
||||
* @private
|
||||
* @typedef {Object} SerializedWorkerResult
|
||||
* @property {number} failureCount - Number of failures
|
||||
* @property {SerializedEvent[]} events - Serialized events
|
||||
* @property {"SerializedWorkerResult"} __type - Symbol-like to denote the type of object this is
|
||||
*/
|
151
node_modules/mocha/lib/nodejs/worker.js
generated
vendored
Normal file
151
node_modules/mocha/lib/nodejs/worker.js
generated
vendored
Normal file
@@ -0,0 +1,151 @@
|
||||
/**
|
||||
* A worker process. Consumes {@link module:reporters/parallel-buffered} reporter.
|
||||
* @module worker
|
||||
* @private
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const {
|
||||
createInvalidArgumentTypeError,
|
||||
createInvalidArgumentValueError
|
||||
} = require('../errors');
|
||||
const workerpool = require('workerpool');
|
||||
const Mocha = require('../mocha');
|
||||
const {handleRequires, validateLegacyPlugin} = require('../cli/run-helpers');
|
||||
const d = require('debug');
|
||||
const debug = d.debug(`mocha:parallel:worker:${process.pid}`);
|
||||
const isDebugEnabled = d.enabled(`mocha:parallel:worker:${process.pid}`);
|
||||
const {serialize} = require('./serializer');
|
||||
const {setInterval, clearInterval} = global;
|
||||
|
||||
let rootHooks;
|
||||
|
||||
if (workerpool.isMainThread) {
|
||||
throw new Error(
|
||||
'This script is intended to be run as a worker (by the `workerpool` package).'
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes some stuff on the first call to {@link run}.
|
||||
*
|
||||
* Handles `--require` and `--ui`. Does _not_ handle `--reporter`,
|
||||
* as only the `Buffered` reporter is used.
|
||||
*
|
||||
* **This function only runs once per worker**; it overwrites itself with a no-op
|
||||
* before returning.
|
||||
*
|
||||
* @param {Options} argv - Command-line options
|
||||
*/
|
||||
let bootstrap = async argv => {
|
||||
// globalSetup and globalTeardown do not run in workers
|
||||
const plugins = await handleRequires(argv.require, {
|
||||
ignoredPlugins: ['mochaGlobalSetup', 'mochaGlobalTeardown']
|
||||
});
|
||||
validateLegacyPlugin(argv, 'ui', Mocha.interfaces);
|
||||
|
||||
rootHooks = plugins.rootHooks;
|
||||
bootstrap = () => {};
|
||||
debug('bootstrap(): finished with args: %O', argv);
|
||||
};
|
||||
|
||||
/**
|
||||
* Runs a single test file in a worker thread.
|
||||
* @param {string} filepath - Filepath of test file
|
||||
* @param {string} [serializedOptions] - **Serialized** options. This string will be eval'd!
|
||||
* @see https://npm.im/serialize-javascript
|
||||
* @returns {Promise<{failures: number, events: BufferedEvent[]}>} - Test
|
||||
* failure count and list of events.
|
||||
*/
|
||||
async function run(filepath, serializedOptions = '{}') {
|
||||
if (!filepath) {
|
||||
throw createInvalidArgumentTypeError(
|
||||
'Expected a non-empty "filepath" argument',
|
||||
'file',
|
||||
'string'
|
||||
);
|
||||
}
|
||||
|
||||
debug('run(): running test file %s', filepath);
|
||||
|
||||
if (typeof serializedOptions !== 'string') {
|
||||
throw createInvalidArgumentTypeError(
|
||||
'run() expects second parameter to be a string which was serialized by the `serialize-javascript` module',
|
||||
'serializedOptions',
|
||||
'string'
|
||||
);
|
||||
}
|
||||
let argv;
|
||||
try {
|
||||
// eslint-disable-next-line no-eval
|
||||
argv = eval('(' + serializedOptions + ')');
|
||||
} catch (err) {
|
||||
throw createInvalidArgumentValueError(
|
||||
'run() was unable to deserialize the options',
|
||||
'serializedOptions',
|
||||
serializedOptions
|
||||
);
|
||||
}
|
||||
|
||||
const opts = Object.assign({ui: 'bdd'}, argv, {
|
||||
// if this was true, it would cause infinite recursion.
|
||||
parallel: false,
|
||||
// this doesn't work in parallel mode
|
||||
forbidOnly: true,
|
||||
// it's useful for a Mocha instance to know if it's running in a worker process.
|
||||
isWorker: true
|
||||
});
|
||||
|
||||
await bootstrap(opts);
|
||||
|
||||
opts.rootHooks = rootHooks;
|
||||
|
||||
const mocha = new Mocha(opts).addFile(filepath);
|
||||
|
||||
try {
|
||||
await mocha.loadFilesAsync();
|
||||
} catch (err) {
|
||||
debug('run(): could not load file %s: %s', filepath, err);
|
||||
throw err;
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let debugInterval;
|
||||
/* istanbul ignore next */
|
||||
if (isDebugEnabled) {
|
||||
debugInterval = setInterval(() => {
|
||||
debug('run(): still running %s...', filepath);
|
||||
}, 5000).unref();
|
||||
}
|
||||
mocha.run(result => {
|
||||
// Runner adds these; if we don't remove them, we'll get a leak.
|
||||
process.removeAllListeners('uncaughtException');
|
||||
process.removeAllListeners('unhandledRejection');
|
||||
|
||||
try {
|
||||
const serialized = serialize(result);
|
||||
debug(
|
||||
'run(): completed run with %d test failures; returning to main process',
|
||||
typeof result.failures === 'number' ? result.failures : 0
|
||||
);
|
||||
resolve(serialized);
|
||||
} catch (err) {
|
||||
// TODO: figure out exactly what the sad path looks like here.
|
||||
// rejection should only happen if an error is "unrecoverable"
|
||||
debug('run(): serialization failed; rejecting: %O', err);
|
||||
reject(err);
|
||||
} finally {
|
||||
clearInterval(debugInterval);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// this registers the `run` function.
|
||||
workerpool.worker({run});
|
||||
|
||||
debug('started worker process');
|
||||
|
||||
// for testing
|
||||
exports.run = run;
|
Reference in New Issue
Block a user