diff --git a/bin/codecept.js b/bin/codecept.js index b1e0a3737..1af6c5773 100755 --- a/bin/codecept.js +++ b/bin/codecept.js @@ -95,7 +95,7 @@ program.command('run [test]') .option('--features', 'run only *.feature files and skip tests') .option('--tests', 'run only JS test files and skip features') .option('-p, --plugins ', 'enable plugins, comma-separated') - + .option('--failed', 'to run failed/custom Tests') // mocha options .option('--colors', 'force enabling of colors') .option('--no-colors', 'force disabling of colors') @@ -133,6 +133,7 @@ program.command('run-workers ') .option('-p, --plugins ', 'enable plugins, comma-separated') .option('-O, --reporter-options ', 'reporter-specific options') .option('-R, --reporter ', 'specify the reporter to use') + .option('--failed', 'to run failed/custom Tests') .action(require('../lib/command/run-workers')); program.command('run-multiple [suites...]') diff --git a/docs/plugins.md b/docs/plugins.md index 538cfe864..1f9a9be2c 100644 --- a/docs/plugins.md +++ b/docs/plugins.md @@ -918,6 +918,51 @@ In the same manner additional services from webdriverio can be installed, enable - `config` +## reRunFailedTest +It stores failed scripts from current execution in failedCases.json + +This plugin allows running +- only the scripts which are failed in previous execution +- run any custom scripts provided by user without any pattern (can be provided in failedCases.json) +- autoRetry failed scripts from current execution to detect flakiness + +```js +plugins: { + reRunFailedTest: {} + enabled: true, + autoRetry: true +} +``` + +Run test with plugin enabled +```js +npx codeceptjs run --plugins reRunFailedTest +``` + +#### Configuration +- autoRetry: to auto retry the failed scripts from current execution after all scripts are completed + +### Options + + +| Param | Description | +| ---------------- | ------------------------------------------------------------------------------ | +| failed | Only executes the failed/custom(selective) scripts present in failedCases.json | + +```js +npx codeceptjs run --failed +``` +or +```js +npx codeceptjs run-workers ${workerCount} --failed +``` + +#### Note: +The restart option must be set true in order to use this plugin +```js +restart: true +``` + [1]: https://user-images.githubusercontent.com/220264/45676511-8e052800-bb3a-11e8-8cbb-db5f73de2add.png [2]: https://github.com/allure-framework/allure2/blob/master/plugins/screen-diff-plugin/README.md diff --git a/failedCases.json b/failedCases.json new file mode 100644 index 000000000..4a8ca554e --- /dev/null +++ b/failedCases.json @@ -0,0 +1 @@ +["test/login.js","test/logout.js"] \ No newline at end of file diff --git a/lib/command/run-workers.js b/lib/command/run-workers.js index c578c39e4..25d9eb0de 100644 --- a/lib/command/run-workers.js +++ b/lib/command/run-workers.js @@ -1,9 +1,10 @@ // For Node version >=10.5.0, have to use experimental flag -const { satisfyNodeVersion } = require('./utils'); +const { satisfyNodeVersion, getConfig } = require('./utils'); const { tryOrDefault } = require('../utils'); const output = require('../output'); const event = require('../event'); const Workers = require('../workers'); +const reRunFailedTest = require('../plugin/reRunFailedTest'); module.exports = async function (workerCount, options) { satisfyNodeVersion( @@ -13,6 +14,9 @@ module.exports = async function (workerCount, options) { process.env.profile = options.profile; + const configFile = options.config; + const Config = getConfig(configFile); + const { config: testConfig, override = '' } = options; const overrideConfigs = tryOrDefault(() => JSON.parse(override), {}); const by = options.suites ? 'suite' : 'test'; @@ -31,22 +35,26 @@ module.exports = async function (workerCount, options) { const workers = new Workers(numberOfWorkers, config); workers.overrideConfig(overrideConfigs); - workers.on(event.test.failed, (failedTest) => { - output.test.failed(failedTest); - }); - - workers.on(event.test.passed, (successTest) => { - output.test.passed(successTest); - }); - - workers.on(event.all.result, () => { - workers.printResults(); - }); - - try { - await workers.bootstrapAll(); - await workers.run(); - } finally { - await workers.teardownAll(); + if (Config.plugins && Config.plugins.reRunFailedTest && Config.plugins.reRunFailedTest.enabled === true) { + await reRunFailedTest(workers, { config: Config, options: config.options }, false); + } else { + workers.on(event.test.failed, (failedTest) => { + output.test.failed(failedTest); + }); + + workers.on(event.test.passed, (successTest) => { + output.test.passed(successTest); + }); + + workers.on(event.all.result, () => { + workers.printResults(); + }); + + try { + await workers.bootstrapAll(); + await workers.run(); + } finally { + await workers.teardownAll(); + } } }; diff --git a/lib/command/run.js b/lib/command/run.js index d70972e0e..741356811 100644 --- a/lib/command/run.js +++ b/lib/command/run.js @@ -3,6 +3,7 @@ const { } = require('./utils'); const Config = require('../config'); const Codecept = require('../codecept'); +const reRunFailedTest = require('../plugin/reRunFailedTest'); module.exports = async function (test, options) { // registering options globally to use in config @@ -22,9 +23,13 @@ module.exports = async function (test, options) { try { codecept.init(testRoot); - await codecept.bootstrap(); - codecept.loadTests(); - await codecept.run(test); + if (config.plugins && config.plugins.reRunFailedTest && config.plugins.reRunFailedTest.enabled === true) { + await reRunFailedTest(codecept, { options, config, testRoot }, true); + } else { + await codecept.bootstrap(); + codecept.loadTests(); + await codecept.run(test); + } } catch (err) { printError(err); process.exitCode = 1; diff --git a/lib/plugin/reRunFailedTest.js b/lib/plugin/reRunFailedTest.js new file mode 100644 index 000000000..9a40f1587 --- /dev/null +++ b/lib/plugin/reRunFailedTest.js @@ -0,0 +1,171 @@ +const event = require('../event'); +const { writeFailedTest, getFailedTest } = require('../reRunFailedTest'); +const container = require('../container'); +const output = require('../output'); + +const failedScripts = new Set(); +const failedScriptsId = new Set(); +const testScriptsName = new Set(); +let mochaStatsBackup = {}; + +const sequentialRun = async (codecept, options) => { + codecept.loadTests(); + if (options.options.failed) { + const testFiles = getFailedTest(); + for (let i = 0; i < testFiles.length; i++) { + if (!codecept.testFiles.includes(testFiles[i])) { + output.print(`Invalid Script Path${testFiles[i]}`); + testFiles.splice(i, 1); + i--; + } + } + if (testFiles.length > 0) { + output.print(`Failed Scripts from previous execution are ${testFiles}`); + await run(testFiles, false); + } else { + output.print('No valid failed scripts from previous execution'); + await writeFailedTest([]); + } + } else { + await run(codecept.testFiles, false); + } + if (options.config.plugins.reRunFailedTest.autoRetry === true) { + output.print('Auto Retrying Failed Scripts'); + const testFiles = getFailedTest(); + if (testFiles.length > 0) { + output.print('Failed Scripts from previous execution are ', testFiles); + await run(testFiles, true); + } + } +}; + +const run = (testFiles, retryFlag) => { + return new Promise((resolve, reject) => { + // @ts-ignore + container.createMocha(); + const mocha = container.mocha(); + testFiles.forEach((file) => { + delete require.cache[file]; + }); + mocha.files = testFiles; + const done = () => { + if (retryFlag === true) { + output.result(mocha._previousRunner.stats.passes, mocha._previousRunner.stats.failures, mocha._previousRunner.stats.pending, `${mocha._previousRunner.stats.duration || 0 / 1000}s`); + } + event.dispatcher.on(event.all.after, (test) => { + writeFailedTest(Array.from(failedScripts)); + }); + event.emit(event.all.result, this); + event.emit(event.all.after, this); + resolve(); + }; + try { + event.emit(event.all.before, this); + event.dispatcher.on(event.test.failed, (test) => { + failedScripts.add(test.file); + }); + mocha.run(() => { + if (retryFlag === false) { + mochaStatsBackup = mocha._previousRunner.stats; + } + if (retryFlag === true) { + mocha._previousRunner.stats.passes += mochaStatsBackup.passes; + } + done(); + }); + } catch (e) { + output.error(e.stack); + reject(e); + } + }); +}; + +const parallelRun = async (workers, options) => { + workers.on(event.test.failed, (failedTest) => { + const failTest = workers.testDetails.filter(t => t.id === failedTest.id); + failedScripts.add(failTest[0].file); + output.test.failed(failedTest); + }); + workers.on(event.test.passed, (successTest) => { + output.test.passed(successTest); + }); + workers.on(event.all.result, () => { + writeFailedTest(Array.from(failedScripts)); + printResults(workers); + }); + if (options.options.failed) { + workers = loadFailedScriptsForWorkers(workers); + } + try { + workers.numberOfWorkers = workers.workers.length; + await workers.bootstrapAll(); + await workers.run(); + if (options.config.plugins.reRunFailedTest.autoRetry === true) { + output.print('Auto Retrying Failed Scripts'); + workers = await loadFailedScriptsForWorkers(workers); + if (workers.workers.length > 0) { + workers.numberOfWorkers += workers.workers.length; + workers.finishedTests = {}; + workers.stats.failures = 0; + await workers.run(); + } + } + } finally { + await workers.teardownAll(); + } +}; + +const loadFailedScriptsForWorkers = (workers) => { + const testFiles = getFailedTest(); + if (testFiles.length > 0) { + for (let i = 0; i < workers.testDetails.length; i++) { + if (testFiles.includes(workers.testDetails[i].file)) { + failedScriptsId.add(workers.testDetails[i].id); + } + testScriptsName.add(workers.testDetails[i].file); + } + for (let i = 0; i < testFiles.length; i++) { + if (!testScriptsName.has(testFiles[i])) { + output.print(`Invalid Script Path ${testFiles[i]}`); + testFiles.splice(i, 1); + i--; + } + } + } + if (testFiles.length > 0) { + output.print('Failed Scripts from previous execution are ', testFiles); + for (let i = 0; i < workers.workers.length; i++) { + for (let j = 0; j < workers.workers[i].tests.length; j++) { + if (!failedScriptsId.has(workers.workers[i].tests[j])) { + workers.workers[i].tests.splice(j, 1); + j--; + } + } + if (workers.workers[i].tests.length === 0) { + workers.workers.splice(i, 1); + i--; + } + } + } else { + writeFailedTest([]); + workers.workers = []; + } + return workers; +}; + +const reRunFailedTest = async (config, options, sequentialFlag) => { + if (sequentialFlag === true) { + await sequentialRun(config, options); + } else if (sequentialFlag === false) { + await parallelRun(config, options); + } +}; + +module.exports = reRunFailedTest; + +const printResults = (workers) => { + workers.stats.end = new Date(); + workers.stats.duration = workers.stats.end - workers.stats.start; + output.print(); + output.result(workers.stats.passes, workers.stats.failures, workers.stats.pending, `${workers.stats.duration || 0 / 1000}s`); +}; diff --git a/lib/reRunFailedTest.js b/lib/reRunFailedTest.js new file mode 100644 index 000000000..70c91a166 --- /dev/null +++ b/lib/reRunFailedTest.js @@ -0,0 +1,29 @@ +const fs = require('fs'); +const { print } = require('./output'); + +const writeFailedTest = (failedTests) => { + if (failedTests.length !== 0) { + fs.writeFileSync('failedCases.json', JSON.stringify(failedTests)); + } else if (fs.existsSync('failedCases.json')) { + fs.unlinkSync('failedCases.json'); + } +}; + +exports.writeFailedTest = writeFailedTest; + +const getFailedTest = () => { + if (!fs.existsSync('failedCases.json')) { + print('There Are No Failed/Custom Scripts From Previous Execution'); + } else { + const failedTests = JSON.parse(fs.readFileSync('failedCases.json', 'utf8')); + if (failedTests.length === 0 || failedTests.toString() === '') { + print('There Are No Failed/Custom Scripts From Previous Execution'); + writeFailedTest([]); + } else { + return failedTests; + } + } + return []; +}; + +exports.getFailedTest = getFailedTest; diff --git a/lib/workers.js b/lib/workers.js index 437e82a26..1b8909279 100644 --- a/lib/workers.js +++ b/lib/workers.js @@ -176,6 +176,8 @@ class Workers extends EventEmitter { pending: 0, }; this.testGroups = []; + // contains all the tests objects + this.testDetails = []; createOutputDir(config.testConfig); if (numberOfWorkers) this._initWorkers(numberOfWorkers, config); @@ -241,6 +243,7 @@ class Workers extends EventEmitter { mocha.suite.eachTest((test) => { const i = groupCounter % groups.length; if (test) { + this.testDetails.push(test); const { id } = test; groups[i].push(id); groupCounter++; diff --git a/test/unit/reRunFailedTest_test.js b/test/unit/reRunFailedTest_test.js new file mode 100644 index 000000000..7d2eeb7d5 --- /dev/null +++ b/test/unit/reRunFailedTest_test.js @@ -0,0 +1,24 @@ +const { expect } = require('chai'); +const assert = require('assert'); +const sinon = require('sinon'); +const { getFailedTest, writeFailedTest } = require('../../lib/reRunFailedTest'); + +const options = { + failed: true, +}; + +describe('Get Failed/Custom Test', () => { + it('should exit the process as failedCases.json is either empty or does not exist', async () => { + await writeFailedTest([]); + sinon.stub(process, 'exit'); + await getFailedTest(options); + assert(process.exit.isSinonProxy); + }); + + it('should return the test scripts failed from previous execution', async () => { + const failedScripts = ['test/login.js', 'test/logout.js']; + await writeFailedTest(failedScripts); + const failedTests = getFailedTest(options); + expect(failedScripts).to.eql(failedTests); + }); +});