Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] feat: mark and skip flaky test #746

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,4 @@ tmp-*
.eslintcache
.ncu
package-lock.json
.vscode
28 changes: 21 additions & 7 deletions bin/ncu-ci.js
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import auth from '../lib/auth.js';
import Request from '../lib/request.js';
import CLI from '../lib/cli.js';
import { hideBin } from 'yargs/helpers';
import { markFlakyTests } from '../lib/ci/ci_mark_flaky.js';

setVerbosityFromEnv();

Expand Down Expand Up @@ -82,7 +83,7 @@ const args = yargs(hideBin(process.argv))
.option('cache', {
default: false,
describe: 'Cache the responses from Jenkins in .ncu/cache/ under' +
' the node-core-utils installation directory'
' the node-core-utils installation directory'
})
.option('limit', {
default: 99,
Expand All @@ -91,13 +92,17 @@ const args = yargs(hideBin(process.argv))
.option('since <date>', {
type: 'string',
describe: 'Time since when the CI results should be queried'
}).option('mark-flaky', {
describe: 'Mark as flaky tests that have inconsistent failures',
type: 'boolean',
default: false
}).check(argv => {
try {
// eslint-disable-next-line no-new
new Date(argv.since);
} catch {
throw new Error('--since <date> should be string that can ' +
'be parsed by new Date()');
'be parsed by new Date()');
}
return true;
});
Expand Down Expand Up @@ -200,7 +205,7 @@ const args = yargs(hideBin(process.argv))
.option('cache', {
default: false,
describe: 'Cache the responses from Jenkins in .ncu/cache/ under' +
' the node-core-utils installation directory'
' the node-core-utils installation directory'
})
.option('limit', {
default: 15,
Expand All @@ -227,6 +232,10 @@ const args = yargs(hideBin(process.argv))
.option('markdown <path>', {
type: 'string',
describe: 'Write the results as markdown to <path>'
}).option('mark-flaky', {
describe: 'Mark as flaky tests that have inconsistent failures',
type: 'boolean',
default: false
}).check(argv => {
if (argv.markdown && commandKeys.includes(argv.markdown)) {
throw new Error('--markdown <path> did not specify a valid path');
Expand Down Expand Up @@ -275,11 +284,11 @@ class RunPRJobCommand {
if (!repo) {
validArgs = false;
cli.error('GitHub repository is missing, please set it via ncu-config ' +
'or pass it via the --repo option');
'or pass it via the --repo option');
}
if (!owner) {
cli.error('GitHub owner is missing, please set it via ncu-config ' +
'or pass it via the --owner option');
'or pass it via the --owner option');
validArgs = false;
}
if (!validArgs) {
Expand Down Expand Up @@ -379,7 +388,7 @@ class CICommand {
}
}

async aggregate() {} // noop
async aggregate() { } // noop

async serialize() {
const { argv, cli } = this;
Expand Down Expand Up @@ -428,6 +437,7 @@ class RateCommand extends CICommand {
class WalkCommand extends CICommand {
constructor(cli, request, argv) {
super(cli, request, argv);
this.markFlaky = argv.markFlaky;
if (argv.cache) {
jobCache.enable();
}
Expand All @@ -453,7 +463,11 @@ class WalkCommand extends CICommand {
return;
}
const aggregator = new FailureAggregator(cli, this.json);
this.json = aggregator.aggregate();
const aggregation = aggregator.aggregate();
if (this.markFlaky) {
await markFlakyTests(aggregation);
}
this.json = aggregation;
cli.log('');
cli.separator('Stats');
cli.log('');
Expand Down
2 changes: 2 additions & 0 deletions docs/ncu-ci.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ Options:
[boolean] [default: false]
--json <path> Write the results as json to <path> [string]
--markdown <path> Write the results as markdown to <path> [string]
--mark-flaky If running walk, whether or not mark tests as flaky.
[boolean] [default: false]
--help Show help [boolean]
```

Expand Down
229 changes: 229 additions & 0 deletions lib/ci/ci_mark_flaky.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,229 @@
import _ from 'lodash';
import { createWriteStream } from 'node:fs';
import { appendFile, open, rename } from 'node:fs/promises';
import { Transform } from 'node:stream';
import { pipeline } from 'node:stream/promises';

// assume 3 is the number of pr in which a test has failed to consider it flaky
const FLAKY_TEST_PR_THRESHOLD = 5;

export async function markFlakyTests(aggregation) {
try {
const tests = getFlakyTests(aggregation);
// group tests by type (ex: parallel, pummel)
const groupedByType = _.groupBy(tests, ({ file }) => file.split('/', 1));

for (const [type, failedTests] of Object.entries(groupedByType)) {
await editStatusFile(type, failedTests);
}
} catch (error) {
console.error(error);
}
};

export function getFlakyTests(aggregation) {
const failedRuns = [];
const { JS_TEST_FAILURE } = aggregation;

for (const failedTest of JS_TEST_FAILURE) {
const { failures, prs } = failedTest;

// if test has failed in less than x pr do not consider it flaky
if (!failures || prs.length < FLAKY_TEST_PR_THRESHOLD) continue;

for (const failure of failures) {
const { builtOn, file } = failure;
if (!builtOn) continue;
const { system, architecture } = parseSystemArchitecture(builtOn);
failedRuns.push({
builtOn,
file,
system,
architecture,
written: false
});
}
}

return failedRuns;
}

function matchSystem(rawSystem) {
let system;
switch (true) {
case rawSystem.includes('container'):
system = 'docker';
break;
case rawSystem.includes('win'):
system = 'win32';
break;
case rawSystem.includes('fedora'):
case rawSystem.includes('ubuntu'):
case rawSystem.includes('rhel'):
case rawSystem.includes('debian'):
system = 'linux';
break;
case rawSystem.includes('macos'):
system = 'macos';
break;
case rawSystem.includes('solaris'):
case rawSystem.includes('smartos'):
system = 'solaris';
break;
case rawSystem.includes('freebsd'):
system = 'freebsd';
break;
case rawSystem.includes('aix72'):
system = 'aix';
break;
default:
system = rawSystem;
break;
}

return system;
}

function matchArchitecture(rawArchitecture) {
let architecture;
switch (true) {
case rawArchitecture.includes('arm64'):
architecture = 'arm64';
break;
case rawArchitecture.includes('arm'):
architecture = 'arm';
break;
case rawArchitecture.includes('s390x'):
case rawArchitecture.includes('ppc64'):
architecture = 'ibm';
break;
default:
architecture = rawArchitecture;
break;
}
return architecture;
}

function parseSystemArchitecture(builtOn) {
const buildInfos = builtOn.split('-');
const rawArchitecture = buildInfos[buildInfos.length - 2]; // second last element is architecture
const rawSystem = buildInfos[buildInfos.length - 3]; // third last element is os

return {
architecture: matchArchitecture(rawArchitecture),
system: matchSystem(rawSystem)
};
}

async function editStatusFile(type, failedTests) {
try {
const testsGroupedByHeader = _.groupBy(failedTests,
(f) => `[$system==${f.system} && $arch==${f.architecture}]`);
// assume the .status file exists
const fileName = `./test/${type}/${type}.status`;
const tmpFile = `${fileName}.tmp`;
const file = await open(fileName);

await pipeline(
file.readLines(),
new FlakyTestTransfrom(testsGroupedByHeader),
createWriteStream(tmpFile) // write output on a temp file
);

// if the header was not present we append it at the end of the file
await appendTestsWithNewHeader(tmpFile, testsGroupedByHeader);

// replace original file with tmp
await rename(tmpFile, fileName);
} catch (error) {
// file might not exist or error was not parsable
console.error(error);
}
}

function generateSkipFileLine(file) {
// take only filename without ex: /parallel/
const filename = file.split('/')[1];
return `${filename}: PASS, FLAKY`;
}

function appendTestsWithNewHeader(tmpFile, testsGroupedByHeader) {
const text = [];

for (const [header, failedTests] of Object.entries(testsGroupedByHeader)) {
// skip if there isnt at least one failedTest with written false or no failedTests
if (!failedTests?.length || !failedTests.some(f => f.written === false)) continue;

// add space on top of header
text.push(header);

// add test lines in a set to avoid duplicates
const newLines = new Set();
for (const failedTest of failedTests) {
// skip tests we have already been written because we found the header
if (failedTest.written) continue;
newLines.add(generateSkipFileLine(failedTest.file));
}
text.push(...newLines);
}

return appendFile(tmpFile, text.join('\n'));
}

class FlakyTestTransfrom extends Transform {
constructor(testsGroupedByHeader) {
super();
this.testsGroupedByHeader = testsGroupedByHeader;
this.bufferedLines = [];
}

_transform(chunk, _encoding, callback) {
const chunkStringified = chunk.toString();

const isHeader = chunkStringified.startsWith('[');

if (
// if its an empty line, passthrough
chunkStringified === '' ||
// if its not a header and we are not buffering, passthrough
(!isHeader && !this.bufferedLines.length) ||
// if its a header and not mapped, passthrough
(isHeader && !(chunkStringified in this.testsGroupedByHeader))
) {
callback(null, chunkStringified + '\n');
return;
;
}

// not a header but a line (ex: test-tls-client-mindhsize: PASS, FLAKY)
if (!isHeader) {
this.bufferedLines.push(chunkStringified + '\n');
callback();
return;
}

// when we hit a new header, flush bufferedLines and reset
this.push(this.bufferedLinesToString());
this.bufferedLines = [];

this.bufferedLines.push(chunkStringified + '\n');

for (const failedTest of this.testsGroupedByHeader[chunkStringified]) {
// set written to true because we are buffering it
failedTest.written = true;
const skipFileLine = generateSkipFileLine(failedTest.file);
this.bufferedLines.push(skipFileLine + '\n');
}

callback();
}

bufferedLinesToString() {
return _.uniq(this.bufferedLines).join('');
}

_flush(callback) {
// Flush any remaining buffered lines
callback(null, this.bufferedLinesToString());
}
}
Loading