From f51908b1799d2c5f7060dbb56b93a16088159534 Mon Sep 17 00:00:00 2001 From: Erich Gubler Date: Wed, 20 Dec 2023 17:49:45 -0500 Subject: [PATCH] WIP: feat(triage): add `PASS`ing count to per-platform analysis TODO: Actually do the counting properly, and test it. --- moz-webgpu-cts/src/main.rs | 34 ++++++++++++++++++++++++++++++---- 1 file changed, 30 insertions(+), 4 deletions(-) diff --git a/moz-webgpu-cts/src/main.rs b/moz-webgpu-cts/src/main.rs index 49553ce..18191fb 100644 --- a/moz-webgpu-cts/src/main.rs +++ b/moz-webgpu-cts/src/main.rs @@ -805,9 +805,11 @@ fn run(cli: Cli) -> ExitCode { #[derive(Clone, Debug, Default)] struct PerPlatformAnalysis { + tests_fully_passing: BTreeSet>, tests_with_runner_errors: TestSet, tests_with_disabled_or_skip: TestSet, tests_with_crashes: TestSet, + subtests_passing_by_test: SubtestByTestSet, subtests_with_failures_by_test: SubtestByTestSet, subtests_with_timeouts_by_test: SubtestByTestSet, } @@ -935,13 +937,21 @@ fn run(cli: Cli) -> ExitCode { fn analyze_test_outcome( test_name: &Arc, expectation: Expectation, + subtests: &BTreeMap, mut receiver: F, ) where F: FnMut(&mut dyn FnMut(&mut PerPlatformAnalysis)), { for outcome in expectation.iter() { match outcome { - TestOutcome::Ok => (), + TestOutcome::Ok => receiver(&mut |analysis| { + insert_in_test_set( + &mut analysis.tests_with_crashes, + test_name, + expectation, + outcome, + ) + }), // We skip this because this test _should_ contain subtests with // `TIMEOUT` and `NOTRUN`, so we shouldn't actually miss anything. TestOutcome::Timeout => (), @@ -974,13 +984,13 @@ fn run(cli: Cli) -> ExitCode { } let apply_to_all_platforms = |analysis: &mut Analysis, expectation| { - analyze_test_outcome(&test_name, expectation, |f| { + analyze_test_outcome(&test_name, expectation, &subtests, |f| { analysis.for_each_platform_mut(f) }) }; let apply_to_specific_platforms = |analysis: &mut Analysis, platform, expectation| { - analyze_test_outcome(&test_name, expectation, |f| { + analyze_test_outcome(&test_name, expectation, &subtests, |f| { analysis.for_platform_mut(platform, f) }) }; @@ -1047,7 +1057,15 @@ fn run(cli: Cli) -> ExitCode { { for outcome in expectation.iter() { match outcome { - SubtestOutcome::Pass => (), + SubtestOutcome::Pass => receiver(&mut |analysis| { + insert_in_subtest_by_test_set( + &mut analysis.subtests_passing_by_test, + test_name, + subtest_name, + expectation, + outcome, + ) + }), SubtestOutcome::Timeout | SubtestOutcome::NotRun => { receiver(&mut |analysis| { insert_in_subtest_by_test_set( @@ -1140,13 +1158,20 @@ fn run(cli: Cli) -> ExitCode { OnZeroItem::Hide => false, }; let PerPlatformAnalysis { + tests_fully_passing, tests_with_runner_errors, tests_with_disabled_or_skip, tests_with_crashes, + subtests_passing_by_test, subtests_with_failures_by_test, subtests_with_timeouts_by_test, } = analysis; + let tests_fully_passing = lazy_format!( + "{} test(s) permanently `PASS`ing", + tests_fully_passing.len() + ); + let PermaAndIntermittent { perma: num_tests_with_perma_runner_errors, intermittent: num_tests_with_intermittent_runner_errors, @@ -1321,6 +1346,7 @@ fn run(cli: Cli) -> ExitCode { item.map(|disp| disp as &dyn Display) } let sections = [ + Some(Box::new(tests_fully_passing) as Box), priority_section( "HIGH", [