Skip to content

Commit

Permalink
style: make flake8 happy
Browse files Browse the repository at this point in the history
  • Loading branch information
tomli380576 committed Aug 27, 2024
1 parent 776a9f2 commit 66bcf36
Show file tree
Hide file tree
Showing 2 changed files with 77 additions and 32 deletions.
55 changes: 38 additions & 17 deletions Tools/PC/c3-submission-helpers/cbwb-diffs.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import os
from collections import defaultdict
import argparse
from typing import Literal # noqa: F401


def inclusive_range(a: int, b: int):
Expand All @@ -21,23 +22,32 @@ def orange(s: str):

def parse_args():
p = argparse.ArgumentParser()
p.add_argument("-p", "--path", required=True, help="The path to session-share")
p.add_argument(
"-p", "--path", required=True, help="The path to session-share"
)
p.add_argument(
"-g",
"--group-by",
dest="group_by",
required=False,
default="index",
help="Whether to group by index or group by fail types. Accepts '-g index' or '-g log-name' ",
help=(
"Whether to group by index or group by fail types."
"Accepts '-g index' or '-g log-name' "
),
choices=["index", "log-name"],
)
p.add_argument(
"--ignore", help="ignore differences of these files", type=list
)
return p.parse_args()


def main():
args = parse_args()
print("Checking session share path", args.path)
args.suppress = ["fwts_klog_oops.log"]
# Add names of log files here to suppress comparing them
args.ignored_log_names = []

if args.group_by == "index":
main_by_index(args)
Expand All @@ -49,20 +59,23 @@ def main_by_index(args):
warm_fail_count = defaultdict(list)
cold_fail_count = defaultdict(list)
log_names = os.listdir(f"{args.path}/session-share/before_reboot/")
for suppressd in args.suppress:
log_names.remove(suppressd)
for ignored in args.ignored_log_names:
log_names.remove(ignored)

for i, log_name, cold_or_warm in itertools.product(
inclusive_range(1, 30),
log_names,
("warm", "cold"),
):
if (
if not (
cmp(
f"{args.path}/session-share/before_reboot/{log_name}",
f"{args.path}/session-share/{cold_or_warm}_reboot_cycle{i}/{log_name}",
(
f"{args.path}/session-share/"
f"{cold_or_warm}_reboot_cycle{i}/"
f"{log_name}"
),
)
== False
):
if cold_or_warm == "cold":
cold_fail_count[i].append(log_name)
Expand All @@ -82,36 +95,44 @@ def main_by_index(args):

def main_by_log_name(args):
log_names = os.listdir(f"{args.path}/session-share/before_reboot/")
out = {}
for suppressd in args.suppress:
log_names.remove(suppressd)
out = {} # type: dict[str, dict[Literal['cold', 'warm'], list[int]]]
for ignored in args.ignored_log_names:
log_names.remove(ignored)

for log_name in log_names:
out[log_name] = {
"cold": [],
"warm": [],
} # dict[Literal['cold', 'warm'], list[int]]
}

for i, log_name, cold_or_warm in itertools.product(
inclusive_range(1, 30),
log_names,
("warm", "cold"),
):
if (
if not (
cmp(
f"{args.path}/session-share/before_reboot/{log_name}",
f"{args.path}/session-share/{cold_or_warm}_reboot_cycle{i}/{log_name}",
(
f"{args.path}/session-share/"
f"{cold_or_warm}_reboot_cycle{i}/{log_name}"
),
)
== False
):
out[log_name][cold_or_warm].append(i)

for log_name in log_names:
if len(out[log_name]["cold"]) > 0:
print(f'{log_name} failed in these cold boot runs: {out[log_name]["cold"]}')
print(
f"{log_name} failed in these cold boot runs: "
f'{out[log_name]["cold"]}'
)

if len(out[log_name]["warm"]) > 0:
print(f'{log_name} failed in these warm boot runs: {out[log_name]["warm"]}')
print(
f"{log_name} failed in these warm boot runs: "
f'{out[log_name]["warm"]}'
)


if __name__ == "__main__":
Expand Down
54 changes: 39 additions & 15 deletions Tools/PC/c3-submission-helpers/parse-suspend-30-logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,27 +41,35 @@ def parse_args() -> Input:
"-w",
"--write-individual-files",
action="store_true",
help="If specified, the logs will be split up into individual files in a directory specified with -d",
help=(
"If specified, the logs will be split up into individual files "
"in a directory specified with -d"
),
)
p.add_argument(
"-d",
"--directory",
dest="write_directory",
default="",
required=False,
help="Where to write the individual logs. If not specified and the -w flag is true, it will create a new local directory called {your original file name}-split",
help=(
"Where to write the individual logs. "
"If not specified and the -w flag is true, "
"it will create a new local directory called "
"{your original file name}-split"
),
)
p.add_argument(
"-v",
"--verbose",
help="Show individual line numbers of where the errors are in th input file",
help="Show line numbers of where the errors are in th input file",
dest="verbose",
action="store_true",
)
p.add_argument(
"-n",
"--num-runs",
help="Set the expected number of runs in the input file. Default is 90.",
help="Set the expected number of runs in the input file. Default=90.",
dest="num_runs",
required=False,
)
Expand All @@ -73,7 +81,10 @@ def parse_args() -> Input:

FailType = Literal["Critical", "High", "Medium", "Low", "Other"]

default_name = "attachment_files/com.canonical.certification__stress-tests_suspend-30-cycles-with-reboot-3-log-attach"
default_name = (
"attachment_files/com.canonical.certification__"
"stress-tests_suspend-30-cycles-with-reboot-3-log-attach"
)


def main():
Expand All @@ -84,13 +95,15 @@ def main():
print(f"{C.ok}Expecting 90 results{C.end}")

if args.write_individual_files:
print(f'Individual test results will be written to "{args.write_directory}"')
print(f'Individual results will be in "{args.write_directory}"')

file_in = None
if args.filename.endswith(".tar.xz"):
extracted = tarfile.open(args.filename).extractfile(default_name)
if extracted is None:
raise ValueError(f"Failed to extract {default_name} from {args.filename}")
raise ValueError(
f"Failed to extract {default_name} from {args.filename}"
)
file_in = io.TextIOWrapper(extracted)
else:
file_in = open(args.filename)
Expand All @@ -109,7 +122,7 @@ def main():

i = 0
while i < len(lines) and SECTION_BEGIN not in lines[i]:
i += 1 # scroll to the first section
i += 1 # scroll to the first section

while i < len(lines):
line = lines[i]
Expand All @@ -128,7 +141,9 @@ def main():
curr_result_lines.append(curr_line)

if curr_line.startswith("This test run on"):
# This test run on 13/08/24 at 01:10:22 on host Linux ubuntu 6.5.0-1027-oem
# Example:
# This test run on 13/08/24 at
# 01:10:22 on host Linux ubuntu 6.5.0-1027-oem
regex = r"This test run on (.*) at (.*) on host (.*)"
match_output = re.match(regex, curr_line)
if match_output:
Expand All @@ -142,13 +157,16 @@ def main():
if fail_count == "NONE":
continue
if args.verbose:
t = fail_type.lower()
print(
f"Line {i}, run {len(test_results) + 1} has "
f"{getattr(C, fail_type.lower())}{fail_type.lower()}{C.end} "
f"{getattr(C, t)}{t}{C.end} "
f"failures: {fail_count}"
)

failed_runs_by_type[fail_type].append(len(test_results) + 1)
failed_runs_by_type[fail_type].append(
len(test_results) + 1
)
i += 1

if args.write_individual_files:
Expand All @@ -158,9 +176,11 @@ def main():
f"{args.write_directory}/{len(test_results) + 1}.txt", "w"
) as f:
f.write(f"{' BEGIN METADATA ':*^80}\n\n")
f.write("\n".join(f"{k}: {v}" for k, v in curr_meta.items()))
f.write(
f"\n\n{f' END OF METADATA, BEGIN ORIGINAL OUTPUT ':*^80}\n\n"
"\n".join(f"{k}: {v}" for k, v in curr_meta.items())
)
f.write(
f"{' END OF METADATA, BEGIN ORIGINAL OUTPUT ':*^80}"
)
f.write("\n".join(curr_result_lines))

Expand All @@ -174,14 +194,18 @@ def main():
(
f"{C.ok}COUNT OK!{C.end}"
if n_results == EXPECTED_NUM_RESULTS
else f"Expected {EXPECTED_NUM_RESULTS} != {C.critical}{n_results}{C.end}"
else (
f"Expected {EXPECTED_NUM_RESULTS} "
f"!= {C.critical}{n_results}{C.end}"
)
),
)

for fail_type, runs in failed_runs_by_type.items():
if len(runs) != 0:
print(
f"Runs with {getattr(C, fail_type.lower())}{fail_type}{C.end} failures: {runs}"
f"Runs with {getattr(C, fail_type.lower())}"
f"{fail_type}{C.end} failures: {runs}"
)


Expand Down

0 comments on commit 66bcf36

Please sign in to comment.