diff --git a/pytest-embedded-idf/pytest_embedded_idf/unity_tester.py b/pytest-embedded-idf/pytest_embedded_idf/unity_tester.py index a57cc10b..b3a0b0ee 100644 --- a/pytest-embedded-idf/pytest_embedded_idf/unity_tester.py +++ b/pytest-embedded-idf/pytest_embedded_idf/unity_tester.py @@ -422,23 +422,61 @@ def run_all_single_board_cases( reset: bool = False, timeout: float = 30, run_ignore_cases: bool = False, + name: t.Optional[t.Union[str, list]] = None, + attributes: t.Optional[dict] = None, ) -> None: """ Run all single board cases, including multi_stage cases, and normal cases Args: - group: test case group - reset: whether to perform a hardware reset before running a case - timeout: timeout. (Default: 30 seconds) - run_ignore_cases: run ignored test cases or not + group: test case group or groups union with '&' and '|'. Supports group invert by '!'. + reset: Whether to perform a hardware reset before running a test case. + timeout: Timeout in seconds. (Default: 30 seconds) + run_ignore_cases: Whether to run ignored test cases or not. + name: test case name or a list of test case names to run. + attributes: Dictionary of attributes to filter and run test cases. """ + if group is None: + group = [] + if isinstance(group, str): + _or_conditions = group.split('|') + group: list[list[str]] = [[_and.strip() for _and in _or.split('&')] for _or in _or_conditions] + if isinstance(name, str): + name: list[str] = [name] + + def validate_group(case_groups): + for _or in group: + for _and in _or: + invert = _and.startswith('!') + _and = _and.lstrip('!') + result = _and in case_groups + if invert: + result = not result + if not result: + break + else: + return True + + return False + for case in self.test_menu: - if not group or group in case.groups: - if not case.is_ignored or run_ignore_cases: - if case.type == 'normal': - self._run_normal_case(case, reset=reset, timeout=timeout) - elif case.type == 'multi_stage': - self._run_multi_stage_case(case, reset=reset, timeout=timeout) + should_run = False + if not group and not name and not attributes: + should_run = True + if group and validate_group(case.groups): + should_run = True + if name and case.name in name: + should_run = True + if attributes and all(case.attributes.get(k) == v for k, v in attributes.items()): + should_run = True + + if not should_run: + continue + if not case.is_ignored or run_ignore_cases: + if case.type == 'normal': + self._run_normal_case(case, reset=reset, timeout=timeout) + elif case.type == 'multi_stage': + self._run_multi_stage_case(case, reset=reset, timeout=timeout) class _MultiDevTestDut: diff --git a/pytest-embedded-idf/tests/test_idf.py b/pytest-embedded-idf/tests/test_idf.py index cac6ef63..f19601bb 100644 --- a/pytest-embedded-idf/tests/test_idf.py +++ b/pytest-embedded-idf/tests/test_idf.py @@ -716,6 +716,90 @@ def test_dut_run_all_single_board_cases(dut): assert multi_stage.attrib['name'] == 'multiple_stages_test' +def test_dut_run_all_single_board_cases_group(testdir): + testdir.makepyfile(r""" + def test_dut_run_all_single_board_cases(dut): + dut.run_all_single_board_cases(group="normal_case", timeout=10) + """) + testdir.runpytest( + '-s', + '--embedded-services', 'esp,idf', + '--app-path', os.path.join(testdir.tmpdir, 'unit_test_app_esp32'), + '--log-cli-level', 'DEBUG', + '--junitxml', 'report.xml', + ) + + junit_report = ET.parse('report.xml').getroot()[0] + + assert junit_report.attrib['errors'] == '0' + assert junit_report.attrib['failures'] == '1' + assert junit_report.attrib['skipped'] == '0' + assert junit_report.attrib['tests'] == '1' + + +def test_dut_run_all_single_board_cases_or_groups(testdir): + testdir.makepyfile(r""" + def test_dut_run_all_single_board_cases(dut): + dut.run_all_single_board_cases(group="normal_case|multi_stage", timeout=10) + """) + testdir.runpytest( + '-s', + '--embedded-services', 'esp,idf', + '--app-path', os.path.join(testdir.tmpdir, 'unit_test_app_esp32'), + '--log-cli-level', 'DEBUG', + '--junitxml', 'report.xml', + ) + + junit_report = ET.parse('report.xml').getroot()[0] + + assert junit_report.attrib['errors'] == '0' + assert junit_report.attrib['failures'] == '1' + assert junit_report.attrib['skipped'] == '0' + assert junit_report.attrib['tests'] == '2' + + +def test_dut_run_all_single_board_cases_invert_group(testdir): + testdir.makepyfile(r""" + def test_dut_run_all_single_board_cases(dut): + dut.run_all_single_board_cases(group="!normal_case", timeout=10) + """) + testdir.runpytest( + '-s', + '--embedded-services', 'esp,idf', + '--app-path', os.path.join(testdir.tmpdir, 'unit_test_app_esp32'), + '--log-cli-level', 'DEBUG', + '--junitxml', 'report.xml', + ) + + junit_report = ET.parse('report.xml').getroot()[0] + + assert junit_report.attrib['errors'] == '0' + assert junit_report.attrib['failures'] == '0' + assert junit_report.attrib['skipped'] == '0' + assert junit_report.attrib['tests'] == '1' + + +def test_dut_run_all_single_board_cases_by_names(testdir): + testdir.makepyfile(r""" + def test_dut_run_all_single_board_cases(dut): + dut.run_all_single_board_cases(name=["normal_case1", "multiple_stages_test"]) + """) + testdir.runpytest( + '-s', + '--embedded-services', 'esp,idf', + '--app-path', os.path.join(testdir.tmpdir, 'unit_test_app_esp32'), + '--log-cli-level', 'DEBUG', + '--junitxml', 'report.xml', + ) + + junit_report = ET.parse('report.xml').getroot()[0] + + assert junit_report.attrib['errors'] == '0' + assert junit_report.attrib['failures'] == '0' + assert junit_report.attrib['skipped'] == '0' + assert junit_report.attrib['tests'] == '2' + + def test_unity_test_case_runner(testdir): testdir.makepyfile(r""" def test_unity_test_case_runner(unity_tester):