diff --git a/src/cli/pytest_commands/consume.py b/src/cli/pytest_commands/consume.py index c5515e15fe..7b210f71b4 100644 --- a/src/cli/pytest_commands/consume.py +++ b/src/cli/pytest_commands/consume.py @@ -131,5 +131,4 @@ def cache(pytest_args: List[str], **kwargs) -> None: """Consume command to cache test fixtures.""" args = handle_consume_command_flags(pytest_args, is_hive=False) args += ["src/pytest_plugins/consume/test_cache.py"] - args += ["--cache-only"] sys.exit(pytest.main(args)) diff --git a/src/pytest_plugins/consume/consume.py b/src/pytest_plugins/consume/consume.py index 6fb2aab252..5e6a8b5616 100644 --- a/src/pytest_plugins/consume/consume.py +++ b/src/pytest_plugins/consume/consume.py @@ -85,6 +85,18 @@ def pytest_addoption(parser): # noqa: D103 f"Defaults to the following local directory: '{default_input_directory()}'." ), ) + consume_group.addoption( + "--cache-folder", + action="store", + dest="fixture_cache_folder", + default=CACHED_DOWNLOADS_DIRECTORY, + help=( + "Specify the path where the downloaded fixtures are cached. " + f"Defaults to the following directory: '{CACHED_DOWNLOADS_DIRECTORY}'." + ), + ) + if "cache" in sys.argv: + return consume_group.addoption( "--fork", action="store", @@ -102,23 +114,6 @@ def pytest_addoption(parser): # noqa: D103 "The --html flag can be used to specify a different path." ), ) - consume_group.addoption( - "--cache-folder", - action="store", - dest="fixture_cache_folder", - default=CACHED_DOWNLOADS_DIRECTORY, - help=( - "Specify the path where the downloaded fixtures should be cached. " - f"Defaults to the following directory: '{CACHED_DOWNLOADS_DIRECTORY}'." - ), - ) - consume_group.addoption( - "--cache-only", - action="store_true", - dest="cache_only", - default=False, - help=("Do not run any tests, only cache the fixtures. "), - ) @pytest.hookimpl(tryfirst=True) @@ -132,6 +127,8 @@ def pytest_configure(config): # noqa: D103 it uses the modified `htmlpath` option. """ fixtures_source = config.getoption("fixtures_source") + if "cache" in sys.argv and not config.getoption("fixtures_source"): + pytest.exit("The --input flag is required when using the cache command.") config.fixture_source_flags = ["--input", fixtures_source] if fixtures_source is None: @@ -169,7 +166,7 @@ def pytest_configure(config): # noqa: D103 ) config.test_cases = TestCases.from_index_file(index_file) - if config.option.collectonly: + if config.option.collectonly or "cache" in sys.argv: return if not config.getoption("disable_html") and config.getoption("htmlpath") is None: # generate an html report by default, unless explicitly disabled @@ -203,7 +200,7 @@ def pytest_generate_tests(metafunc): Generate test cases for every test fixture in all the JSON fixture files within the specified fixtures directory, or read from stdin if the directory is 'stdin'. """ - if metafunc.config.getoption("cache_only"): + if "cache" in sys.argv: return fork = metafunc.config.getoption("single_fork") diff --git a/src/pytest_plugins/consume/releases.py b/src/pytest_plugins/consume/releases.py index 65b8a379f9..1ea50349f4 100644 --- a/src/pytest_plugins/consume/releases.py +++ b/src/pytest_plugins/consume/releases.py @@ -140,21 +140,39 @@ def is_docker_or_ci() -> bool: return "GITHUB_ACTIONS" in os.environ or Path("/.dockerenv").exists() -def parse_release_information(release_information: dict) -> List[ReleaseInformation]: +def parse_release_information(release_information: List) -> List[ReleaseInformation]: """Parse the release information from the Github API.""" return Releases.model_validate(release_information).root # type: ignore def download_release_information(destination_file: Path | None) -> List[ReleaseInformation]: - """Download the latest stable and develop releases and optionally save them to a file.""" - response = requests.get(RELEASE_INFORMATION_URL) - response.raise_for_status() - release_information = response.json() - if destination_file is not None: + """ + Download all releases from the GitHub API, handling pagination properly. + + GitHub's API returns releases in pages of 30 by default. This function + follows the pagination links to ensure we get every release, which is + crucial for finding older version or latest releases. + """ + all_releases = [] + current_url: str | None = RELEASE_INFORMATION_URL + max_pages = 2 + while current_url and max_pages > 0: + max_pages -= 1 + response = requests.get(current_url) + response.raise_for_status() + all_releases.extend(response.json()) + current_url = None + if "link" in response.headers: + for link in requests.utils.parse_header_links(response.headers["link"]): + if link["rel"] == "next": + current_url = link["url"] + break + + if destination_file: destination_file.parent.mkdir(parents=True, exist_ok=True) with open(destination_file, "w") as file: - json.dump(release_information, file) - return parse_release_information(release_information) + json.dump(all_releases, file) + return parse_release_information(all_releases) def parse_release_information_from_file( @@ -187,7 +205,7 @@ def get_release_information() -> List[ReleaseInformation]: """ if CACHED_RELEASE_INFORMATION_FILE.exists(): last_modified = CACHED_RELEASE_INFORMATION_FILE.stat().st_mtime - if (datetime.now().timestamp() - last_modified) < 10 * 60 or is_docker_or_ci(): + if (datetime.now().timestamp() - last_modified) < 4 * 60 * 60 or is_docker_or_ci(): return parse_release_information_from_file(CACHED_RELEASE_INFORMATION_FILE) CACHED_RELEASE_INFORMATION_FILE.unlink() if not CACHED_RELEASE_INFORMATION_FILE.exists():