From 3fb263701829541db79529369da3a129264dd5bc Mon Sep 17 00:00:00 2001 From: Damian Shaw Date: Wed, 18 Oct 2023 18:58:38 -0400 Subject: [PATCH 01/38] Optimize _FlatDirectorySource to only scan each path once --- src/pip/_internal/index/collector.py | 2 + src/pip/_internal/index/sources.py | 84 ++++++++++++++++++++++++---- 2 files changed, 75 insertions(+), 11 deletions(-) diff --git a/src/pip/_internal/index/collector.py b/src/pip/_internal/index/collector.py index b3e293ea3a5..08c8bddcb69 100644 --- a/src/pip/_internal/index/collector.py +++ b/src/pip/_internal/index/collector.py @@ -473,6 +473,7 @@ def collect_sources( page_validator=self.session.is_secure_origin, expand_dir=False, cache_link_parsing=False, + project_name=project_name, ) for loc in self.search_scope.get_index_urls_locations(project_name) ).values() @@ -483,6 +484,7 @@ def collect_sources( page_validator=self.session.is_secure_origin, expand_dir=True, cache_link_parsing=True, + project_name=project_name, ) for loc in self.find_links ).values() diff --git a/src/pip/_internal/index/sources.py b/src/pip/_internal/index/sources.py index cd9cb8d40f1..f4626d71ab4 100644 --- a/src/pip/_internal/index/sources.py +++ b/src/pip/_internal/index/sources.py @@ -1,8 +1,17 @@ import logging import mimetypes import os -import pathlib -from typing import Callable, Iterable, Optional, Tuple +from collections import defaultdict +from typing import Callable, Dict, Iterable, List, Optional, Tuple + +from pip._vendor.packaging.utils import ( + InvalidSdistFilename, + InvalidVersion, + InvalidWheelFilename, + canonicalize_name, + parse_sdist_filename, + parse_wheel_filename, +) from pip._internal.models.candidate import InstallationCandidate from pip._internal.models.link import Link @@ -36,6 +45,53 @@ def _is_html_file(file_url: str) -> bool: return mimetypes.guess_type(file_url, strict=False)[0] == "text/html" +class _FlatDirectoryToUrls: + """Scans directory and caches results""" + + def __init__(self, path: str) -> None: + self._path = path + self._page_candidates: List[str] = [] + self._project_name_to_urls: Dict[str, List[str]] = defaultdict(list) + self._scanned_directory = False + + def _scan_directory(self) -> None: + """Scans directory once and populates both page_candidates + and project_name_to_urls at the same time + """ + for entry in os.scandir(self._path): + url = path_to_url(entry.path) + if _is_html_file(url): + self._page_candidates.append(url) + continue + + # File must have a valid wheel or sdist name, + # otherwise not worth considering as a package + try: + project_filename = parse_wheel_filename(entry.name)[0] + except (InvalidWheelFilename, InvalidVersion): + try: + project_filename = parse_sdist_filename(entry.name)[0] + except (InvalidSdistFilename, InvalidVersion): + continue + + self._project_name_to_urls[project_filename].append(url) + self._scanned_directory = True + + @property + def page_candidates(self) -> List[str]: + if not self._scanned_directory: + self._scan_directory() + + return self._page_candidates + + @property + def project_name_to_urls(self) -> Dict[str, List[str]]: + if not self._scanned_directory: + self._scan_directory() + + return self._project_name_to_urls + + class _FlatDirectorySource(LinkSource): """Link source specified by ``--find-links=``. @@ -45,30 +101,34 @@ class _FlatDirectorySource(LinkSource): * ``file_candidates``: Archives in the directory. """ + _paths_to_urls: Dict[str, _FlatDirectoryToUrls] = {} + def __init__( self, candidates_from_page: CandidatesFromPage, path: str, + project_name: str, ) -> None: self._candidates_from_page = candidates_from_page - self._path = pathlib.Path(os.path.realpath(path)) + self._project_name = canonicalize_name(project_name) + + # Get existing instance of _FlatDirectoryToUrls if it exists + if path in self._paths_to_urls: + self._path_to_urls = self._paths_to_urls[path] + else: + self._path_to_urls = _FlatDirectoryToUrls(path=path) + self._paths_to_urls[path] = self._path_to_urls @property def link(self) -> Optional[Link]: return None def page_candidates(self) -> FoundCandidates: - for path in self._path.iterdir(): - url = path_to_url(str(path)) - if not _is_html_file(url): - continue + for url in self._path_to_urls.page_candidates: yield from self._candidates_from_page(Link(url)) def file_links(self) -> FoundLinks: - for path in self._path.iterdir(): - url = path_to_url(str(path)) - if _is_html_file(url): - continue + for url in self._path_to_urls.project_name_to_urls[self._project_name]: yield Link(url) @@ -170,6 +230,7 @@ def build_source( page_validator: PageValidator, expand_dir: bool, cache_link_parsing: bool, + project_name: str, ) -> Tuple[Optional[str], Optional[LinkSource]]: path: Optional[str] = None url: Optional[str] = None @@ -203,6 +264,7 @@ def build_source( source = _FlatDirectorySource( candidates_from_page=candidates_from_page, path=path, + project_name=project_name, ) else: source = _IndexDirectorySource( From 125ce542cc77d20a65f39a05f828fa38aff1d094 Mon Sep 17 00:00:00 2001 From: Damian Shaw Date: Wed, 18 Oct 2023 18:58:53 -0400 Subject: [PATCH 02/38] Fix and add tests for optimized _FlatDirectorySource --- tests/functional/test_install_config.py | 6 --- tests/unit/test_collector.py | 55 ++++++++++++++++++++++--- tests/unit/test_finder.py | 5 ++- 3 files changed, 54 insertions(+), 12 deletions(-) diff --git a/tests/functional/test_install_config.py b/tests/functional/test_install_config.py index ecaf2f705a2..5c7b23ed7cb 100644 --- a/tests/functional/test_install_config.py +++ b/tests/functional/test_install_config.py @@ -125,9 +125,6 @@ def test_command_line_append_flags( "Fetching project page and analyzing links: https://test.pypi.org" in result.stdout ) - assert ( - f"Skipping link: not a file: {data.find_links}" in result.stdout - ), f"stdout: {result.stdout}" @pytest.mark.network @@ -151,9 +148,6 @@ def test_command_line_appends_correctly( "Fetching project page and analyzing links: https://test.pypi.org" in result.stdout ), result.stdout - assert ( - f"Skipping link: not a file: {data.find_links}" in result.stdout - ), f"stdout: {result.stdout}" def test_config_file_override_stack( diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py index 3c8b81de44d..1ad431e39a4 100644 --- a/tests/unit/test_collector.py +++ b/tests/unit/test_collector.py @@ -862,7 +862,7 @@ def test_collect_sources__file_expand_dir(data: TestData) -> None: ) sources = collector.collect_sources( # Shouldn't be used. - project_name=None, # type: ignore[arg-type] + project_name="", candidates_from_page=None, # type: ignore[arg-type] ) assert ( @@ -960,7 +960,7 @@ def test_fetch_response(self, mock_get_simple_response: mock.Mock) -> None: session=link_collector.session, ) - def test_collect_sources( + def test_collect_page_sources( self, caplog: pytest.LogCaptureFixture, data: TestData ) -> None: caplog.set_level(logging.DEBUG) @@ -993,9 +993,8 @@ def test_collect_sources( files = list(files_it) pages = list(pages_it) - # Spot-check the returned sources. - assert len(files) > 20 - check_links_include(files, names=["simple-1.0.tar.gz"]) + # Only "twine" should return from collecting sources + assert len(files) == 1 assert [page.link for page in pages] == [Link("https://pypi.org/simple/twine/")] # Check that index URLs are marked as *un*cacheable. @@ -1010,6 +1009,52 @@ def test_collect_sources( ("pip._internal.index.collector", logging.DEBUG, expected_message), ] + def test_collect_file_sources( + self, caplog: pytest.LogCaptureFixture, data: TestData + ) -> None: + caplog.set_level(logging.DEBUG) + + link_collector = make_test_link_collector( + find_links=[data.find_links], + # Include two copies of the URL to check that the second one + # is skipped. + index_urls=[PyPI.simple_url, PyPI.simple_url], + ) + collected_sources = link_collector.collect_sources( + "singlemodule", + candidates_from_page=lambda link: [ + InstallationCandidate("singlemodule", "0.0.1", link) + ], + ) + + files_it = itertools.chain.from_iterable( + source.file_links() + for sources in collected_sources + for source in sources + if source is not None + ) + pages_it = itertools.chain.from_iterable( + source.page_candidates() + for sources in collected_sources + for source in sources + if source is not None + ) + files = list(files_it) + _ = list(pages_it) + + # singlemodule should return files + assert len(files) > 0 + check_links_include(files, names=["singlemodule-0.0.1.tar.gz"]) + + expected_message = dedent( + """\ + 1 location(s) to search for versions of singlemodule: + * https://pypi.org/simple/singlemodule/""" + ) + assert caplog.record_tuples == [ + ("pip._internal.index.collector", logging.DEBUG, expected_message), + ] + @pytest.mark.parametrize( "find_links, no_index, suppress_no_index, expected", diff --git a/tests/unit/test_finder.py b/tests/unit/test_finder.py index 3404d1498e3..35c7e89b765 100644 --- a/tests/unit/test_finder.py +++ b/tests/unit/test_finder.py @@ -128,7 +128,10 @@ def test_skip_invalid_wheel_link( with pytest.raises(DistributionNotFound): finder.find_requirement(req, True) - assert "Skipping link: invalid wheel filename:" in caplog.text + assert ( + "Could not find a version that satisfies the requirement invalid" + " (from versions:" in caplog.text + ) def test_not_find_wheel_not_supported(self, data: TestData) -> None: """ From dc0b3138e89aa6ec8a8a8361f62482d2e41c1856 Mon Sep 17 00:00:00 2001 From: Damian Shaw Date: Wed, 18 Oct 2023 18:59:02 -0400 Subject: [PATCH 03/38] Add news entry --- news/12327.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/12327.bugfix.rst diff --git a/news/12327.bugfix.rst b/news/12327.bugfix.rst new file mode 100644 index 00000000000..b07ef130a2e --- /dev/null +++ b/news/12327.bugfix.rst @@ -0,0 +1 @@ +Optimized usage of ``--find-links=``, by only scanning the relevant directory once, only considering file names that are valid wheel or sdist names, and only considering files in the directory that are related to the install. From 6ff621bcfbfc38f4556ea8b843dc6d393186b6ee Mon Sep 17 00:00:00 2001 From: Sviatoslav Sydorenko Date: Sat, 23 Dec 2023 01:59:36 +0100 Subject: [PATCH 04/38] Stop injecting `wheel` as a build dep fallback PEP 517 doesn't mandate depending on `wheel` when a `__legacy__` setuptools fallback is used. Historically, it used to be assumed as necessary, but later it turned out to be wrong. The reason is that `setuptools`' `get_requires_for_build_wheel()` hook already injects this dependency when building wheels is requested [[1]]. It also used to have this hint in the docs, but it was corrected earlier [[2]]. It could be argued that this is an optimization as `pip` will request building wheels anyway. However, it also shows up in the docs, giving the readers a wrong impression of what to put into `[build-system].requires` when they start a new project using setuptools. This patch removes `wheel` from said `requires` list fallback in the docs and the actual runtime. [1]: https://github.com/pypa/setuptools/blob/v40.8.0/setuptools/build_meta.py#L130 [2]: https://github.com/pypa/setuptools/pull/3056 --- docs/html/reference/build-system/pyproject-toml.md | 2 +- news/12449.bugfix.rst | 2 ++ src/pip/_internal/pyproject.py | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 news/12449.bugfix.rst diff --git a/docs/html/reference/build-system/pyproject-toml.md b/docs/html/reference/build-system/pyproject-toml.md index a42a3b8c484..c1e7a68c597 100644 --- a/docs/html/reference/build-system/pyproject-toml.md +++ b/docs/html/reference/build-system/pyproject-toml.md @@ -135,7 +135,7 @@ section, it will be assumed to have the following backend settings: ```toml [build-system] -requires = ["setuptools>=40.8.0", "wheel"] +requires = ["setuptools>=40.8.0"] build-backend = "setuptools.build_meta:__legacy__" ``` diff --git a/news/12449.bugfix.rst b/news/12449.bugfix.rst new file mode 100644 index 00000000000..19f1d9809ac --- /dev/null +++ b/news/12449.bugfix.rst @@ -0,0 +1,2 @@ +Removed ``wheel`` from the ``[build-system].requires`` list fallback +that is used when ``pyproject.toml`` is absent. diff --git a/src/pip/_internal/pyproject.py b/src/pip/_internal/pyproject.py index eb8e12b2dec..8de36b873ed 100644 --- a/src/pip/_internal/pyproject.py +++ b/src/pip/_internal/pyproject.py @@ -123,7 +123,7 @@ def load_pyproject_toml( # a version of setuptools that supports that backend. build_system = { - "requires": ["setuptools>=40.8.0", "wheel"], + "requires": ["setuptools>=40.8.0"], "build-backend": "setuptools.build_meta:__legacy__", } From 3769ad7e0c024952060d76528d64feaf25bc3f15 Mon Sep 17 00:00:00 2001 From: Sviatoslav Sydorenko Date: Sat, 23 Dec 2023 02:16:48 +0100 Subject: [PATCH 05/38] Stop telling users to use `wheel` as build dep --- docs/html/reference/build-system/pyproject-toml.md | 2 +- news/12449.doc.rst | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 news/12449.doc.rst diff --git a/docs/html/reference/build-system/pyproject-toml.md b/docs/html/reference/build-system/pyproject-toml.md index c1e7a68c597..9719023cced 100644 --- a/docs/html/reference/build-system/pyproject-toml.md +++ b/docs/html/reference/build-system/pyproject-toml.md @@ -141,7 +141,7 @@ build-backend = "setuptools.build_meta:__legacy__" If a project has a `build-system` section but no `build-backend`, then: -- It is expected to include `setuptools` and `wheel` as build requirements. An +- It is expected to include `setuptools` as a build requirement. An error is reported if the available version of `setuptools` is not recent enough. diff --git a/news/12449.doc.rst b/news/12449.doc.rst new file mode 100644 index 00000000000..431475f51eb --- /dev/null +++ b/news/12449.doc.rst @@ -0,0 +1,2 @@ +Updated the ``pyproject.toml`` document to stop suggesting +to depend on ``wheel`` as a build dependency directly. From 4b03b1a90a5eb6e418dfa18ae6ba220b28e7b512 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Edgar=20Ram=C3=ADrez=20Mondrag=C3=B3n?= Date: Fri, 12 Jan 2024 21:37:48 -0600 Subject: [PATCH 06/38] Update supported interpreters in development docs --- docs/html/development/ci.rst | 30 +++++++++++++++++++++++++++++- news/12475.doc.rst | 1 + 2 files changed, 30 insertions(+), 1 deletion(-) create mode 100644 news/12475.doc.rst diff --git a/docs/html/development/ci.rst b/docs/html/development/ci.rst index ac65f816594..01bec42472e 100644 --- a/docs/html/development/ci.rst +++ b/docs/html/development/ci.rst @@ -22,6 +22,8 @@ pip support a variety of Python interpreters: - CPython 3.8 - CPython 3.9 - CPython 3.10 +- CPython 3.11 +- CPython 3.12 - Latest PyPy3 on different operating systems: @@ -96,6 +98,10 @@ Actual testing | | +-------+---------------+-----------------+ | | | CP3.10| | | | | +-------+---------------+-----------------+ +| | | CP3.11| | | +| | +-------+---------------+-----------------+ +| | | CP3.12| | | +| | +-------+---------------+-----------------+ | | | PyPy3 | | | | Windows +----------+-------+---------------+-----------------+ | | x64 | CP3.7 | GitHub | GitHub | @@ -104,7 +110,11 @@ Actual testing | | +-------+---------------+-----------------+ | | | CP3.9 | | | | | +-------+---------------+-----------------+ -| | | CP3.10| GitHub | GitHub | +| | | CP3.10| | | +| | +-------+---------------+-----------------+ +| | | CP3.11| GitHub | GitHub | +| | +-------+---------------+-----------------+ +| | | CP3.12| | | | | +-------+---------------+-----------------+ | | | PyPy3 | | | +-----------+----------+-------+---------------+-----------------+ @@ -114,6 +124,12 @@ Actual testing | | +-------+---------------+-----------------+ | | | CP3.9 | | | | | +-------+---------------+-----------------+ +| | | CP3.10| | | +| | +-------+---------------+-----------------+ +| | | CP3.11| | | +| | +-------+---------------+-----------------+ +| | | CP3.12| | | +| | +-------+---------------+-----------------+ | | | PyPy3 | | | | Linux +----------+-------+---------------+-----------------+ | | x64 | CP3.7 | GitHub | GitHub | @@ -124,6 +140,10 @@ Actual testing | | +-------+---------------+-----------------+ | | | CP3.10| GitHub | GitHub | | | +-------+---------------+-----------------+ +| | | CP3.11| GitHub | GitHub | +| | +-------+---------------+-----------------+ +| | | CP3.12| GitHub | GitHub | +| | +-------+---------------+-----------------+ | | | PyPy3 | | | +-----------+----------+-------+---------------+-----------------+ | | arm64 | CP3.7 | | | @@ -134,6 +154,10 @@ Actual testing | | +-------+---------------+-----------------+ | | | CP3.10| | | | | +-------+---------------+-----------------+ +| | | CP3.11| | | +| | +-------+---------------+-----------------+ +| | | CP3.12| | | +| | +-------+---------------+-----------------+ | | | PyPy3 | | | | macOS +----------+-------+---------------+-----------------+ | | x64 | CP3.7 | GitHub | GitHub | @@ -144,5 +168,9 @@ Actual testing | | +-------+---------------+-----------------+ | | | CP3.10| GitHub | GitHub | | | +-------+---------------+-----------------+ +| | | CP3.11| GitHub | GitHub | +| | +-------+---------------+-----------------+ +| | | CP3.12| GitHub | GitHub | +| | +-------+---------------+-----------------+ | | | PyPy3 | | | +-----------+----------+-------+---------------+-----------------+ diff --git a/news/12475.doc.rst b/news/12475.doc.rst new file mode 100644 index 00000000000..2713e1878c9 --- /dev/null +++ b/news/12475.doc.rst @@ -0,0 +1 @@ +Update supported interpreters in development docs From 09f7c662731927d84b242d002ae1119cc2bc995c Mon Sep 17 00:00:00 2001 From: Richard Si Date: Mon, 1 Jan 2024 17:19:45 -0500 Subject: [PATCH 07/38] Fix tests broken by Setuptools 69.0.3 which now preserves underscores in egg_info --- tests/functional/test_check.py | 8 ++++---- tests/functional/test_install_vcs_git.py | 2 +- tests/functional/test_new_resolver.py | 2 +- tests/functional/test_show.py | 8 ++++---- tests/lib/__init__.py | 2 +- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/tests/functional/test_check.py b/tests/functional/test_check.py index e2b1c60ef3a..3d8dff5c475 100644 --- a/tests/functional/test_check.py +++ b/tests/functional/test_check.py @@ -119,7 +119,7 @@ def test_check_complicated_name_missing(script: PipTestEnvironment) -> None: # Without dependency result = script.pip("install", "--no-index", package_a_path, "--no-deps") - assert "Successfully installed package-A-1.0" in result.stdout, str(result) + assert "Successfully installed package_A-1.0" in result.stdout, str(result) result = script.pip("check", expect_error=True) expected_lines = ("package-a 1.0 requires dependency-b, which is not installed.",) @@ -142,7 +142,7 @@ def test_check_complicated_name_broken(script: PipTestEnvironment) -> None: # With broken dependency result = script.pip("install", "--no-index", package_a_path, "--no-deps") - assert "Successfully installed package-A-1.0" in result.stdout, str(result) + assert "Successfully installed package_A-1.0" in result.stdout, str(result) result = script.pip( "install", @@ -175,7 +175,7 @@ def test_check_complicated_name_clean(script: PipTestEnvironment) -> None: ) result = script.pip("install", "--no-index", package_a_path, "--no-deps") - assert "Successfully installed package-A-1.0" in result.stdout, str(result) + assert "Successfully installed package_A-1.0" in result.stdout, str(result) result = script.pip( "install", @@ -203,7 +203,7 @@ def test_check_considers_conditional_reqs(script: PipTestEnvironment) -> None: ) result = script.pip("install", "--no-index", package_a_path, "--no-deps") - assert "Successfully installed package-A-1.0" in result.stdout, str(result) + assert "Successfully installed package_A-1.0" in result.stdout, str(result) result = script.pip("check", expect_error=True) expected_lines = ("package-a 1.0 requires dependency-b, which is not installed.",) diff --git a/tests/functional/test_install_vcs_git.py b/tests/functional/test_install_vcs_git.py index 2abc7aa0fd2..e59b269a61f 100644 --- a/tests/functional/test_install_vcs_git.py +++ b/tests/functional/test_install_vcs_git.py @@ -449,7 +449,7 @@ def test_git_with_ambiguous_revs(script: PipTestEnvironment) -> None: assert "Could not find a tag or branch" not in result.stdout # it is 'version-pkg' instead of 'version_pkg' because # egg-link name is version-pkg.egg-link because it is a single .py module - result.assert_installed("version-pkg", with_files=[".git"]) + result.assert_installed("version_pkg", with_files=[".git"]) def test_editable__no_revision(script: PipTestEnvironment) -> None: diff --git a/tests/functional/test_new_resolver.py b/tests/functional/test_new_resolver.py index 62e56adb2b5..2ab99834b2f 100644 --- a/tests/functional/test_new_resolver.py +++ b/tests/functional/test_new_resolver.py @@ -1847,7 +1847,7 @@ def test_new_resolver_succeeds_on_matching_constraint_and_requirement( script.assert_installed(test_pkg="0.1.0") if editable: - assert_editable(script, "test-pkg") + assert_editable(script, "test_pkg") def test_new_resolver_applies_url_constraint_to_dep(script: PipTestEnvironment) -> None: diff --git a/tests/functional/test_show.py b/tests/functional/test_show.py index b8ec0510a1e..e1f026ee72e 100644 --- a/tests/functional/test_show.py +++ b/tests/functional/test_show.py @@ -277,7 +277,7 @@ def test_show_required_by_packages_basic( lines = result.stdout.splitlines() assert "Name: simple" in lines - assert "Required-by: requires-simple" in lines + assert "Required-by: requires_simple" in lines def test_show_required_by_packages_capitalized( @@ -294,7 +294,7 @@ def test_show_required_by_packages_capitalized( lines = result.stdout.splitlines() assert "Name: simple" in lines - assert "Required-by: Requires-Capitalized" in lines + assert "Required-by: Requires_Capitalized" in lines def test_show_required_by_packages_requiring_capitalized( @@ -314,8 +314,8 @@ def test_show_required_by_packages_requiring_capitalized( lines = result.stdout.splitlines() print(lines) - assert "Name: Requires-Capitalized" in lines - assert "Required-by: requires-requires-capitalized" in lines + assert "Name: Requires_Capitalized" in lines + assert "Required-by: requires_requires_capitalized" in lines def test_show_skip_work_dir_pkg(script: PipTestEnvironment) -> None: diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index f14837e24ea..b21f1400dbc 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -320,7 +320,7 @@ def assert_installed( e = self.test_env if editable: - pkg_dir = e.venv / "src" / pkg_name.lower() + pkg_dir = e.venv / "src" / canonicalize_name(pkg_name) # If package was installed in a sub directory if sub_dir: pkg_dir = pkg_dir / sub_dir From a109f659ce5cc4b83c45e1a9b46c0a8ea855f88d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 14 Jan 2024 12:54:21 +0100 Subject: [PATCH 08/38] Change .egg-link search algorithm to support setuptools >= 69 --- src/pip/_internal/utils/egg_link.py | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/src/pip/_internal/utils/egg_link.py b/src/pip/_internal/utils/egg_link.py index eb57ed1519f..4a384a63682 100644 --- a/src/pip/_internal/utils/egg_link.py +++ b/src/pip/_internal/utils/egg_link.py @@ -15,24 +15,31 @@ ] -def _egg_link_name(raw_name: str) -> str: +def _egg_link_names(raw_name: str) -> List[str]: """ Convert a Name metadata value to a .egg-link name, by applying the same substitution as pkg_resources's safe_name function. Note: we cannot use canonicalize_name because it has a different logic. + + We also look for the raw name (without normalization) as setuptools 69 changed + the way it names .egg-link files (https://github.com/pypa/setuptools/issues/4167). """ - return re.sub("[^A-Za-z0-9.]+", "-", raw_name) + ".egg-link" + return [ + re.sub("[^A-Za-z0-9.]+", "-", raw_name) + ".egg-link", + f"{raw_name}.egg-link", + ] def egg_link_path_from_sys_path(raw_name: str) -> Optional[str]: """ Look for a .egg-link file for project name, by walking sys.path. """ - egg_link_name = _egg_link_name(raw_name) + egg_link_names = _egg_link_names(raw_name) for path_item in sys.path: - egg_link = os.path.join(path_item, egg_link_name) - if os.path.isfile(egg_link): - return egg_link + for egg_link_name in egg_link_names: + egg_link = os.path.join(path_item, egg_link_name) + if os.path.isfile(egg_link): + return egg_link return None @@ -64,9 +71,10 @@ def egg_link_path_from_location(raw_name: str) -> Optional[str]: sites.append(user_site) sites.append(site_packages) - egg_link_name = _egg_link_name(raw_name) + egg_link_names = _egg_link_names(raw_name) for site in sites: - egglink = os.path.join(site, egg_link_name) - if os.path.isfile(egglink): - return egglink + for egg_link_name in egg_link_names: + egglink = os.path.join(site, egg_link_name) + if os.path.isfile(egglink): + return egglink return None From 98dc4b2f27b177e92c594a73fc318c034e08911e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 14 Jan 2024 13:49:39 +0100 Subject: [PATCH 09/38] More test suite fixes for setuptools 69 compatibility --- news/12477.feature.rst | 2 ++ tests/functional/test_check.py | 20 ++++++++++++++---- tests/functional/test_freeze.py | 6 +++--- tests/functional/test_install.py | 2 +- tests/functional/test_install_reqs.py | 2 +- tests/functional/test_new_resolver.py | 11 +++++++--- tests/functional/test_show.py | 19 ++++++++++++++---- tests/lib/__init__.py | 29 ++++++++++++++++++++------- 8 files changed, 68 insertions(+), 23 deletions(-) create mode 100644 news/12477.feature.rst diff --git a/news/12477.feature.rst b/news/12477.feature.rst new file mode 100644 index 00000000000..e318d2924bb --- /dev/null +++ b/news/12477.feature.rst @@ -0,0 +1,2 @@ +Make pip freeze and pip uninstall of legacy editable installs of packages whose name +contains ``_`` compatible with ``setuptools>=69``. diff --git a/tests/functional/test_check.py b/tests/functional/test_check.py index 3d8dff5c475..79b6df39c19 100644 --- a/tests/functional/test_check.py +++ b/tests/functional/test_check.py @@ -119,7 +119,10 @@ def test_check_complicated_name_missing(script: PipTestEnvironment) -> None: # Without dependency result = script.pip("install", "--no-index", package_a_path, "--no-deps") - assert "Successfully installed package_A-1.0" in result.stdout, str(result) + assert ( + "Successfully installed package_A-1.0" in result.stdout + or "Successfully installed package-A-1.0" in result.stdout + ), str(result) result = script.pip("check", expect_error=True) expected_lines = ("package-a 1.0 requires dependency-b, which is not installed.",) @@ -142,7 +145,10 @@ def test_check_complicated_name_broken(script: PipTestEnvironment) -> None: # With broken dependency result = script.pip("install", "--no-index", package_a_path, "--no-deps") - assert "Successfully installed package_A-1.0" in result.stdout, str(result) + assert ( + "Successfully installed package_A-1.0" in result.stdout + or "Successfully installed package-A-1.0" in result.stdout + ), str(result) result = script.pip( "install", @@ -175,7 +181,10 @@ def test_check_complicated_name_clean(script: PipTestEnvironment) -> None: ) result = script.pip("install", "--no-index", package_a_path, "--no-deps") - assert "Successfully installed package_A-1.0" in result.stdout, str(result) + assert ( + "Successfully installed package_A-1.0" in result.stdout + or "Successfully installed package-A-1.0" in result.stdout + ), str(result) result = script.pip( "install", @@ -203,7 +212,10 @@ def test_check_considers_conditional_reqs(script: PipTestEnvironment) -> None: ) result = script.pip("install", "--no-index", package_a_path, "--no-deps") - assert "Successfully installed package_A-1.0" in result.stdout, str(result) + assert ( + "Successfully installed package_A-1.0" in result.stdout + or "Successfully installed package-A-1.0" in result.stdout + ), str(result) result = script.pip("check", expect_error=True) expected_lines = ("package-a 1.0 requires dependency-b, which is not installed.",) diff --git a/tests/functional/test_freeze.py b/tests/functional/test_freeze.py index b2fd1d62982..b7af974ea61 100644 --- a/tests/functional/test_freeze.py +++ b/tests/functional/test_freeze.py @@ -220,7 +220,7 @@ def test_freeze_editable_not_vcs(script: PipTestEnvironment) -> None: # the freeze code does. expected = textwrap.dedent( f"""\ - ...# Editable install with no version control (version-pkg==0.1) + ...# Editable install with no version control (version...pkg==0.1) -e {os.path.normcase(pkg_path)} ...""" ) @@ -245,7 +245,7 @@ def test_freeze_editable_git_with_no_remote( # the freeze code does. expected = textwrap.dedent( f"""\ - ...# Editable Git install with no remote (version-pkg==0.1) + ...# Editable Git install with no remote (version...pkg==0.1) -e {os.path.normcase(pkg_path)} ...""" ) @@ -483,7 +483,7 @@ def test_freeze_git_remote(script: PipTestEnvironment) -> None: expected = os.path.normcase( textwrap.dedent( f""" - ...# Editable Git...(version-pkg...)... + ...# Editable Git...(version...pkg...)... # '{other_remote}' -e {repo_dir}... """ diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index b18fabc84c9..3411f66d801 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -358,7 +358,7 @@ def test_basic_install_editable_from_svn(script: PipTestEnvironment) -> None: checkout_path = _create_test_package(script.scratch_path) repo_url = _create_svn_repo(script.scratch_path, checkout_path) result = script.pip("install", "-e", "svn+" + repo_url + "#egg=version-pkg") - result.assert_installed("version-pkg", with_files=[".svn"]) + result.assert_installed("version_pkg", with_files=[".svn"]) def _test_install_editable_from_git(script: PipTestEnvironment) -> None: diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py index c2d951f2695..993f25a2abf 100644 --- a/tests/functional/test_install_reqs.py +++ b/tests/functional/test_install_reqs.py @@ -300,7 +300,7 @@ def test_install_local_editable_with_subdirectory(script: PipTestEnvironment) -> ), ) - result.assert_installed("version-subpkg", sub_dir="version_subdir") + result.assert_installed("version_subpkg", sub_dir="version_subdir") @pytest.mark.network diff --git a/tests/functional/test_new_resolver.py b/tests/functional/test_new_resolver.py index 2ab99834b2f..afe07cbf3ec 100644 --- a/tests/functional/test_new_resolver.py +++ b/tests/functional/test_new_resolver.py @@ -5,6 +5,7 @@ from typing import TYPE_CHECKING, Callable, Dict, List, Tuple import pytest +from packaging.utils import canonicalize_name from tests.conftest import ScriptFactory from tests.lib import ( @@ -27,9 +28,13 @@ def assert_editable(script: PipTestEnvironment, *args: str) -> None: # This simply checks whether all of the listed packages have a # corresponding .egg-link file installed. # TODO: Implement a more rigorous way to test for editable installations. - egg_links = {f"{arg}.egg-link" for arg in args} - assert egg_links <= set( - os.listdir(script.site_packages_path) + egg_links = {f"{canonicalize_name(arg)}.egg-link" for arg in args} + actual_egg_links = { + f"{canonicalize_name(p.stem)}.egg-link" + for p in script.site_packages_path.glob("*.egg-link") + } + assert ( + egg_links <= actual_egg_links ), f"{args!r} not all found in {script.site_packages_path!r}" diff --git a/tests/functional/test_show.py b/tests/functional/test_show.py index e1f026ee72e..05fccbbe327 100644 --- a/tests/functional/test_show.py +++ b/tests/functional/test_show.py @@ -277,7 +277,10 @@ def test_show_required_by_packages_basic( lines = result.stdout.splitlines() assert "Name: simple" in lines - assert "Required-by: requires_simple" in lines + assert ( + "Required-by: requires_simple" in lines + or "Required-by: requires-simple" in lines + ) def test_show_required_by_packages_capitalized( @@ -294,7 +297,10 @@ def test_show_required_by_packages_capitalized( lines = result.stdout.splitlines() assert "Name: simple" in lines - assert "Required-by: Requires_Capitalized" in lines + assert ( + "Required-by: Requires_Capitalized" in lines + or "Required-by: Requires-Capitalized" in lines + ) def test_show_required_by_packages_requiring_capitalized( @@ -314,8 +320,13 @@ def test_show_required_by_packages_requiring_capitalized( lines = result.stdout.splitlines() print(lines) - assert "Name: Requires_Capitalized" in lines - assert "Required-by: requires_requires_capitalized" in lines + assert ( + "Name: Requires_Capitalized" in lines or "Name: Requires-Capitalized" in lines + ) + assert ( + "Required-by: requires_requires_capitalized" in lines + or "Required-by: requires-requires-capitalized" in lines + ) def test_show_skip_work_dir_pkg(script: PipTestEnvironment) -> None: diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index b21f1400dbc..ea755c83812 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -41,6 +41,7 @@ from pip._internal.models.selection_prefs import SelectionPreferences from pip._internal.models.target_python import TargetPython from pip._internal.network.session import PipSession +from pip._internal.utils.egg_link import _egg_link_names from tests.lib.venv import VirtualEnvironment from tests.lib.wheel import make_wheel @@ -305,6 +306,12 @@ def files_updated(self) -> FoundFiles: def files_deleted(self) -> FoundFiles: return FoundFiles(self._impl.files_deleted) + def _get_egg_link_path_created(self, egg_link_paths: List[str]) -> Optional[str]: + for egg_link_path in egg_link_paths: + if egg_link_path in self.files_created: + return egg_link_path + return None + def assert_installed( self, pkg_name: str, @@ -329,22 +336,30 @@ def assert_installed( pkg_dir = e.site_packages / pkg_name if use_user_site: - egg_link_path = e.user_site / f"{pkg_name}.egg-link" + egg_link_paths = [ + e.user_site / egg_link_name + for egg_link_name in _egg_link_names(pkg_name) + ] else: - egg_link_path = e.site_packages / f"{pkg_name}.egg-link" + egg_link_paths = [ + e.site_packages / egg_link_name + for egg_link_name in _egg_link_names(pkg_name) + ] + egg_link_path_created = self._get_egg_link_path_created(egg_link_paths) if without_egg_link: - if egg_link_path in self.files_created: + if egg_link_path_created: raise TestFailure( - f"unexpected egg link file created: {egg_link_path!r}\n{self}" + f"unexpected egg link file created: {egg_link_path_created!r}\n" + f"{self}" ) else: - if egg_link_path not in self.files_created: + if not egg_link_path_created: raise TestFailure( - f"expected egg link file missing: {egg_link_path!r}\n{self}" + f"expected egg link file missing: {egg_link_paths!r}\n{self}" ) - egg_link_file = self.files_created[egg_link_path] + egg_link_file = self.files_created[egg_link_path_created] egg_link_contents = egg_link_file.bytes.replace(os.linesep, "\n") # FIXME: I don't understand why there's a trailing . here From ca6971d249080d15cc771c24deb7cb155142760e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Thu, 28 Dec 2023 18:19:49 +0100 Subject: [PATCH 10/38] Upgrade linters --- .pre-commit-config.yaml | 8 ++++---- src/pip/_internal/req/req_uninstall.py | 3 +-- tests/unit/test_vcs.py | 3 +-- 3 files changed, 6 insertions(+), 8 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 18d911256d3..66fd1e2cda1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ exclude: 'src/pip/_vendor/' repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: check-builtin-literals - id: check-added-large-files @@ -17,18 +17,18 @@ repos: exclude: .patch - repo: https://github.com/psf/black - rev: 23.7.0 + rev: 23.12.1 hooks: - id: black - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.4 + rev: v0.1.9 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.6.1 + rev: v1.8.0 hooks: - id: mypy exclude: tests/data diff --git a/src/pip/_internal/req/req_uninstall.py b/src/pip/_internal/req/req_uninstall.py index 3ca10098cf9..707fde1b2b9 100644 --- a/src/pip/_internal/req/req_uninstall.py +++ b/src/pip/_internal/req/req_uninstall.py @@ -172,8 +172,7 @@ def compress_for_output_listing(paths: Iterable[str]) -> Tuple[Set[str], Set[str folders.add(os.path.dirname(path)) files.add(path) - # probably this one https://github.com/python/mypy/issues/390 - _normcased_files = set(map(os.path.normcase, files)) # type: ignore + _normcased_files = set(map(os.path.normcase, files)) folders = compact(folders) diff --git a/tests/unit/test_vcs.py b/tests/unit/test_vcs.py index 5291f129cf7..a52a6217e77 100644 --- a/tests/unit/test_vcs.py +++ b/tests/unit/test_vcs.py @@ -458,8 +458,7 @@ def test_version_control__run_command__fails( with mock.patch("pip._internal.vcs.versioncontrol.call_subprocess") as call: call.side_effect = exc_cls with pytest.raises(BadCommand, match=msg_re.format(name=vcs_cls.name)): - # https://github.com/python/mypy/issues/3283 - vcs_cls.run_command([]) # type: ignore[arg-type] + vcs_cls.run_command([]) @pytest.mark.parametrize( From f63d4c17c80fa04cc4fc5a39cd88eb30db26a860 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 15 Jan 2024 11:51:16 +0100 Subject: [PATCH 11/38] pre-commit: use faster mypyc-compiled version of black Co-authored-by: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 66fd1e2cda1..5f06add1eb3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -16,7 +16,7 @@ repos: - id: trailing-whitespace exclude: .patch -- repo: https://github.com/psf/black +- repo: https://github.com/psf/black-pre-commit-mirror rev: 23.12.1 hooks: - id: black From a4a1e73495f4f103b90ca2dc2a086de60bbfde42 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Jan 2024 11:47:35 +0000 Subject: [PATCH 12/38] Bump actions/setup-python from 4 to 5 Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4 to 5. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yml | 12 ++++++------ .github/workflows/update-rtd-redirects.yml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5f7cd942bf4..70a09b6a88f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -22,7 +22,7 @@ jobs: steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: "3.x" - run: pip install nox @@ -58,7 +58,7 @@ jobs: steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: "3.x" - name: Set up git credentials @@ -82,7 +82,7 @@ jobs: steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: "3.x" @@ -113,7 +113,7 @@ jobs: steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} allow-prereleases: true @@ -165,7 +165,7 @@ jobs: steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} @@ -216,7 +216,7 @@ jobs: steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: "3.10" diff --git a/.github/workflows/update-rtd-redirects.yml b/.github/workflows/update-rtd-redirects.yml index c333a09a30d..0beb2b84b97 100644 --- a/.github/workflows/update-rtd-redirects.yml +++ b/.github/workflows/update-rtd-redirects.yml @@ -19,7 +19,7 @@ jobs: environment: RTD Deploys steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: "3.11" - run: pip install httpx pyyaml rich From 6dbe3bf122ccf3402a102a062bcae62c8fbc9785 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 27 Jan 2024 13:22:54 +0100 Subject: [PATCH 13/38] Tweak news fragment --- news/12477.feature.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/news/12477.feature.rst b/news/12477.feature.rst index e318d2924bb..56b6e99b38f 100644 --- a/news/12477.feature.rst +++ b/news/12477.feature.rst @@ -1,2 +1,2 @@ Make pip freeze and pip uninstall of legacy editable installs of packages whose name -contains ``_`` compatible with ``setuptools>=69``. +contains ``_`` compatible with ``setuptools>=69.0.3``. From 5014eacc53de8a51f36ec996e8eb2fbb2f44e738 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 27 Jan 2024 13:27:19 +0100 Subject: [PATCH 14/38] Postpone removal of legacy versions and specifiers support --- src/pip/_internal/operations/check.py | 4 ++-- src/pip/_internal/req/req_set.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/pip/_internal/operations/check.py b/src/pip/_internal/operations/check.py index 1b7fd7ab7fd..90c6a58a55e 100644 --- a/src/pip/_internal/operations/check.py +++ b/src/pip/_internal/operations/check.py @@ -168,7 +168,7 @@ def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None: f"release a version with a conforming version number" ), issue=12063, - gone_in="24.0", + gone_in="24.1", ) for dep in package_details.dependencies: if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier): @@ -183,5 +183,5 @@ def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None: f"release a version with a conforming dependency specifiers" ), issue=12063, - gone_in="24.0", + gone_in="24.1", ) diff --git a/src/pip/_internal/req/req_set.py b/src/pip/_internal/req/req_set.py index 1bf73d595f6..bf36114e802 100644 --- a/src/pip/_internal/req/req_set.py +++ b/src/pip/_internal/req/req_set.py @@ -99,7 +99,7 @@ def warn_legacy_versions_and_specifiers(self) -> None: "or contact the package author to fix the version number" ), issue=12063, - gone_in="24.0", + gone_in="24.1", ) for dep in req.get_dist().iter_dependencies(): if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier): @@ -115,5 +115,5 @@ def warn_legacy_versions_and_specifiers(self) -> None: "or contact the package author to fix the version number" ), issue=12063, - gone_in="24.0", + gone_in="24.1", ) From 429cf3c1a6428dd194c24d8d6cf5e2fcf13eed8f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 27 Jan 2024 13:32:56 +0100 Subject: [PATCH 15/38] Postpone removal of --global-option and --build-option --- src/pip/_internal/req/req_install.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index b61a219df68..b622402270c 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -905,7 +905,7 @@ def check_legacy_setup_py_options( reason="--build-option and --global-option are deprecated.", issue=11859, replacement="to use --config-settings", - gone_in="24.0", + gone_in="24.2", ) logger.warning( "Implying --no-binary=:all: due to the presence of " From fd92c84d316668b8c1aca3e76f2b5adafce24f12 Mon Sep 17 00:00:00 2001 From: Flavio Amurrio <25621374+FlavioAmurrioCS@users.noreply.github.com> Date: Sun, 15 Oct 2023 23:46:33 -0400 Subject: [PATCH 16/38] Upgrade distlib to 0.3.7 --- news/distlib.vendor.rst | 1 + src/pip/_vendor/distlib/__init__.py | 2 +- src/pip/_vendor/distlib/database.py | 9 ++++++--- src/pip/_vendor/distlib/manifest.py | 5 +++-- src/pip/_vendor/distlib/markers.py | 15 ++++++++------- src/pip/_vendor/distlib/metadata.py | 12 ++---------- src/pip/_vendor/distlib/scripts.py | 11 ++++++----- src/pip/_vendor/distlib/util.py | 25 +++++++++++++++++++++---- src/pip/_vendor/distlib/version.py | 21 +++++++++++++++------ src/pip/_vendor/vendor.txt | 2 +- 10 files changed, 64 insertions(+), 39 deletions(-) create mode 100644 news/distlib.vendor.rst diff --git a/news/distlib.vendor.rst b/news/distlib.vendor.rst new file mode 100644 index 00000000000..b82cd0db822 --- /dev/null +++ b/news/distlib.vendor.rst @@ -0,0 +1 @@ +Upgrade distlib to 0.3.7 diff --git a/src/pip/_vendor/distlib/__init__.py b/src/pip/_vendor/distlib/__init__.py index 962173c8d0a..ad8a082a73f 100644 --- a/src/pip/_vendor/distlib/__init__.py +++ b/src/pip/_vendor/distlib/__init__.py @@ -6,7 +6,7 @@ # import logging -__version__ = '0.3.6' +__version__ = '0.3.7' class DistlibException(Exception): pass diff --git a/src/pip/_vendor/distlib/database.py b/src/pip/_vendor/distlib/database.py index 5db5d7f507c..bc16e889db8 100644 --- a/src/pip/_vendor/distlib/database.py +++ b/src/pip/_vendor/distlib/database.py @@ -903,15 +903,18 @@ def parse_requires_data(data): lines = data.splitlines() for line in lines: line = line.strip() - if line.startswith('['): + # sectioned files have bare newlines (separating sections) + if not line: # pragma: no cover + continue + if line.startswith('['): # pragma: no cover logger.warning('Unexpected line: quitting requirement scan: %r', line) break r = parse_requirement(line) - if not r: + if not r: # pragma: no cover logger.warning('Not recognised as a requirement: %r', line) continue - if r.extras: + if r.extras: # pragma: no cover logger.warning('extra requirements in requires.txt are ' 'not supported') if not r.constraints: diff --git a/src/pip/_vendor/distlib/manifest.py b/src/pip/_vendor/distlib/manifest.py index ca0fe442d9c..18beba3db37 100644 --- a/src/pip/_vendor/distlib/manifest.py +++ b/src/pip/_vendor/distlib/manifest.py @@ -35,8 +35,9 @@ _PYTHON_VERSION = sys.version_info[:2] class Manifest(object): - """A list of files built by on exploring the filesystem and filtered by - applying various patterns to what we find there. + """ + A list of files built by exploring the filesystem and filtered by applying various + patterns to what we find there. """ def __init__(self, base=None): diff --git a/src/pip/_vendor/distlib/markers.py b/src/pip/_vendor/distlib/markers.py index 9dc68410337..c70400cc889 100644 --- a/src/pip/_vendor/distlib/markers.py +++ b/src/pip/_vendor/distlib/markers.py @@ -24,6 +24,10 @@ __all__ = ['interpret'] _VERSION_PATTERN = re.compile(r'((\d+(\.\d+)*\w*)|\'(\d+(\.\d+)*\w*)\'|\"(\d+(\.\d+)*\w*)\")') +_VERSION_MARKERS = {'python_version', 'python_full_version'} + +def _is_version_marker(s): + return isinstance(s, string_types) and s in _VERSION_MARKERS def _is_literal(o): if not isinstance(o, string_types) or not o: @@ -31,14 +35,11 @@ def _is_literal(o): return o[0] in '\'"' def _get_versions(s): - result = [] - for m in _VERSION_PATTERN.finditer(s): - result.append(NV(m.groups()[0])) - return set(result) + return {NV(m.groups()[0]) for m in _VERSION_PATTERN.finditer(s)} class Evaluator(object): """ - This class is used to evaluate marker expessions. + This class is used to evaluate marker expressions. """ operations = { @@ -80,11 +81,11 @@ def evaluate(self, expr, context): lhs = self.evaluate(elhs, context) rhs = self.evaluate(erhs, context) - if ((elhs == 'python_version' or erhs == 'python_version') and + if ((_is_version_marker(elhs) or _is_version_marker(erhs)) and op in ('<', '<=', '>', '>=', '===', '==', '!=', '~=')): lhs = NV(lhs) rhs = NV(rhs) - elif elhs == 'python_version' and op in ('in', 'not in'): + elif _is_version_marker(elhs) and op in ('in', 'not in'): lhs = NV(lhs) rhs = _get_versions(rhs) result = self.operations[op](lhs, rhs) diff --git a/src/pip/_vendor/distlib/metadata.py b/src/pip/_vendor/distlib/metadata.py index c329e1977fd..7189aeef229 100644 --- a/src/pip/_vendor/distlib/metadata.py +++ b/src/pip/_vendor/distlib/metadata.py @@ -136,17 +136,9 @@ def _version2fieldlist(version): def _best_version(fields): """Detect the best version depending on the fields used.""" def _has_marker(keys, markers): - for marker in markers: - if marker in keys: - return True - return False - - keys = [] - for key, value in fields.items(): - if value in ([], 'UNKNOWN', None): - continue - keys.append(key) + return any(marker in keys for marker in markers) + keys = [key for key, value in fields.items() if value not in ([], 'UNKNOWN', None)] possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.1', '2.2'] # 2.0 removed # first let's try to see if a field is not part of one of the version diff --git a/src/pip/_vendor/distlib/scripts.py b/src/pip/_vendor/distlib/scripts.py index d2706242b8a..d8fdb3a95d3 100644 --- a/src/pip/_vendor/distlib/scripts.py +++ b/src/pip/_vendor/distlib/scripts.py @@ -168,15 +168,16 @@ def _get_shebang(self, encoding, post_interp=b'', options=None): executable = os.path.join(sysconfig.get_path('scripts'), 'python%s' % sysconfig.get_config_var('EXE')) else: # pragma: no cover - executable = os.path.join( - sysconfig.get_config_var('BINDIR'), - 'python%s%s' % (sysconfig.get_config_var('VERSION'), - sysconfig.get_config_var('EXE'))) - if not os.path.isfile(executable): + if os.name == 'nt': # for Python builds from source on Windows, no Python executables with # a version suffix are created, so we use python.exe executable = os.path.join(sysconfig.get_config_var('BINDIR'), 'python%s' % (sysconfig.get_config_var('EXE'))) + else: + executable = os.path.join( + sysconfig.get_config_var('BINDIR'), + 'python%s%s' % (sysconfig.get_config_var('VERSION'), + sysconfig.get_config_var('EXE'))) if options: executable = self._get_alternate_executable(executable, options) diff --git a/src/pip/_vendor/distlib/util.py b/src/pip/_vendor/distlib/util.py index dd01849d997..04429ad2b52 100644 --- a/src/pip/_vendor/distlib/util.py +++ b/src/pip/_vendor/distlib/util.py @@ -707,7 +707,7 @@ def __eq__(self, other): __hash__ = object.__hash__ -ENTRY_RE = re.compile(r'''(?P(\w|[-.+])+) +ENTRY_RE = re.compile(r'''(?P([^\[]\S*)) \s*=\s*(?P(\w+)([:\.]\w+)*) \s*(\[\s*(?P[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? ''', re.VERBOSE) @@ -1249,6 +1249,19 @@ def check_path(path): for tarinfo in archive.getmembers(): if not isinstance(tarinfo.name, text_type): tarinfo.name = tarinfo.name.decode('utf-8') + + # Limit extraction of dangerous items, if this Python + # allows it easily. If not, just trust the input. + # See: https://docs.python.org/3/library/tarfile.html#extraction-filters + def extraction_filter(member, path): + """Run tarfile.tar_filter, but raise the expected ValueError""" + # This is only called if the current Python has tarfile filters + try: + return tarfile.tar_filter(member, path) + except tarfile.FilterError as exc: + raise ValueError(str(exc)) + archive.extraction_filter = extraction_filter + archive.extractall(dest_dir) finally: @@ -1435,7 +1448,7 @@ def connect(self): context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) if hasattr(ssl, 'OP_NO_SSLv2'): context.options |= ssl.OP_NO_SSLv2 - if self.cert_file: + if getattr(self, 'cert_file', None): context.load_cert_chain(self.cert_file, self.key_file) kwargs = {} if self.ca_certs: @@ -1908,9 +1921,13 @@ def get_host_platform(): if m: release = m.group() elif osname[:6] == 'darwin': - import _osx_support, distutils.sysconfig + import _osx_support + try: + from distutils import sysconfig + except ImportError: + import sysconfig osname, release, machine = _osx_support.get_platform_osx( - distutils.sysconfig.get_config_vars(), + sysconfig.get_config_vars(), osname, release, machine) return '%s-%s-%s' % (osname, release, machine) diff --git a/src/pip/_vendor/distlib/version.py b/src/pip/_vendor/distlib/version.py index c7c8bb6ff4f..5de88ef0581 100644 --- a/src/pip/_vendor/distlib/version.py +++ b/src/pip/_vendor/distlib/version.py @@ -176,9 +176,9 @@ def __str__(self): return self._string -PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?' - r'(\.(post)(\d+))?(\.(dev)(\d+))?' - r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$') +PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|alpha|b|beta|c|rc|pre|preview)(\d+)?)?' + r'(\.(post|r|rev)(\d+)?)?([._-]?(dev)(\d+)?)?' + r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$', re.I) def _pep_440_key(s): @@ -202,15 +202,24 @@ def _pep_440_key(s): if pre == (None, None): pre = () else: - pre = pre[0], int(pre[1]) + if pre[1] is None: + pre = pre[0], 0 + else: + pre = pre[0], int(pre[1]) if post == (None, None): post = () else: - post = post[0], int(post[1]) + if post[1] is None: + post = post[0], 0 + else: + post = post[0], int(post[1]) if dev == (None, None): dev = () else: - dev = dev[0], int(dev[1]) + if dev[1] is None: + dev = dev[0], 0 + else: + dev = dev[0], int(dev[1]) if local is None: local = () else: diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 8dbe1341377..fd2bbca6474 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -1,6 +1,6 @@ CacheControl==0.13.1 # Make sure to update the license in pyproject.toml for this. colorama==0.4.6 -distlib==0.3.6 +distlib==0.3.7 distro==1.8.0 msgpack==1.0.5 packaging==21.3 From 2a7df17a2b10578d92fdb7f69250afb941453c57 Mon Sep 17 00:00:00 2001 From: Flavio Amurrio <25621374+FlavioAmurrioCS@users.noreply.github.com> Date: Thu, 19 Oct 2023 23:12:42 -0400 Subject: [PATCH 17/38] test update to validate distlib behavior --- tests/functional/test_uninstall.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/tests/functional/test_uninstall.py b/tests/functional/test_uninstall.py index 69e340a5675..80ffe056ed1 100644 --- a/tests/functional/test_uninstall.py +++ b/tests/functional/test_uninstall.py @@ -238,12 +238,9 @@ def test_uninstall_overlapping_package( "console_scripts", [ "test_ = distutils_install:test", - pytest.param( - "test_:test_ = distutils_install:test_test", - marks=pytest.mark.xfail( - reason="colon not supported in wheel entry point name?" - ), - ), + "test_:test_ = distutils_install:test_test", + ",test_ = distutils_install:test_test", + ", = distutils_install:test_test", ], ) def test_uninstall_entry_point_colon_in_name( From 7fdfda1a437744298460120085b85843689864ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 27 Jan 2024 14:25:06 +0100 Subject: [PATCH 18/38] Upgrade distlib to 0.3.8 --- news/distlib.vendor.rst | 2 +- src/pip/_vendor/distlib/__init__.py | 22 ++- src/pip/_vendor/distlib/compat.py | 176 ++++++++++-------- src/pip/_vendor/distlib/database.py | 126 ++++++------- src/pip/_vendor/distlib/index.py | 8 +- src/pip/_vendor/distlib/locators.py | 27 +-- src/pip/_vendor/distlib/manifest.py | 20 +-- src/pip/_vendor/distlib/markers.py | 48 +++-- src/pip/_vendor/distlib/scripts.py | 84 +++++---- src/pip/_vendor/distlib/util.py | 268 ++++++++++++++++++---------- src/pip/_vendor/distlib/version.py | 17 +- src/pip/_vendor/distlib/wheel.py | 113 +++++++----- src/pip/_vendor/vendor.txt | 2 +- 13 files changed, 534 insertions(+), 379 deletions(-) diff --git a/news/distlib.vendor.rst b/news/distlib.vendor.rst index b82cd0db822..13caa068e0d 100644 --- a/news/distlib.vendor.rst +++ b/news/distlib.vendor.rst @@ -1 +1 @@ -Upgrade distlib to 0.3.7 +Upgrade distlib to 0.3.8 diff --git a/src/pip/_vendor/distlib/__init__.py b/src/pip/_vendor/distlib/__init__.py index ad8a082a73f..e999438fe94 100644 --- a/src/pip/_vendor/distlib/__init__.py +++ b/src/pip/_vendor/distlib/__init__.py @@ -1,23 +1,33 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2012-2022 Vinay Sajip. +# Copyright (C) 2012-2023 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # import logging -__version__ = '0.3.7' +__version__ = '0.3.8' + class DistlibException(Exception): pass + try: from logging import NullHandler -except ImportError: # pragma: no cover +except ImportError: # pragma: no cover + class NullHandler(logging.Handler): - def handle(self, record): pass - def emit(self, record): pass - def createLock(self): self.lock = None + + def handle(self, record): + pass + + def emit(self, record): + pass + + def createLock(self): + self.lock = None + logger = logging.getLogger(__name__) logger.addHandler(NullHandler()) diff --git a/src/pip/_vendor/distlib/compat.py b/src/pip/_vendor/distlib/compat.py index 1fe3d225acb..e93dc27a3eb 100644 --- a/src/pip/_vendor/distlib/compat.py +++ b/src/pip/_vendor/distlib/compat.py @@ -8,6 +8,7 @@ import os import re +import shutil import sys try: @@ -33,9 +34,8 @@ def quote(s): import urllib2 from urllib2 import (Request, urlopen, URLError, HTTPError, - HTTPBasicAuthHandler, HTTPPasswordMgr, - HTTPHandler, HTTPRedirectHandler, - build_opener) + HTTPBasicAuthHandler, HTTPPasswordMgr, HTTPHandler, + HTTPRedirectHandler, build_opener) if ssl: from urllib2 import HTTPSHandler import httplib @@ -50,15 +50,15 @@ def quote(s): # Leaving this around for now, in case it needs resurrecting in some way # _userprog = None # def splituser(host): - # """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" - # global _userprog - # if _userprog is None: - # import re - # _userprog = re.compile('^(.*)@(.*)$') + # """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" + # global _userprog + # if _userprog is None: + # import re + # _userprog = re.compile('^(.*)@(.*)$') - # match = _userprog.match(host) - # if match: return match.group(1, 2) - # return None, host + # match = _userprog.match(host) + # if match: return match.group(1, 2) + # return None, host else: # pragma: no cover from io import StringIO @@ -67,14 +67,12 @@ def quote(s): from io import TextIOWrapper as file_type import builtins import configparser - import shutil - from urllib.parse import (urlparse, urlunparse, urljoin, quote, - unquote, urlsplit, urlunsplit, splittype) + from urllib.parse import (urlparse, urlunparse, urljoin, quote, unquote, + urlsplit, urlunsplit, splittype) from urllib.request import (urlopen, urlretrieve, Request, url2pathname, - pathname2url, - HTTPBasicAuthHandler, HTTPPasswordMgr, - HTTPHandler, HTTPRedirectHandler, - build_opener) + pathname2url, HTTPBasicAuthHandler, + HTTPPasswordMgr, HTTPHandler, + HTTPRedirectHandler, build_opener) if ssl: from urllib.request import HTTPSHandler from urllib.error import HTTPError, URLError, ContentTooShortError @@ -88,14 +86,13 @@ def quote(s): from itertools import filterfalse filter = filter - try: from ssl import match_hostname, CertificateError -except ImportError: # pragma: no cover +except ImportError: # pragma: no cover + class CertificateError(ValueError): pass - def _dnsname_match(dn, hostname, max_wildcards=1): """Matching according to RFC 6125, section 6.4.3 @@ -145,7 +142,6 @@ def _dnsname_match(dn, hostname, max_wildcards=1): pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) return pat.match(hostname) - def match_hostname(cert, hostname): """Verify that *cert* (in decoded format as returned by SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 @@ -178,24 +174,26 @@ def match_hostname(cert, hostname): dnsnames.append(value) if len(dnsnames) > 1: raise CertificateError("hostname %r " - "doesn't match either of %s" - % (hostname, ', '.join(map(repr, dnsnames)))) + "doesn't match either of %s" % + (hostname, ', '.join(map(repr, dnsnames)))) elif len(dnsnames) == 1: raise CertificateError("hostname %r " - "doesn't match %r" - % (hostname, dnsnames[0])) + "doesn't match %r" % + (hostname, dnsnames[0])) else: raise CertificateError("no appropriate commonName or " - "subjectAltName fields were found") + "subjectAltName fields were found") try: from types import SimpleNamespace as Container except ImportError: # pragma: no cover + class Container(object): """ A generic container for when multiple values need to be returned """ + def __init__(self, **kwargs): self.__dict__.update(kwargs) @@ -214,6 +212,7 @@ def which(cmd, mode=os.F_OK | os.X_OK, path=None): path. """ + # Check that a given file can be accessed with the correct mode. # Additionally check that `file` is not a directory, as on Windows # directories pass the os.access check. @@ -237,7 +236,7 @@ def _access_check(fn, mode): if sys.platform == "win32": # The current directory takes precedence on Windows. - if not os.curdir in path: + if os.curdir not in path: path.insert(0, os.curdir) # PATHEXT is necessary to check on Windows. @@ -258,7 +257,7 @@ def _access_check(fn, mode): seen = set() for dir in path: normdir = os.path.normcase(dir) - if not normdir in seen: + if normdir not in seen: seen.add(normdir) for thefile in files: name = os.path.join(dir, thefile) @@ -277,6 +276,7 @@ def _access_check(fn, mode): from zipfile import ZipExtFile as BaseZipExtFile class ZipExtFile(BaseZipExtFile): + def __init__(self, base): self.__dict__.update(base.__dict__) @@ -288,6 +288,7 @@ def __exit__(self, *exc_info): # return None, so if an exception occurred, it will propagate class ZipFile(BaseZipFile): + def __enter__(self): return self @@ -299,9 +300,11 @@ def open(self, *args, **kwargs): base = BaseZipFile.open(self, *args, **kwargs) return ZipExtFile(base) + try: from platform import python_implementation -except ImportError: # pragma: no cover +except ImportError: # pragma: no cover + def python_implementation(): """Return a string identifying the Python implementation.""" if 'PyPy' in sys.version: @@ -312,12 +315,12 @@ def python_implementation(): return 'IronPython' return 'CPython' -import shutil + import sysconfig try: callable = callable -except NameError: # pragma: no cover +except NameError: # pragma: no cover from collections.abc import Callable def callable(obj): @@ -358,11 +361,11 @@ def fsdecode(filename): raise TypeError("expect bytes or str, not %s" % type(filename).__name__) + try: from tokenize import detect_encoding -except ImportError: # pragma: no cover +except ImportError: # pragma: no cover from codecs import BOM_UTF8, lookup - import re cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)") @@ -401,6 +404,7 @@ def detect_encoding(readline): bom_found = False encoding = None default = 'utf-8' + def read_or_stop(): try: return readline() @@ -430,8 +434,8 @@ def find_cookie(line): if filename is None: msg = "unknown encoding: " + encoding else: - msg = "unknown encoding for {!r}: {}".format(filename, - encoding) + msg = "unknown encoding for {!r}: {}".format( + filename, encoding) raise SyntaxError(msg) if bom_found: @@ -440,7 +444,8 @@ def find_cookie(line): if filename is None: msg = 'encoding problem: utf-8' else: - msg = 'encoding problem for {!r}: utf-8'.format(filename) + msg = 'encoding problem for {!r}: utf-8'.format( + filename) raise SyntaxError(msg) encoding += '-sig' return encoding @@ -467,6 +472,7 @@ def find_cookie(line): return default, [first, second] + # For converting & <-> & etc. try: from html import escape @@ -479,12 +485,13 @@ def find_cookie(line): try: from collections import ChainMap -except ImportError: # pragma: no cover +except ImportError: # pragma: no cover from collections import MutableMapping try: from reprlib import recursive_repr as _recursive_repr except ImportError: + def _recursive_repr(fillvalue='...'): ''' Decorator to make a repr function return fillvalue for a recursive @@ -509,13 +516,15 @@ def wrapper(self): wrapper.__module__ = getattr(user_function, '__module__') wrapper.__doc__ = getattr(user_function, '__doc__') wrapper.__name__ = getattr(user_function, '__name__') - wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) + wrapper.__annotations__ = getattr(user_function, + '__annotations__', {}) return wrapper return decorating_function class ChainMap(MutableMapping): - ''' A ChainMap groups multiple dicts (or other mappings) together + ''' + A ChainMap groups multiple dicts (or other mappings) together to create a single, updateable view. The underlying mappings are stored in a list. That list is public and can @@ -524,7 +533,6 @@ class ChainMap(MutableMapping): Lookups search the underlying mappings successively until a key is found. In contrast, writes, updates, and deletions only operate on the first mapping. - ''' def __init__(self, *maps): @@ -532,7 +540,7 @@ def __init__(self, *maps): If no mappings are provided, a single empty dictionary is used. ''' - self.maps = list(maps) or [{}] # always at least one map + self.maps = list(maps) or [{}] # always at least one map def __missing__(self, key): raise KeyError(key) @@ -540,16 +548,19 @@ def __missing__(self, key): def __getitem__(self, key): for mapping in self.maps: try: - return mapping[key] # can't use 'key in mapping' with defaultdict + return mapping[ + key] # can't use 'key in mapping' with defaultdict except KeyError: pass - return self.__missing__(key) # support subclasses that define __missing__ + return self.__missing__( + key) # support subclasses that define __missing__ def get(self, key, default=None): return self[key] if key in self else default def __len__(self): - return len(set().union(*self.maps)) # reuses stored hash values if possible + return len(set().union( + *self.maps)) # reuses stored hash values if possible def __iter__(self): return iter(set().union(*self.maps)) @@ -576,12 +587,12 @@ def copy(self): __copy__ = copy - def new_child(self): # like Django's Context.push() + def new_child(self): # like Django's Context.push() 'New ChainMap with a new dict followed by all previous maps.' return self.__class__({}, *self.maps) @property - def parents(self): # like Django's Context.pop() + def parents(self): # like Django's Context.pop() 'New ChainMap from maps[1:].' return self.__class__(*self.maps[1:]) @@ -592,7 +603,8 @@ def __delitem__(self, key): try: del self.maps[0][key] except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + raise KeyError( + 'Key not found in the first mapping: {!r}'.format(key)) def popitem(self): 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' @@ -606,15 +618,18 @@ def pop(self, key, *args): try: return self.maps[0].pop(key, *args) except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + raise KeyError( + 'Key not found in the first mapping: {!r}'.format(key)) def clear(self): 'Clear maps[0], leaving maps[1:] intact.' self.maps[0].clear() + try: from importlib.util import cache_from_source # Python >= 3.4 except ImportError: # pragma: no cover + def cache_from_source(path, debug_override=None): assert path.endswith('.py') if debug_override is None: @@ -625,12 +640,13 @@ def cache_from_source(path, debug_override=None): suffix = 'o' return path + suffix + try: from collections import OrderedDict -except ImportError: # pragma: no cover -## {{{ http://code.activestate.com/recipes/576693/ (r9) -# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. -# Passes Python2.7's test suite and incorporates all the latest updates. +except ImportError: # pragma: no cover + # {{{ http://code.activestate.com/recipes/576693/ (r9) + # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. + # Passes Python2.7's test suite and incorporates all the latest updates. try: from thread import get_ident as _get_ident except ImportError: @@ -641,9 +657,9 @@ def cache_from_source(path, debug_override=None): except ImportError: pass - class OrderedDict(dict): 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. # The inherited dict provides __getitem__, __len__, __contains__, and get. # The remaining methods are order-aware. @@ -661,11 +677,12 @@ def __init__(self, *args, **kwds): ''' if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) + raise TypeError('expected at most 1 arguments, got %d' % + len(args)) try: self.__root except AttributeError: - self.__root = root = [] # sentinel node + self.__root = root = [] # sentinel node root[:] = [root, root, None] self.__map = {} self.__update(*args, **kwds) @@ -779,7 +796,7 @@ def update(*args, **kwds): ''' if len(args) > 2: raise TypeError('update() takes at most 2 positional ' - 'arguments (%d given)' % (len(args),)) + 'arguments (%d given)' % (len(args), )) elif not args: raise TypeError('update() takes at least 1 argument (0 given)') self = args[0] @@ -825,14 +842,15 @@ def setdefault(self, key, default=None): def __repr__(self, _repr_running=None): 'od.__repr__() <==> repr(od)' - if not _repr_running: _repr_running = {} + if not _repr_running: + _repr_running = {} call_key = id(self), _get_ident() if call_key in _repr_running: return '...' _repr_running[call_key] = 1 try: if not self: - return '%s()' % (self.__class__.__name__,) + return '%s()' % (self.__class__.__name__, ) return '%s(%r)' % (self.__class__.__name__, self.items()) finally: del _repr_running[call_key] @@ -844,8 +862,8 @@ def __reduce__(self): for k in vars(OrderedDict()): inst_dict.pop(k, None) if inst_dict: - return (self.__class__, (items,), inst_dict) - return self.__class__, (items,) + return (self.__class__, (items, ), inst_dict) + return self.__class__, (items, ) def copy(self): 'od.copy() -> a shallow copy of od' @@ -868,7 +886,8 @@ def __eq__(self, other): ''' if isinstance(other, OrderedDict): - return len(self)==len(other) and self.items() == other.items() + return len(self) == len( + other) and self.items() == other.items() return dict.__eq__(self, other) def __ne__(self, other): @@ -888,19 +907,18 @@ def viewitems(self): "od.viewitems() -> a set-like object providing a view on od's items" return ItemsView(self) + try: from logging.config import BaseConfigurator, valid_ident -except ImportError: # pragma: no cover +except ImportError: # pragma: no cover IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) - def valid_ident(s): m = IDENTIFIER.match(s) if not m: raise ValueError('Not a valid Python identifier: %r' % s) return True - # The ConvertingXXX classes are wrappers around standard Python containers, # and they serve to convert any suitable values in the container. The # conversion converts base dicts, lists and tuples to their wrapped @@ -916,7 +934,7 @@ class ConvertingDict(dict): def __getitem__(self, key): value = dict.__getitem__(self, key) result = self.configurator.convert(value) - #If the converted value is different, save for next time + # If the converted value is different, save for next time if value is not result: self[key] = result if type(result) in (ConvertingDict, ConvertingList, @@ -928,7 +946,7 @@ def __getitem__(self, key): def get(self, key, default=None): value = dict.get(self, key, default) result = self.configurator.convert(value) - #If the converted value is different, save for next time + # If the converted value is different, save for next time if value is not result: self[key] = result if type(result) in (ConvertingDict, ConvertingList, @@ -949,10 +967,11 @@ def pop(self, key, default=None): class ConvertingList(list): """A converting list wrapper.""" + def __getitem__(self, key): value = list.__getitem__(self, key) result = self.configurator.convert(value) - #If the converted value is different, save for next time + # If the converted value is different, save for next time if value is not result: self[key] = result if type(result) in (ConvertingDict, ConvertingList, @@ -972,6 +991,7 @@ def pop(self, idx=-1): class ConvertingTuple(tuple): """A converting tuple wrapper.""" + def __getitem__(self, key): value = tuple.__getitem__(self, key) result = self.configurator.convert(value) @@ -995,8 +1015,8 @@ class BaseConfigurator(object): DIGIT_PATTERN = re.compile(r'^\d+$') value_converters = { - 'ext' : 'ext_convert', - 'cfg' : 'cfg_convert', + 'ext': 'ext_convert', + 'cfg': 'cfg_convert', } # We might want to use a different one, e.g. importlib @@ -1042,7 +1062,6 @@ def cfg_convert(self, value): else: rest = rest[m.end():] d = self.config[m.groups()[0]] - #print d, rest while rest: m = self.DOT_PATTERN.match(rest) if m: @@ -1055,7 +1074,9 @@ def cfg_convert(self, value): d = d[idx] else: try: - n = int(idx) # try as number first (most likely) + n = int( + idx + ) # try as number first (most likely) d = d[n] except TypeError: d = d[idx] @@ -1064,7 +1085,7 @@ def cfg_convert(self, value): else: raise ValueError('Unable to convert ' '%r at %r' % (value, rest)) - #rest should be empty + # rest should be empty return d def convert(self, value): @@ -1073,14 +1094,15 @@ def convert(self, value): replaced by their converting alternatives. Strings are checked to see if they have a conversion format and are converted if they do. """ - if not isinstance(value, ConvertingDict) and isinstance(value, dict): + if not isinstance(value, ConvertingDict) and isinstance( + value, dict): value = ConvertingDict(value) value.configurator = self - elif not isinstance(value, ConvertingList) and isinstance(value, list): + elif not isinstance(value, ConvertingList) and isinstance( + value, list): value = ConvertingList(value) value.configurator = self - elif not isinstance(value, ConvertingTuple) and\ - isinstance(value, tuple): + elif not isinstance(value, ConvertingTuple) and isinstance(value, tuple): value = ConvertingTuple(value) value.configurator = self elif isinstance(value, string_types): diff --git a/src/pip/_vendor/distlib/database.py b/src/pip/_vendor/distlib/database.py index bc16e889db8..eb3765f193b 100644 --- a/src/pip/_vendor/distlib/database.py +++ b/src/pip/_vendor/distlib/database.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2012-2017 The Python Software Foundation. +# Copyright (C) 2012-2023 The Python Software Foundation. # See LICENSE.txt and CONTRIBUTORS.txt. # """PEP 376 implementation.""" @@ -25,11 +25,10 @@ from .util import (parse_requirement, cached_property, parse_name_and_version, read_exports, write_exports, CSVReader, CSVWriter) - -__all__ = ['Distribution', 'BaseInstalledDistribution', - 'InstalledDistribution', 'EggInfoDistribution', - 'DistributionPath'] - +__all__ = [ + 'Distribution', 'BaseInstalledDistribution', 'InstalledDistribution', + 'EggInfoDistribution', 'DistributionPath' +] logger = logging.getLogger(__name__) @@ -46,6 +45,7 @@ class _Cache(object): """ A simple cache mapping names and .dist-info paths to distributions """ + def __init__(self): """ Initialise an instance. There is normally one for each DistributionPath. @@ -76,6 +76,7 @@ class DistributionPath(object): """ Represents a set of distributions installed on a path (typically sys.path). """ + def __init__(self, path=None, include_egg=False): """ Create an instance from a path, optionally including legacy (distutils/ @@ -111,7 +112,6 @@ def clear_cache(self): self._cache.clear() self._cache_egg.clear() - def _yield_distributions(self): """ Yield .dist-info and/or .egg(-info) distributions. @@ -134,11 +134,13 @@ def _yield_distributions(self): continue try: if self._include_dist and entry.endswith(DISTINFO_EXT): - possible_filenames = [METADATA_FILENAME, - WHEEL_METADATA_FILENAME, - LEGACY_METADATA_FILENAME] + possible_filenames = [ + METADATA_FILENAME, WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME + ] for metadata_filename in possible_filenames: - metadata_path = posixpath.join(entry, metadata_filename) + metadata_path = posixpath.join( + entry, metadata_filename) pydist = finder.find(metadata_path) if pydist: break @@ -146,13 +148,15 @@ def _yield_distributions(self): continue with contextlib.closing(pydist.as_stream()) as stream: - metadata = Metadata(fileobj=stream, scheme='legacy') + metadata = Metadata(fileobj=stream, + scheme='legacy') logger.debug('Found %s', r.path) seen.add(r.path) - yield new_dist_class(r.path, metadata=metadata, + yield new_dist_class(r.path, + metadata=metadata, env=self) - elif self._include_egg and entry.endswith(('.egg-info', - '.egg')): + elif self._include_egg and entry.endswith( + ('.egg-info', '.egg')): logger.debug('Found %s', r.path) seen.add(r.path) yield old_dist_class(r.path, self) @@ -271,7 +275,7 @@ def provides_distribution(self, name, version=None): matcher = self._scheme.matcher('%s (%s)' % (name, version)) except ValueError: raise DistlibException('invalid name or version: %r, %r' % - (name, version)) + (name, version)) for dist in self.get_distributions(): # We hit a problem on Travis where enum34 was installed and doesn't @@ -346,12 +350,12 @@ def __init__(self, metadata): """ self.metadata = metadata self.name = metadata.name - self.key = self.name.lower() # for case-insensitive comparisons + self.key = self.name.lower() # for case-insensitive comparisons self.version = metadata.version self.locator = None self.digest = None - self.extras = None # additional features requested - self.context = None # environment marker overrides + self.extras = None # additional features requested + self.context = None # environment marker overrides self.download_urls = set() self.digests = {} @@ -362,7 +366,7 @@ def source_url(self): """ return self.metadata.source_url - download_url = source_url # Backward compatibility + download_url = source_url # Backward compatibility @property def name_and_version(self): @@ -386,10 +390,10 @@ def provides(self): def _get_requirements(self, req_attr): md = self.metadata reqts = getattr(md, req_attr) - logger.debug('%s: got requirements %r from metadata: %r', self.name, req_attr, - reqts) - return set(md.get_requirements(reqts, extras=self.extras, - env=self.context)) + logger.debug('%s: got requirements %r from metadata: %r', self.name, + req_attr, reqts) + return set( + md.get_requirements(reqts, extras=self.extras, env=self.context)) @property def run_requires(self): @@ -426,12 +430,11 @@ def matches_requirement(self, req): matcher = scheme.matcher(r.requirement) except UnsupportedVersionError: # XXX compat-mode if cannot read the version - logger.warning('could not read version %r - using name only', - req) + logger.warning('could not read version %r - using name only', req) name = req.split()[0] matcher = scheme.matcher(name) - name = matcher.key # case-insensitive + name = matcher.key # case-insensitive result = False for p in self.provides: @@ -466,9 +469,8 @@ def __eq__(self, other): if type(other) is not type(self): result = False else: - result = (self.name == other.name and - self.version == other.version and - self.source_url == other.source_url) + result = (self.name == other.name and self.version == other.version + and self.source_url == other.source_url) return result def __hash__(self): @@ -559,8 +561,8 @@ def __init__(self, path, metadata=None, env=None): if r is None: r = finder.find(LEGACY_METADATA_FILENAME) if r is None: - raise ValueError('no %s found in %s' % (METADATA_FILENAME, - path)) + raise ValueError('no %s found in %s' % + (METADATA_FILENAME, path)) with contextlib.closing(r.as_stream()) as stream: metadata = Metadata(fileobj=stream, scheme='legacy') @@ -571,7 +573,7 @@ def __init__(self, path, metadata=None, env=None): r = finder.find('REQUESTED') self.requested = r is not None - p = os.path.join(path, 'top_level.txt') + p = os.path.join(path, 'top_level.txt') if os.path.exists(p): with open(p, 'rb') as f: data = f.read().decode('utf-8') @@ -596,14 +598,14 @@ def _get_records(self): with contextlib.closing(r.as_stream()) as stream: with CSVReader(stream=stream) as record_reader: # Base location is parent dir of .dist-info dir - #base_location = os.path.dirname(self.path) - #base_location = os.path.abspath(base_location) + # base_location = os.path.dirname(self.path) + # base_location = os.path.abspath(base_location) for row in record_reader: missing = [None for i in range(len(row), 3)] path, checksum, size = row + missing - #if not os.path.isabs(path): - # path = path.replace('/', os.sep) - # path = os.path.join(base_location, path) + # if not os.path.isabs(path): + # path = path.replace('/', os.sep) + # path = os.path.join(base_location, path) results.append((path, checksum, size)) return results @@ -701,8 +703,8 @@ def write_installed_files(self, paths, prefix, dry_run=False): size = '%d' % os.path.getsize(path) with open(path, 'rb') as fp: hash_value = self.get_hash(fp.read()) - if path.startswith(base) or (base_under_prefix and - path.startswith(prefix)): + if path.startswith(base) or (base_under_prefix + and path.startswith(prefix)): path = os.path.relpath(path, base) writer.writerow((path, hash_value, size)) @@ -744,7 +746,8 @@ def check_installed_files(self): with open(path, 'rb') as f: actual_hash = self.get_hash(f.read(), hasher) if actual_hash != hash_value: - mismatches.append((path, 'hash', hash_value, actual_hash)) + mismatches.append( + (path, 'hash', hash_value, actual_hash)) return mismatches @cached_property @@ -791,7 +794,7 @@ def write_shared_locations(self, paths, dry_run=False): for key in ('prefix', 'lib', 'headers', 'scripts', 'data'): path = paths[key] if os.path.isdir(paths[key]): - lines.append('%s=%s' % (key, path)) + lines.append('%s=%s' % (key, path)) for ns in paths.get('namespace', ()): lines.append('namespace=%s' % ns) @@ -854,8 +857,8 @@ def list_distinfo_files(self): yield path def __eq__(self, other): - return (isinstance(other, InstalledDistribution) and - self.path == other.path) + return (isinstance(other, InstalledDistribution) + and self.path == other.path) # See http://docs.python.org/reference/datamodel#object.__hash__ __hash__ = object.__hash__ @@ -867,13 +870,14 @@ class EggInfoDistribution(BaseInstalledDistribution): if the given path happens to be a directory, the metadata is read from the file ``PKG-INFO`` under that directory.""" - requested = True # as we have no way of knowing, assume it was + requested = True # as we have no way of knowing, assume it was shared_locations = {} def __init__(self, path, env=None): + def set_name_and_version(s, n, v): s.name = n - s.key = n.lower() # for case-insensitive comparisons + s.key = n.lower() # for case-insensitive comparisons s.version = v self.path = path @@ -907,8 +911,8 @@ def parse_requires_data(data): if not line: # pragma: no cover continue if line.startswith('['): # pragma: no cover - logger.warning('Unexpected line: quitting requirement scan: %r', - line) + logger.warning( + 'Unexpected line: quitting requirement scan: %r', line) break r = parse_requirement(line) if not r: # pragma: no cover @@ -955,7 +959,8 @@ def parse_requires_path(req_path): metadata = Metadata(fileobj=fileobj, scheme='legacy') try: data = zipf.get_data('EGG-INFO/requires.txt') - tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode('utf-8') + tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode( + 'utf-8') requires = parse_requires_data(data.decode('utf-8')) except IOError: requires = None @@ -985,8 +990,8 @@ def parse_requires_path(req_path): return metadata def __repr__(self): - return '' % ( - self.name, self.version, self.path) + return '' % (self.name, self.version, + self.path) def __str__(self): return "%s %s" % (self.name, self.version) @@ -1042,7 +1047,7 @@ def _size(path): logger.warning('Non-existent file: %s', p) if p.endswith(('.pyc', '.pyo')): continue - #otherwise fall through and fail + # otherwise fall through and fail if not os.path.isdir(p): result.append((p, _md5(p), _size(p))) result.append((record_path, None, None)) @@ -1078,12 +1083,13 @@ def list_distinfo_files(self, absolute=False): yield line def __eq__(self, other): - return (isinstance(other, EggInfoDistribution) and - self.path == other.path) + return (isinstance(other, EggInfoDistribution) + and self.path == other.path) # See http://docs.python.org/reference/datamodel#object.__hash__ __hash__ = object.__hash__ + new_dist_class = InstalledDistribution old_dist_class = EggInfoDistribution @@ -1117,7 +1123,7 @@ def add_distribution(self, distribution): """ self.adjacency_list[distribution] = [] self.reverse_list[distribution] = [] - #self.missing[distribution] = [] + # self.missing[distribution] = [] def add_edge(self, x, y, label=None): """Add an edge from distribution *x* to distribution *y* with the given @@ -1177,7 +1183,7 @@ def to_dot(self, f, skip_disconnected=True): if len(adjs) == 0 and not skip_disconnected: disconnected.append(dist) for other, label in adjs: - if not label is None: + if label is not None: f.write('"%s" -> "%s" [label="%s"]\n' % (dist.name, other.name, label)) else: @@ -1255,8 +1261,8 @@ def make_graph(dists, scheme='default'): # now make the edges for dist in dists: - requires = (dist.run_requires | dist.meta_requires | - dist.build_requires | dist.dev_requires) + requires = (dist.run_requires | dist.meta_requires + | dist.build_requires | dist.dev_requires) for req in requires: try: matcher = scheme.matcher(req) @@ -1267,7 +1273,7 @@ def make_graph(dists, scheme='default'): name = req.split()[0] matcher = scheme.matcher(name) - name = matcher.key # case-insensitive + name = matcher.key # case-insensitive matched = False if name in provided: @@ -1327,7 +1333,7 @@ def get_required_dists(dists, dist): req = set() # required distributions todo = graph.adjacency_list[dist] # list of nodes we should inspect - seen = set(t[0] for t in todo) # already added to todo + seen = set(t[0] for t in todo) # already added to todo while todo: d = todo.pop()[0] diff --git a/src/pip/_vendor/distlib/index.py b/src/pip/_vendor/distlib/index.py index 9b6d129ed69..56cd2867145 100644 --- a/src/pip/_vendor/distlib/index.py +++ b/src/pip/_vendor/distlib/index.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2013 Vinay Sajip. +# Copyright (C) 2013-2023 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # @@ -25,6 +25,7 @@ DEFAULT_INDEX = 'https://pypi.org/pypi' DEFAULT_REALM = 'pypi' + class PackageIndex(object): """ This class represents a package index compatible with PyPI, the Python @@ -119,7 +120,7 @@ def register(self, metadata): # pragma: no cover d = metadata.todict() d[':action'] = 'verify' request = self.encode_request(d.items(), []) - response = self.send_request(request) + self.send_request(request) d[':action'] = 'submit' request = self.encode_request(d.items(), []) return self.send_request(request) @@ -358,8 +359,7 @@ def verify_signature(self, signature_filename, data_filename, keystore) rc, stdout, stderr = self.run_command(cmd) if rc not in (0, 1): - raise DistlibException('verify command failed with error ' - 'code %s' % rc) + raise DistlibException('verify command failed with error code %s' % rc) return rc == 0 def download_file(self, url, destfile, digest=None, reporthook=None): diff --git a/src/pip/_vendor/distlib/locators.py b/src/pip/_vendor/distlib/locators.py index 966ebc0e37d..f9f0788fc2a 100644 --- a/src/pip/_vendor/distlib/locators.py +++ b/src/pip/_vendor/distlib/locators.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2012-2015 Vinay Sajip. +# Copyright (C) 2012-2023 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # @@ -38,6 +38,7 @@ HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml') DEFAULT_INDEX = 'https://pypi.org/pypi' + def get_all_distribution_names(url=None): """ Return all distribution names known by an index. @@ -52,6 +53,7 @@ def get_all_distribution_names(url=None): finally: client('close')() + class RedirectHandler(BaseRedirectHandler): """ A class to work around a bug in some Python 3.2.x releases. @@ -83,6 +85,7 @@ def http_error_302(self, req, fp, code, msg, headers): http_error_301 = http_error_303 = http_error_307 = http_error_302 + class Locator(object): """ A base class for locators - things that locate distributions. @@ -272,7 +275,7 @@ def same_project(name1, name2): 'python-version': ', '.join( ['.'.join(list(v[2:])) for v in wheel.pyver]), } - except Exception as e: # pragma: no cover + except Exception: # pragma: no cover logger.warning('invalid path for wheel: %s', path) elif not path.endswith(self.downloadable_extensions): # pragma: no cover logger.debug('Not downloadable: %s', path) @@ -293,7 +296,6 @@ def same_project(name1, name2): 'filename': filename, 'url': urlunparse((scheme, netloc, origpath, params, query, '')), - #'packagetype': 'sdist', } if pyver: # pragma: no cover result['python-version'] = pyver @@ -382,12 +384,9 @@ def locate(self, requirement, prereleases=False): else: if prereleases or not vcls(k).is_prerelease: slist.append(k) - # else: - # logger.debug('skipping pre-release ' - # 'version %s of %s', k, matcher.name) except Exception: # pragma: no cover logger.warning('error matching %s with %r', matcher, k) - pass # slist.append(k) + pass # slist.append(k) if len(slist) > 1: slist = sorted(slist, key=scheme.key) if slist: @@ -456,6 +455,7 @@ def _get_project(self, name): result['digests'][url] = digest return result + class PyPIJSONLocator(Locator): """ This locator uses PyPI's JSON interface. It's very limited in functionality @@ -476,7 +476,7 @@ def _get_project(self, name): url = urljoin(self.base_url, '%s/json' % quote(name)) try: resp = self.opener.open(url) - data = resp.read().decode() # for now + data = resp.read().decode() # for now d = json.loads(data) md = Metadata(scheme=self.scheme) data = d['info'] @@ -487,7 +487,7 @@ def _get_project(self, name): md.summary = data.get('summary') dist = Distribution(md) dist.locator = self - urls = d['urls'] + # urls = d['urls'] result[md.version] = dist for info in d['urls']: url = info['url'] @@ -745,7 +745,7 @@ def _fetch(self): try: self._seen.add(link) if (not self._process_download(link) and - self._should_queue(link, url, rel)): + self._should_queue(link, url, rel)): logger.debug('Queueing %s from %s', link, url) self._to_fetch.put(link) except MetadataInvalidError: # e.g. invalid versions @@ -756,7 +756,7 @@ def _fetch(self): # always do this, to avoid hangs :-) self._to_fetch.task_done() if not url: - #logger.debug('Sentinel seen, quitting.') + # logger.debug('Sentinel seen, quitting.') break def get_page(self, url): @@ -832,6 +832,7 @@ def get_distribution_names(self): result.add(match.group(1)) return result + class DirectoryLocator(Locator): """ This class locates distributions in a directory tree. @@ -897,6 +898,7 @@ def get_distribution_names(self): break return result + class JSONLocator(Locator): """ This locator uses special extended metadata (not available on PyPI) and is @@ -935,6 +937,7 @@ def _get_project(self, name): result['urls'].setdefault(dist.version, set()).add(info['url']) return result + class DistPathLocator(Locator): """ This locator finds installed distributions in a path. It can be useful for @@ -1245,7 +1248,7 @@ def find(self, requirement, meta_extras=None, prereleases=False): if name not in self.dists_by_name: self.add_distribution(dist) else: - #import pdb; pdb.set_trace() + # import pdb; pdb.set_trace() other = self.dists_by_name[name] if other != dist: self.try_to_replace(dist, other, problems) diff --git a/src/pip/_vendor/distlib/manifest.py b/src/pip/_vendor/distlib/manifest.py index 18beba3db37..420dcf12ed2 100644 --- a/src/pip/_vendor/distlib/manifest.py +++ b/src/pip/_vendor/distlib/manifest.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2012-2013 Python Software Foundation. +# Copyright (C) 2012-2023 Python Software Foundation. # See LICENSE.txt and CONTRIBUTORS.txt. # """ @@ -34,6 +34,7 @@ # _PYTHON_VERSION = sys.version_info[:2] + class Manifest(object): """ A list of files built by exploring the filesystem and filtered by applying various @@ -155,10 +156,7 @@ def process_directive(self, directive): elif action == 'exclude': for pattern in patterns: - found = self._exclude_pattern(pattern, anchor=True) - #if not found: - # logger.warning('no previously-included files ' - # 'found matching %r', pattern) + self._exclude_pattern(pattern, anchor=True) elif action == 'global-include': for pattern in patterns: @@ -168,11 +166,7 @@ def process_directive(self, directive): elif action == 'global-exclude': for pattern in patterns: - found = self._exclude_pattern(pattern, anchor=False) - #if not found: - # logger.warning('no previously-included files ' - # 'matching %r found anywhere in ' - # 'distribution', pattern) + self._exclude_pattern(pattern, anchor=False) elif action == 'recursive-include': for pattern in patterns: @@ -182,11 +176,7 @@ def process_directive(self, directive): elif action == 'recursive-exclude': for pattern in patterns: - found = self._exclude_pattern(pattern, prefix=thedir) - #if not found: - # logger.warning('no previously-included files ' - # 'matching %r found under directory %r', - # pattern, thedir) + self._exclude_pattern(pattern, prefix=thedir) elif action == 'graft': if not self._include_pattern(None, prefix=dirpattern): diff --git a/src/pip/_vendor/distlib/markers.py b/src/pip/_vendor/distlib/markers.py index c70400cc889..1514d460e70 100644 --- a/src/pip/_vendor/distlib/markers.py +++ b/src/pip/_vendor/distlib/markers.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2012-2017 Vinay Sajip. +# Copyright (C) 2012-2023 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # @@ -19,23 +19,28 @@ from .compat import string_types from .util import in_venv, parse_marker -from .version import NormalizedVersion as NV +from .version import LegacyVersion as LV __all__ = ['interpret'] -_VERSION_PATTERN = re.compile(r'((\d+(\.\d+)*\w*)|\'(\d+(\.\d+)*\w*)\'|\"(\d+(\.\d+)*\w*)\")') +_VERSION_PATTERN = re.compile( + r'((\d+(\.\d+)*\w*)|\'(\d+(\.\d+)*\w*)\'|\"(\d+(\.\d+)*\w*)\")') _VERSION_MARKERS = {'python_version', 'python_full_version'} + def _is_version_marker(s): return isinstance(s, string_types) and s in _VERSION_MARKERS + def _is_literal(o): if not isinstance(o, string_types) or not o: return False return o[0] in '\'"' + def _get_versions(s): - return {NV(m.groups()[0]) for m in _VERSION_PATTERN.finditer(s)} + return {LV(m.groups()[0]) for m in _VERSION_PATTERN.finditer(s)} + class Evaluator(object): """ @@ -47,10 +52,10 @@ class Evaluator(object): '===': lambda x, y: x == y, '~=': lambda x, y: x == y or x > y, '!=': lambda x, y: x != y, - '<': lambda x, y: x < y, - '<=': lambda x, y: x == y or x < y, - '>': lambda x, y: x > y, - '>=': lambda x, y: x == y or x > y, + '<': lambda x, y: x < y, + '<=': lambda x, y: x == y or x < y, + '>': lambda x, y: x > y, + '>=': lambda x, y: x == y or x > y, 'and': lambda x, y: x and y, 'or': lambda x, y: x or y, 'in': lambda x, y: x in y, @@ -77,23 +82,27 @@ def evaluate(self, expr, context): elhs = expr['lhs'] erhs = expr['rhs'] if _is_literal(expr['lhs']) and _is_literal(expr['rhs']): - raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs)) + raise SyntaxError('invalid comparison: %s %s %s' % + (elhs, op, erhs)) lhs = self.evaluate(elhs, context) rhs = self.evaluate(erhs, context) - if ((_is_version_marker(elhs) or _is_version_marker(erhs)) and - op in ('<', '<=', '>', '>=', '===', '==', '!=', '~=')): - lhs = NV(lhs) - rhs = NV(rhs) + if ((_is_version_marker(elhs) or _is_version_marker(erhs)) + and op in ('<', '<=', '>', '>=', '===', '==', '!=', '~=')): + lhs = LV(lhs) + rhs = LV(rhs) elif _is_version_marker(elhs) and op in ('in', 'not in'): - lhs = NV(lhs) + lhs = LV(lhs) rhs = _get_versions(rhs) result = self.operations[op](lhs, rhs) return result + _DIGITS = re.compile(r'\d+\.\d+') + def default_context(): + def format_full_version(info): version = '%s.%s.%s' % (info.major, info.minor, info.micro) kind = info.releaselevel @@ -102,7 +111,8 @@ def format_full_version(info): return version if hasattr(sys, 'implementation'): - implementation_version = format_full_version(sys.implementation.version) + implementation_version = format_full_version( + sys.implementation.version) implementation_name = sys.implementation.name else: implementation_version = '0' @@ -127,11 +137,13 @@ def format_full_version(info): } return result + DEFAULT_CONTEXT = default_context() del default_context evaluator = Evaluator() + def interpret(marker, execution_context=None): """ Interpret a marker and return a result depending on environment. @@ -144,9 +156,11 @@ def interpret(marker, execution_context=None): try: expr, rest = parse_marker(marker) except Exception as e: - raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e)) + raise SyntaxError('Unable to interpret marker syntax: %s: %s' % + (marker, e)) if rest and rest[0] != '#': - raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest)) + raise SyntaxError('unexpected trailing data in marker: %s: %s' % + (marker, rest)) context = dict(DEFAULT_CONTEXT) if execution_context: context.update(execution_context) diff --git a/src/pip/_vendor/distlib/scripts.py b/src/pip/_vendor/distlib/scripts.py index d8fdb3a95d3..cfa45d2af18 100644 --- a/src/pip/_vendor/distlib/scripts.py +++ b/src/pip/_vendor/distlib/scripts.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2013-2015 Vinay Sajip. +# Copyright (C) 2013-2023 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # @@ -65,9 +65,11 @@ def enquote_executable(executable): executable = '"%s"' % executable return executable + # Keep the old name around (for now), as there is at least one project using it! _enquote_executable = enquote_executable + class ScriptMaker(object): """ A class to copy or create scripts from source scripts or callable @@ -77,21 +79,25 @@ class ScriptMaker(object): executable = None # for shebangs - def __init__(self, source_dir, target_dir, add_launchers=True, - dry_run=False, fileop=None): + def __init__(self, + source_dir, + target_dir, + add_launchers=True, + dry_run=False, + fileop=None): self.source_dir = source_dir self.target_dir = target_dir self.add_launchers = add_launchers self.force = False self.clobber = False # It only makes sense to set mode bits on POSIX. - self.set_mode = (os.name == 'posix') or (os.name == 'java' and - os._name == 'posix') + self.set_mode = (os.name == 'posix') or (os.name == 'java' + and os._name == 'posix') self.variants = set(('', 'X.Y')) self._fileop = fileop or FileOperator(dry_run) - self._is_nt = os.name == 'nt' or ( - os.name == 'java' and os._name == 'nt') + self._is_nt = os.name == 'nt' or (os.name == 'java' + and os._name == 'nt') self.version_info = sys.version_info def _get_alternate_executable(self, executable, options): @@ -102,6 +108,7 @@ def _get_alternate_executable(self, executable, options): return executable if sys.platform.startswith('java'): # pragma: no cover + def _is_shell(self, executable): """ Determine if the specified executable is a script @@ -146,8 +153,8 @@ def _build_shebang(self, executable, post_interp): max_shebang_length = 512 else: max_shebang_length = 127 - simple_shebang = ((b' ' not in executable) and - (shebang_length <= max_shebang_length)) + simple_shebang = ((b' ' not in executable) + and (shebang_length <= max_shebang_length)) if simple_shebang: result = b'#!' + executable + post_interp + b'\n' @@ -161,23 +168,25 @@ def _get_shebang(self, encoding, post_interp=b'', options=None): enquote = True if self.executable: executable = self.executable - enquote = False # assume this will be taken care of + enquote = False # assume this will be taken care of elif not sysconfig.is_python_build(): executable = get_executable() elif in_venv(): # pragma: no cover - executable = os.path.join(sysconfig.get_path('scripts'), - 'python%s' % sysconfig.get_config_var('EXE')) + executable = os.path.join( + sysconfig.get_path('scripts'), + 'python%s' % sysconfig.get_config_var('EXE')) else: # pragma: no cover if os.name == 'nt': # for Python builds from source on Windows, no Python executables with # a version suffix are created, so we use python.exe - executable = os.path.join(sysconfig.get_config_var('BINDIR'), - 'python%s' % (sysconfig.get_config_var('EXE'))) + executable = os.path.join( + sysconfig.get_config_var('BINDIR'), + 'python%s' % (sysconfig.get_config_var('EXE'))) else: executable = os.path.join( sysconfig.get_config_var('BINDIR'), - 'python%s%s' % (sysconfig.get_config_var('VERSION'), - sysconfig.get_config_var('EXE'))) + 'python%s%s' % (sysconfig.get_config_var('VERSION'), + sysconfig.get_config_var('EXE'))) if options: executable = self._get_alternate_executable(executable, options) @@ -202,7 +211,7 @@ def _get_shebang(self, encoding, post_interp=b'', options=None): executable = executable.encode('utf-8') # in case of IronPython, play safe and enable frames support if (sys.platform == 'cli' and '-X:Frames' not in post_interp - and '-X:FullFrames' not in post_interp): # pragma: no cover + and '-X:FullFrames' not in post_interp): # pragma: no cover post_interp += b' -X:Frames' shebang = self._build_shebang(executable, post_interp) # Python parser starts to read a script using UTF-8 until @@ -213,8 +222,8 @@ def _get_shebang(self, encoding, post_interp=b'', options=None): try: shebang.decode('utf-8') except UnicodeDecodeError: # pragma: no cover - raise ValueError( - 'The shebang (%r) is not decodable from utf-8' % shebang) + raise ValueError('The shebang (%r) is not decodable from utf-8' % + shebang) # If the script is encoded to a custom encoding (use a # #coding:xxx cookie), the shebang has to be decodable from # the script encoding too. @@ -222,15 +231,16 @@ def _get_shebang(self, encoding, post_interp=b'', options=None): try: shebang.decode(encoding) except UnicodeDecodeError: # pragma: no cover - raise ValueError( - 'The shebang (%r) is not decodable ' - 'from the script encoding (%r)' % (shebang, encoding)) + raise ValueError('The shebang (%r) is not decodable ' + 'from the script encoding (%r)' % + (shebang, encoding)) return shebang def _get_script_text(self, entry): - return self.script_template % dict(module=entry.prefix, - import_name=entry.suffix.split('.')[0], - func=entry.suffix) + return self.script_template % dict( + module=entry.prefix, + import_name=entry.suffix.split('.')[0], + func=entry.suffix) manifest = _DEFAULT_MANIFEST @@ -255,7 +265,8 @@ def _write_script(self, names, shebang, script_bytes, filenames, ext): source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH') if source_date_epoch: date_time = time.gmtime(int(source_date_epoch))[:6] - zinfo = ZipInfo(filename='__main__.py', date_time=date_time) + zinfo = ZipInfo(filename='__main__.py', + date_time=date_time) zf.writestr(zinfo, script_bytes) else: zf.writestr('__main__.py', script_bytes) @@ -276,7 +287,7 @@ def _write_script(self, names, shebang, script_bytes, filenames, ext): 'use .deleteme logic') dfname = '%s.deleteme' % outname if os.path.exists(dfname): - os.remove(dfname) # Not allowed to fail here + os.remove(dfname) # Not allowed to fail here os.rename(outname, dfname) # nor here self._fileop.write_binary_file(outname, script_bytes) logger.debug('Able to replace executable using ' @@ -284,9 +295,10 @@ def _write_script(self, names, shebang, script_bytes, filenames, ext): try: os.remove(dfname) except Exception: - pass # still in use - ignore error + pass # still in use - ignore error else: - if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover + if self._is_nt and not outname.endswith( + '.' + ext): # pragma: no cover outname = '%s.%s' % (outname, ext) if os.path.exists(outname) and not self.clobber: logger.warning('Skipping existing file %s', outname) @@ -305,8 +317,9 @@ def get_script_filenames(self, name): if 'X' in self.variants: result.add('%s%s' % (name, self.version_info[0])) if 'X.Y' in self.variants: - result.add('%s%s%s.%s' % (name, self.variant_separator, - self.version_info[0], self.version_info[1])) + result.add('%s%s%s.%s' % + (name, self.variant_separator, self.version_info[0], + self.version_info[1])) return result def _make_script(self, entry, filenames, options=None): @@ -384,12 +397,13 @@ def dry_run(self): def dry_run(self, value): self._fileop.dry_run = value - if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover + if os.name == 'nt' or (os.name == 'java' + and os._name == 'nt'): # pragma: no cover # Executable launcher support. # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/ def _get_launcher(self, kind): - if struct.calcsize('P') == 8: # 64-bit + if struct.calcsize('P') == 8: # 64-bit bits = '64' else: bits = '32' @@ -400,8 +414,8 @@ def _get_launcher(self, kind): distlib_package = __name__.rsplit('.', 1)[0] resource = finder(distlib_package).find(name) if not resource: - msg = ('Unable to find resource %s in package %s' % (name, - distlib_package)) + msg = ('Unable to find resource %s in package %s' % + (name, distlib_package)) raise ValueError(msg) return resource.bytes diff --git a/src/pip/_vendor/distlib/util.py b/src/pip/_vendor/distlib/util.py index 04429ad2b52..ba58858d0fb 100644 --- a/src/pip/_vendor/distlib/util.py +++ b/src/pip/_vendor/distlib/util.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012-2021 The Python Software Foundation. +# Copyright (C) 2012-2023 The Python Software Foundation. # See LICENSE.txt and CONTRIBUTORS.txt. # import codecs @@ -33,7 +33,7 @@ from . import DistlibException from .compat import (string_types, text_type, shutil, raw_input, StringIO, cache_from_source, urlopen, urljoin, httplib, xmlrpclib, - splittype, HTTPHandler, BaseConfigurator, valid_ident, + HTTPHandler, BaseConfigurator, valid_ident, Container, configparser, URLError, ZipFile, fsdecode, unquote, urlparse) @@ -62,6 +62,7 @@ def parse_marker(marker_string): interpreted as a literal string, and a string not contained in quotes is a variable (such as os_name). """ + def marker_var(remaining): # either identifier, or literal string m = IDENTIFIER.match(remaining) @@ -87,7 +88,8 @@ def marker_var(remaining): else: m = STRING_CHUNK.match(remaining) if not m: - raise SyntaxError('error in string literal: %s' % remaining) + raise SyntaxError('error in string literal: %s' % + remaining) parts.append(m.groups()[0]) remaining = remaining[m.end():] else: @@ -95,7 +97,7 @@ def marker_var(remaining): raise SyntaxError('unterminated string: %s' % s) parts.append(q) result = ''.join(parts) - remaining = remaining[1:].lstrip() # skip past closing quote + remaining = remaining[1:].lstrip() # skip past closing quote return result, remaining def marker_expr(remaining): @@ -208,7 +210,8 @@ def get_versions(ver_remaining): ver_remaining = ver_remaining[m.end():] m = VERSION_IDENTIFIER.match(ver_remaining) if not m: - raise SyntaxError('invalid version: %s' % ver_remaining) + raise SyntaxError('invalid version: %s' % + ver_remaining) v = m.groups()[0] versions.append((op, v)) ver_remaining = ver_remaining[m.end():] @@ -221,7 +224,8 @@ def get_versions(ver_remaining): break m = COMPARE_OP.match(ver_remaining) if not m: - raise SyntaxError('invalid constraint: %s' % ver_remaining) + raise SyntaxError('invalid constraint: %s' % + ver_remaining) if not versions: versions = None return versions, ver_remaining @@ -231,7 +235,8 @@ def get_versions(ver_remaining): else: i = remaining.find(')', 1) if i < 0: - raise SyntaxError('unterminated parenthesis: %s' % remaining) + raise SyntaxError('unterminated parenthesis: %s' % + remaining) s = remaining[1:i] remaining = remaining[i + 1:].lstrip() # As a special diversion from PEP 508, allow a version number @@ -262,9 +267,14 @@ def get_versions(ver_remaining): if not versions: rs = distname else: - rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions])) - return Container(name=distname, extras=extras, constraints=versions, - marker=mark_expr, url=uri, requirement=rs) + rs = '%s %s' % (distname, ', '.join( + ['%s %s' % con for con in versions])) + return Container(name=distname, + extras=extras, + constraints=versions, + marker=mark_expr, + url=uri, + requirement=rs) def get_resources_dests(resources_root, rules): @@ -304,15 +314,15 @@ def in_venv(): def get_executable(): -# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as -# changes to the stub launcher mean that sys.executable always points -# to the stub on OS X -# if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' -# in os.environ): -# result = os.environ['__PYVENV_LAUNCHER__'] -# else: -# result = sys.executable -# return result + # The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as + # changes to the stub launcher mean that sys.executable always points + # to the stub on OS X + # if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' + # in os.environ): + # result = os.environ['__PYVENV_LAUNCHER__'] + # else: + # result = sys.executable + # return result # Avoid normcasing: see issue #143 # result = os.path.normcase(sys.executable) result = sys.executable @@ -346,6 +356,7 @@ def extract_by_key(d, keys): result[key] = d[key] return result + def read_exports(stream): if sys.version_info[0] >= 3: # needs to be a text stream @@ -388,7 +399,7 @@ def read_stream(cp, stream): s = '%s = %s' % (name, value) entry = get_export_entry(s) assert entry is not None - #entry.dist = self + # entry.dist = self entries[name] = entry return result @@ -420,6 +431,7 @@ def tempdir(): finally: shutil.rmtree(td) + @contextlib.contextmanager def chdir(d): cwd = os.getcwd() @@ -441,19 +453,21 @@ def socket_timeout(seconds=15): class cached_property(object): + def __init__(self, func): self.func = func - #for attr in ('__name__', '__module__', '__doc__'): - # setattr(self, attr, getattr(func, attr, None)) + # for attr in ('__name__', '__module__', '__doc__'): + # setattr(self, attr, getattr(func, attr, None)) def __get__(self, obj, cls=None): if obj is None: return self value = self.func(obj) object.__setattr__(obj, self.func.__name__, value) - #obj.__dict__[self.func.__name__] = value = self.func(obj) + # obj.__dict__[self.func.__name__] = value = self.func(obj) return value + def convert_path(pathname): """Return 'pathname' as a name that will work on the native filesystem. @@ -482,6 +496,7 @@ def convert_path(pathname): class FileOperator(object): + def __init__(self, dry_run=False): self.dry_run = dry_run self.ensured = set() @@ -586,7 +601,12 @@ def ensure_dir(self, path): if self.record: self.dirs_created.add(path) - def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False): + def byte_compile(self, + path, + optimize=False, + force=False, + prefix=None, + hashed_invalidation=False): dpath = cache_from_source(path, not optimize) logger.info('Byte-compiling %s to %s', path, dpath) if not self.dry_run: @@ -597,9 +617,12 @@ def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_in assert path.startswith(prefix) diagpath = path[len(prefix):] compile_kwargs = {} - if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'): - compile_kwargs['invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH - py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error + if hashed_invalidation and hasattr(py_compile, + 'PycInvalidationMode'): + compile_kwargs[ + 'invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH + py_compile.compile(path, dpath, diagpath, True, + **compile_kwargs) # raise error self.record_as_written(dpath) return dpath @@ -661,9 +684,10 @@ def rollback(self): assert flist == ['__pycache__'] sd = os.path.join(d, flist[0]) os.rmdir(sd) - os.rmdir(d) # should fail if non-empty + os.rmdir(d) # should fail if non-empty self._init_record() + def resolve(module_name, dotted_path): if module_name in sys.modules: mod = sys.modules[module_name] @@ -680,6 +704,7 @@ def resolve(module_name, dotted_path): class ExportEntry(object): + def __init__(self, name, prefix, suffix, flags): self.name = name self.prefix = prefix @@ -698,20 +723,21 @@ def __eq__(self, other): if not isinstance(other, ExportEntry): result = False else: - result = (self.name == other.name and - self.prefix == other.prefix and - self.suffix == other.suffix and - self.flags == other.flags) + result = (self.name == other.name and self.prefix == other.prefix + and self.suffix == other.suffix + and self.flags == other.flags) return result __hash__ = object.__hash__ -ENTRY_RE = re.compile(r'''(?P([^\[]\S*)) +ENTRY_RE = re.compile( + r'''(?P([^\[]\S*)) \s*=\s*(?P(\w+)([:\.]\w+)*) \s*(\[\s*(?P[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? ''', re.VERBOSE) + def get_export_entry(specification): m = ENTRY_RE.search(specification) if not m: @@ -827,6 +853,7 @@ def get_process_umask(): os.umask(result) return result + def is_string_sequence(seq): result = True i = None @@ -837,8 +864,10 @@ def is_string_sequence(seq): assert i is not None return result -PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' - '([a-z0-9_.+-]+)', re.I) + +PROJECT_NAME_AND_VERSION = re.compile( + '([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' + '([a-z0-9_.+-]+)', re.I) PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)') @@ -866,10 +895,12 @@ def split_filename(filename, project_name=None): result = m.group(1), m.group(3), pyver return result + # Allow spaces in name because of legacy dists like "Twisted Core" NAME_VERSION_RE = re.compile(r'(?P[\w .-]+)\s*' r'\(\s*(?P[^\s)]+)\)$') + def parse_name_and_version(p): """ A utility method used to get name and version from a string. @@ -885,6 +916,7 @@ def parse_name_and_version(p): d = m.groupdict() return d['name'].strip().lower(), d['ver'] + def get_extras(requested, available): result = set() requested = set(requested or []) @@ -906,10 +938,13 @@ def get_extras(requested, available): logger.warning('undeclared extra: %s' % r) result.add(r) return result + + # # Extended metadata functionality # + def _get_external_data(url): result = {} try: @@ -923,21 +958,24 @@ def _get_external_data(url): logger.debug('Unexpected response for JSON request: %s', ct) else: reader = codecs.getreader('utf-8')(resp) - #data = reader.read().decode('utf-8') - #result = json.loads(data) + # data = reader.read().decode('utf-8') + # result = json.loads(data) result = json.load(reader) except Exception as e: logger.exception('Failed to get external data for %s: %s', url, e) return result + _external_data_base_url = 'https://www.red-dove.com/pypi/projects/' + def get_project_data(name): url = '%s/%s/project.json' % (name[0].upper(), name) url = urljoin(_external_data_base_url, url) result = _get_external_data(url) return result + def get_package_data(name, version): url = '%s/%s/package-%s.json' % (name[0].upper(), name, version) url = urljoin(_external_data_base_url, url) @@ -992,6 +1030,7 @@ class EventMixin(object): """ A very simple publish/subscribe system. """ + def __init__(self): self._subscribers = {} @@ -1053,18 +1092,20 @@ def publish(self, event, *args, **kwargs): logger.exception('Exception during event publication') value = None result.append(value) - logger.debug('publish %s: args = %s, kwargs = %s, result = %s', - event, args, kwargs, result) + logger.debug('publish %s: args = %s, kwargs = %s, result = %s', event, + args, kwargs, result) return result + # # Simple sequencing # class Sequencer(object): + def __init__(self): self._preds = {} self._succs = {} - self._nodes = set() # nodes with no preds/succs + self._nodes = set() # nodes with no preds/succs def add_node(self, node): self._nodes.add(node) @@ -1104,8 +1145,8 @@ def remove(self, pred, succ): raise ValueError('%r not a successor of %r' % (succ, pred)) def is_step(self, step): - return (step in self._preds or step in self._succs or - step in self._nodes) + return (step in self._preds or step in self._succs + or step in self._nodes) def get_steps(self, final): if not self.is_step(final): @@ -1134,7 +1175,7 @@ def get_steps(self, final): @property def strong_connections(self): - #http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm + # http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm index_counter = [0] stack = [] lowlinks = {} @@ -1159,11 +1200,11 @@ def strongconnect(node): if successor not in lowlinks: # Successor has not yet been visited strongconnect(successor) - lowlinks[node] = min(lowlinks[node],lowlinks[successor]) + lowlinks[node] = min(lowlinks[node], lowlinks[successor]) elif successor in stack: # the successor is in the stack and hence in the current # strongly connected component (SCC) - lowlinks[node] = min(lowlinks[node],index[successor]) + lowlinks[node] = min(lowlinks[node], index[successor]) # If `node` is a root node, pop the stack and generate an SCC if lowlinks[node] == index[node]: @@ -1172,7 +1213,8 @@ def strongconnect(node): while True: successor = stack.pop() connected_component.append(successor) - if successor == node: break + if successor == node: + break component = tuple(connected_component) # storing the result result.append(component) @@ -1195,12 +1237,14 @@ def dot(self): result.append('}') return '\n'.join(result) + # # Unarchiving functionality for zip, tar, tgz, tbz, whl # -ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', - '.tgz', '.tbz', '.whl') +ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz', + '.whl') + def unarchive(archive_filename, dest_dir, format=None, check=True): @@ -1260,6 +1304,7 @@ def extraction_filter(member, path): return tarfile.tar_filter(member, path) except tarfile.FilterError as exc: raise ValueError(str(exc)) + archive.extraction_filter = extraction_filter archive.extractall(dest_dir) @@ -1282,11 +1327,12 @@ def zip_dir(directory): zf.write(full, dest) return result + # # Simple progress bar # -UNITS = ('', 'K', 'M', 'G','T','P') +UNITS = ('', 'K', 'M', 'G', 'T', 'P') class Progress(object): @@ -1341,8 +1387,8 @@ def percentage(self): def format_duration(self, duration): if (duration <= 0) and self.max is None or self.cur == self.min: result = '??:??:??' - #elif duration < 1: - # result = '--:--:--' + # elif duration < 1: + # result = '--:--:--' else: result = time.strftime('%H:%M:%S', time.gmtime(duration)) return result @@ -1352,7 +1398,7 @@ def ETA(self): if self.done: prefix = 'Done' t = self.elapsed - #import pdb; pdb.set_trace() + # import pdb; pdb.set_trace() else: prefix = 'ETA ' if self.max is None: @@ -1360,7 +1406,7 @@ def ETA(self): elif self.elapsed == 0 or (self.cur == self.min): t = 0 else: - #import pdb; pdb.set_trace() + # import pdb; pdb.set_trace() t = float(self.max - self.min) t /= self.cur - self.min t = (t - 1) * self.elapsed @@ -1378,6 +1424,7 @@ def speed(self): result /= 1000.0 return '%d %sB/s' % (result, unit) + # # Glob functionality # @@ -1425,22 +1472,23 @@ def _iglob(path_glob): for fn in _iglob(os.path.join(path, radical)): yield fn + if ssl: from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, CertificateError) - -# -# HTTPSConnection which verifies certificates/matches domains -# + # + # HTTPSConnection which verifies certificates/matches domains + # class HTTPSConnection(httplib.HTTPSConnection): - ca_certs = None # set this to the path to the certs file (.pem) - check_domain = True # only used if ca_certs is not None + ca_certs = None # set this to the path to the certs file (.pem) + check_domain = True # only used if ca_certs is not None # noinspection PyPropertyAccess def connect(self): - sock = socket.create_connection((self.host, self.port), self.timeout) + sock = socket.create_connection((self.host, self.port), + self.timeout) if getattr(self, '_tunnel_host', False): self.sock = sock self._tunnel() @@ -1468,6 +1516,7 @@ def connect(self): raise class HTTPSHandler(BaseHTTPSHandler): + def __init__(self, ca_certs, check_domain=True): BaseHTTPSHandler.__init__(self) self.ca_certs = ca_certs @@ -1494,8 +1543,9 @@ def https_open(self, req): return self.do_open(self._conn_maker, req) except URLError as e: if 'certificate verify failed' in str(e.reason): - raise CertificateError('Unable to verify server certificate ' - 'for %s' % req.host) + raise CertificateError( + 'Unable to verify server certificate ' + 'for %s' % req.host) else: raise @@ -1509,14 +1559,18 @@ def https_open(self, req): # handler for HTTP itself. # class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler): + def http_open(self, req): - raise URLError('Unexpected HTTP request on what should be a secure ' - 'connection: %s' % req) + raise URLError( + 'Unexpected HTTP request on what should be a secure ' + 'connection: %s' % req) + # # XML-RPC with timeouts # class Transport(xmlrpclib.Transport): + def __init__(self, timeout, use_datetime=0): self.timeout = timeout xmlrpclib.Transport.__init__(self, use_datetime) @@ -1528,8 +1582,11 @@ def make_connection(self, host): self._connection = host, httplib.HTTPConnection(h) return self._connection[1] + if ssl: + class SafeTransport(xmlrpclib.SafeTransport): + def __init__(self, timeout, use_datetime=0): self.timeout = timeout xmlrpclib.SafeTransport.__init__(self, use_datetime) @@ -1541,12 +1598,13 @@ def make_connection(self, host): kwargs['timeout'] = self.timeout if not self._connection or host != self._connection[0]: self._extra_headers = eh - self._connection = host, httplib.HTTPSConnection(h, None, - **kwargs) + self._connection = host, httplib.HTTPSConnection( + h, None, **kwargs) return self._connection[1] class ServerProxy(xmlrpclib.ServerProxy): + def __init__(self, uri, **kwargs): self.timeout = timeout = kwargs.pop('timeout', None) # The above classes only come into play if a timeout @@ -1563,11 +1621,13 @@ def __init__(self, uri, **kwargs): self.transport = t xmlrpclib.ServerProxy.__init__(self, uri, **kwargs) + # # CSV functionality. This is provided because on 2.x, the csv module can't # handle Unicode. However, we need to deal with Unicode in e.g. RECORD files. # + def _csv_open(fn, mode, **kwargs): if sys.version_info[0] < 3: mode += 'b' @@ -1581,9 +1641,9 @@ def _csv_open(fn, mode, **kwargs): class CSVBase(object): defaults = { - 'delimiter': str(','), # The strs are used because we need native - 'quotechar': str('"'), # str in the csv API (2.x won't take - 'lineterminator': str('\n') # Unicode) + 'delimiter': str(','), # The strs are used because we need native + 'quotechar': str('"'), # str in the csv API (2.x won't take + 'lineterminator': str('\n') # Unicode) } def __enter__(self): @@ -1594,6 +1654,7 @@ def __exit__(self, *exc_info): class CSVReader(CSVBase): + def __init__(self, **kwargs): if 'stream' in kwargs: stream = kwargs['stream'] @@ -1618,7 +1679,9 @@ def next(self): __next__ = next + class CSVWriter(CSVBase): + def __init__(self, fn, **kwargs): self.stream = _csv_open(fn, 'w') self.writer = csv.writer(self.stream, **self.defaults) @@ -1633,10 +1696,12 @@ def writerow(self, row): row = r self.writer.writerow(row) + # # Configurator functionality # + class Configurator(BaseConfigurator): value_converters = dict(BaseConfigurator.value_converters) @@ -1647,6 +1712,7 @@ def __init__(self, config, base=None): self.base = base or os.getcwd() def configure_custom(self, config): + def convert(o): if isinstance(o, (list, tuple)): result = type(o)([convert(i) for i in o]) @@ -1696,6 +1762,7 @@ class SubprocessMixin(object): """ Mixin for running subprocesses and capturing their output """ + def __init__(self, verbose=False, progress=None): self.verbose = verbose self.progress = progress @@ -1722,8 +1789,10 @@ def reader(self, stream, context): stream.close() def run_command(self, cmd, **kwargs): - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, **kwargs) + p = subprocess.Popen(cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + **kwargs) t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout')) t1.start() t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr')) @@ -1743,15 +1812,17 @@ def normalize_name(name): # https://www.python.org/dev/peps/pep-0503/#normalized-names return re.sub('[-_.]+', '-', name).lower() + # def _get_pypirc_command(): - # """ - # Get the distutils command for interacting with PyPI configurations. - # :return: the command. - # """ - # from distutils.core import Distribution - # from distutils.config import PyPIRCCommand - # d = Distribution() - # return PyPIRCCommand(d) +# """ +# Get the distutils command for interacting with PyPI configurations. +# :return: the command. +# """ +# from distutils.core import Distribution +# from distutils.config import PyPIRCCommand +# d = Distribution() +# return PyPIRCCommand(d) + class PyPIRCFile(object): @@ -1776,9 +1847,10 @@ def read(self): if 'distutils' in sections: # let's get the list of servers index_servers = config.get('distutils', 'index-servers') - _servers = [server.strip() for server in - index_servers.split('\n') - if server.strip() != ''] + _servers = [ + server.strip() for server in index_servers.split('\n') + if server.strip() != '' + ] if _servers == []: # nothing set, let's try to get the default pypi if 'pypi' in sections: @@ -1789,7 +1861,8 @@ def read(self): result['username'] = config.get(server, 'username') # optional params - for key, default in (('repository', self.DEFAULT_REPOSITORY), + for key, default in (('repository', + self.DEFAULT_REPOSITORY), ('realm', self.DEFAULT_REALM), ('password', None)): if config.has_option(server, key): @@ -1800,11 +1873,11 @@ def read(self): # work around people having "repository" for the "pypi" # section of their config set to the HTTP (rather than # HTTPS) URL - if (server == 'pypi' and - repository in (self.DEFAULT_REPOSITORY, 'pypi')): + if (server == 'pypi' and repository + in (self.DEFAULT_REPOSITORY, 'pypi')): result['repository'] = self.DEFAULT_REPOSITORY - elif (result['server'] != repository and - result['repository'] != repository): + elif (result['server'] != repository + and result['repository'] != repository): result = {} elif 'server-login' in sections: # old format @@ -1834,20 +1907,24 @@ def update(self, username, password): with open(fn, 'w') as f: config.write(f) + def _load_pypirc(index): """ Read the PyPI access configuration as supported by distutils. """ return PyPIRCFile(url=index.url).read() + def _store_pypirc(index): PyPIRCFile().update(index.username, index.password) + # # get_platform()/get_host_platform() copied from Python 3.10.a0 source, with some minor # tweaks # + def get_host_platform(): """Return a string that identifies the current platform. This is used mainly to distinguish platform-specific build directories and platform-specific built @@ -1899,16 +1976,16 @@ def get_host_platform(): # At least on Linux/Intel, 'machine' is the processor -- # i386, etc. # XXX what about Alpha, SPARC, etc? - return "%s-%s" % (osname, machine) + return "%s-%s" % (osname, machine) elif osname[:5] == 'sunos': - if release[0] >= '5': # SunOS 5 == Solaris 2 + if release[0] >= '5': # SunOS 5 == Solaris 2 osname = 'solaris' release = '%d.%s' % (int(release[0]) - 3, release[2:]) # We can't use 'platform.architecture()[0]' because a # bootstrap problem. We use a dict to get an error # if some suspicious happens. - bitness = {2147483647:'32bit', 9223372036854775807:'64bit'} + bitness = {2147483647: '32bit', 9223372036854775807: '64bit'} machine += '.%s' % bitness[sys.maxsize] # fall through to standard osname-release-machine representation elif osname[:3] == 'aix': @@ -1916,7 +1993,7 @@ def get_host_platform(): return aix_platform() elif osname[:6] == 'cygwin': osname = 'cygwin' - rel_re = re.compile (r'[\d.]+', re.ASCII) + rel_re = re.compile(r'[\d.]+', re.ASCII) m = rel_re.match(release) if m: release = m.group() @@ -1927,16 +2004,15 @@ def get_host_platform(): except ImportError: import sysconfig osname, release, machine = _osx_support.get_platform_osx( - sysconfig.get_config_vars(), - osname, release, machine) + sysconfig.get_config_vars(), osname, release, machine) return '%s-%s-%s' % (osname, release, machine) _TARGET_TO_PLAT = { - 'x86' : 'win32', - 'x64' : 'win-amd64', - 'arm' : 'win-arm32', + 'x86': 'win32', + 'x64': 'win-amd64', + 'arm': 'win-arm32', } diff --git a/src/pip/_vendor/distlib/version.py b/src/pip/_vendor/distlib/version.py index 5de88ef0581..14171ac938d 100644 --- a/src/pip/_vendor/distlib/version.py +++ b/src/pip/_vendor/distlib/version.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2012-2017 The Python Software Foundation. +# Copyright (C) 2012-2023 The Python Software Foundation. # See LICENSE.txt and CONTRIBUTORS.txt. # """ @@ -247,7 +247,6 @@ def _pep_440_key(s): if not dev: dev = ('final',) - #print('%s -> %s' % (s, m.groups())) return epoch, nums, pre, post, dev, local @@ -387,6 +386,7 @@ def _match_compatible(self, version, constraint, prefix): pfx = '.'.join([str(i) for i in release_clause]) return _match_prefix(version, pfx) + _REPLACEMENTS = ( (re.compile('[.+-]$'), ''), # remove trailing puncts (re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start @@ -397,7 +397,7 @@ def _match_compatible(self, version, constraint, prefix): (re.compile('[.]{2,}'), '.'), # multiple runs of '.' (re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha (re.compile(r'\b(pre-alpha|prealpha)\b'), - 'pre.alpha'), # standardise + 'pre.alpha'), # standardise (re.compile(r'\(beta\)$'), 'beta'), # remove parentheses ) @@ -425,7 +425,7 @@ def _suggest_semantic_version(s): # Now look for numeric prefix, and separate it out from # the rest. - #import pdb; pdb.set_trace() + # import pdb; pdb.set_trace() m = _NUMERIC_PREFIX.match(result) if not m: prefix = '0.0.0' @@ -443,7 +443,7 @@ def _suggest_semantic_version(s): prefix = '.'.join([str(i) for i in prefix]) suffix = suffix.strip() if suffix: - #import pdb; pdb.set_trace() + # import pdb; pdb.set_trace() # massage the suffix. for pat, repl in _SUFFIX_REPLACEMENTS: suffix = pat.sub(repl, suffix) @@ -513,7 +513,7 @@ def _suggest_normalized_version(s): rs = rs[1:] # Clean leading '0's on numbers. - #TODO: unintended side-effect on, e.g., "2003.05.09" + # TODO: unintended side-effect on, e.g., "2003.05.09" # PyPI stats: 77 (~2%) better rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs) @@ -572,6 +572,7 @@ def _suggest_normalized_version(s): # Legacy version processing (distribute-compatible) # + _VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I) _VERSION_REPLACE = { 'pre': 'c', @@ -619,7 +620,7 @@ def is_prerelease(self): result = False for x in self._parts: if (isinstance(x, string_types) and x.startswith('*') and - x < '*final'): + x < '*final'): result = True break return result @@ -650,6 +651,7 @@ def _match_compatible(self, version, constraint, prefix): # Semantic versioning # + _SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)' r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?' r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I) @@ -731,6 +733,7 @@ def suggest(self, s): result = self.suggester(s) return result + _SCHEMES = { 'normalized': VersionScheme(_normalized_key, NormalizedMatcher, _suggest_normalized_version), diff --git a/src/pip/_vendor/distlib/wheel.py b/src/pip/_vendor/distlib/wheel.py index 028c2d99b57..4a5a30e1d8d 100644 --- a/src/pip/_vendor/distlib/wheel.py +++ b/src/pip/_vendor/distlib/wheel.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2013-2020 Vinay Sajip. +# Copyright (C) 2013-2023 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # @@ -24,8 +24,7 @@ from . import __version__, DistlibException from .compat import sysconfig, ZipFile, fsdecode, text_type, filter from .database import InstalledDistribution -from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, - LEGACY_METADATA_FILENAME) +from .metadata import Metadata, WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, cached_property, get_cache_base, read_exports, tempdir, get_platform) @@ -33,7 +32,7 @@ logger = logging.getLogger(__name__) -cache = None # created when needed +cache = None # created when needed if hasattr(sys, 'pypy_version_info'): # pragma: no cover IMP_PREFIX = 'pp' @@ -45,7 +44,7 @@ IMP_PREFIX = 'cp' VER_SUFFIX = sysconfig.get_config_var('py_version_nodot') -if not VER_SUFFIX: # pragma: no cover +if not VER_SUFFIX: # pragma: no cover VER_SUFFIX = '%s%s' % sys.version_info[:2] PYVER = 'py' + VER_SUFFIX IMPVER = IMP_PREFIX + VER_SUFFIX @@ -56,6 +55,7 @@ if ABI and ABI.startswith('cpython-'): ABI = ABI.replace('cpython-', 'cp').split('-')[0] else: + def _derive_abi(): parts = ['cp', VER_SUFFIX] if sysconfig.get_config_var('Py_DEBUG'): @@ -73,10 +73,12 @@ def _derive_abi(): if us == 4 or (us is None and sys.maxunicode == 0x10FFFF): parts.append('u') return ''.join(parts) + ABI = _derive_abi() del _derive_abi -FILENAME_RE = re.compile(r''' +FILENAME_RE = re.compile( + r''' (?P[^-]+) -(?P\d+[^-]*) (-(?P\d+[^-]*))? @@ -86,7 +88,8 @@ def _derive_abi(): \.whl$ ''', re.IGNORECASE | re.VERBOSE) -NAME_VERSION_RE = re.compile(r''' +NAME_VERSION_RE = re.compile( + r''' (?P[^-]+) -(?P\d+[^-]*) (-(?P\d+[^-]*))?$ @@ -109,12 +112,14 @@ def _derive_abi(): import importlib.machinery import importlib.util + def _get_suffixes(): if imp: return [s[0] for s in imp.get_suffixes()] else: return importlib.machinery.EXTENSION_SUFFIXES + def _load_dynamic(name, path): # https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly if imp: @@ -126,7 +131,9 @@ def _load_dynamic(name, path): spec.loader.exec_module(module) return module + class Mounter(object): + def __init__(self): self.impure_wheels = {} self.libs = {} @@ -161,6 +168,7 @@ def load_module(self, fullname): result.__package__ = parts[0] return result + _hook = Mounter() @@ -227,8 +235,8 @@ def filename(self): arch = '.'.join(self.arch) # replace - with _ as a local version separator version = self.version.replace('-', '_') - return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, - pyver, abi, arch) + return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, pyver, + abi, arch) @property def exists(self): @@ -249,14 +257,14 @@ def metadata(self): info_dir = '%s.dist-info' % name_ver wrapper = codecs.getreader('utf-8') with ZipFile(pathname, 'r') as zf: - wheel_metadata = self.get_wheel_metadata(zf) - wv = wheel_metadata['Wheel-Version'].split('.', 1) - file_version = tuple([int(i) for i in wv]) + self.get_wheel_metadata(zf) + # wv = wheel_metadata['Wheel-Version'].split('.', 1) + # file_version = tuple([int(i) for i in wv]) # if file_version < (1, 1): - # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, - # LEGACY_METADATA_FILENAME] + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, + # LEGACY_METADATA_FILENAME] # else: - # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME] result = None for fn in fns: @@ -326,13 +334,14 @@ def get_hash(self, data, hash_kind=None): try: hasher = getattr(hashlib, hash_kind) except AttributeError: - raise DistlibException('Unsupported hash algorithm: %r' % hash_kind) + raise DistlibException('Unsupported hash algorithm: %r' % + hash_kind) result = hasher(data).digest() result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii') return hash_kind, result def write_record(self, records, record_path, archive_record_path): - records = list(records) # make a copy, as mutated + records = list(records) # make a copy, as mutated records.append((archive_record_path, '', '')) with CSVWriter(record_path) as writer: for row in records: @@ -341,7 +350,7 @@ def write_record(self, records, record_path, archive_record_path): def write_records(self, info, libdir, archive_paths): records = [] distinfo, info_dir = info - hasher = getattr(hashlib, self.hash_kind) + # hasher = getattr(hashlib, self.hash_kind) for ap, p in archive_paths: with open(p, 'rb') as f: data = f.read() @@ -466,6 +475,7 @@ def sorter(t): if '.dist-info' in ap: n += 10000 return (n, ap) + archive_paths = sorted(archive_paths, key=sorter) # Now, at last, RECORD. @@ -512,7 +522,8 @@ def install(self, paths, maker, **kwargs): dry_run = maker.dry_run warner = kwargs.get('warner') lib_only = kwargs.get('lib_only', False) - bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False) + bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', + False) pathname = os.path.join(self.dirname, self.filename) name_ver = '%s-%s' % (self.name, self.version) @@ -553,11 +564,11 @@ def install(self, paths, maker, **kwargs): # make a new instance rather than a copy of maker's, # as we mutate it fileop = FileOperator(dry_run=dry_run) - fileop.record = True # so we can rollback if needed + fileop.record = True # so we can rollback if needed - bc = not sys.dont_write_bytecode # Double negatives. Lovely! + bc = not sys.dont_write_bytecode # Double negatives. Lovely! - outfiles = [] # for RECORD writing + outfiles = [] # for RECORD writing # for script copying/shebang processing workdir = tempfile.mkdtemp() @@ -611,7 +622,8 @@ def install(self, paths, maker, **kwargs): # So ... manually preserve permission bits as given in zinfo if os.name == 'posix': # just set the normal permission bits - os.chmod(outfile, (zinfo.external_attr >> 16) & 0x1FF) + os.chmod(outfile, + (zinfo.external_attr >> 16) & 0x1FF) outfiles.append(outfile) # Double check the digest of the written file if not dry_run and row[1]: @@ -624,8 +636,9 @@ def install(self, paths, maker, **kwargs): '%s' % outfile) if bc and outfile.endswith('.py'): try: - pyc = fileop.byte_compile(outfile, - hashed_invalidation=bc_hashed_invalidation) + pyc = fileop.byte_compile( + outfile, + hashed_invalidation=bc_hashed_invalidation) outfiles.append(pyc) except Exception: # Don't give up if byte-compilation fails, @@ -700,7 +713,7 @@ def install(self, paths, maker, **kwargs): fileop.set_executable_mode(filenames) if gui_scripts: - options = {'gui': True } + options = {'gui': True} for k, v in gui_scripts.items(): script = '%s = %s' % (k, v) filenames = maker.make(script, options) @@ -710,7 +723,7 @@ def install(self, paths, maker, **kwargs): dist = InstalledDistribution(p) # Write SHARED - paths = dict(paths) # don't change passed in dict + paths = dict(paths) # don't change passed in dict del paths['purelib'] del paths['platlib'] paths['lib'] = libdir @@ -761,7 +774,8 @@ def _get_extensions(self): extract = True else: file_time = os.stat(dest).st_mtime - file_time = datetime.datetime.fromtimestamp(file_time) + file_time = datetime.datetime.fromtimestamp( + file_time) info = zf.getinfo(relpath) wheel_time = datetime.datetime(*info.date_time) extract = wheel_time > file_time @@ -782,7 +796,7 @@ def is_mountable(self): """ Determine if a wheel is asserted as mountable by its metadata. """ - return True # for now - metadata details TBD + return True # for now - metadata details TBD def mount(self, append=False): pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) @@ -820,10 +834,10 @@ def unmount(self): def verify(self): pathname = os.path.join(self.dirname, self.filename) name_ver = '%s-%s' % (self.name, self.version) - data_dir = '%s.data' % name_ver + # data_dir = '%s.data' % name_ver info_dir = '%s.dist-info' % name_ver - metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) + # metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') record_name = posixpath.join(info_dir, 'RECORD') @@ -832,9 +846,9 @@ def verify(self): with ZipFile(pathname, 'r') as zf: with zf.open(wheel_metadata_name) as bwf: wf = wrapper(bwf) - message = message_from_file(wf) - wv = message['Wheel-Version'].split('.', 1) - file_version = tuple([int(i) for i in wv]) + message_from_file(wf) + # wv = message['Wheel-Version'].split('.', 1) + # file_version = tuple([int(i) for i in wv]) # TODO version verification records = {} @@ -903,25 +917,25 @@ def get_version(path_map, info_dir): def update_version(version, path): updated = None try: - v = NormalizedVersion(version) + NormalizedVersion(version) i = version.find('-') if i < 0: updated = '%s+1' % version else: parts = [int(s) for s in version[i + 1:].split('.')] parts[-1] += 1 - updated = '%s+%s' % (version[:i], - '.'.join(str(i) for i in parts)) + updated = '%s+%s' % (version[:i], '.'.join( + str(i) for i in parts)) except UnsupportedVersionError: - logger.debug('Cannot update non-compliant (PEP-440) ' - 'version %r', version) + logger.debug( + 'Cannot update non-compliant (PEP-440) ' + 'version %r', version) if updated: md = Metadata(path=path) md.version = updated legacy = path.endswith(LEGACY_METADATA_FILENAME) md.write(path=path, legacy=legacy) - logger.debug('Version updated from %r to %r', version, - updated) + logger.debug('Version updated from %r to %r', version, updated) pathname = os.path.join(self.dirname, self.filename) name_ver = '%s-%s' % (self.name, self.version) @@ -963,7 +977,8 @@ def update_version(version, path): os.close(fd) else: if not os.path.isdir(dest_dir): - raise DistlibException('Not a directory: %r' % dest_dir) + raise DistlibException('Not a directory: %r' % + dest_dir) newpath = os.path.join(dest_dir, self.filename) archive_paths = list(path_map.items()) distinfo = os.path.join(workdir, info_dir) @@ -974,6 +989,7 @@ def update_version(version, path): shutil.copyfile(newpath, pathname) return modified + def _get_glibc_version(): import platform ver = platform.libc_ver() @@ -984,13 +1000,14 @@ def _get_glibc_version(): result = tuple(result) return result + def compatible_tags(): """ Return (pyver, abi, arch) tuples compatible with this Python. """ versions = [VER_SUFFIX] major = VER_SUFFIX[0] - for minor in range(sys.version_info[1] - 1, - 1, -1): + for minor in range(sys.version_info[1] - 1, -1, -1): versions.append(''.join([major, str(minor)])) abis = [] @@ -1023,7 +1040,7 @@ def compatible_tags(): while minor >= 0: for match in matches: s = '%s_%s_%s_%s' % (name, major, minor, match) - if s != ARCH: # already there + if s != ARCH: # already there arches.append(s) minor -= 1 @@ -1045,9 +1062,9 @@ def compatible_tags(): if parts >= (2, 17): result.append((''.join((IMP_PREFIX, versions[0])), abi, 'manylinux2014_%s' % arch)) - result.append((''.join((IMP_PREFIX, versions[0])), abi, - 'manylinux_%s_%s_%s' % (parts[0], parts[1], - arch))) + result.append( + (''.join((IMP_PREFIX, versions[0])), abi, + 'manylinux_%s_%s_%s' % (parts[0], parts[1], arch))) # where no ABI / arch dependency, but IMP_PREFIX dependency for i, version in enumerate(versions): @@ -1071,7 +1088,7 @@ def compatible_tags(): def is_compatible(wheel, tags=None): if not isinstance(wheel, Wheel): - wheel = Wheel(wheel) # assume it's a filename + wheel = Wheel(wheel) # assume it's a filename result = False if tags is None: tags = COMPATIBLE_TAGS diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index fd2bbca6474..5554c38ecbf 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -1,6 +1,6 @@ CacheControl==0.13.1 # Make sure to update the license in pyproject.toml for this. colorama==0.4.6 -distlib==0.3.7 +distlib==0.3.8 distro==1.8.0 msgpack==1.0.5 packaging==21.3 From a289707ae5283b8d144e329a12dcbc8254ecde1d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 27 Jan 2024 18:49:50 +0100 Subject: [PATCH 19/38] Add test that config settings are passed to editable builds --- tests/functional/test_pep660.py | 42 ++++++++++++++++++++++++++++++--- 1 file changed, 39 insertions(+), 3 deletions(-) diff --git a/tests/functional/test_pep660.py b/tests/functional/test_pep660.py index 8418b26894c..d562d0750db 100644 --- a/tests/functional/test_pep660.py +++ b/tests/functional/test_pep660.py @@ -37,7 +37,7 @@ def prepare_metadata_for_build_wheel(metadata_directory, config_settings=None): def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): with open("log.txt", "a") as f: - print(":build_wheel called", file=f) + print(f":build_wheel called with config_settings={config_settings}", file=f) return _build_wheel(wheel_directory, config_settings, metadata_directory) """ @@ -55,7 +55,7 @@ def prepare_metadata_for_build_editable(metadata_directory, config_settings=None def build_editable(wheel_directory, config_settings=None, metadata_directory=None): with open("log.txt", "a") as f: - print(":build_editable called", file=f) + print(f":build_editable called with config_settings={config_settings}", file=f) return _build_wheel(wheel_directory, config_settings, metadata_directory) """ # fmt: on @@ -88,6 +88,16 @@ def _assert_hook_called(project_dir: Path, hook: str) -> None: assert f":{hook} called" in log, f"{hook} has not been called" +def _assert_hook_called_with_config_settings( + project_dir: Path, hook: str, config_settings: Dict[str, str] +) -> None: + log = project_dir.joinpath("log.txt").read_text() + assert f":{hook} called" in log, f"{hook} has not been called" + assert ( + f":{hook} called with config_settings={config_settings}" in log + ), f"{hook} has not been called with the expected config settings:\n{log}" + + def _assert_hook_not_called(project_dir: Path, hook: str) -> None: log = project_dir.joinpath("log.txt").read_text() assert f":{hook} called" not in log, f"{hook} should not have been called" @@ -119,9 +129,35 @@ def test_install_pep660_basic(tmpdir: Path, script: PipTestEnvironment) -> None: "--no-build-isolation", "--editable", project_dir, + "--config-setting", + "x=y", + ) + _assert_hook_called(project_dir, "prepare_metadata_for_build_editable") + _assert_hook_called_with_config_settings(project_dir, "build_editable", {"x": "y"}) + assert ( + result.test_env.site_packages.joinpath("project.egg-link") + not in result.files_created + ), "a .egg-link file should not have been created" + + +def test_install_pep660_from_reqs_file( + tmpdir: Path, script: PipTestEnvironment +) -> None: + """ + Test with backend that supports build_editable. + """ + project_dir = _make_project(tmpdir, BACKEND_WITH_PEP660, with_setup_py=False) + reqs_file = tmpdir / "requirements.txt" + reqs_file.write_text(f"-e {project_dir.as_uri()} --config-setting x=y\n") + result = script.pip( + "install", + "--no-index", + "--no-build-isolation", + "-r", + reqs_file, ) _assert_hook_called(project_dir, "prepare_metadata_for_build_editable") - _assert_hook_called(project_dir, "build_editable") + _assert_hook_called_with_config_settings(project_dir, "build_editable", {"x": "y"}) assert ( result.test_env.site_packages.joinpath("project.egg-link") not in result.files_created From 1f48ca3fc6c13e557b2c88ef46185a525799ed4e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 27 Jan 2024 19:02:05 +0100 Subject: [PATCH 20/38] Support per requirement options for editables too --- news/12480.feature.rst | 1 + src/pip/_internal/req/req_file.py | 48 ++++++++++++++++--------------- 2 files changed, 26 insertions(+), 23 deletions(-) create mode 100644 news/12480.feature.rst diff --git a/news/12480.feature.rst b/news/12480.feature.rst new file mode 100644 index 00000000000..1e9d5531996 --- /dev/null +++ b/news/12480.feature.rst @@ -0,0 +1 @@ +Support per requirement options for editable installs. diff --git a/src/pip/_internal/req/req_file.py b/src/pip/_internal/req/req_file.py index f717c1ccc79..1ef3d5ef6e7 100644 --- a/src/pip/_internal/req/req_file.py +++ b/src/pip/_internal/req/req_file.py @@ -75,8 +75,16 @@ cmdoptions.config_settings, ] +SUPPORTED_OPTIONS_EDITABLE_REQ: List[Callable[..., optparse.Option]] = [ + cmdoptions.config_settings, +] + + # the 'dest' string values SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ] +SUPPORTED_OPTIONS_EDITABLE_REQ_DEST = [ + str(o().dest) for o in SUPPORTED_OPTIONS_EDITABLE_REQ +] logger = logging.getLogger(__name__) @@ -178,31 +186,25 @@ def handle_requirement_line( assert line.is_requirement + # get the options that apply to requirements if line.is_editable: - # For editable requirements, we don't support per-requirement - # options, so just return the parsed requirement. - return ParsedRequirement( - requirement=line.requirement, - is_editable=line.is_editable, - comes_from=line_comes_from, - constraint=line.constraint, - ) + supported_dest = SUPPORTED_OPTIONS_EDITABLE_REQ_DEST else: - # get the options that apply to requirements - req_options = {} - for dest in SUPPORTED_OPTIONS_REQ_DEST: - if dest in line.opts.__dict__ and line.opts.__dict__[dest]: - req_options[dest] = line.opts.__dict__[dest] - - line_source = f"line {line.lineno} of {line.filename}" - return ParsedRequirement( - requirement=line.requirement, - is_editable=line.is_editable, - comes_from=line_comes_from, - constraint=line.constraint, - options=req_options, - line_source=line_source, - ) + supported_dest = SUPPORTED_OPTIONS_REQ_DEST + req_options = {} + for dest in supported_dest: + if dest in line.opts.__dict__ and line.opts.__dict__[dest]: + req_options[dest] = line.opts.__dict__[dest] + + line_source = f"line {line.lineno} of {line.filename}" + return ParsedRequirement( + requirement=line.requirement, + is_editable=line.is_editable, + comes_from=line_comes_from, + constraint=line.constraint, + options=req_options, + line_source=line_source, + ) def handle_option_line( From 9cf1c5e0c628993a92775a61fa7908b0e93e1772 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 29 Jan 2024 21:13:25 -1000 Subject: [PATCH 21/38] Add status code 502 to the urllib retry list fixes #11843 related https://github.com/pypa/pip/issues/12008 --- news/11843.feature.rst | 1 + src/pip/_internal/network/session.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 news/11843.feature.rst diff --git a/news/11843.feature.rst b/news/11843.feature.rst new file mode 100644 index 00000000000..56ff2c6d48e --- /dev/null +++ b/news/11843.feature.rst @@ -0,0 +1 @@ +Retry on HTTP status code 502 diff --git a/src/pip/_internal/network/session.py b/src/pip/_internal/network/session.py index 887dc14e796..676e8ee723b 100644 --- a/src/pip/_internal/network/session.py +++ b/src/pip/_internal/network/session.py @@ -355,8 +355,9 @@ def __init__( # is typically considered a transient error so we'll go ahead and # retry it. # A 500 may indicate transient error in Amazon S3 + # A 502 may be a transient error from a CDN # A 520 or 527 - may indicate transient error in CloudFlare - status_forcelist=[500, 503, 520, 527], + status_forcelist=[500, 502, 503, 520, 527], # Add a small amount of back off between failed requests in # order to prevent hammering the service. backoff_factor=0.25, From f2e77df9f6e73edc874f199dc4f1ae6f9d128e51 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 2 Apr 2023 15:28:47 +0200 Subject: [PATCH 22/38] Let --config-settings imply PEP 517 --- news/11915.feature.rst | 2 ++ src/pip/_internal/req/req_install.py | 20 +++++++++++--------- tests/functional/test_config_settings.py | 20 ++++++++++++++++++++ 3 files changed, 33 insertions(+), 9 deletions(-) create mode 100644 news/11915.feature.rst diff --git a/news/11915.feature.rst b/news/11915.feature.rst new file mode 100644 index 00000000000..e2e1fd7ce8f --- /dev/null +++ b/news/11915.feature.rst @@ -0,0 +1,2 @@ +Automatically use the setuptools PEP 517 build backend when ``--config-settings`` is +used for projects without ``pyproject.toml``. diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index b622402270c..699dfa6c7c9 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -181,6 +181,16 @@ def __init__( # but after loading this flag should be treated as read only. self.use_pep517 = use_pep517 + # If config settings are provided, enforce PEP 517. + if self.config_settings: + if self.use_pep517 is False: + raise InstallationError( + f"Disabling PEP 517 processing is not allowed for {self} " + f"when --config-settings are specified." + ) + else: + self.use_pep517 = True + # This requirement needs more preparation before it can be built self.needs_more_preparation = False @@ -508,15 +518,7 @@ def load_pyproject_toml(self) -> None: ) if pyproject_toml_data is None: - if self.config_settings: - deprecated( - reason=f"Config settings are ignored for project {self}.", - replacement=( - "to use --use-pep517 or add a " - "pyproject.toml file to the project" - ), - gone_in="24.0", - ) + assert not self.config_settings self.use_pep517 = False return diff --git a/tests/functional/test_config_settings.py b/tests/functional/test_config_settings.py index f3975de2af5..3f88d9c3924 100644 --- a/tests/functional/test_config_settings.py +++ b/tests/functional/test_config_settings.py @@ -107,6 +107,26 @@ def make_project( return name, version, project_dir +def test_config_settings_implies_pep517( + script: PipTestEnvironment, tmp_path: Path +) -> None: + """Test that setup.py bdist_wheel is not used when config settings are.""" + pkg_path = tmp_path / "pkga" + pkg_path.mkdir() + pkg_path.joinpath("setup.py").write_text( + "from setuptools import setup; setup(name='pkga')\n" + ) + result = script.pip( + "wheel", + "--config-settings", + "FOO=Hello", + pkg_path, + cwd=tmp_path, + ) + assert "Successfully built pkga" in result.stdout + assert "Preparing metadata (pyproject.toml)" in result.stdout + + def test_backend_sees_config(script: PipTestEnvironment) -> None: name, version, project_dir = make_project(script.scratch_path) script.pip( From 84bb3040bbb16a3b4242600ce40b9c113a32fa6d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 28 Jan 2024 12:52:50 +0100 Subject: [PATCH 23/38] Warning instead of error when --no-use-pep517 is combined with config settings --- src/pip/_internal/req/req_install.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 699dfa6c7c9..71dc61a87b6 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -184,12 +184,12 @@ def __init__( # If config settings are provided, enforce PEP 517. if self.config_settings: if self.use_pep517 is False: - raise InstallationError( - f"Disabling PEP 517 processing is not allowed for {self} " - f"when --config-settings are specified." + logger.warning( + "--no-use-pep517 ignored for %s " + "because --config-settings are specified.", + self, ) - else: - self.use_pep517 = True + self.use_pep517 = True # This requirement needs more preparation before it can be built self.needs_more_preparation = False From e9b3d95f954b96b5ba7f5efe5c9e9b3dd6fc65e1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 28 Jan 2024 13:08:28 +0100 Subject: [PATCH 24/38] Warn if config settings are provided for a legacy editable install --- src/pip/_internal/req/req_install.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 71dc61a87b6..9ae60043189 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -829,6 +829,13 @@ def install( ) if self.editable and not self.is_wheel: + if self.config_settings: + logger.warning( + "--config-settings ignored for legacy editable install of %s. " + "Consider upgrading to a version of setuptools " + "that supports PEP 660.", + self, + ) install_editable_legacy( global_options=global_options if global_options is not None else [], prefix=prefix, From cfab072a81d9db6e4ffcbe156529af079ed6ca96 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Tue, 30 Jan 2024 20:49:34 +0100 Subject: [PATCH 25/38] Mention minimum setuptools version that supports PEP 660 --- src/pip/_internal/req/req_install.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 9ae60043189..a65611c320b 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -833,7 +833,7 @@ def install( logger.warning( "--config-settings ignored for legacy editable install of %s. " "Consider upgrading to a version of setuptools " - "that supports PEP 660.", + "that supports PEP 660 (>= 64).", self, ) install_editable_legacy( From d131618d347430f4c0bae510e748cc2bf14f252e Mon Sep 17 00:00:00 2001 From: Vladimir Fokow <57260995+VladimirFokow@users.noreply.github.com> Date: Wed, 31 Jan 2024 05:49:12 +0100 Subject: [PATCH 26/38] fix a small typo: "an newer" --- docs/html/reference/build-system/pyproject-toml.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/html/reference/build-system/pyproject-toml.md b/docs/html/reference/build-system/pyproject-toml.md index 9719023cced..a6652d7a4d7 100644 --- a/docs/html/reference/build-system/pyproject-toml.md +++ b/docs/html/reference/build-system/pyproject-toml.md @@ -29,7 +29,7 @@ that build requirements are handled independently of the user's runtime environment. For example, a project that needs an older version of setuptools to build can -still be installed, even if the user has an newer version installed (and +still be installed, even if the user has a newer version installed (and without silently replacing that version). ### Build-time dependencies From 04d6992d44ebc5437f38e1e53c6f8d09cb06bef7 Mon Sep 17 00:00:00 2001 From: Vladimir Fokow <57260995+VladimirFokow@users.noreply.github.com> Date: Wed, 31 Jan 2024 06:00:51 +0100 Subject: [PATCH 27/38] add a news entry as described here: https://pip.pypa.io/en/latest/development/contributing/#choosing-the-type-of-news-entry --- news/fixtypo.trivial.rst | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 news/fixtypo.trivial.rst diff --git a/news/fixtypo.trivial.rst b/news/fixtypo.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d From aa930c49983e11c89d00d959c5d412a3211c2cbf Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 1 Feb 2024 13:42:30 -0600 Subject: [PATCH 28/38] Update src/pip/_internal/network/session.py --- src/pip/_internal/network/session.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/network/session.py b/src/pip/_internal/network/session.py index 676e8ee723b..f17efc52992 100644 --- a/src/pip/_internal/network/session.py +++ b/src/pip/_internal/network/session.py @@ -355,7 +355,7 @@ def __init__( # is typically considered a transient error so we'll go ahead and # retry it. # A 500 may indicate transient error in Amazon S3 - # A 502 may be a transient error from a CDN + # A 502 may be a transient error from a CDN like CloudFlare or CloudFront # A 520 or 527 - may indicate transient error in CloudFlare status_forcelist=[500, 502, 503, 520, 527], # Add a small amount of back off between failed requests in From 0b93dfc7c319449a71c50233c4fb344f9c24a668 Mon Sep 17 00:00:00 2001 From: Jean Abou Samra Date: Fri, 2 Feb 2024 13:33:34 +0100 Subject: [PATCH 29/38] Add mailmap entry for @jeanas --- .mailmap | 1 + 1 file changed, 1 insertion(+) diff --git a/.mailmap b/.mailmap index d0c64300fd2..875dba24ed3 100644 --- a/.mailmap +++ b/.mailmap @@ -27,6 +27,7 @@ Hugo van Kemenade hugovk Ilya Baryshev Jakub Stasiak +Jean Abou Samra John-Scott Atlakson Jorge Niedbalski From 53e5f192bab27cad793e9dbfd9849717c1ed3617 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Thu, 1 Feb 2024 20:21:06 +0100 Subject: [PATCH 30/38] Tweak news before release --- NEWS.rst | 1 + news/12389.bugfix.rst | 1 - news/12480.feature.rst | 2 +- 3 files changed, 2 insertions(+), 2 deletions(-) delete mode 100644 news/12389.bugfix.rst diff --git a/NEWS.rst b/NEWS.rst index 8738e181e2e..7930a63a020 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -9,6 +9,7 @@ .. towncrier release notes start + 23.3.2 (2023-12-17) =================== diff --git a/news/12389.bugfix.rst b/news/12389.bugfix.rst deleted file mode 100644 index 84871873328..00000000000 --- a/news/12389.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Update mypy to 1.6.1 and fix/ignore types diff --git a/news/12480.feature.rst b/news/12480.feature.rst index 1e9d5531996..dd6101c244e 100644 --- a/news/12480.feature.rst +++ b/news/12480.feature.rst @@ -1 +1 @@ -Support per requirement options for editable installs. +Support per requirement ``--config-settings`` for editable installs. From ef56d47f90eb3881dba5134241de94d504576f1b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 3 Feb 2024 09:57:13 +0100 Subject: [PATCH 31/38] Update AUTHORS.txt --- AUTHORS.txt | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/AUTHORS.txt b/AUTHORS.txt index e02de32bcf3..0e6354892d8 100644 --- a/AUTHORS.txt +++ b/AUTHORS.txt @@ -160,6 +160,7 @@ Cristina Muñoz Curtis Doty cytolentino Daan De Meyer +Dale Damian Damian Quiroga Damian Shaw @@ -226,6 +227,7 @@ Dustin Ingram Dwayne Bailey Ed Morley Edgar Ramírez +Edgar Ramírez Mondragón Ee Durbin Efflam Lemaillet efflamlemaillet @@ -258,6 +260,7 @@ Filip Kokosiński Filipe Laíns Finn Womack finnagin +Flavio Amurrio Florian Briand Florian Rathgeber Francesco @@ -320,6 +323,7 @@ Ionel Cristian Mărieș Ionel Maries Cristian Itamar Turner-Trauring Ivan Pozdeev +J. Nick Koston Jacob Kim Jacob Walls Jaime Sanz @@ -342,6 +346,7 @@ Jason R. Coombs JasonMo JasonMo1 Jay Graves +Jean Abou Samra Jean-Christophe Fillion-Robin Jeff Barber Jeff Dairiki @@ -581,6 +586,7 @@ Przemek Wrzos Pulkit Goyal q0w Qiangning Hong +Qiming Xu Quentin Lee Quentin Pradet R. David Murray @@ -719,6 +725,7 @@ Vincent Philippon Vinicyus Macedo Vipul Kumar Vitaly Babiy +Vladimir Fokow Vladimir Rutsky W. Trevor King Wil Tan From 6681c02710b81278adb874007286960108867768 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 3 Feb 2024 09:57:14 +0100 Subject: [PATCH 32/38] Bump for release --- NEWS.rst | 38 ++++++++++++++++++++++++++++++++++++++ news/11815.doc.rst | 1 - news/11843.feature.rst | 1 - news/11909.process.rst | 1 - news/11915.feature.rst | 2 -- news/12327.bugfix.rst | 1 - news/12390.trivial.rst | 1 - news/12393.trivial.rst | 1 - news/12417.doc.rst | 1 - news/12434.doc.rst | 1 - news/12449.bugfix.rst | 2 -- news/12449.doc.rst | 2 -- news/12475.doc.rst | 1 - news/12477.feature.rst | 2 -- news/12480.feature.rst | 1 - news/distlib.vendor.rst | 1 - news/fixtypo.trivial.rst | 0 src/pip/__init__.py | 2 +- 18 files changed, 39 insertions(+), 20 deletions(-) delete mode 100644 news/11815.doc.rst delete mode 100644 news/11843.feature.rst delete mode 100644 news/11909.process.rst delete mode 100644 news/11915.feature.rst delete mode 100644 news/12327.bugfix.rst delete mode 100644 news/12390.trivial.rst delete mode 100644 news/12393.trivial.rst delete mode 100644 news/12417.doc.rst delete mode 100644 news/12434.doc.rst delete mode 100644 news/12449.bugfix.rst delete mode 100644 news/12449.doc.rst delete mode 100644 news/12475.doc.rst delete mode 100644 news/12477.feature.rst delete mode 100644 news/12480.feature.rst delete mode 100644 news/distlib.vendor.rst delete mode 100644 news/fixtypo.trivial.rst diff --git a/NEWS.rst b/NEWS.rst index 7930a63a020..3c6e4be151c 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -9,7 +9,45 @@ .. towncrier release notes start +24.0 (2024-02-03) +================= + +Process +------- + +- Most project metadata is now defined statically via pip's ``pyproject.toml`` file. + +Features +-------- + +- Retry on HTTP status code 502 (`#11843 `_) +- Automatically use the setuptools PEP 517 build backend when ``--config-settings`` is + used for projects without ``pyproject.toml``. (`#11915 `_) +- Make pip freeze and pip uninstall of legacy editable installs of packages whose name + contains ``_`` compatible with ``setuptools>=69.0.3``. (`#12477 `_) +- Support per requirement ``--config-settings`` for editable installs. (`#12480 `_) + +Bug Fixes +--------- + +- Optimized usage of ``--find-links=``, by only scanning the relevant directory once, only considering file names that are valid wheel or sdist names, and only considering files in the directory that are related to the install. (`#12327 `_) +- Removed ``wheel`` from the ``[build-system].requires`` list fallback + that is used when ``pyproject.toml`` is absent. (`#12449 `_) + +Vendored Libraries +------------------ + +- Upgrade distlib to 0.3.8 + +Improved Documentation +---------------------- +- Fix explanation of how PIP_CONFIG_FILE works (`#11815 `_) +- Fix outdated pip install argument description in documentation. (`#12417 `_) +- Replace some links to PEPs with links to the canonical specifications on the :doc:`pypug:index` (`#12434 `_) +- Updated the ``pyproject.toml`` document to stop suggesting + to depend on ``wheel`` as a build dependency directly. (`#12449 `_) +- Update supported interpreters in development docs (`#12475 `_) 23.3.2 (2023-12-17) =================== diff --git a/news/11815.doc.rst b/news/11815.doc.rst deleted file mode 100644 index 8e7e8d21bef..00000000000 --- a/news/11815.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Fix explanation of how PIP_CONFIG_FILE works diff --git a/news/11843.feature.rst b/news/11843.feature.rst deleted file mode 100644 index 56ff2c6d48e..00000000000 --- a/news/11843.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Retry on HTTP status code 502 diff --git a/news/11909.process.rst b/news/11909.process.rst deleted file mode 100644 index a396d93d963..00000000000 --- a/news/11909.process.rst +++ /dev/null @@ -1 +0,0 @@ -Most project metadata is now defined statically via pip's ``pyproject.toml`` file. diff --git a/news/11915.feature.rst b/news/11915.feature.rst deleted file mode 100644 index e2e1fd7ce8f..00000000000 --- a/news/11915.feature.rst +++ /dev/null @@ -1,2 +0,0 @@ -Automatically use the setuptools PEP 517 build backend when ``--config-settings`` is -used for projects without ``pyproject.toml``. diff --git a/news/12327.bugfix.rst b/news/12327.bugfix.rst deleted file mode 100644 index b07ef130a2e..00000000000 --- a/news/12327.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Optimized usage of ``--find-links=``, by only scanning the relevant directory once, only considering file names that are valid wheel or sdist names, and only considering files in the directory that are related to the install. diff --git a/news/12390.trivial.rst b/news/12390.trivial.rst deleted file mode 100644 index 52b21413ca0..00000000000 --- a/news/12390.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Update ruff versions and config for dev diff --git a/news/12393.trivial.rst b/news/12393.trivial.rst deleted file mode 100644 index 15452737aef..00000000000 --- a/news/12393.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Enforce and update code to use f-strings via Ruff rule UP032 diff --git a/news/12417.doc.rst b/news/12417.doc.rst deleted file mode 100644 index efde79a5808..00000000000 --- a/news/12417.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Fix outdated pip install argument description in documentation. diff --git a/news/12434.doc.rst b/news/12434.doc.rst deleted file mode 100644 index c1d3635df78..00000000000 --- a/news/12434.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Replace some links to PEPs with links to the canonical specifications on the :doc:`pypug:index` diff --git a/news/12449.bugfix.rst b/news/12449.bugfix.rst deleted file mode 100644 index 19f1d9809ac..00000000000 --- a/news/12449.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Removed ``wheel`` from the ``[build-system].requires`` list fallback -that is used when ``pyproject.toml`` is absent. diff --git a/news/12449.doc.rst b/news/12449.doc.rst deleted file mode 100644 index 431475f51eb..00000000000 --- a/news/12449.doc.rst +++ /dev/null @@ -1,2 +0,0 @@ -Updated the ``pyproject.toml`` document to stop suggesting -to depend on ``wheel`` as a build dependency directly. diff --git a/news/12475.doc.rst b/news/12475.doc.rst deleted file mode 100644 index 2713e1878c9..00000000000 --- a/news/12475.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Update supported interpreters in development docs diff --git a/news/12477.feature.rst b/news/12477.feature.rst deleted file mode 100644 index 56b6e99b38f..00000000000 --- a/news/12477.feature.rst +++ /dev/null @@ -1,2 +0,0 @@ -Make pip freeze and pip uninstall of legacy editable installs of packages whose name -contains ``_`` compatible with ``setuptools>=69.0.3``. diff --git a/news/12480.feature.rst b/news/12480.feature.rst deleted file mode 100644 index dd6101c244e..00000000000 --- a/news/12480.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Support per requirement ``--config-settings`` for editable installs. diff --git a/news/distlib.vendor.rst b/news/distlib.vendor.rst deleted file mode 100644 index 13caa068e0d..00000000000 --- a/news/distlib.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade distlib to 0.3.8 diff --git a/news/fixtypo.trivial.rst b/news/fixtypo.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 46e56014998..be0e3edbc4b 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "24.0.dev0" +__version__ = "24.0" def main(args: Optional[List[str]] = None) -> int: From ef78c129b1a966dbbbdb8ebfffc43723e89110d1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 3 Feb 2024 10:01:04 +0100 Subject: [PATCH 33/38] Tweak NEWS.rst --- NEWS.rst | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/NEWS.rst b/NEWS.rst index 3c6e4be151c..6b1dff81e80 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -12,11 +12,6 @@ 24.0 (2024-02-03) ================= -Process -------- - -- Most project metadata is now defined statically via pip's ``pyproject.toml`` file. - Features -------- @@ -48,6 +43,12 @@ Improved Documentation - Updated the ``pyproject.toml`` document to stop suggesting to depend on ``wheel`` as a build dependency directly. (`#12449 `_) - Update supported interpreters in development docs (`#12475 `_) + +Process +------- + +- Most project metadata is now defined statically via pip's ``pyproject.toml`` file. + 23.3.2 (2023-12-17) =================== From f8dd7ddb1b9049f5524779ee8c79fbf7ba5d5e2a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 3 Feb 2024 09:57:16 +0100 Subject: [PATCH 34/38] Bump for development --- src/pip/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/__init__.py b/src/pip/__init__.py index be0e3edbc4b..13523d261f0 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "24.0" +__version__ = "24.1.dev0" def main(args: Optional[List[str]] = None) -> int: From dfa1db26853ebe054250d2e48d24adfb99f444e6 Mon Sep 17 00:00:00 2001 From: Damian Shaw Date: Sun, 4 Feb 2024 10:57:38 -0500 Subject: [PATCH 35/38] Update ruff to 0.2.0 and fix deprications --- .pre-commit-config.yaml | 2 +- pyproject.toml | 14 ++++++++------ 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5f06add1eb3..b932b146f18 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: - id: black - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.9 + rev: v0.2.0 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] diff --git a/pyproject.toml b/pyproject.toml index 7496a08ee2c..5edbbc4d00d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -138,20 +138,22 @@ distlib = "https://bitbucket.org/pypa/distlib/raw/master/LICENSE.txt" webencodings = "https://github.com/SimonSapin/python-webencodings/raw/master/LICENSE" [tool.ruff] +target-version = "py37" +line-length = 88 extend-exclude = [ "_vendor", "./build", ".scratch", "data", ] + +[tool.ruff.lint] ignore = [ "B019", "B020", "B904", # Ruff enables opinionated warnings by default "B905", # Ruff enables opinionated warnings by default ] -target-version = "py37" -line-length = 88 select = [ "ASYNC", "B", @@ -170,22 +172,22 @@ select = [ "UP032", ] -[tool.ruff.isort] +[tool.ruff.lint.isort] # We need to explicitly make pip "first party" as it's imported by code in # the docs and tests directories. known-first-party = ["pip"] known-third-party = ["pip._vendor"] -[tool.ruff.mccabe] +[tool.ruff.lint.mccabe] max-complexity = 33 # default is 10 -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] "noxfile.py" = ["G"] "src/pip/_internal/*" = ["PERF203"] "tests/*" = ["B011"] "tests/unit/test_finder.py" = ["C414"] -[tool.ruff.pylint] +[tool.ruff.lint.pylint] max-args = 15 # default is 5 max-branches = 28 # default is 12 max-returns = 13 # default is 6 From 1fc81e309c26beb5787c6482479fbb9db0873851 Mon Sep 17 00:00:00 2001 From: Damian Shaw Date: Sun, 4 Feb 2024 10:57:50 -0500 Subject: [PATCH 36/38] Fix RUF100 [*] Unused blanket `noqa` directive --- noxfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noxfile.py b/noxfile.py index 878dbbd0ad6..34430e30077 100644 --- a/noxfile.py +++ b/noxfile.py @@ -13,7 +13,7 @@ # fmt: off sys.path.append(".") -from tools import release # isort:skip # noqa +from tools import release # isort:skip sys.path.pop() # fmt: on From 2eb68a9b987433b16b03d3f5ba0b3cd56dbe4955 Mon Sep 17 00:00:00 2001 From: Damian Shaw Date: Sun, 4 Feb 2024 10:59:26 -0500 Subject: [PATCH 37/38] Update source location to source --- pyproject.toml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 5edbbc4d00d..2da7793a026 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -138,6 +138,7 @@ distlib = "https://bitbucket.org/pypa/distlib/raw/master/LICENSE.txt" webencodings = "https://github.com/SimonSapin/python-webencodings/raw/master/LICENSE" [tool.ruff] +src = ["src"] target-version = "py37" line-length = 88 extend-exclude = [ @@ -173,9 +174,8 @@ select = [ ] [tool.ruff.lint.isort] -# We need to explicitly make pip "first party" as it's imported by code in -# the docs and tests directories. -known-first-party = ["pip"] +# Explicitly make tests "first party" as it's not in the "src" directory +known-first-party = ["tests"] known-third-party = ["pip._vendor"] [tool.ruff.lint.mccabe] From 5ca52ff78d41acf0caf4a70b385a28959bc83bec Mon Sep 17 00:00:00 2001 From: Damian Shaw Date: Sun, 4 Feb 2024 11:03:19 -0500 Subject: [PATCH 38/38] Add NEWS entry --- news/12510.trivial.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/12510.trivial.rst diff --git a/news/12510.trivial.rst b/news/12510.trivial.rst new file mode 100644 index 00000000000..d41d5425b56 --- /dev/null +++ b/news/12510.trivial.rst @@ -0,0 +1 @@ +Update ruff to 0.2.0 and update ruff config to reflect